diff --git a/cime b/cime
index 4388509869bd..f903115718eb 160000
--- a/cime
+++ b/cime
@@ -1 +1 @@
-Subproject commit 4388509869bd5988d6315e2da65b1a2fbfa604fa
+Subproject commit f903115718ebc30669ce557f511abaef231a1d88
diff --git a/cime_config/config_grids.xml b/cime_config/config_grids.xml
index 7c8eb0ff75bd..3fb5ef619d73 100755
--- a/cime_config/config_grids.xml
+++ b/cime_config/config_grids.xml
@@ -406,6 +406,16 @@
IcoswISC30E3r5
+
+ T62
+ T62
+ RRSwISC6to18E3r5
+ rx1
+ null
+ null
+ RRSwISC6to18E3r5
+
+
TL319
TL319
@@ -526,6 +536,16 @@
ARRM10to60E2r1
+
+ TL319
+ r05
+ IcoswISC30E3r5
+ r05
+ null
+ null
+ IcoswISC30E3r5
+
+
TL319
TL319
@@ -616,6 +636,16 @@
IcosXISC30E3r7
+
+ TL319
+ TL319
+ RRSwISC6to18E3r5
+ JRA025
+ null
+ null
+ RRSwISC6to18E3r5
+
+
TL319
TL319
@@ -1340,6 +1370,16 @@
IcoswISC30E3r5
+
+ ne30np4.pg2
+ ne30np4.pg2
+ RRSwISC6to18E3r5
+ r05
+ null
+ null
+ RRSwISC6to18E3r5
+
+
ne0np4_northamericax4v1
r0125
@@ -1623,6 +1663,26 @@
IcoswISC30E3r5
+
+ ne120np4.pg2
+ r05
+ RRSwISC6to18E3r5
+ r05
+ null
+ null
+ RRSwISC6to18E3r5
+
+
+
+ ne120np4.pg2
+ r025
+ RRSwISC6to18E3r5
+ r025
+ null
+ null
+ RRSwISC6to18E3r5
+
+
ne240np4
ne240np4
@@ -2291,6 +2351,16 @@
IcosXISC30E3r7
+
+ ne30np4.pg2
+ r05
+ RRSwISC6to18E3r5
+ r05
+ null
+ null
+ RRSwISC6to18E3r5
+
+
ne30np4.pg2
r05
@@ -2581,16 +2651,16 @@
96
$DIN_LOC_ROOT/share/domains/domain.lnd.T62_gx1v6.090320.nc
$DIN_LOC_ROOT/share/domains/domain.lnd.T62_gx3v7.090911.nc
- $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oQU480.151209.nc
- $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oQU240.151209.nc
- $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oQU240wLI_mask.160929.nc
- $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oQU120.151209.nc
- $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oEC60to30v3.161222.nc
- $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oEC60to30v3wLI_mask.170328.nc
+ $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oQU480.240513.nc
+ $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oQU240.240513.nc
+ $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oQU240wLI.240513.nc
+ $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oQU120.240513.nc
+ $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oEC60to30v3.240513.nc
+ $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oEC60to30v3wLI.240513.nc
$DIN_LOC_ROOT/share/domains/domain.lnd.T62_ECwISC30to60E1r2.200410.nc
- $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oRRS30to10v3.171129.nc
- $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oRRS30to10v3wLI_mask.171109.nc
- $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oRRS18to6v3.170111.nc
+ $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oRRS30to10v3.240513.nc
+ $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oRRS30to10v3wLI.240513.nc
+ $DIN_LOC_ROOT/share/domains/domain.lnd.T62_oRRS18to6v3.240513.nc
$DIN_LOC_ROOT/share/domains/domain.lnd.T62_oRRS15to5.150722.nc
$DIN_LOC_ROOT/share/domains/domain.lnd.T62_oARRM60to10.180716.nc
$DIN_LOC_ROOT/share/domains/domain.lnd.T62_oARRM60to6.180803.nc
@@ -2601,6 +2671,7 @@
$DIN_LOC_ROOT/share/domains/domain.lnd.T62_SOwISC12to60E2r4.210119.nc
$DIN_LOC_ROOT/share/domains/domain.lnd.T62_ECwISC30to60E2r1.201007.nc
$DIN_LOC_ROOT/share/domains/domain.lnd.T62_IcoswISC30E3r5.231121.nc
+ $DIN_LOC_ROOT/share/domains/domain.lnd.T62_RRSwISC6to18E3r5.240328.nc
T62 is Gaussian grid:
@@ -2657,6 +2728,8 @@
$DIN_LOC_ROOT/share/domains/domain.ocn.TL319_IcoswISC30E3r5.231121.nc
$DIN_LOC_ROOT/share/domains/domain.lnd.TL319_IcosXISC30E3r7.240326.nc
$DIN_LOC_ROOT/share/domains/domain.ocn.TL319_IcosXISC30E3r7.240326.nc
+ $DIN_LOC_ROOT/share/domains/domain.lnd.TL319_RRSwISC6to18E3r5.240328.nc
+ $DIN_LOC_ROOT/share/domains/domain.ocn.TL319_RRSwISC6to18E3r5.240328.nc
$DIN_LOC_ROOT/share/domains/domain.lnd.TL319_oRRS18to6v3.220124.nc
$DIN_LOC_ROOT/share/domains/domain.ocn.TL319_oRRS18to6v3.220124.nc
TL319 is JRA lat/lon grid:
@@ -2768,6 +2841,8 @@
$DIN_LOC_ROOT/share/domains/domain.ocn.ne30pg2_IcoswISC30E3r5.231121.nc
$DIN_LOC_ROOT/share/domains/domain.lnd.ne30pg2_IcosXISC30E3r7.240326.nc
$DIN_LOC_ROOT/share/domains/domain.ocn.ne30pg2_IcosXISC30E3r7.240326.nc
+ $DIN_LOC_ROOT/share/domains/domain.lnd.ne30pg2_RRSwISC6to18E3r5.240328.nc
+ $DIN_LOC_ROOT/share/domains/domain.ocn.ne30pg2_RRSwISC6to18E3r5.240328.nc
$DIN_LOC_ROOT/share/domains/domain.lnd.ne30pg2_gx1v6.190806.nc
$DIN_LOC_ROOT/share/domains/domain.ocn.ne30pg2_gx1v6.190806.nc
ne30np4.pg2 is Spectral Elem 1-deg grid w/ 2x2 FV physics grid per element:
@@ -2839,6 +2914,8 @@
$DIN_LOC_ROOT/share/domains/domain.ocn.ne120pg2_ICOS10.230120.nc
$DIN_LOC_ROOT/share/domains/domain.lnd.ne120pg2_IcoswISC30E3r5.231121.nc
$DIN_LOC_ROOT/share/domains/domain.ocn.ne120pg2_IcoswISC30E3r5.231121.nc
+ $DIN_LOC_ROOT/share/domains/domain.lnd.ne120pg2_RRSwISC6to18E3r5.240328.nc
+ $DIN_LOC_ROOT/share/domains/domain.ocn.ne120pg2_RRSwISC6to18E3r5.240328.nc
$DIN_LOC_ROOT/share/domains/domain.lnd.ne120pg2_gx1v6.190819.nc
$DIN_LOC_ROOT/share/domains/domain.ocn.ne120pg2_gx1v6.190819.nc
ne120np4 is Spectral Elem 1/4-deg grid w/ 2x2 FV physics grid
@@ -3077,6 +3154,13 @@
IcosXISC30E3r7 is a MPAS ocean grid generated with the jigsaw/compass process using a dual mesh that is a subdivided icosahedron, resulting in a nearly uniform resolution of 30 km.:
+
+ 4062533
+ 1
+ $DIN_LOC_ROOT/share/domains/domain.ocn.RRSwISC6to18E3r5.240328.nc
+ RRSwISC6to18E3r5 is a MPAS ocean grid generated with the jigsaw/compass process using a mesh density function that is roughly proportional to the Rossby radius of deformation, with 18 km gridcells at low and 6 km gridcells at high latitudes. Additionally, it has ocean in ice-shelf cavities:
+
+
@@ -3111,6 +3195,8 @@
$DIN_LOC_ROOT/share/domains/domain.lnd.r05_IcoswISC30E3r5.231121.nc
$DIN_LOC_ROOT/share/domains/domain.lnd.r05_IcosXISC30E3r7.240326.nc
$DIN_LOC_ROOT/share/domains/domain.lnd.r05_IcosXISC30E3r7.240326.nc
+ $DIN_LOC_ROOT/share/domains/domain.lnd.r05_RRSwISC6to18E3r5.240328.nc
+ $DIN_LOC_ROOT/share/domains/domain.lnd.r05_RRSwISC6to18E3r5.240328.nc
$DIN_LOC_ROOT/share/domains/domain.lnd.r05_gx1v6.191014.nc
r05 is 1/2 degree river routing grid:
@@ -3144,6 +3230,8 @@
1440
720
$DIN_LOC_ROOT/share/domains/domain.lnd.r025_IcoswISC30E3r5.240129.nc
+ $DIN_LOC_ROOT/share/domains/domain.lnd.r025_RRSwISC6to18E3r5.240402.nc
+ r025 is 1/4 degree river routing grid:
@@ -3601,6 +3689,16 @@
+
+
+
+
+
+
+
+
+
+
@@ -3676,6 +3774,14 @@
+
+
+
+
+
+
+
+
@@ -3775,22 +3881,6 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
@@ -3839,6 +3929,14 @@
+
+
+
+
+
+
+
+
@@ -3846,11 +3944,24 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -4209,51 +4320,51 @@
-
-
-
-
-
+
+
+
+
+
-
-
-
-
-
+
+
+
+
+
-
-
-
-
-
+
+
+
+
+
-
-
-
-
-
+
+
+
+
+
-
-
-
-
-
+
+
+
+
+
-
-
-
-
-
+
+
+
+
+
@@ -4265,27 +4376,27 @@
-
-
-
-
-
+
+
+
+
+
-
-
-
-
-
+
+
+
+
+
-
-
-
-
-
+
+
+
+
+
@@ -4368,6 +4479,14 @@
+
+
+
+
+
+
+
+
@@ -4512,6 +4631,14 @@
+
+
+
+
+
+
+
+
@@ -4892,6 +5019,10 @@
+
+
+
+
@@ -5002,6 +5133,11 @@
+
+
+
+
+
@@ -5092,6 +5228,11 @@
+
+
+
+
+
@@ -5182,11 +5323,21 @@
+
+
+
+
+
+
+
+
+
+
diff --git a/cime_config/machines/cmake_macros/nvidiagpu_muller-gpu.cmake b/cime_config/machines/cmake_macros/nvidiagpu_muller-gpu.cmake
index 59758d59989c..93c7cdd16b21 100644
--- a/cime_config/machines/cmake_macros/nvidiagpu_muller-gpu.cmake
+++ b/cime_config/machines/cmake_macros/nvidiagpu_muller-gpu.cmake
@@ -6,8 +6,9 @@ if (COMP_NAME STREQUAL gptl)
endif()
string(APPEND CPPDEFS " -DTHRUST_IGNORE_CUB_VERSION_CHECK")
string(APPEND CMAKE_CUDA_FLAGS " -ccbin CC -O2 -arch sm_80 --use_fast_math")
-string(APPEND CMAKE_EXE_LINKER_FLAGS " -acc -gpu=cc70,cc60 -Minfo=accel")
+set(CMAKE_CUDA_ARCHITECTURES "80")
+string(APPEND CMAKE_Fortran_FLAGS " -acc -gpu=cc80 -Minfo=accel")
+string(APPEND CMAKE_EXE_LINKER_FLAGS " -acc -gpu=cc80 -Minfo=accel")
set(SCC "cc")
set(SCXX "CC")
set(SFC "ftn")
-string(APPEND CMAKE_Fortran_FLAGS " -acc -gpu=cc70,cc60 -Minfo=accel")
diff --git a/cime_config/machines/cmake_macros/nvidiagpu_pm-gpu.cmake b/cime_config/machines/cmake_macros/nvidiagpu_pm-gpu.cmake
index 59758d59989c..93c7cdd16b21 100644
--- a/cime_config/machines/cmake_macros/nvidiagpu_pm-gpu.cmake
+++ b/cime_config/machines/cmake_macros/nvidiagpu_pm-gpu.cmake
@@ -6,8 +6,9 @@ if (COMP_NAME STREQUAL gptl)
endif()
string(APPEND CPPDEFS " -DTHRUST_IGNORE_CUB_VERSION_CHECK")
string(APPEND CMAKE_CUDA_FLAGS " -ccbin CC -O2 -arch sm_80 --use_fast_math")
-string(APPEND CMAKE_EXE_LINKER_FLAGS " -acc -gpu=cc70,cc60 -Minfo=accel")
+set(CMAKE_CUDA_ARCHITECTURES "80")
+string(APPEND CMAKE_Fortran_FLAGS " -acc -gpu=cc80 -Minfo=accel")
+string(APPEND CMAKE_EXE_LINKER_FLAGS " -acc -gpu=cc80 -Minfo=accel")
set(SCC "cc")
set(SCXX "CC")
set(SFC "ftn")
-string(APPEND CMAKE_Fortran_FLAGS " -acc -gpu=cc70,cc60 -Minfo=accel")
diff --git a/cime_config/machines/config_machines.xml b/cime_config/machines/config_machines.xml
index 37691bb669da..b2e4ed91a720 100644
--- a/cime_config/machines/config_machines.xml
+++ b/cime_config/machines/config_machines.xml
@@ -2859,34 +2859,45 @@
anaconda3/5.2.0
- intel/18.0.4-443hhug
- intel-mkl/2018.4.274-jwaeshj
- hdf5/1.10.5-3mk3uik
- netcdf/4.7.0-krelxcz
- netcdf-fortran/4.4.5-74lj75q
+ gcc/7.4.0
+ intel/20.0.4-lednsve
+ intel-mkl/2020.4.304-voqlapk
- intel-mpi/2018.4.274-4hmwfl6
- parallel-netcdf/1.11.0-acswzws
+ intel-mpi/2019.9.304-i42whlw
+ hdf5/1.10.7-ugvomvt
+ netcdf-c/4.4.1-blyisdg
+ netcdf-cxx/4.2-gkqc6fq
+ netcdf-fortran/4.4.4-eanrh5t
+ parallel-netcdf/1.11.0-y3nmmej
- mvapich2/2.3.1-verbs-omjz3ck
- parallel-netcdf/1.11.2-7fy6qz3
+ mvapich2/2.3.6-verbs-x4iz7lq
+ hdf5/1.10.7-igh6foh
+ netcdf-c/4.4.1-gei7x7w
+ netcdf-cxx/4.2-db2f5or
+ netcdf-fortran/4.4.4-b4ldb3a
+ parallel-netcdf/1.11.0-kj4jsvt
- gcc/8.2.0-g7hppkz
- intel-mkl/2018.4.274-2amycpi
- hdf5/1.8.16-mz7lmxh
- netcdf/4.4.1-xkjcghm
- netcdf-fortran/4.4.4-mpstomu
+ gcc/8.2.0-xhxgy33
+ intel-mkl/2020.4.304-d6zw4xa
- intel-mpi/2018.4.274-ozfo327
- parallel-netcdf/1.11.0-filvnis
+ intel-mpi/2019.9.304-rxpzd6p
+ hdf5/1.10.7-oy6d2nm
+ netcdf-c/4.4.1-fysjgfx
+ netcdf-cxx/4.2-oaiw2v6
+ netcdf-fortran/4.4.4-kxgkaop
+ parallel-netcdf/1.11.0-fce7akl
- mvapich2/2.3-bebop-3xi4hiu
- parallel-netcdf/1.11.2-hfn33fd
+ mvapich2/2.3-bebop-a66r4jf
+ hdf5/1.10.5-ejeshwh
+ netcdf/4.4.1-ve2zfkw
+ netcdf-cxx/4.2-2rkopdl
+ netcdf-fortran/4.4.4-thtylny
+ parallel-netcdf/1.11.0-kozyofv
$CIME_OUTPUT_ROOT/$CASE/run
@@ -2901,7 +2912,7 @@
$SHELL{dirname $(dirname $(which pnetcdf_version))}
-
+
$SHELL{which h5dump | xargs dirname | xargs dirname}
diff --git a/components/data_comps/drof/src/drof_comp_mod.F90 b/components/data_comps/drof/src/drof_comp_mod.F90
index cb060e02ff84..3be703ec6998 100644
--- a/components/data_comps/drof/src/drof_comp_mod.F90
+++ b/components/data_comps/drof/src/drof_comp_mod.F90
@@ -25,7 +25,11 @@ module drof_comp_mod
use drof_shr_mod , only: rest_file ! namelist input
use drof_shr_mod , only: rest_file_strm ! namelist input
use drof_shr_mod , only: nullstr
-
+#ifdef HAVE_MOAB
+ use seq_comm_mct, only : mrofid ! id of moab rof app
+ use seq_comm_mct, only : mbrof_data ! turn on if the data rof
+ use iso_c_binding
+#endif
!
! !PUBLIC TYPES:
implicit none
@@ -67,6 +71,12 @@ subroutine drof_comp_init(Eclock, x2r, r2x, &
SDROF, gsmap, ggrid, mpicom, compid, my_task, master_task, &
inst_suffix, inst_name, logunit, read_restart)
+#ifdef HAVE_MOAB
+ use iMOAB, only: iMOAB_DefineTagStorage, iMOAB_GetDoubleTagStorage, &
+ iMOAB_SetIntTagStorage, iMOAB_SetDoubleTagStorage, &
+ iMOAB_ResolveSharedEntities, iMOAB_CreateVertices, &
+ iMOAB_GetMeshInfo, iMOAB_UpdateMeshInfo, iMOAB_WriteMesh
+#endif
! !DESCRIPTION: initialize drof model
implicit none
@@ -92,7 +102,19 @@ subroutine drof_comp_init(Eclock, x2r, r2x, &
logical :: exists ! file existance logical
integer(IN) :: nu ! unit number
character(CL) :: calendar ! model calendar
-
+#ifdef HAVE_MOAB
+ character*400 tagname
+ real(R8) latv, lonv
+ integer iv, tagindex, ilat, ilon, ierr !, arrsize, nfields
+ real(R8), allocatable, target :: data(:)
+ integer(IN), pointer :: idata(:) ! temporary
+ real(r8), dimension(:), allocatable :: moab_vert_coords ! temporary
+ !real(R8), allocatable, target :: vtags_zero(:, :)
+
+#ifdef MOABDEBUG
+ character*100 outfile, wopts
+#endif
+#endif
!--- formats ---
character(*), parameter :: F00 = "('(drof_comp_init) ',8a)"
character(*), parameter :: F0L = "('(drof_comp_init) ',a, l2)"
@@ -164,6 +186,121 @@ subroutine drof_comp_init(Eclock, x2r, r2x, &
call t_stopf('drof_initmctdom')
+
+#ifdef HAVE_MOAB
+ ilat = mct_aVect_indexRA(ggrid%data,'lat')
+ ilon = mct_aVect_indexRA(ggrid%data,'lon')
+ allocate(moab_vert_coords(lsize*3))
+ do iv = 1, lsize
+ lonv = ggrid%data%rAttr(ilon, iv) * SHR_CONST_PI/180.
+ latv = ggrid%data%rAttr(ilat, iv) * SHR_CONST_PI/180.
+ moab_vert_coords(3*iv-2)=COS(latv)*COS(lonv)
+ moab_vert_coords(3*iv-1)=COS(latv)*SIN(lonv)
+ moab_vert_coords(3*iv )=SIN(latv)
+ enddo
+
+ ! create the vertices with coordinates from MCT domain
+ ierr = iMOAB_CreateVertices(mrofid, lsize*3, 3, moab_vert_coords)
+ if (ierr .ne. 0) &
+ call shr_sys_abort('Error: fail to create MOAB vertices in land model')
+
+ tagname='GLOBAL_ID'//C_NULL_CHAR
+ ierr = iMOAB_DefineTagStorage(mrofid, tagname, &
+ 0, & ! dense, integer
+ 1, & ! number of components
+ tagindex )
+ if (ierr .ne. 0) &
+ call shr_sys_abort('Error: fail to retrieve GLOBAL_ID tag ')
+
+ ! get list of global IDs for Dofs
+ call mct_gsMap_orderedPoints(gsMap, my_task, idata)
+
+ ierr = iMOAB_SetIntTagStorage ( mrofid, tagname, lsize, &
+ 0, & ! vertex type
+ idata)
+ if (ierr .ne. 0) &
+ call shr_sys_abort('Error: fail to set GLOBAL_ID tag ')
+
+ ierr = iMOAB_ResolveSharedEntities( mrofid, lsize, idata );
+ if (ierr .ne. 0) &
+ call shr_sys_abort('Error: fail to resolve shared entities')
+
+ deallocate(moab_vert_coords)
+ deallocate(idata)
+
+ ierr = iMOAB_UpdateMeshInfo( mrofid )
+ if (ierr .ne. 0) &
+ call shr_sys_abort('Error: fail to update mesh info ')
+
+ allocate(data(lsize))
+ ierr = iMOAB_DefineTagStorage( mrofid, "area:aream:frac:mask"//C_NULL_CHAR, &
+ 1, & ! dense, double
+ 1, & ! number of components
+ tagindex )
+ if (ierr > 0 ) &
+ call shr_sys_abort('Error: fail to create tag: area:aream:frac:mask' )
+
+ data(:) = ggrid%data%rAttr(mct_aVect_indexRA(ggrid%data,'area'),:)
+ tagname='area'//C_NULL_CHAR
+ ierr = iMOAB_SetDoubleTagStorage ( mrofid, tagname, lsize, &
+ 0, & ! set data on vertices
+ data)
+ if (ierr > 0 ) &
+ call shr_sys_abort('Error: fail to get area tag ')
+
+ ! set the same data for aream (model area) as area
+ ! data(:) = ggrid%data%rAttr(mct_aVect_indexRA(ggrid%data,'aream'),:)
+ tagname='aream'//C_NULL_CHAR
+ ierr = iMOAB_SetDoubleTagStorage ( mrofid, tagname, lsize, &
+ 0, & ! set data on vertices
+ data)
+ if (ierr > 0 ) &
+ call shr_sys_abort('Error: fail to set aream tag ')
+
+ data(:) = ggrid%data%rAttr(mct_aVect_indexRA(ggrid%data,'mask'),:)
+ tagname='mask'//C_NULL_CHAR
+ ierr = iMOAB_SetDoubleTagStorage ( mrofid, tagname, lsize, &
+ 0, & ! set data on vertices
+ data)
+ if (ierr > 0 ) &
+ call shr_sys_abort('Error: fail to set mask tag ')
+
+ data(:) = ggrid%data%rAttr(mct_aVect_indexRA(ggrid%data,'frac'),:)
+ tagname='frac'//C_NULL_CHAR
+ ierr = iMOAB_SetDoubleTagStorage ( mrofid, tagname, lsize, &
+ 0, & ! set data on vertices
+ data)
+ if (ierr > 0 ) &
+ call shr_sys_abort('Error: fail to set frac tag ')
+
+ deallocate(data)
+
+ ! define tags
+ ierr = iMOAB_DefineTagStorage( mrofid, trim(seq_flds_x2r_fields)//C_NULL_CHAR, &
+ 1, & ! dense, double
+ 1, & ! number of components
+ tagindex )
+ if (ierr > 0 ) &
+ call shr_sys_abort('Error: fail to create seq_flds_x2r_fields tags ')
+
+ ierr = iMOAB_DefineTagStorage( mrofid, trim(seq_flds_r2x_fields)//C_NULL_CHAR, &
+ 1, & ! dense, double
+ 1, & ! number of components
+ tagindex )
+ if (ierr > 0 ) &
+ call shr_sys_abort('Error: fail to create seq_flds_r2x_fields tags ')
+ mbrof_data = .true. ! will have effects
+#ifdef MOABDEBUG
+ ! debug test
+ outfile = 'RofDataMesh.h5m'//C_NULL_CHAR
+ wopts = ';PARALLEL=WRITE_PART'//C_NULL_CHAR !
+ ! write out the mesh file to disk
+ ierr = iMOAB_WriteMesh(mrofid, trim(outfile), trim(wopts))
+ if (ierr .ne. 0) then
+ call shr_sys_abort(subname//' ERROR in writing data mesh rof ')
+ endif
+#endif
+#endif
!----------------------------------------------------------------------------
! Initialize MCT attribute vectors
!----------------------------------------------------------------------------
@@ -256,6 +393,13 @@ subroutine drof_comp_run(EClock, x2r, r2x, &
SDROF, gsmap, ggrid, mpicom, compid, my_task, master_task, &
inst_suffix, logunit, case_name)
+#ifdef MOABDEBUG
+ use iMOAB, only: iMOAB_WriteMesh
+#endif
+#ifdef HAVE_MOAB
+ use seq_flds_mod , only: seq_flds_r2x_fields
+ use seq_flds_mod , only: moab_set_tag_from_av
+#endif
! !DESCRIPTION: run method for drof model
implicit none
@@ -285,7 +429,18 @@ subroutine drof_comp_run(EClock, x2r, r2x, &
integer(IN) :: nu ! unit number
integer(IN) :: nflds_r2x
character(len=18) :: date_str
+#ifdef HAVE_MOAB
+ real(R8), allocatable, target :: datam(:)
+ type(mct_list) :: temp_list
+ integer :: size_list, index_list
+ type(mct_string) :: mctOStr !
+ character*400 tagname, mct_field
+#ifdef MOABDEBUG
+ integer :: cur_drof_stepno, ierr
+ character*100 outfile, wopts, lnum
+#endif
+#endif
character(*), parameter :: F00 = "('(drof_comp_run) ',8a)"
character(*), parameter :: F04 = "('(drof_comp_run) ',2a,2i8,'s')"
character(*), parameter :: subName = "(drof_comp_run) "
@@ -384,6 +539,32 @@ subroutine drof_comp_run(EClock, x2r, r2x, &
!----------------------------------------------------------------------------
! Log output for model date
!----------------------------------------------------------------------------
+#ifdef HAVE_MOAB
+ lsize = mct_avect_lsize(r2x) ! is it the same as mct_avect_lsize(avstrm) ?
+ allocate(datam(lsize)) !
+ call mct_list_init(temp_list ,seq_flds_r2x_fields)
+ size_list=mct_list_nitem (temp_list)
+ do index_list = 1, size_list
+ call mct_list_get(mctOStr,index_list,temp_list)
+ mct_field = mct_string_toChar(mctOStr)
+ tagname= trim(mct_field)//C_NULL_CHAR
+ call moab_set_tag_from_av(tagname, r2x, index_list, mrofid, datam, lsize) ! loop over all a2x fields, not just a few
+ enddo
+ call mct_list_clean(temp_list)
+ deallocate(datam) ! maybe we should keep it around, deallocate at the final only?
+
+#ifdef MOABDEBUG
+ call seq_timemgr_EClockGetData( EClock, stepno=cur_drof_stepno )
+ write(lnum,"(I0.2)")cur_drof_stepno
+ outfile = 'drof_comp_run_'//trim(lnum)//'.h5m'//C_NULL_CHAR
+ wopts = 'PARALLEL=WRITE_PART'//C_NULL_CHAR
+ ierr = iMOAB_WriteMesh(mrofid, outfile, wopts)
+ if (ierr > 0 ) then
+ write(logunit,*) 'Failed to write data rof component state '
+ endif
+#endif
+
+#endif
call t_startf('drof_run2')
if (my_task == master_task) then
diff --git a/components/data_comps/drof/src/rof_comp_mct.F90 b/components/data_comps/drof/src/rof_comp_mct.F90
index bafdc6d3f988..7257028e7b52 100644
--- a/components/data_comps/drof/src/rof_comp_mct.F90
+++ b/components/data_comps/drof/src/rof_comp_mct.F90
@@ -16,7 +16,11 @@ module rof_comp_mct
use drof_comp_mod , only: drof_comp_init, drof_comp_run, drof_comp_final
use drof_shr_mod , only: drof_shr_read_namelists
use seq_flds_mod , only: seq_flds_x2r_fields, seq_flds_r2x_fields
-
+#ifdef HAVE_MOAB
+ use seq_comm_mct, only : mrofid ! iMOAB app id for rof
+ use iso_c_binding
+ use iMOAB , only: iMOAB_RegisterApplication
+#endif
! !PUBLIC TYPES:
implicit none
private ! except
@@ -53,6 +57,9 @@ module rof_comp_mct
!===============================================================================
subroutine rof_init_mct( EClock, cdata, x2r, r2x, NLFilename )
+#ifdef HAVE_MOAB
+ use shr_stream_mod, only: shr_stream_getDomainInfo, shr_stream_getFile
+#endif
! !DESCRIPTION: initialize drof model
implicit none
@@ -74,6 +81,16 @@ subroutine rof_init_mct( EClock, cdata, x2r, r2x, NLFilename )
integer(IN) :: shrloglev ! original log level
logical :: read_restart ! start from restart
integer(IN) :: ierr ! error code
+
+ character(CL) :: filePath ! generic file path
+ character(CL) :: fileName ! generic file name
+ character(CS) :: timeName ! domain file: time variable name
+ character(CS) :: lonName ! domain file: lon variable name
+ character(CS) :: latName ! domain file: lat variable name
+ character(CS) :: hgtName ! domain file: hgt variable name
+ character(CS) :: maskName ! domain file: mask variable name
+ character(CS) :: areaName ! domain file: area variable name
+
character(*), parameter :: subName = "(rof_init_mct) "
!-------------------------------------------------------------------------------
@@ -140,11 +157,28 @@ subroutine rof_init_mct( EClock, cdata, x2r, r2x, NLFilename )
! Initialize drof
!----------------------------------------------------------------------------
+#ifdef HAVE_MOAB
+ ierr = iMOAB_RegisterApplication(trim("DROF")//C_NULL_CHAR, mpicom, compid, mrofid)
+ if (ierr .ne. 0) then
+ write(logunit,*) subname,' error in registering data rof comp'
+ call shr_sys_abort(subname//' ERROR in registering data rof comp')
+ endif
+#endif
+
call drof_comp_init(Eclock, x2r, r2x, &
seq_flds_x2r_fields, seq_flds_r2x_fields, &
SDROF, gsmap, ggrid, mpicom, compid, my_task, master_task, &
inst_suffix, inst_name, logunit, read_restart)
-
+#ifdef HAVE_MOAB
+ if (my_task == master_task) then
+ call shr_stream_getDomainInfo(SDROF%stream(1), filePath,fileName,timeName,lonName, &
+ latName,hgtName,maskName,areaName)
+ call shr_stream_getFile(filePath,fileName)
+ ! send path of river domain to MOAB coupler.
+ call seq_infodata_PutData( infodata, rof_domain=fileName)
+ write(logunit,*), ' filename: ', filename
+ endif
+#endif
!----------------------------------------------------------------------------
! Fill infodata that needs to be returned from drof
!----------------------------------------------------------------------------
diff --git a/components/elm/bld/ELMBuildNamelist.pm b/components/elm/bld/ELMBuildNamelist.pm
index 8c16df25743b..67347728e0fd 100755
--- a/components/elm/bld/ELMBuildNamelist.pm
+++ b/components/elm/bld/ELMBuildNamelist.pm
@@ -813,7 +813,7 @@ sub setup_cmdl_fates_mode {
"use_fates_inventory_init", "use_fates_fixed_biogeog", "use_fates_nocomp","use_fates_sp",
"fates_inventory_ctrl_filename","use_fates_logging", "use_fates_tree_damage",
"use_fates_parteh_mode","use_fates_cohort_age_tracking","use_snicar_ad", "use_fates_luh",
- "fluh_timeseries");
+ "fluh_timeseries","fates_history_dimlevel");
foreach my $var ( @list ) {
if ( defined($nl->get_value($var)) ) {
$nl_flags->{$var} = $nl->get_value($var);
@@ -3397,7 +3397,8 @@ sub setup_logic_fates {
add_default($test_files, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'use_fates_luh', 'use_fates'=>$nl_flags->{'use_fates'});
add_default($test_files, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'fates_paramfile', 'phys'=>$nl_flags->{'phys'});
add_default($test_files, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'fluh_timeseries', 'phys'=>$nl_flags->{'phys'});
-
+ add_default($test_files, $nl_flags->{'inputdata_rootdir'}, $definition, $defaults, $nl, 'fates_history_dimlevel','use_fates'=>$nl_flags->{'use_fates'});
+
# For FATES SP mode make sure no-competion, and fixed-biogeography are also set
# And also check for other settings that can't be trigged on as well
my $var = "use_fates_sp";
@@ -3413,6 +3414,10 @@ sub setup_logic_fates {
if ( $nl->get_value('fates_spitfire_mode') > 0 ) {
fatal_error('fates_spitfire_mode can NOT be set to greater than 0 when use_fates_sp is true');
}
+ # hydro isn't currently supported to work when FATES SP mode is active
+ if (&value_is_true( $nl->get_value('use_fates_planthydro') )) {
+ fatal_error('fates sp mode is currently not supported to work with fates hydro');
+ }
}
}
# check that fates landuse change mode has the necessary luh2 landuse timeseries data
diff --git a/components/elm/bld/namelist_files/namelist_defaults.xml b/components/elm/bld/namelist_files/namelist_defaults.xml
index 44cf85c73f3f..2fe8b19aaa87 100644
--- a/components/elm/bld/namelist_files/namelist_defaults.xml
+++ b/components/elm/bld/namelist_files/namelist_defaults.xml
@@ -134,7 +134,7 @@ attributes from the config_cache.xml file (with keys converted to upper-case).
-lnd/clm2/paramdata/fates_params_api.32.0.0_12pft_c231215.nc
+lnd/clm2/paramdata/fates_params_api.35.0.0_12pft_c240326.nc
lnd/clm2/paramdata/CNP_parameters_c131108.nc
@@ -2195,7 +2195,7 @@ this mask will have smb calculated over the entire global land surface
.false.
.true.
.false.
-
+2,2
.true.
.true.
.false.
diff --git a/components/elm/bld/namelist_files/namelist_definition.xml b/components/elm/bld/namelist_files/namelist_definition.xml
index ec3a74cbd965..a6d4e9eff3c0 100644
--- a/components/elm/bld/namelist_files/namelist_definition.xml
+++ b/components/elm/bld/namelist_files/namelist_definition.xml
@@ -395,6 +395,19 @@ Full pathname of unified land use harmonization data file. This causes the land-
types to vary over time.
+
+Setting for what types of FATES history to be allocate and
+calculated at the dynamics timestep (1st integer) and the
+model timestep (2nd integer). This must be consistent with
+hist_fincl*, ie output variables must not be listed if the
+output level is not enabled.
+0 = no fates history variables are calculated or allocated
+1 = only time x space (3d) fates history variables allowed
+2 = multiplexed dimensioned fates history is also allowed
+(Only relevant if FATES is on)
+
+
Toggle to turn on if Kennedy et al plant hydraulics model is used.
@@ -1423,7 +1436,7 @@ Representative concentration pathway for future scenarios [radiative forcing at
+ valid_values="USGS,gx3v7,gx1v6,navy,test,tx0.1v2,tx1v1,T62,TL319,cruncep,oEC60to30v3,oEC60to30v3wLI,ECwISC30to60E1r2,EC30to60E2r2,WC14to60E2r3,WCAtl12to45E2r4,SOwISC12to60E2r4,ECwISC30to60E2r1,oRRS18to6,oRRS18to6v3,oRRS15to5,oARRM60to10,oARRM60to6,ARRM10to60E2r1,oQU480,oQU240,oQU240wLI,oQU120,oRRS30to10v3,oRRS30to10v3wLI,360x720cru,NLDASww3a,NLDAS,tx0.1v2,ICOS10,IcoswISC30E3r5,IcosXISC30E3r7,RRSwISC6to18E3r5">
Land mask description
diff --git a/components/elm/cime_config/testdefs/testmods_dirs/elm/fates_cold_allvars/user_nl_elm b/components/elm/cime_config/testdefs/testmods_dirs/elm/fates_cold_allvars/user_nl_elm
index 2aff9c0b3c23..249764fb277d 100644
--- a/components/elm/cime_config/testdefs/testmods_dirs/elm/fates_cold_allvars/user_nl_elm
+++ b/components/elm/cime_config/testdefs/testmods_dirs/elm/fates_cold_allvars/user_nl_elm
@@ -2,73 +2,57 @@ hist_mfilt = 365
hist_nhtfrq = -24
hist_empty_htapes = .false.
fates_spitfire_mode = 1
-hist_fincl1 = 'FATES_CROWNAREA_PF', 'FATES_CANOPYCROWNAREA_PF',
-'FATES_NCL_AP', 'FATES_NPATCH_AP', 'FATES_VEGC_AP',
-'FATES_SECONDARY_FOREST_FRACTION', 'FATES_WOOD_PRODUCT',
-'FATES_SECONDARY_FOREST_VEGC', 'FATES_SECONDAREA_ANTHRODIST_AP',
-'FATES_SECONDAREA_DIST_AP', 'FATES_STOMATAL_COND_AP', 'FATES_LBLAYER_COND_AP',
-'FATES_NPP_AP', 'FATES_GPP_AP', 'FATES_PARSUN_Z_CLLL', 'FATES_PARSHA_Z_CLLL',
-'FATES_PARSUN_Z_CLLLPF', 'FATES_PARSHA_Z_CLLLPF', 'FATES_PARSUN_Z_CL',
-'FATES_PARSHA_Z_CL', 'FATES_LAISUN_Z_CLLL', 'FATES_LAISHA_Z_CLLL',
-'FATES_LAISUN_Z_CLLLPF', 'FATES_LAISHA_Z_CLLLPF', 'FATES_LAISUN_TOP_CL',
-'FATES_LAISHA_TOP_CL', 'FATES_FABD_SUN_CLLLPF', 'FATES_FABD_SHA_CLLLPF',
-'FATES_FABI_SUN_CLLLPF', 'FATES_FABI_SHA_CLLLPF', 'FATES_FABD_SUN_CLLL',
-'FATES_FABD_SHA_CLLL', 'FATES_FABI_SUN_CLLL', 'FATES_FABI_SHA_CLLL',
-'FATES_PARPROF_DIR_CLLLPF', 'FATES_PARPROF_DIF_CLLLPF','FATES_FABD_SUN_TOPLF_CL',
-'FATES_FABD_SHA_TOPLF_CL', 'FATES_FABI_SUN_TOPLF_CL', 'FATES_FABI_SHA_TOPLF_CL',
-'FATES_NET_C_UPTAKE_CLLL', 'FATES_CROWNAREA_CLLL', 'FATES_NPLANT_CANOPY_SZAP',
-'FATES_NPLANT_USTORY_SZAP', 'FATES_DDBH_CANOPY_SZAP', 'FATES_DDBH_USTORY_SZAP',
-'FATES_MORTALITY_CANOPY_SZAP', 'FATES_MORTALITY_USTORY_SZAP',
-'FATES_NPLANT_SZAPPF', 'FATES_NPP_APPF', 'FATES_VEGC_APPF', 'FATES_GPP_SZPF',
-'FATES_GPP_CANOPY_SZPF', 'FATES_AUTORESP_CANOPY_SZPF', 'FATES_GPP_USTORY_SZPF',
-'FATES_AUTORESP_USTORY_SZPF', 'FATES_NPP_SZPF', 'FATES_LEAF_ALLOC_SZPF',
-'FATES_SEED_ALLOC_SZPF', 'FATES_FROOT_ALLOC_SZPF', 'FATES_BGSAPWOOD_ALLOC_SZPF',
-'FATES_BGSTRUCT_ALLOC_SZPF', 'FATES_AGSAPWOOD_ALLOC_SZPF',
-'FATES_AGSTRUCT_ALLOC_SZPF', 'FATES_STORE_ALLOC_SZPF', 'FATES_DDBH_SZPF',
-'FATES_GROWTHFLUX_SZPF', 'FATES_GROWTHFLUX_FUSION_SZPF',
-'FATES_DDBH_CANOPY_SZPF', 'FATES_DDBH_USTORY_SZPF', 'FATES_BASALAREA_SZPF',
-'FATES_VEGC_ABOVEGROUND_SZPF', 'FATES_NPLANT_SZPF', 'FATES_NPLANT_ACPF',
-'FATES_MORTALITY_BACKGROUND_SZPF', 'FATES_MORTALITY_HYDRAULIC_SZPF',
-'FATES_MORTALITY_CSTARV_SZPF', 'FATES_MORTALITY_IMPACT_SZPF',
-'FATES_MORTALITY_FIRE_SZPF', 'FATES_MORTALITY_CROWNSCORCH_SZPF',
-'FATES_MORTALITY_CAMBIALBURN_SZPF', 'FATES_MORTALITY_TERMINATION_SZPF',
-'FATES_MORTALITY_LOGGING_SZPF', 'FATES_MORTALITY_FREEZING_SZPF',
-'FATES_MORTALITY_SENESCENCE_SZPF', 'FATES_MORTALITY_AGESCEN_SZPF',
-'FATES_MORTALITY_AGESCEN_ACPF', 'FATES_MORTALITY_CANOPY_SZPF',
-'FATES_STOREC_CANOPY_SZPF', 'FATES_LEAFC_CANOPY_SZPF',
-'FATES_NPLANT_CANOPY_SZPF', 'FATES_MORTALITY_USTORY_SZPF',
-'FATES_STOREC_USTORY_SZPF', 'FATES_LEAFC_USTORY_SZPF',
-'FATES_NPLANT_USTORY_SZPF', 'FATES_CWD_ABOVEGROUND_DC',
-'FATES_CWD_BELOWGROUND_DC', 'FATES_CWD_ABOVEGROUND_IN_DC',
-'FATES_CWD_BELOWGROUND_IN_DC', 'FATES_CWD_ABOVEGROUND_OUT_DC',
-'FATES_CWD_BELOWGROUND_OUT_DC', 'FATES_AUTORESP_SZPF', 'FATES_GROWAR_SZPF',
-'FATES_MAINTAR_SZPF', 'FATES_RDARK_SZPF', 'FATES_AGSAPMAINTAR_SZPF',
-'FATES_BGSAPMAINTAR_SZPF', 'FATES_FROOTMAINTAR_SZPF',
-'FATES_YESTCANLEV_CANOPY_SZ', 'FATES_YESTCANLEV_USTORY_SZ',
-'FATES_VEGC_SZ', 'FATES_DEMOTION_RATE_SZ', 'FATES_PROMOTION_RATE_SZ',
-'FATES_SAI_CANOPY_SZ', 'FATES_SAI_USTORY_SZ', 'FATES_NPP_CANOPY_SZ',
-'FATES_NPP_USTORY_SZ', 'FATES_TRIMMING_CANOPY_SZ', 'FATES_TRIMMING_USTORY_SZ',
-'FATES_CROWNAREA_CANOPY_SZ', 'FATES_CROWNAREA_USTORY_SZ',
-'FATES_LEAFCTURN_CANOPY_SZ', 'FATES_FROOTCTURN_CANOPY_SZ',
-'FATES_STORECTURN_CANOPY_SZ', 'FATES_STRUCTCTURN_CANOPY_SZ',
-'FATES_SAPWOODCTURN_CANOPY_SZ', 'FATES_SEED_PROD_CANOPY_SZ',
-'FATES_LEAF_ALLOC_CANOPY_SZ', 'FATES_FROOT_ALLOC_CANOPY_SZ',
-'FATES_SAPWOOD_ALLOC_CANOPY_SZ', 'FATES_STRUCT_ALLOC_CANOPY_SZ',
-'FATES_SEED_ALLOC_CANOPY_SZ', 'FATES_STORE_ALLOC_CANOPY_SZ',
-'FATES_RDARK_CANOPY_SZ', 'FATES_LSTEMMAINTAR_CANOPY_SZ',
-'FATES_CROOTMAINTAR_CANOPY_SZ', 'FATES_FROOTMAINTAR_CANOPY_SZ',
-'FATES_GROWAR_CANOPY_SZ', 'FATES_MAINTAR_CANOPY_SZ',
-'FATES_LEAFCTURN_USTORY_SZ', 'FATES_FROOTCTURN_USTORY_SZ',
-'FATES_STORECTURN_USTORY_SZ', 'FATES_STRUCTCTURN_USTORY_SZ',
-'FATES_SAPWOODCTURN_USTORY_SZ', 'FATES_SEED_PROD_USTORY_SZ',
-'FATES_LEAF_ALLOC_USTORY_SZ', 'FATES_FROOT_ALLOC_USTORY_SZ',
-'FATES_SAPWOOD_ALLOC_USTORY_SZ', 'FATES_STRUCT_ALLOC_USTORY_SZ',
-'FATES_SEED_ALLOC_USTORY_SZ', 'FATES_STORE_ALLOC_USTORY_SZ',
-'FATES_RDARK_USTORY_SZ', 'FATES_LSTEMMAINTAR_USTORY_SZ',
-'FATES_CROOTMAINTAR_USTORY_SZ', 'FATES_FROOTMAINTAR_USTORY_SZ',
-'FATES_GROWAR_USTORY_SZ', 'FATES_MAINTAR_USTORY_SZ', 'FATES_VEGC_SZPF',
-'FATES_LEAFC_SZPF', 'FATES_FROOTC_SZPF', 'FATES_SAPWOODC_SZPF',
-'FATES_STOREC_SZPF', 'FATES_REPROC_SZPF', 'FATES_DROUGHT_STATUS_PF',
-'FATES_DAYSINCE_DROUGHTLEAFOFF_PF', 'FATES_DAYSINCE_DROUGHTLEAFON_PF',
-'FATES_MEANLIQVOL_DROUGHTPHEN_PF', 'FATES_MEANSMP_DROUGHTPHEN_PF',
-'FATES_ELONG_FACTOR_PF'
+fates_history_dimlevel(1) = 2
+fates_history_dimlevel(2) = 2
+use_fates_tree_damage = .true.
+hist_fincl1 = 'FATES_TLONGTERM',
+'FATES_TGROWTH','FATES_SEEDS_IN_GRIDCELL_PF','FATES_SEEDS_OUT_GRIDCELL_PF','FATES_NCL_AP',
+'FATES_NPATCH_AP','FATES_VEGC_AP','FATES_SECONDAREA_ANTHRODIST_AP','FATES_SECONDAREA_DIST_AP',
+'FATES_FUEL_AMOUNT_APFC','FATES_STOREC_TF_USTORY_SZPF','FATES_STOREC_TF_CANOPY_SZPF',
+'FATES_CROWNAREA_CLLL','FATES_ABOVEGROUND_MORT_SZPF',
+'FATES_ABOVEGROUND_PROD_SZPF','FATES_NPLANT_SZAP','FATES_NPLANT_CANOPY_SZAP',
+'FATES_NPLANT_USTORY_SZAP','FATES_DDBH_CANOPY_SZAP','FATES_DDBH_USTORY_SZAP',
+'FATES_MORTALITY_CANOPY_SZAP','FATES_MORTALITY_USTORY_SZAP','FATES_NPLANT_SZAPPF',
+'FATES_NPP_APPF','FATES_VEGC_APPF','FATES_SCORCH_HEIGHT_APPF','FATES_GPP_SZPF',
+'FATES_GPP_CANOPY_SZPF','FATES_AUTORESP_CANOPY_SZPF','FATES_GPP_USTORY_SZPF',
+'FATES_AUTORESP_USTORY_SZPF','FATES_NPP_SZPF','FATES_LEAF_ALLOC_SZPF',
+'FATES_SEED_ALLOC_SZPF','FATES_FROOT_ALLOC_SZPF','FATES_BGSAPWOOD_ALLOC_SZPF',
+'FATES_BGSTRUCT_ALLOC_SZPF','FATES_AGSAPWOOD_ALLOC_SZPF','FATES_AGSTRUCT_ALLOC_SZPF',
+'FATES_STORE_ALLOC_SZPF','FATES_DDBH_SZPF','FATES_GROWTHFLUX_SZPF','FATES_GROWTHFLUX_FUSION_SZPF',
+'FATES_DDBH_CANOPY_SZPF','FATES_DDBH_USTORY_SZPF','FATES_BASALAREA_SZPF','FATES_VEGC_ABOVEGROUND_SZPF',
+'FATES_NPLANT_SZPF','FATES_NPLANT_ACPF','FATES_MORTALITY_BACKGROUND_SZPF','FATES_MORTALITY_HYDRAULIC_SZPF',
+'FATES_MORTALITY_CSTARV_SZPF','FATES_MORTALITY_IMPACT_SZPF','FATES_MORTALITY_FIRE_SZPF',
+'FATES_MORTALITY_CROWNSCORCH_SZPF','FATES_MORTALITY_CAMBIALBURN_SZPF','FATES_MORTALITY_TERMINATION_SZPF',
+'FATES_MORTALITY_LOGGING_SZPF','FATES_MORTALITY_FREEZING_SZPF','FATES_MORTALITY_SENESCENCE_SZPF',
+'FATES_MORTALITY_AGESCEN_SZPF','FATES_MORTALITY_AGESCEN_ACPF','FATES_MORTALITY_CANOPY_SZPF',
+'FATES_M3_MORTALITY_CANOPY_SZPF','FATES_M3_MORTALITY_USTORY_SZPF','FATES_C13DISC_SZPF',
+'FATES_STOREC_CANOPY_SZPF','FATES_LEAFC_CANOPY_SZPF','FATES_LAI_CANOPY_SZPF','FATES_CROWNAREA_CANOPY_SZPF',
+'FATES_CROWNAREA_USTORY_SZPF','FATES_NPLANT_CANOPY_SZPF','FATES_MORTALITY_USTORY_SZPF','FATES_STOREC_USTORY_SZPF',
+'FATES_LEAFC_USTORY_SZPF','FATES_LAI_USTORY_SZPF','FATES_NPLANT_USTORY_SZPF','FATES_CWD_ABOVEGROUND_DC',
+'FATES_CWD_BELOWGROUND_DC','FATES_CWD_ABOVEGROUND_IN_DC','FATES_CWD_BELOWGROUND_IN_DC',
+'FATES_CWD_ABOVEGROUND_OUT_DC','FATES_CWD_BELOWGROUND_OUT_DC','FATES_YESTCANLEV_CANOPY_SZ',
+'FATES_YESTCANLEV_USTORY_SZ','FATES_VEGC_SZ','FATES_DEMOTION_RATE_SZ','FATES_PROMOTION_RATE_SZ',
+'FATES_SAI_CANOPY_SZ','FATES_M3_MORTALITY_CANOPY_SZ','FATES_M3_MORTALITY_USTORY_SZ','FATES_SAI_USTORY_SZ',
+'FATES_NPP_CANOPY_SZ','FATES_NPP_USTORY_SZ','FATES_TRIMMING_CANOPY_SZ','FATES_TRIMMING_USTORY_SZ',
+'FATES_CROWNAREA_CANOPY_SZ','FATES_CROWNAREA_USTORY_SZ','FATES_LEAFCTURN_CANOPY_SZ','FATES_FROOTCTURN_CANOPY_SZ',
+'FATES_STORECTURN_CANOPY_SZ','FATES_STRUCTCTURN_CANOPY_SZ','FATES_SAPWOODCTURN_CANOPY_SZ','FATES_SEED_PROD_CANOPY_SZ',
+'FATES_LEAF_ALLOC_CANOPY_SZ','FATES_FROOT_ALLOC_CANOPY_SZ','FATES_SAPWOOD_ALLOC_CANOPY_SZ','FATES_STRUCT_ALLOC_CANOPY_SZ',
+'FATES_SEED_ALLOC_CANOPY_SZ','FATES_STORE_ALLOC_CANOPY_SZ','FATES_LEAFCTURN_USTORY_SZ','FATES_FROOTCTURN_USTORY_SZ',
+'FATES_STORECTURN_USTORY_SZ','FATES_STRUCTCTURN_USTORY_SZ','FATES_SAPWOODCTURN_USTORY_SZ',
+'FATES_SEED_PROD_USTORY_SZ','FATES_LEAF_ALLOC_USTORY_SZ','FATES_FROOT_ALLOC_USTORY_SZ','FATES_SAPWOOD_ALLOC_USTORY_SZ',
+'FATES_STRUCT_ALLOC_USTORY_SZ','FATES_SEED_ALLOC_USTORY_SZ','FATES_STORE_ALLOC_USTORY_SZ','FATES_CROWNAREA_CANOPY_CD',
+'FATES_CROWNAREA_USTORY_CD','FATES_NPLANT_CDPF','FATES_NPLANT_CANOPY_CDPF','FATES_NPLANT_USTORY_CDPF',
+'FATES_M3_CDPF','FATES_M11_SZPF','FATES_M11_CDPF','FATES_MORTALITY_CDPF','FATES_M3_MORTALITY_CANOPY_CDPF',
+'FATES_M3_MORTALITY_USTORY_CDPF','FATES_M11_MORTALITY_CANOPY_CDPF','FATES_M11_MORTALITY_USTORY_CDPF',
+'FATES_MORTALITY_CANOPY_CDPF','FATES_MORTALITY_USTORY_CDPF','FATES_DDBH_CDPF','FATES_DDBH_CANOPY_CDPF',
+'FATES_DDBH_USTORY_CDPF','FATES_VEGC_SZPF','FATES_LEAFC_SZPF','FATES_FROOTC_SZPF','FATES_SAPWOODC_SZPF',
+'FATES_STOREC_SZPF','FATES_REPROC_SZPF','FATES_NPP_AP','FATES_GPP_AP','FATES_RDARK_USTORY_SZ',
+'FATES_LSTEMMAINTAR_USTORY_SZ','FATES_CROOTMAINTAR_USTORY_SZ','FATES_FROOTMAINTAR_USTORY_SZ','FATES_GROWAR_USTORY_SZ',
+'FATES_MAINTAR_USTORY_SZ','FATES_RDARK_CANOPY_SZ','FATES_CROOTMAINTAR_CANOPY_SZ','FATES_FROOTMAINTAR_CANOPY_SZ',
+'FATES_GROWAR_CANOPY_SZ','FATES_MAINTAR_CANOPY_SZ','FATES_LSTEMMAINTAR_CANOPY_SZ','FATES_AUTORESP_SZPF',
+'FATES_GROWAR_SZPF','FATES_MAINTAR_SZPF','FATES_RDARK_SZPF','FATES_AGSAPMAINTAR_SZPF','FATES_BGSAPMAINTAR_SZPF',
+'FATES_FROOTMAINTAR_SZPF','FATES_PARSUN_CLLL','FATES_PARSHA_CLLL','FATES_PARSUN_CLLLPF','FATES_PARSHA_CLLLPF',
+'FATES_PARSUN_CL','FATES_PARSHA_CL','FATES_LAISUN_CLLL','FATES_LAISHA_CLLL','FATES_LAISUN_CLLLPF',
+'FATES_LAISHA_CLLLPF','FATES_PARPROF_DIR_CLLLPF','FATES_PARPROF_DIF_CLLLPF','FATES_LAISUN_CL','FATES_LAISHA_CL',
+'FATES_PARPROF_DIR_CLLL','FATES_PARPROF_DIF_CLLL','FATES_NET_C_UPTAKE_CLLL','FATES_CROWNFRAC_CLLLPF',
+'FATES_LBLAYER_COND_AP','FATES_STOMATAL_COND_AP'
diff --git a/components/elm/src/biogeochem/CNEcosystemDynBetrMod.F90 b/components/elm/src/biogeochem/CNEcosystemDynBetrMod.F90
index c3f877f48964..7ff65fe8f1f0 100644
--- a/components/elm/src/biogeochem/CNEcosystemDynBetrMod.F90
+++ b/components/elm/src/biogeochem/CNEcosystemDynBetrMod.F90
@@ -298,7 +298,7 @@ subroutine CNEcosystemDynBetr(bounds, &
call RootDynamics(bounds, num_soilc, filter_soilc, num_soilp, filter_soilp, &
canopystate_vars, cnstate_vars, &
- crop_vars, energyflux_vars, soilstate_vars)
+ crop_vars, energyflux_vars, soilstate_vars, dt)
call t_stopf('RootDynamics')
end if
diff --git a/components/elm/src/biogeochem/EcosystemDynMod.F90 b/components/elm/src/biogeochem/EcosystemDynMod.F90
index f814ca9f3678..32dd9cf6afe3 100644
--- a/components/elm/src/biogeochem/EcosystemDynMod.F90
+++ b/components/elm/src/biogeochem/EcosystemDynMod.F90
@@ -639,7 +639,7 @@ subroutine EcosystemDynNoLeaching2(bounds, &
call RootDynamics(bounds, num_soilc, filter_soilc, num_soilp, filter_soilp, &
canopystate_vars, &
- cnstate_vars, crop_vars, energyflux_vars, soilstate_vars)
+ cnstate_vars, crop_vars, energyflux_vars, soilstate_vars, dt)
call t_stop_lnd(event)
end if
diff --git a/components/elm/src/biogeochem/RootDynamicsMod.F90 b/components/elm/src/biogeochem/RootDynamicsMod.F90
index 79f4c1bb67b7..3c78043aa6ac 100644
--- a/components/elm/src/biogeochem/RootDynamicsMod.F90
+++ b/components/elm/src/biogeochem/RootDynamicsMod.F90
@@ -39,7 +39,7 @@ module RootDynamicsMod
!
subroutine RootDynamics(bounds, num_soilc, filter_soilc, num_soilp, filter_soilp, &
canopystate_vars, &
- cnstate_vars, crop_vars, energyflux_vars, soilstate_vars)
+ cnstate_vars, crop_vars, energyflux_vars, soilstate_vars, dt)
!
! !DESCRIPTION:
! This routine determine the fine root distribution
@@ -62,12 +62,12 @@ subroutine RootDynamics(bounds, num_soilc, filter_soilc, num_soilp, filter_soilp
type(crop_type) , intent(in) :: crop_vars
type(energyflux_type) , intent(in) :: energyflux_vars
type(soilstate_type) , intent(inout) :: soilstate_vars
+ real(r8) , intent(in) :: dt ! radiation time step delta t (seconds)
!
! !LOCAL VARIABLES:
integer :: f,c,p,lev,j ! indices
- real(r8) :: dt ! radiation time step delta t (seconds)
real(r8) :: w_limit(bounds%begp:bounds%endp) ! soil water weighting factor
real(r8) :: rswa(bounds%begp:bounds%endp,1:nlevgrnd) ! soil water availability in each soil layer
real(r8) :: rsmn(bounds%begp:bounds%endp,1:nlevgrnd) ! soil nitrogen availability in each soil layer
diff --git a/components/elm/src/cpl/lnd_comp_mct.F90 b/components/elm/src/cpl/lnd_comp_mct.F90
index 6b006b5d9188..e533d5bcbc2b 100644
--- a/components/elm/src/cpl/lnd_comp_mct.F90
+++ b/components/elm/src/cpl/lnd_comp_mct.F90
@@ -846,7 +846,7 @@ subroutine init_moab_land(bounds, LNDID)
use spmdmod , only: masterproc
use iMOAB , only: iMOAB_CreateVertices, iMOAB_WriteMesh, iMOAB_RegisterApplication, &
iMOAB_DefineTagStorage, iMOAB_SetIntTagStorage, iMOAB_SetDoubleTagStorage, &
- iMOAB_ResolveSharedEntities, iMOAB_CreateElements, iMOAB_MergeVertices, iMOAB_UpdateMeshInfo
+ iMOAB_ResolveSharedEntities, iMOAB_CreateElements, iMOAB_UpdateMeshInfo
type(bounds_type) , intent(in) :: bounds
integer , intent(in) :: LNDID ! id of the land app
diff --git a/components/elm/src/external_models/fates b/components/elm/src/external_models/fates
index 42d804ba54d0..b8e4eee5ed46 160000
--- a/components/elm/src/external_models/fates
+++ b/components/elm/src/external_models/fates
@@ -1 +1 @@
-Subproject commit 42d804ba54d0cf013a9737018ff9920e0c9808ea
+Subproject commit b8e4eee5ed46daf5c9e710e9ebbe6d20464adbc8
diff --git a/components/elm/src/main/controlMod.F90 b/components/elm/src/main/controlMod.F90
index 3b8c08be31bd..8f763a9ebd4d 100755
--- a/components/elm/src/main/controlMod.F90
+++ b/components/elm/src/main/controlMod.F90
@@ -263,7 +263,8 @@ subroutine control_init( )
fluh_timeseries, &
fates_parteh_mode, &
fates_seeddisp_cadence, &
- use_fates_tree_damage
+ use_fates_tree_damage, &
+ fates_history_dimlevel
namelist /elm_inparm / use_betr
@@ -818,7 +819,8 @@ subroutine control_spmd()
call mpi_bcast (fates_parteh_mode, 1, MPI_INTEGER, 0, mpicom, ier)
call mpi_bcast (fates_seeddisp_cadence, 1, MPI_INTEGER, 0, mpicom, ier)
call mpi_bcast (use_fates_tree_damage, 1, MPI_LOGICAL, 0, mpicom, ier)
-
+ call mpi_bcast (fates_history_dimlevel, 2, MPI_INTEGER, 0, mpicom, ier)
+
call mpi_bcast (use_betr, 1, MPI_LOGICAL, 0, mpicom, ier)
call mpi_bcast (use_lai_streams, 1, MPI_LOGICAL, 0, mpicom, ier)
diff --git a/components/elm/src/main/elm_varctl.F90 b/components/elm/src/main/elm_varctl.F90
index d5a61da8fdbf..7fa580b19f28 100644
--- a/components/elm/src/main/elm_varctl.F90
+++ b/components/elm/src/main/elm_varctl.F90
@@ -240,7 +240,20 @@ module elm_varctl
integer, public :: fates_seeddisp_cadence = iundef ! 0 => no seed dispersal across gridcells
! 1, 2, 3 => daily, monthly, or yearly seed dispersal
-
+ ! FATES history dimension level
+ ! fates can produce history at either the daily timescale (dynamics)
+ ! and the model step timescale. It can also generate output on the extra dimension
+ ! Performing this output can be expensive, so we allow different history dimension
+ ! levels.
+ ! The first index is output at the model timescale
+ ! The second index is output at the dynamics (daily) timescale
+ ! 0 - no output
+ ! 1 - include only column level means (3D)
+ ! 2 - include output that includes the 4th dimension
+
+ integer, dimension(2), public :: fates_history_dimlevel = (/2,2/)
+
+
!----------------------------------------------------------
! BeTR switches
!----------------------------------------------------------
diff --git a/components/elm/src/main/elmfates_interfaceMod.F90 b/components/elm/src/main/elmfates_interfaceMod.F90
index 9b9fbc6e3903..1b9d6c1ba490 100644
--- a/components/elm/src/main/elmfates_interfaceMod.F90
+++ b/components/elm/src/main/elmfates_interfaceMod.F90
@@ -59,6 +59,7 @@ module ELMFatesInterfaceMod
use elm_varctl , only : use_fates_tree_damage
use elm_varctl , only : nsrest, nsrBranch
use elm_varctl , only : fates_inventory_ctrl_filename
+ use elm_varctl , only : fates_history_dimlevel
use elm_varctl , only : use_lch4
use elm_varctl , only : use_century_decomp
use elm_varcon , only : tfrz
@@ -137,7 +138,7 @@ module ELMFatesInterfaceMod
use FatesHistoryInterfaceMod, only : fates_hist
use FatesRestartInterfaceMod, only : fates_restart_interface_type
use FatesInterfaceTypesMod, only : hlm_num_luh2_states
-
+ use FatesIOVariableKindMod, only : group_dyna_simple, group_dyna_complx
use PRTGenericMod , only : num_elements
use FatesPatchMod , only : fates_patch_type
use FatesDispersalMod , only : lneighbors, dispersal_type, IsItDispersalTime
@@ -284,6 +285,7 @@ module ELMFatesInterfaceMod
public :: ELMFatesGlobals1
public :: ELMFatesGlobals2
public :: ELMFatesTimesteps
+ public :: CrossRefHistoryFields
contains
@@ -427,6 +429,9 @@ subroutine ELMFatesGlobals2()
call set_fates_ctrlparms('parteh_mode',ival=fates_parteh_mode)
call set_fates_ctrlparms('seeddisp_cadence',ival=fates_seeddisp_cadence)
+ call set_fates_ctrlparms('hist_hifrq_dimlevel',ival=fates_history_dimlevel(1))
+ call set_fates_ctrlparms('hist_dynam_dimlevel',ival=fates_history_dimlevel(2))
+
if(use_fates_tree_damage)then
pass_tree_damage = 1
else
@@ -590,6 +595,86 @@ subroutine ELMFatesGlobals2()
return
end subroutine ELMFatesGlobals2
+
+ ! ===================================================================================
+
+ subroutine CrossRefHistoryFields
+
+ ! This routine only needs to be called on the masterproc.
+ ! Here we cross reference the ELM history master
+ ! list and make sure that all fields that start
+ ! with fates have been allocated. If it has
+ ! not, then we give a more constructive error
+ ! message than what is possible in PIO. The user
+ ! most likely needs to increase the history density
+ ! level
+
+ use histFileMod, only: getname
+ use histFileMod, only: hist_fincl1,hist_fincl2,hist_fincl3,hist_fincl4
+ use histFileMod, only: hist_fincl5,hist_fincl6
+ use histFileMod, only: max_tapes, max_flds, max_namlen
+
+ integer :: t ! iterator index for history tapes
+ integer :: f ! iterator index for registered history field names
+ integer :: nh ! iterator index for fates registered history
+ logical :: is_fates_field ! Does this start with FATES_ ?
+ logical :: found ! if true, than the history field is either
+ ! not part of the fates set, or was found in
+ ! the fates set
+ character(len=64) :: fincl_name
+ ! This is a copy of the public in histFileMod, copied
+ ! here because it isn't filled at the time of this call
+ character(len=max_namlen+2) :: fincl(max_flds,max_tapes)
+
+ fincl(:,1) = hist_fincl1(:)
+ fincl(:,2) = hist_fincl2(:)
+ fincl(:,3) = hist_fincl3(:)
+ fincl(:,4) = hist_fincl4(:)
+ fincl(:,5) = hist_fincl5(:)
+ fincl(:,6) = hist_fincl6(:)
+
+ do t = 1,max_tapes
+
+ f = 1
+ search_fields: do while (f < max_flds .and. fincl(f,t) /= ' ')
+
+ fincl_name = getname(fincl(f,t))
+ is_fates_field = fincl_name(1:6)=='FATES_'
+
+ if(is_fates_field) then
+ found = .false.
+ do_fates_hist: do nh = 1,fates_hist%num_history_vars()
+ if(trim(fates_hist%hvars(nh)%vname) == &
+ trim(fincl_name)) then
+ found=.true.
+ exit do_fates_hist
+ end if
+ end do do_fates_hist
+
+ if(.not.found)then
+ write(iulog,*) 'the history field: ',trim(fincl_name)
+ write(iulog,*) 'was requested in the namelist, but was'
+ write(iulog,*) 'not found in the list of fates_hist%hvars.'
+ write(iulog,*) 'Most likely, this is because this history variable'
+ write(iulog,*) 'was specified in the user namelist, but the user'
+ write(iulog,*) 'specified a FATES history output dimension level'
+ write(iulog,*) 'that does not contain that variable in its valid set.'
+ write(iulog,*) 'You may have to increase the namelist setting: fates_history_dimlevel'
+ write(iulog,*) 'current fates_history_dimlevel: ',fates_history_dimlevel(:)
+ if (debug) then
+ !if you want to list all fates history variables in registry turn on debug
+ do_fates_hist2: do nh = 1,fates_hist%num_history_vars()
+ write(iulog,*) trim(fates_hist%hvars(nh)%vname)
+ end do do_fates_hist2
+ end if
+ call endrun(msg=errMsg(sourcefile, __LINE__))
+ end if
+ end if
+ f = f + 1
+ end do search_fields
+
+ end do
+ end subroutine CrossRefHistoryFields
! ====================================================================================
@@ -1097,11 +1182,8 @@ subroutine dynamics_driv(this, bounds_clump, top_as_inst, &
! Flush arrays to values defined by %flushval (see registry entry in
! subroutine define_history_vars()
! ---------------------------------------------------------------------------------
- call fates_hist%flush_hvars(nc,upfreq_in=1)
-
- ! Frequency 5 is routine that processes FATES history
- ! on the dynamics (daily) step, but before disturbance
- call fates_hist%flush_hvars(nc,upfreq_in=5)
+ call fates_hist%flush_hvars(nc,upfreq_in=group_dyna_simple)
+ call fates_hist%flush_hvars(nc,upfreq_in=group_dyna_complx)
! ---------------------------------------------------------------------------------
! Part II: Call the FATES model now that input boundary conditions have been
@@ -1791,14 +1873,22 @@ subroutine restart( this, bounds_proc, ncid, flag, &
! ------------------------------------------------------------------------
! Update history IO fields that depend on ecosystem dynamics
! ------------------------------------------------------------------------
- call fates_hist%flush_hvars(nc,upfreq_in=1)
- call fates_hist%flush_hvars(nc,upfreq_in=5)
- do s = 1,this%fates(nc)%nsites
- call fates_hist%zero_site_hvars(this%fates(nc)%sites(s), &
- upfreq_in=1)
- call fates_hist%zero_site_hvars(this%fates(nc)%sites(s), &
- upfreq_in=5)
- end do
+
+ if(fates_history_dimlevel(2)>0) then
+ call fates_hist%flush_hvars(nc,upfreq_in=group_dyna_simple)
+ do s = 1,this%fates(nc)%nsites
+ call fates_hist%zero_site_hvars(this%fates(nc)%sites(s), &
+ upfreq_in=group_dyna_simple)
+ end do
+ if(fates_history_dimlevel(2)>1) then
+ call fates_hist%flush_hvars(nc,upfreq_in=group_dyna_complx)
+ do s = 1,this%fates(nc)%nsites
+ call fates_hist%zero_site_hvars(this%fates(nc)%sites(s), &
+ upfreq_in=group_dyna_complx)
+ end do
+ end if
+ end if
+
call fates_hist%update_history_dyn( nc, &
this%fates(nc)%nsites, &
this%fates(nc)%sites, &
@@ -1971,15 +2061,21 @@ subroutine init_coldstart(this, canopystate_inst, soilstate_inst, frictionvel_in
! ------------------------------------------------------------------------
! Update history IO fields that depend on ecosystem dynamics
! ------------------------------------------------------------------------
-
- call fates_hist%flush_hvars(nc,upfreq_in=1)
- call fates_hist%flush_hvars(nc,upfreq_in=5)
- do s = 1,this%fates(nc)%nsites
- call fates_hist%zero_site_hvars(this%fates(nc)%sites(s), &
- upfreq_in=1)
- call fates_hist%zero_site_hvars(this%fates(nc)%sites(s), &
- upfreq_in=5)
- end do
+ if(fates_history_dimlevel(2)>0) then
+ call fates_hist%flush_hvars(nc,upfreq_in=group_dyna_simple)
+ do s = 1,this%fates(nc)%nsites
+ call fates_hist%zero_site_hvars(this%fates(nc)%sites(s), &
+ upfreq_in=group_dyna_simple)
+ end do
+ if(fates_history_dimlevel(2)>1) then
+ call fates_hist%flush_hvars(nc,upfreq_in=group_dyna_complx)
+ do s = 1,this%fates(nc)%nsites
+ call fates_hist%zero_site_hvars(this%fates(nc)%sites(s), &
+ upfreq_in=group_dyna_complx)
+ end do
+ end if
+ end if
+
call fates_hist%update_history_dyn( nc, &
this%fates(nc)%nsites, &
this%fates(nc)%sites, &
@@ -2762,6 +2858,7 @@ subroutine wrap_update_hifrq_hist(this, bounds_clump )
this%fates(nc)%nsites, &
this%fates(nc)%sites, &
this%fates(nc)%bc_in, &
+ this%fates(nc)%bc_out, &
dtime)
@@ -3068,6 +3165,8 @@ subroutine init_history_io(this,bounds_proc)
call fates_hist%initialize_history_vars()
nvar = fates_hist%num_history_vars()
+ call CrossRefHistoryFields()
+
do ivar = 1, nvar
associate( vname => fates_hist%hvars(ivar)%vname, &
diff --git a/components/elm/src/main/histFileMod.F90 b/components/elm/src/main/histFileMod.F90
index c11eafe96f45..1b86c3a1618a 100644
--- a/components/elm/src/main/histFileMod.F90
+++ b/components/elm/src/main/histFileMod.F90
@@ -148,7 +148,7 @@ module histFileMod
private :: hist_set_snow_field_2d ! Set values in history field dimensioned by levsno
private :: list_index ! Find index of field in exclude list
private :: set_hist_filename ! Determine history dataset filenames
- private :: getname ! Retrieve name portion of input "inname"
+ public :: getname ! Retrieve name portion of input "inname" (PUBLIC for FATES)
private :: getflag ! Retrieve flag
private :: pointer_index ! Track data pointer indices
private :: max_nFields ! The max number of fields on any tape
diff --git a/components/elm/src/main/ncdio_pio.F90.in b/components/elm/src/main/ncdio_pio.F90.in
index 51160c3e8cc4..6fbaba0b7108 100644
--- a/components/elm/src/main/ncdio_pio.F90.in
+++ b/components/elm/src/main/ncdio_pio.F90.in
@@ -1070,7 +1070,7 @@ contains
dim1name, dim2name, dim3name, dim4name, dim5name, &
long_name, standard_name, units, cell_method, missing_value, fill_value, &
imissing_value, ifill_value, switchdim, comment, &
- flag_meanings, flag_values, nvalid_range )
+ flag_meanings, flag_values, nvalid_range, varid )
!
! !DESCRIPTION:
! Define a netcdf variable
@@ -1097,12 +1097,13 @@ contains
logical , intent(in), optional :: switchdim ! true=> permute dim1 and dim2 for output
integer , intent(in), optional :: flag_values(:) ! attribute for int
integer , intent(in), optional :: nvalid_range(2) ! attribute for int
+ integer , intent(out), optional :: varid ! returned var id
!
! !LOCAL VARIABLES:
integer :: n ! indices
integer :: ndims ! dimension counter
integer :: dimid(5) ! dimension ids
- integer :: varid ! variable id
+ integer :: varid_tmp ! variable id (temporary)
integer :: itmp ! temporary
character(len=256) :: str ! temporary
character(len=*),parameter :: subname='ncd_defvar_bygrid' ! subroutine name
@@ -1135,13 +1136,17 @@ contains
end do
end if
- call ncd_defvar_bynf(ncid,varname,xtype,ndims,dimid,varid, &
+ call ncd_defvar_bynf(ncid,varname,xtype,ndims,dimid,varid_tmp, &
long_name=long_name, standard_name=standard_name,units=units, cell_method=cell_method, &
missing_value=missing_value, fill_value=fill_value, &
imissing_value=imissing_value, ifill_value=ifill_value, &
comment=comment, flag_meanings=flag_meanings, &
flag_values=flag_values, nvalid_range=nvalid_range )
+ if (present(varid)) then
+ varid = varid_tmp
+ end if
+
end subroutine ncd_defvar_bygrid
!------------------------------------------------------------------------
diff --git a/components/elm/src/utils/restUtilMod.F90.in b/components/elm/src/utils/restUtilMod.F90.in
index 7bb9dd04093c..47e3bc87a76c 100644
--- a/components/elm/src/utils/restUtilMod.F90.in
+++ b/components/elm/src/utils/restUtilMod.F90.in
@@ -68,7 +68,6 @@ contains
!
! Local variables
integer :: ivalue
- type(var_desc_t) :: vardesc ! local vardesc
integer :: status ! return error code
integer :: varid
integer :: lxtype ! local external type (in case logical variable)
@@ -84,10 +83,10 @@ contains
end if
call ncd_defvar(ncid=ncid, varname=trim(varname), xtype=lxtype, &
- long_name=trim(long_name), units=units)
-
- status = PIO_inq_varid(ncid, trim(varname), vardesc)
- varid = vardesc%varid
+ long_name=trim(long_name), units=units, comment=comment, &
+ fill_value=fill_value, missing_value=missing_value, &
+ ifill_value=ifill_value, imissing_value=imissing_value, &
+ varid=varid)
if (trim(interpinic_flag) == 'interp') then
status = PIO_put_att(ncid, varid, 'interpinic_flag', iflag_interp)
@@ -99,36 +98,25 @@ contains
status = PIO_put_att(ncid, varid, 'interpinic_flag_meanings', &
"1=nearest neighbor, 2=copy directly, 3=skip")
- if (present(comment)) then
- call ncd_putatt(ncid, varid, 'comment', trim(comment))
- end if
- if (present(units)) then
- call ncd_putatt(ncid, varid, 'units', trim(units))
- end if
- if (present(fill_value)) then
- call ncd_putatt(ncid, varid, '_FillValue', fill_value, lxtype)
- else if (lxtype == ncd_double) then
- call ncd_putatt(ncid, varid, '_FillValue', spval, lxtype)
+ if (.not. present(fill_value)) then
+ if (lxtype == ncd_double) then
+ call ncd_putatt(ncid, varid, '_FillValue', spval, lxtype)
+ end if
end if
- if (present(missing_value)) then
- call ncd_putatt(ncid, varid, 'missing_value', missing_value, lxtype)
- else if (lxtype == ncd_double) then
- call ncd_putatt(ncid, varid, 'missing_value', spval, lxtype)
+ if (.not. present(missing_value)) then
+ if (lxtype == ncd_double) then
+ call ncd_putatt(ncid, varid, 'missing_value', spval, lxtype)
+ end if
end if
- if (present(ifill_value)) then
- call ncd_putatt(ncid, varid, '_FillValue', ifill_value, lxtype)
- else if (lxtype == ncd_int) then
- call ncd_putatt(ncid, varid, '_FillValue', ispval, lxtype)
+ if (.not. present(ifill_value)) then
+ if (lxtype == ncd_int) then
+ call ncd_putatt(ncid, varid, '_FillValue', ispval, lxtype)
+ end if
end if
- if (present(imissing_value)) then
- call ncd_putatt(ncid, varid, 'missing_value', imissing_value, lxtype)
- else if (lxtype == ncd_int) then
- call ncd_putatt(ncid, varid, 'missing_value', ispval, lxtype)
- end if
- if ( xtype == ncd_log )then
- status = PIO_put_att(ncid,varid,'flag_values', (/0, 1/) )
- status = PIO_put_att(ncid,varid,'flag_meanings', "FALSE TRUE" )
- status = PIO_put_att(ncid,varid,'valid_range', (/0, 1/) )
+ if (.not. present(imissing_value)) then
+ if (lxtype == ncd_int) then
+ call ncd_putatt(ncid, varid, 'missing_value', ispval, lxtype)
+ end if
end if
else if (flag == 'read' .or. flag == 'write') then
@@ -178,7 +166,6 @@ contains
!
! Local variables
integer :: ivalue
- type(var_desc_t) :: vardesc ! local vardesc
integer :: status ! return error code
integer :: varid
integer :: lxtype ! local external type (in case logical variable)
@@ -195,20 +182,23 @@ contains
if (.not. present(dim1name)) then
call ncd_defvar(ncid=ncid, varname=trim(varname), xtype=lxtype, &
- long_name=trim(long_name), units=units)
+ long_name=trim(long_name), units=units, comment=comment, fill_value=fill_Value, &
+ missing_value=missing_value, ifill_value=ifill_value, imissing_value=imissing_value, &
+ nvalid_range=nvalid_range, varid=varid)
else if (.not. present(dim2name)) then
call ncd_defvar(ncid=ncid, varname=trim(varname), xtype=lxtype, &
dim1name=trim(dim1name), &
- long_name=trim(long_name), units=units)
+ long_name=trim(long_name), units=units, comment=comment, fill_value=fill_Value, &
+ missing_value=missing_value, ifill_value=ifill_value, imissing_value=imissing_value, &
+ nvalid_range=nvalid_range, varid=varid)
else if (present(dim2name)) then
call ncd_defvar(ncid=ncid, varname=trim(varname), xtype=lxtype, &
dim1name=trim(dim1name), dim2name=trim(dim2name), &
- long_name=trim(long_name), units=units)
+ long_name=trim(long_name), units=units, comment=comment, fill_value=fill_Value, &
+ missing_value=missing_value, ifill_value=ifill_value, imissing_value=imissing_value, &
+ nvalid_range=nvalid_range, varid=varid)
end if
- status = PIO_inq_varid(ncid, trim(varname), vardesc)
- varid = vardesc%varid
-
if (trim(interpinic_flag) == 'interp') then
status = PIO_put_att(ncid, varid, 'interpinic_flag', iflag_interp)
else if (trim(interpinic_flag) == 'copy') then
@@ -219,40 +209,25 @@ contains
status = PIO_put_att(ncid, varid, 'interpinic_flag_meanings', &
"1=nearest neighbor, 2=copy directly, 3=skip")
- if (present(comment)) then
- call ncd_putatt(ncid, varid, 'comment', trim(comment))
+ if (.not. present(fill_value)) then
+ if (lxtype == ncd_double) then
+ call ncd_putatt(ncid, varid, '_FillValue', spval, lxtype)
+ end if
end if
- if (present(units)) then
- call ncd_putatt(ncid, varid, 'units', trim(units))
+ if (.not. present(missing_value)) then
+ if (lxtype == ncd_double) then
+ call ncd_putatt(ncid, varid, 'missing_value', spval, lxtype)
+ end if
end if
-
- if (present(fill_value)) then
- call ncd_putatt(ncid, varid, '_FillValue', fill_value, lxtype)
- else if (lxtype == ncd_double) then
- call ncd_putatt(ncid, varid, '_FillValue', spval, lxtype)
- end if
- if (present(missing_value)) then
- call ncd_putatt(ncid, varid, 'missing_value', missing_value, lxtype)
- else if (lxtype == ncd_double) then
- call ncd_putatt(ncid, varid, 'missing_value', spval, lxtype)
- end if
- if (present(ifill_value)) then
- call ncd_putatt(ncid, varid, '_FillValue', ifill_value, lxtype)
- else if (lxtype == ncd_int) then
- call ncd_putatt(ncid, varid, '_FillValue', ispval, lxtype)
+ if (.not. present(ifill_value)) then
+ if (lxtype == ncd_int) then
+ call ncd_putatt(ncid, varid, '_FillValue', ispval, lxtype)
+ end if
end if
- if (present(imissing_value)) then
- call ncd_putatt(ncid, varid, 'missing_value', imissing_value, lxtype)
- else if (lxtype == ncd_int) then
- call ncd_putatt(ncid, varid, 'missing_value', ispval, lxtype)
- end if
- if (present(nvalid_range)) then
- status = PIO_put_att(ncid,varid,'valid_range', nvalid_range )
- end if
- if ( xtype == ncd_log )then
- status = PIO_put_att(ncid,varid,'flag_values', (/0, 1/) )
- status = PIO_put_att(ncid,varid,'flag_meanings', "FALSE TRUE" )
- status = PIO_put_att(ncid,varid,'valid_range', (/0, 1/) )
+ if (.not. present(imissing_value)) then
+ if (lxtype == ncd_int) then
+ call ncd_putatt(ncid, varid, 'missing_value', ispval, lxtype)
+ end if
end if
else if (flag == 'read' .or. flag == 'write') then
@@ -309,7 +284,6 @@ contains
!
! Local variables
integer :: ivalue
- type(var_desc_t) :: vardesc ! local vardesc
integer :: status ! return error code
integer :: varid ! returned var id
integer :: lxtype ! local external type (in case logical variable)
@@ -327,15 +301,16 @@ contains
if (switchdim) then
call ncd_defvar(ncid=ncid, varname=trim(varname), xtype=lxtype, &
dim1name=trim(dim2name), dim2name=trim(dim1name), &
- long_name=trim(long_name), units=units)
+ long_name=trim(long_name), units=units, comment=comment, fill_value=fill_Value, &
+ missing_value=missing_value, ifill_value=ifill_value, imissing_value=imissing_value, &
+ nvalid_range=nvalid_range, varid=varid)
else
call ncd_defvar(ncid=ncid, varname=trim(varname), xtype=lxtype, &
dim1name=trim(dim1name), dim2name=trim(dim2name), &
- long_name=trim(long_name), units=units)
+ long_name=trim(long_name), units=units, comment=comment, fill_value=fill_Value, &
+ missing_value=missing_value, ifill_value=ifill_value, imissing_value=imissing_value, &
+ nvalid_range=nvalid_range, varid=varid)
end if
- status = PIO_inq_varid(ncid, trim(varname), vardesc)
-
- varid = vardesc%varid
if (trim(interpinic_flag) == 'interp') then
status = PIO_put_att(ncid, varid, 'interpinic_flag', iflag_interp)
@@ -348,49 +323,35 @@ contains
"1=>nearest_neighbor 2=>copy 3=>skip")
if (switchdim) then
- status = PIO_put_att(ncid, vardesc%varid, 'switchdim_flag', 1)
+ status = PIO_put_att(ncid, varid, 'switchdim_flag', 1)
else
- status = PIO_put_att(ncid, vardesc%varid, 'switchdim_flag', 0)
+ status = PIO_put_att(ncid, varid, 'switchdim_flag', 0)
end if
- status = PIO_put_att(ncid, vardesc%varid, 'switchdim_flag_values', (/0,1/))
- status = PIO_put_att(ncid, vardesc%varid, 'switchdim_flag_is_0', &
+ status = PIO_put_att(ncid, varid, 'switchdim_flag_values', (/0,1/))
+ status = PIO_put_att(ncid, varid, 'switchdim_flag_is_0', &
"1st and 2nd dims are same as model representation")
- status = PIO_put_att(ncid, vardesc%varid, 'switchdim_flag_is_1', &
+ status = PIO_put_att(ncid, varid, 'switchdim_flag_is_1', &
"1st and 2nd dims are switched from model representation")
- if (present(comment)) then
- call ncd_putatt(ncid, varid, 'comment', trim(comment))
- end if
- if (present(units)) then
- call ncd_putatt(ncid, varid, 'units', trim(units))
- end if
- if (present(fill_value)) then
- call ncd_putatt(ncid, varid, '_FillValue', fill_value, lxtype)
- else if (lxtype == ncd_double) then
- call ncd_putatt(ncid, varid, '_FillValue', spval, lxtype)
- end if
- if (present(missing_value)) then
- call ncd_putatt(ncid, varid, 'missing_value', missing_value, lxtype)
- else if (lxtype == ncd_double) then
- call ncd_putatt(ncid, varid, 'missing_value', spval, lxtype)
- end if
- if (present(ifill_value)) then
- call ncd_putatt(ncid, varid, '_FillValue', ifill_value, lxtype)
- else if (lxtype == ncd_int) then
- call ncd_putatt(ncid, varid, '_FillValue', ispval, lxtype)
- end if
- if (present(imissing_value)) then
- call ncd_putatt(ncid, varid, 'missing_value', imissing_value, lxtype)
- else if (lxtype == ncd_int) then
- call ncd_putatt(ncid, varid, 'missing_value', ispval, lxtype)
- end if
- if (present(nvalid_range)) then
- status = PIO_put_att(ncid,varid,'valid_range', nvalid_range )
- end if
- if ( xtype == ncd_log )then
- status = PIO_put_att(ncid,varid,'flag_values', (/0, 1/) )
- status = PIO_put_att(ncid,varid,'flag_meanings', "FALSE TRUE" )
- status = PIO_put_att(ncid,varid,'valid_range', (/0, 1/) )
+ if (.not. present(fill_value)) then
+ if (lxtype == ncd_double) then
+ call ncd_putatt(ncid, varid, '_FillValue', spval, lxtype)
+ end if
+ end if
+ if (.not. present(missing_value)) then
+ if (lxtype == ncd_double) then
+ call ncd_putatt(ncid, varid, 'missing_value', spval, lxtype)
+ end if
+ end if
+ if (.not. present(ifill_value)) then
+ if (lxtype == ncd_int) then
+ call ncd_putatt(ncid, varid, '_FillValue', ispval, lxtype)
+ end if
+ end if
+ if (.not. present(imissing_value)) then
+ if (lxtype == ncd_int) then
+ call ncd_putatt(ncid, varid, 'missing_value', ispval, lxtype)
+ end if
end if
else
diff --git a/components/mpas-ocean/bld/build-namelist b/components/mpas-ocean/bld/build-namelist
index 2a16bef69e3c..1c55bcb7be1d 100755
--- a/components/mpas-ocean/bld/build-namelist
+++ b/components/mpas-ocean/bld/build-namelist
@@ -1041,7 +1041,7 @@ if ($OCN_FORCING eq 'datm_forced_restoring') {
add_default($nl, 'config_use_surface_salinity_monthly_restoring', 'val'=>".true.");
add_default($nl, 'config_salinity_restoring_constant_piston_velocity', 'val'=>"1.585e-6");
add_default($nl, 'config_salinity_restoring_max_difference', 'val'=>"100.");
- add_default($nl, 'config_salinity_restoring_under_sea_ice', 'val'=>".false.");
+ add_default($nl, 'config_salinity_restoring_under_sea_ice', 'val'=>".true.");
} else {
add_default($nl, 'config_use_activeTracers_surface_restoring');
add_default($nl, 'config_use_surface_salinity_monthly_restoring');
diff --git a/components/mpas-ocean/bld/namelist_files/namelist_defaults_mpaso.xml b/components/mpas-ocean/bld/namelist_files/namelist_defaults_mpaso.xml
index be51b84bf033..c7441c6b2214 100644
--- a/components/mpas-ocean/bld/namelist_files/namelist_defaults_mpaso.xml
+++ b/components/mpas-ocean/bld/namelist_files/namelist_defaults_mpaso.xml
@@ -54,6 +54,7 @@
'00:04:00'
'00:02:00'
'00:01:00'
+'00:05:00'
'split_explicit_ab2'
2
@@ -83,6 +84,7 @@
.true.
.true.
.true.
+.true.
-1.0
.false.
30.0e3
@@ -104,6 +106,7 @@
.true.
.true.
.true.
+.true.
10.0
1000.0
1000.0
@@ -119,6 +122,7 @@
154.0
77.0
38.5
+100.0
.false.
10.0
@@ -149,6 +153,7 @@
4.37e08
5.46e07
6.83e06
+3.2e09
1.0
.false.
0.0
@@ -165,6 +170,7 @@
.false.
.false.
.false.
+.false.
'constant'
400.0
400.0
@@ -203,6 +209,7 @@
.false.
.false.
.false.
+.false.
'EdenGreatbatch'
'constant'
'constant'
@@ -399,6 +406,7 @@
'pressure_only'
'pressure_only'
'pressure_only'
+'pressure_only'
'Jenkins'
.false.
10.0
@@ -417,6 +425,7 @@
4.48e-3
4.48e-3
4.48e-3
+4.48e-3
1e-4
0.011
0.00295
@@ -429,6 +438,7 @@
0.00295
0.00295
0.00295
+0.00295
3.1e-4
8.42e-5
8.42e-5
@@ -440,6 +450,7 @@
8.42e-5
8.42e-5
8.42e-5
+8.42e-5
1.0
0.0
5e-2
@@ -471,6 +482,7 @@
4.48e-3
4.48e-3
4.48e-3
+4.48e-3
1.0e-3
10.0
2.5e-3
@@ -557,6 +569,7 @@
'0000_00:00:05'
'0000_00:00:02.5'
'0000_00:00:01.25'
+'0000_00:00:05'
2
.true.
2
@@ -603,6 +616,7 @@
.false.
.false.
.false.
+.false.
.false.
.false.
.false.
@@ -664,7 +678,7 @@
.false.
.false.
.false.
-'0000-00-01_00:00:00'
+'dt'
1.585e-6
0.5
.false.
@@ -1128,6 +1142,7 @@
.true.
.true.
.true.
+.true.
'0000-00-00_01:00:00'
'mocStreamfunctionOutput'
.true.
@@ -1215,6 +1230,7 @@
.true.
.true.
.true.
+.true.
'dt'
'conservationCheckOutput'
.false.
@@ -1226,6 +1242,7 @@
.true.
.true.
.true.
+.true.
.false.
.true.
.true.
@@ -1235,6 +1252,7 @@
.true.
.true.
.true.
+.true.
.true.
'conservationCheckRestart'
diff --git a/components/mpas-ocean/bld/namelist_files/namelist_definition_mpaso.xml b/components/mpas-ocean/bld/namelist_files/namelist_definition_mpaso.xml
index f45de96a7ac6..f6c2f9c5b765 100644
--- a/components/mpas-ocean/bld/namelist_files/namelist_definition_mpaso.xml
+++ b/components/mpas-ocean/bld/namelist_files/namelist_definition_mpaso.xml
@@ -2769,7 +2769,7 @@ Default: Defined in namelist_defaults.xml
category="tracer_forcing_activeTracers" group="tracer_forcing_activeTracers">
Time interval to compute salinity restoring tendency.
-Valid values: Any valid time stamp or 'dt'
+Valid values: Any valid time stamp e.g. format '0000-00-01_00:00:00' or 'dt'
Default: Defined in namelist_defaults.xml
@@ -2791,7 +2791,7 @@ Default: Defined in namelist_defaults.xml
-Flag to enable salinity restoring under sea ice. The default setting is false, where salinity restoring tapers from full restoring in the open ocean (iceFraction=0.0) to zero restoring below full sea ice coverage (iceFraction=1.0); below partial sea ice coverage, restoring is in proportion to iceFraction. If true, full salinity restoring is used everywhere, regardless of iceFraction value
+Flag to enable salinity restoring under sea ice. The default setting is true, meaning that full salinity restoring is used everywhere, regardless of iceFraction value, except under ice shelves. If false, the salinity restoring tapers from full restoring in the open ocean (iceFraction=0.0) to zero restoring below full sea ice coverage (iceFraction=1.0); below partial sea ice coverage, restoring is in proportion to iceFraction.
Valid values: .true. or .false.
Default: Defined in namelist_defaults.xml
diff --git a/components/mpas-ocean/cime_config/buildnml b/components/mpas-ocean/cime_config/buildnml
index b3cde0620382..4c7ed2c0a311 100755
--- a/components/mpas-ocean/cime_config/buildnml
+++ b/components/mpas-ocean/cime_config/buildnml
@@ -383,6 +383,20 @@ def buildnml(case, caseroot, compname):
ic_date = '20240314'
ic_prefix = 'mpaso.IcosXISC30E3r7.rstFromPiControlSpinup-chrysalis'
+ elif ocn_grid == 'RRSwISC6to18E3r5':
+ decomp_date = '20240404'
+ decomp_prefix = 'partitions/mpas-o.graph.info.'
+ restoring_file = 'sss.PHC2_monthlyClimatology.RRSwISC6to18E3r5.20240327.nc'
+ analysis_mask_file = 'RRSwISC6to18E3r5_mocBasinsAndTransects20210623.nc'
+ ic_date = '20240327'
+ ic_prefix = 'mpaso.RRSwISC6to18E3r5'
+ if ocn_ic_mode == 'spunup':
+ logger.warning("WARNING: The specified compset is requesting ocean ICs spunup from a G-case")
+ logger.warning(" But no file available for this grid.")
+ if ocn_ismf == 'data':
+ data_ismf_file = 'prescribed_ismf_paolo2023.RRSwISC6to18E3r5.20240327.nc'
+
+
#--------------------------------------------------------------------
# Set OCN_FORCING = datm_forced_restoring if restoring file is available
#--------------------------------------------------------------------
@@ -513,7 +527,8 @@ def buildnml(case, caseroot, compname):
lines.append('')
lines.append('')
lines.append('')
lines.append('')
lines.append('')
lines.append(' ')
@@ -1240,7 +1259,8 @@ def buildnml(case, caseroot, compname):
lines.append('')
lines.append(' ')
lines.append(' ')
- if not ocn_grid.startswith("oRRS1"):
+ if not (ocn_grid.startswith("oRRS1") or ocn_grid.startswith("RRSwISC6")):
lines.append(' ')
lines.append(' ')
lines.append(' ')
@@ -1474,7 +1494,8 @@ def buildnml(case, caseroot, compname):
lines.append(' block_ptr % next
diff --git a/components/mpas-ocean/src/mode_init/mpas_ocn_init_ssh_and_landIcePressure.F b/components/mpas-ocean/src/mode_init/mpas_ocn_init_ssh_and_landIcePressure.F
index c85249635a7d..dfa7c6b430b6 100644
--- a/components/mpas-ocean/src/mode_init/mpas_ocn_init_ssh_and_landIcePressure.F
+++ b/components/mpas-ocean/src/mode_init/mpas_ocn_init_ssh_and_landIcePressure.F
@@ -25,9 +25,7 @@ module ocn_init_ssh_and_landIcePressure
use mpas_io_units
use mpas_derived_types
use mpas_pool_routines
- use mpas_constants
- use ocn_constants
use ocn_config
use ocn_diagnostics_variables
diff --git a/components/mpas-ocean/src/mode_init/mpas_ocn_init_vertical_grids.F b/components/mpas-ocean/src/mode_init/mpas_ocn_init_vertical_grids.F
index 2fcbf507f7a3..e28cf5e97fad 100644
--- a/components/mpas-ocean/src/mode_init/mpas_ocn_init_vertical_grids.F
+++ b/components/mpas-ocean/src/mode_init/mpas_ocn_init_vertical_grids.F
@@ -22,11 +22,9 @@ module ocn_init_vertical_grids
use mpas_kind_types
use mpas_derived_types
use mpas_pool_routines
- use mpas_constants
use mpas_timer
use mpas_io
- use ocn_constants
use ocn_config
use ocn_diagnostics_variables
diff --git a/components/mpas-ocean/src/mode_init/shr_const_mod.F b/components/mpas-ocean/src/mode_init/shr_const_mod.F
new file mode 120000
index 000000000000..c471e79113fd
--- /dev/null
+++ b/components/mpas-ocean/src/mode_init/shr_const_mod.F
@@ -0,0 +1 @@
+../../../../share/util/shr_const_mod.F90
\ No newline at end of file
diff --git a/components/mpas-ocean/src/mode_init/shr_kind_mod.F b/components/mpas-ocean/src/mode_init/shr_kind_mod.F
new file mode 120000
index 000000000000..77a61f967b6a
--- /dev/null
+++ b/components/mpas-ocean/src/mode_init/shr_kind_mod.F
@@ -0,0 +1 @@
+../../../../share/util/shr_kind_mod.F90
\ No newline at end of file
diff --git a/components/mpas-ocean/src/shared/mpas_ocn_tracer_surface_restoring.F b/components/mpas-ocean/src/shared/mpas_ocn_tracer_surface_restoring.F
index f47608ef0d9d..cc014b3f2362 100644
--- a/components/mpas-ocean/src/shared/mpas_ocn_tracer_surface_restoring.F
+++ b/components/mpas-ocean/src/shared/mpas_ocn_tracer_surface_restoring.F
@@ -344,7 +344,8 @@ subroutine ocn_get_surfaceSalinityData( streamManager, &
if (config_salinity_restoring_under_sea_ice) then
- ! Simulation has landIceMask AND config_salinity_restoring_under_sea_ice=.true.
+ ! Simulation has landIceMask AND
+ ! config_salinity_restoring_under_sea_ice=.true. (default)
do iCell = 1, nCells
if (landIceMask(iCell)==1) then
! Turn off salinity restoring in this cell
@@ -362,7 +363,7 @@ subroutine ocn_get_surfaceSalinityData( streamManager, &
else ! config_salinity_restoring_under_sea_ice = .false.
- ! Simulation has landIceMask AND config_salinity_restoring_under_sea_ice=.false. (default)
+ ! Simulation has landIceMask AND config_salinity_restoring_under_sea_ice=.false.
do iCell = 1, nCells
if (landIceMask(iCell)==1) then
! Turn off salinity restoring in this cell
@@ -386,7 +387,8 @@ subroutine ocn_get_surfaceSalinityData( streamManager, &
if (config_salinity_restoring_under_sea_ice) then
- ! Simulation has NO landIceMask AND config_salinity_restoring_under_sea_ice=.true.
+ ! Simulation has NO landIceMask AND
+ ! config_salinity_restoring_under_sea_ice=.true. (default)
do iCell = 1, nCells
deltaS = surfaceSalinityMonthlyClimatologyValue(iCell) - activeTracers(indexSalinity,1,iCell)
if (deltaS > config_salinity_restoring_max_difference) deltaS = config_salinity_restoring_max_difference
@@ -398,7 +400,7 @@ subroutine ocn_get_surfaceSalinityData( streamManager, &
else ! config_salinity_restoring_under_sea_ice = .false.
- ! Simulation has NO landIceMask AND config_salinity_restoring_under_sea_ice=.false. (default)
+ ! Simulation has NO landIceMask AND config_salinity_restoring_under_sea_ice=.false.
do iCell = 1, nCells
deltaS = surfaceSalinityMonthlyClimatologyValue(iCell) - activeTracers(indexSalinity,1,iCell)
if (deltaS > config_salinity_restoring_max_difference) deltaS = config_salinity_restoring_max_difference
diff --git a/components/mpas-ocean/src/tracer_groups/Registry_activeTracers.xml b/components/mpas-ocean/src/tracer_groups/Registry_activeTracers.xml
index 3b5344391480..916df21b70cc 100644
--- a/components/mpas-ocean/src/tracer_groups/Registry_activeTracers.xml
+++ b/components/mpas-ocean/src/tracer_groups/Registry_activeTracers.xml
@@ -31,9 +31,9 @@
description="If true, apply monthly salinity restoring using a uniform piston velocity, defined at run-time by config_salinity_restoring_constant_piston_velocity. When false, salinity piston velocity is specified in the input file by salinityPistonVelocity, which may be spatially variable."
possible_values=".true. or .false."
/>
-
-
diff --git a/components/mpas-seaice/bld/namelist_files/namelist_defaults_mpassi.xml b/components/mpas-seaice/bld/namelist_files/namelist_defaults_mpassi.xml
index 1d8330ac86ef..dee59f726e25 100644
--- a/components/mpas-seaice/bld/namelist_files/namelist_defaults_mpassi.xml
+++ b/components/mpas-seaice/bld/namelist_files/namelist_defaults_mpassi.xml
@@ -30,6 +30,7 @@
240.0
120.0
60.0
+900.0
'noleap'
'2000-01-01_00:00:00'
'none'
@@ -89,6 +90,7 @@
75.0
85.0
85.0
+85.0
-60.0
-75.0
-75.0
@@ -103,6 +105,7 @@
-85.0
-85.0
-85.0
+-85.0
'uniform'
0.0
0.0
@@ -166,6 +169,7 @@
1
1
1
+2
true
true
120
diff --git a/components/mpas-seaice/cime_config/buildnml b/components/mpas-seaice/cime_config/buildnml
index 8337fcfc7061..5522d8ac6511 100755
--- a/components/mpas-seaice/cime_config/buildnml
+++ b/components/mpas-seaice/cime_config/buildnml
@@ -318,6 +318,16 @@ def buildnml(case, caseroot, compname):
grid_date = '20240314'
grid_prefix = 'mpassi.IcosXISC30E3r7.rstFromPiControlSpinup-chrysalis'
+ elif ice_grid == 'RRSwISC6to18E3r5':
+ decomp_date = '20240404'
+ decomp_prefix = 'partitions/mpas-seaice.graph.info.'
+ grid_date = '20240327'
+ grid_prefix = 'mpassi.RRSwISC6to18E3r5'
+ data_iceberg_file = 'Iceberg_Climatology_Merino.RRSwISC6to18E3r5.20240327.nc'
+ if ice_ic_mode == 'spunup':
+ logger.warning("WARNING: The specified compset is requesting seaice ICs spunup from a G-case")
+ logger.warning(" But no file available for this grid.")
+
elif ice_grid == 'ICOS10':
grid_date = '211015'
grid_prefix = 'seaice.ICOS10'
@@ -451,7 +461,8 @@ def buildnml(case, caseroot, compname):
lines.append('')
lines.append('CESM1_MOD
CESM1_MOD
RASM_OPTION1
+ RASM_OPTION2
run_coupling
env_run.xml
diff --git a/driver-moab/main/component_type_mod.F90 b/driver-moab/main/component_type_mod.F90
index 85a300356c9a..ebb3ad8f58e3 100644
--- a/driver-moab/main/component_type_mod.F90
+++ b/driver-moab/main/component_type_mod.F90
@@ -508,7 +508,7 @@ subroutine compare_mct_av_moab_tag(comp, attrVect, mct_field, appId, tagname, en
difference = sqrt(differenceg)
iamroot = seq_comm_iamroot(CPLID)
if ( iamroot ) then
- print * , subname, trim(comp%ntype), ' comp, difference on tag ', trim(tagname), ' = ', difference
+ print * , subname, trim(comp%ntype), ' on cpl, difference on tag ', trim(tagname), ' = ', difference
!call shr_sys_abort(subname//'differences between mct and moab values')
endif
deallocate(GlobalIds)
diff --git a/driver-moab/main/cplcomp_exchange_mod.F90 b/driver-moab/main/cplcomp_exchange_mod.F90
index 2fd8bb1f5f60..0e2682192697 100644
--- a/driver-moab/main/cplcomp_exchange_mod.F90
+++ b/driver-moab/main/cplcomp_exchange_mod.F90
@@ -1017,7 +1017,7 @@ subroutine cplcomp_moab_Init(infodata,comp)
integer :: mpigrp_old ! component group pes
integer :: ierr, context_id
character*200 :: appname, outfile, wopts, ropts
- character(CL) :: rtm_mesh
+ character(CL) :: rtm_mesh, rof_domain
character(CL) :: lnd_domain
character(CL) :: ocn_domain
character(CL) :: atm_mesh
@@ -1112,11 +1112,14 @@ subroutine cplcomp_moab_Init(infodata,comp)
else
! we need to read the atm mesh on coupler, from domain file
ierr = iMOAB_LoadMesh(mbaxid, trim(atm_mesh)//C_NULL_CHAR, &
- "PARALLEL=READ_PART;PARTITION_METHOD=SQIJ;NO_CULLING", 0)
+ "PARALLEL=READ_PART;PARTITION_METHOD=SQIJ;VARIABLE=;REPARTITION;NO_CULLING", 0)
if ( ierr /= 0 ) then
write(logunit,*) 'Failed to load atm domain mesh on coupler'
call shr_sys_abort(subname//' ERROR Failed to load atm domain mesh on coupler ')
endif
+ if (seq_comm_iamroot(CPLID)) then
+ write(logunit,'(A)') subname//' load atm domain mesh from file '//trim(atm_mesh)
+ endif
! right now, turn atm_pg_active to true
atm_pg_active = .true. ! FIXME TODO
! need to add global id tag to the app, it will be used in restart
@@ -1290,11 +1293,14 @@ subroutine cplcomp_moab_Init(infodata,comp)
else
! we need to read the ocean mesh on coupler, from domain file
ierr = iMOAB_LoadMesh(mboxid, trim(ocn_domain)//C_NULL_CHAR, &
- "PARALLEL=READ_PART;PARTITION_METHOD=SQIJ;NO_CULLING", 0)
+ "PARALLEL=READ_PART;PARTITION_METHOD=SQIJ;VARIABLE=;NO_CULLING;REPARTITION", 0)
if ( ierr /= 0 ) then
write(logunit,*) 'Failed to load ocean domain mesh on coupler'
call shr_sys_abort(subname//' ERROR Failed to load ocean domain mesh on coupler ')
endif
+ if (seq_comm_iamroot(CPLID)) then
+ write(logunit,'(A)') subname//' load ocn domain mesh from file '//trim(ocn_domain)
+ endif
! need to add global id tag to the app, it will be used in restart
tagtype = 0 ! dense, integer
numco = 1
@@ -1399,11 +1405,14 @@ subroutine cplcomp_moab_Init(infodata,comp)
else
! we need to read the ocean mesh on coupler, from domain file
ierr = iMOAB_LoadMesh(mbofxid, trim(ocn_domain)//C_NULL_CHAR, &
- "PARALLEL=READ_PART;PARTITION_METHOD=SQIJ;NO_CULLING", 0)
+ "PARALLEL=READ_PART;PARTITION_METHOD=SQIJ;VARIABLE=;NO_CULLING;REPARTITION", 0)
if ( ierr /= 0 ) then
write(logunit,*) 'Failed to load second ocean domain mesh on coupler'
call shr_sys_abort(subname//' ERROR Failed to load second ocean domain mesh on coupler ')
endif
+ if (seq_comm_iamroot(CPLID)) then
+ write(logunit,'(A)') subname//' load ocn domain mesh from file for second ocn instance '//trim(ocn_domain)
+ endif
! need to add global id tag to the app, it will be used in restart
tagtype = 0 ! dense, integer
numco = 1
@@ -1445,7 +1454,7 @@ subroutine cplcomp_moab_Init(infodata,comp)
! do not receive the mesh anymore, read it from file, then pair it with mlnid, component land PC mesh
! similar to rof mosart mesh
- ropts = 'PARALLEL=READ_PART;PARTITION_METHOD=SQIJ;VARIABLE='//C_NULL_CHAR
+ ropts = 'PARALLEL=READ_PART;PARTITION_METHOD=SQIJ;VARIABLE=;REPARTITION'//C_NULL_CHAR
call seq_infodata_GetData(infodata,lnd_domain=lnd_domain)
outfile = trim(lnd_domain)//C_NULL_CHAR
nghlay = 0 ! no ghost layers
@@ -1458,6 +1467,9 @@ subroutine cplcomp_moab_Init(infodata,comp)
write(logunit,*) subname,' error in reading land coupler mesh from ', trim(lnd_domain)
call shr_sys_abort(subname//' ERROR in reading land coupler mesh')
endif
+ if (seq_comm_iamroot(CPLID)) then
+ write(logunit,'(A)') subname//' load lnd domain mesh from file '//trim(lnd_domain)
+ endif
! need to add global id tag to the app, it will be used in restart
tagtype = 0 ! dense, integer
numco = 1
@@ -1633,15 +1645,23 @@ subroutine cplcomp_moab_Init(infodata,comp)
ierr = iMOAB_RegisterApplication(trim(appname), mpicom_new, id_join, mbrxid)
! load mesh from scrip file passed from river model
- call seq_infodata_GetData(infodata,rof_mesh=rtm_mesh)
- outfile = trim(rtm_mesh)//C_NULL_CHAR
- ropts = 'PARALLEL=READ_PART;PARTITION_METHOD=RCBZOLTAN'//C_NULL_CHAR
-
+ call seq_infodata_GetData(infodata,rof_mesh=rtm_mesh,rof_domain=rof_domain)
+ if ( trim(rof_domain) == 'none' ) then
+ outfile = trim(rtm_mesh)//C_NULL_CHAR
+ ropts = 'PARALLEL=READ_PART;PARTITION_METHOD=RCBZOLTAN'//C_NULL_CHAR
+ else
+ outfile = trim(rof_domain)//C_NULL_CHAR
+ ropts = 'PARALLEL=READ_PART;PARTITION_METHOD=SQIJ;VARIABLE=;REPARTITION'//C_NULL_CHAR
+ endif
nghlay = 0 ! no ghost layers
ierr = iMOAB_LoadMesh(mbrxid, outfile, ropts, nghlay)
+ if (seq_comm_iamroot(CPLID)) then
+ write(logunit,'(A)') subname//' load rof from file '//trim(outfile)
+ endif
if ( ierr .ne. 0 ) then
call shr_sys_abort( subname//' ERROR: cannot read rof mesh on coupler' )
end if
+
! need to add global id tag to the app, it will be used in restart
tagtype = 0 ! dense, integer
numco = 1
diff --git a/driver-moab/main/prep_ocn_mod.F90 b/driver-moab/main/prep_ocn_mod.F90
index 7514cc40db01..44ec0c6a3c7a 100644
--- a/driver-moab/main/prep_ocn_mod.F90
+++ b/driver-moab/main/prep_ocn_mod.F90
@@ -1985,6 +1985,9 @@ subroutine prep_ocn_mrg_moab(infodata, xao_ox)
outfile = 'OcnCplAftMm'//trim(lnum)//'.h5m'//C_NULL_CHAR
wopts = ';PARALLEL=WRITE_PART'//C_NULL_CHAR !
ierr = iMOAB_WriteMesh(mboxid, trim(outfile), trim(wopts))
+ if (ierr .ne. 0) then
+ call shr_sys_abort(subname//' error in writing ocean after merging')
+ endif
endif
#endif
if (first_time) then
@@ -2846,7 +2849,22 @@ subroutine prep_ocn_calc_r2x_ox(timer)
!---------------------------------------------------------------
! Description
! Create r2x_ox (note that r2x_ox is a local module variable)
- !
+#ifdef MOABDEBUG
+ use iMOAB, only : iMOAB_WriteMesh
+ use seq_comm_mct, only: num_moab_exports ! used to count the steps for moab files
+#endif
+ ! Arguments
+
+ ! Local Variables
+#ifdef MOABDEBUG
+ character*32 :: outfile, wopts, lnum
+ integer :: ierr
+#endif
+#ifdef MOABCOMP
+ character*100 :: tagname, mct_field
+ integer :: ent_type
+ real*8 :: difference
+#endif
! Arguments
character(len=*), intent(in) :: timer
!
@@ -2861,10 +2879,8 @@ subroutine prep_ocn_calc_r2x_ox(timer)
r2x_rx => component_get_c2x_cx(rof(eri))
call seq_map_map(mapper_Rr2o_liq, r2x_rx, r2x_ox(eri), &
fldlist=seq_flds_r2o_liq_fluxes, norm=.false.)
-
call seq_map_map(mapper_Rr2o_ice, r2x_rx, r2x_ox(eri), &
fldlist=seq_flds_r2o_ice_fluxes, norm=.false.)
-
if (flood_present) then
call seq_map_map(mapper_Fr2o, r2x_rx, r2x_ox(eri), &
fldlist='Flrr_flood', norm=.true.)
diff --git a/driver-moab/shr/seq_comm_mct.F90 b/driver-moab/shr/seq_comm_mct.F90
index 10a23b9c5094..44d29e91ad2c 100644
--- a/driver-moab/shr/seq_comm_mct.F90
+++ b/driver-moab/shr/seq_comm_mct.F90
@@ -241,6 +241,7 @@ module seq_comm_mct
integer, public :: mbrmapro ! iMOAB id for read map between river and ocean; it exists on coupler PEs
! similar to intx id, oa, la;
integer, public :: mbrxoid ! iMOAB id for rof migrated to coupler for ocean context (r2o mapping)
+ logical, public :: mbrof_data = .false. ! made true if no rtm mesh, which means data rof ?
integer, public :: mbintxar ! iMOAB id for intx mesh between atm and river
integer, public :: mbintxlr ! iMOAB id for intx mesh between land and river
integer, public :: mbintxrl ! iMOAB id for intx mesh between river and land
diff --git a/driver-moab/shr/seq_infodata_mod.F90 b/driver-moab/shr/seq_infodata_mod.F90
index 77236e027cc3..2e9838bc69c5 100644
--- a/driver-moab/shr/seq_infodata_mod.F90
+++ b/driver-moab/shr/seq_infodata_mod.F90
@@ -232,6 +232,7 @@ MODULE seq_infodata_mod
integer(SHR_KIND_IN) :: iac_ny ! nx, ny of "2d" grid
character(SHR_KIND_CL) :: lnd_domain ! path to land domain file
character(SHR_KIND_CL) :: rof_mesh ! path to river mesh file
+ character(SHR_KIND_CL) :: rof_domain ! path to river domain file; only for data rof for now
character(SHR_KIND_CL) :: ocn_domain ! path to ocean domain file, used by data ocean models only
character(SHR_KIND_CL) :: atm_mesh ! path to atmosphere domain/mesh file, used by data atm models only
@@ -792,6 +793,7 @@ SUBROUTINE seq_infodata_Init( infodata, nmlfile, ID, pioid, cpl_tag)
infodata%iac_ny = 0
infodata%lnd_domain = 'none'
infodata%rof_mesh = 'none'
+ infodata%rof_domain = 'none'
infodata%ocn_domain = 'none' ! will be used for ocean data models only; will be used as a signal
infodata%atm_mesh = 'none' ! will be used for atmosphere data models only; will be used as a signal
! not sure if it exists always actually
@@ -1037,8 +1039,8 @@ SUBROUTINE seq_infodata_GetData_explicit( infodata, cime_model, case_name, case_
glc_phase, rof_phase, atm_phase, lnd_phase, ocn_phase, ice_phase, &
wav_phase, iac_phase, esp_phase, wav_nx, wav_ny, atm_nx, atm_ny, &
lnd_nx, lnd_ny, rof_nx, rof_ny, ice_nx, ice_ny, ocn_nx, ocn_ny, &
- iac_nx, iac_ny, glc_nx, glc_ny, lnd_domain, rof_mesh, ocn_domain, &
- atm_mesh, eps_frac, &
+ iac_nx, iac_ny, glc_nx, glc_ny, lnd_domain, rof_mesh, rof_domain, &
+ ocn_domain, atm_mesh, eps_frac, &
eps_amask, eps_agrid, eps_aarea, eps_omask, eps_ogrid, eps_oarea, &
reprosum_use_ddpdd, reprosum_allow_infnan, &
reprosum_diffmax, reprosum_recompute, &
@@ -1212,6 +1214,7 @@ SUBROUTINE seq_infodata_GetData_explicit( infodata, cime_model, case_name, case_
integer(SHR_KIND_IN), optional, intent(OUT) :: iac_ny
character(SHR_KIND_CL), optional, intent(OUT) :: lnd_domain
character(SHR_KIND_CL), optional, intent(OUT) :: rof_mesh
+ character(SHR_KIND_CL), optional, intent(OUT) :: rof_domain
character(SHR_KIND_CL), optional, intent(OUT) :: ocn_domain
character(SHR_KIND_CL), optional, intent(OUT) :: atm_mesh
@@ -1401,6 +1404,7 @@ SUBROUTINE seq_infodata_GetData_explicit( infodata, cime_model, case_name, case_
if ( present(iac_ny) ) iac_ny = infodata%iac_ny
if ( present(lnd_domain) ) lnd_domain = infodata%lnd_domain
if ( present(rof_mesh) ) rof_mesh = infodata%rof_mesh
+ if ( present(rof_domain) ) rof_domain = infodata%rof_domain
if ( present(ocn_domain) ) ocn_domain = infodata%ocn_domain
if ( present(atm_mesh) ) atm_mesh = infodata%atm_mesh
@@ -1598,8 +1602,8 @@ SUBROUTINE seq_infodata_PutData_explicit( infodata, cime_model, case_name, case_
wav_phase, iac_phase, esp_phase, wav_nx, wav_ny, atm_nx, atm_ny, &
lnd_nx, lnd_ny, rof_nx, rof_ny, ice_nx, ice_ny, ocn_nx, ocn_ny, &
iac_nx, iac_ny, glc_nx, glc_ny, eps_frac, eps_amask, lnd_domain, &
- rof_mesh, ocn_domain, atm_mesh, eps_agrid, eps_aarea, eps_omask, &
- eps_ogrid, eps_oarea, &
+ rof_mesh, rof_domain, ocn_domain, atm_mesh, eps_agrid, eps_aarea, &
+ eps_omask, eps_ogrid, eps_oarea, &
reprosum_use_ddpdd, reprosum_allow_infnan, &
reprosum_diffmax, reprosum_recompute, &
mct_usealltoall, mct_usevector, glc_valid_input, nlmaps_verbosity)
@@ -1771,6 +1775,7 @@ SUBROUTINE seq_infodata_PutData_explicit( infodata, cime_model, case_name, case_
integer(SHR_KIND_IN), optional, intent(IN) :: iac_ny
character(SHR_KIND_CL), optional, intent(IN) :: lnd_domain
character(SHR_KIND_CL), optional, intent(IN) :: rof_mesh
+ character(SHR_KIND_CL), optional, intent(IN) :: rof_domain
character(SHR_KIND_CL), optional, intent(IN) :: ocn_domain
character(SHR_KIND_CL), optional, intent(IN) :: atm_mesh
@@ -1959,6 +1964,7 @@ SUBROUTINE seq_infodata_PutData_explicit( infodata, cime_model, case_name, case_
if ( present(iac_ny) ) infodata%iac_ny = iac_ny
if ( present(lnd_domain) ) infodata%lnd_domain = lnd_domain
if ( present(rof_mesh) ) infodata%rof_mesh = rof_mesh
+ if ( present(rof_domain) ) infodata%rof_domain = rof_domain
if ( present(ocn_domain) ) infodata%ocn_domain = ocn_domain
if ( present(atm_mesh) ) infodata%atm_mesh = atm_mesh
@@ -2271,6 +2277,7 @@ subroutine seq_infodata_bcast(infodata,mpicom)
call shr_mpi_bcast(infodata%iac_ny, mpicom)
call shr_mpi_bcast(infodata%lnd_domain, mpicom)
call shr_mpi_bcast(infodata%rof_mesh, mpicom)
+ call shr_mpi_bcast(infodata%rof_domain, mpicom)
call shr_mpi_bcast(infodata%ocn_domain, mpicom)
call shr_mpi_bcast(infodata%atm_mesh, mpicom)
call shr_mpi_bcast(infodata%nextsw_cday, mpicom)
@@ -2518,6 +2525,7 @@ subroutine seq_infodata_Exchange(infodata,ID,type)
call shr_mpi_bcast(infodata%rof_ny, mpicom, pebcast=cmppe)
call shr_mpi_bcast(infodata%flood_present, mpicom, pebcast=cmppe)
call shr_mpi_bcast(infodata%rof_mesh, mpicom, pebcast=cmppe)
+ call shr_mpi_bcast(infodata%rof_domain, mpicom, pebcast=cmppe)
! dead_comps is true if it's ever set to true
deads = infodata%dead_comps
call shr_mpi_bcast(deads, mpicom, pebcast=cmppe)
@@ -2990,6 +2998,7 @@ SUBROUTINE seq_infodata_print( infodata )
write(logunit,F0I) subname,'iac_ny = ', infodata%iac_ny
write(logunit,F0I) subname,'lnd_domain = ', infodata%lnd_domain
write(logunit,F0I) subname,'rof_mesh = ', infodata%rof_mesh
+ write(logunit,F0I) subname,'rof_domain = ', infodata%rof_domain
write(logunit,F0I) subname,'ocn_domain = ', infodata%ocn_domain
write(logunit,F0I) subname,'atm_mesh = ', infodata%atm_mesh