diff --git a/do_submit_cycle.sh b/do_submit_cycle.sh index 171aff50..76c04878 100755 --- a/do_submit_cycle.sh +++ b/do_submit_cycle.sh @@ -22,7 +22,7 @@ export KEEPWORKDIR="YES" ############################ # ensure necessary envars are set -envars=("exp_name" "STARTDATE" "ENDDATE" "LANDDAROOT" "LANDDA_INPUTS" "CYCLEDIR" \ +envars=("exp_name" "STARTDATE" "ENDDATE" "LANDDAROOT" "LANDDA_INPUTS" "HOMElandda" \ "LANDDA_EXPTS" "BUILDDIR" "atmos_forc" "OBSDIR" "WORKDIR" \ "OUTDIR" "TEST_BASEDIR" "JEDI_EXECDIR" "JEDI_STATICDIR" "ensemble_size" \ "FCSTHR" "RES" "TPATH" "TSTUB" "cycles_per_job" "ICSDIR" "DA_config" \ @@ -46,7 +46,7 @@ fi # check that modules are loaded in the environment if [[ ! $BASELINE =~ 'hera.internal' ]]; then - ${CYCLEDIR}/module_check.sh + ${HOMElandda}/module_check.sh fi if [[ $? -ne 0 ]]; then @@ -74,37 +74,37 @@ fi ############################ # set executables -if [[ -e ${CYCLEDIR}/exec/vector2tile_converter.exe ]]; then #prefer cmake-built executables - export vec2tileexec=${CYCLEDIR}/exec/vector2tile_converter.exe +if [[ -e ${HOMElandda}/exec/vector2tile_converter.exe ]]; then #prefer cmake-built executables + export vec2tileexec=${HOMElandda}/exec/vector2tile_converter.exe else - export vec2tileexec=${CYCLEDIR}/sorc/vector2tile/vector2tile_converter.exe + export vec2tileexec=${HOMElandda}/sorc/vector2tile/vector2tile_converter.exe fi -if [[ -e ${CYCLEDIR}/exec/tile2tile_converter.exe ]]; then #prefer cmake-built executables - export tile2tileexec=${CYCLEDIR}/exec/tile2tile_converter.exe +if [[ -e ${HOMElandda}/exec/tile2tile_converter.exe ]]; then #prefer cmake-built executables + export tile2tileexec=${HOMElandda}/exec/tile2tile_converter.exe else - export tile2tileexec=${CYCLEDIR}/sorc/tile2tile/tile2tile_converter.exe + export tile2tileexec=${HOMElandda}/sorc/tile2tile/tile2tile_converter.exe fi -if [[ -e ${CYCLEDIR}/exec/ufsLand.exe ]]; then - export LSMexec=${CYCLEDIR}/exec/ufsLand.exe +if [[ -e ${HOMElandda}/exec/ufsLand.exe ]]; then + export LSMexec=${HOMElandda}/exec/ufsLand.exe else - export LSMexec=${CYCLEDIR}/sorc/ufsLand.fd/run/ufsLand.exe + export LSMexec=${HOMElandda}/sorc/ufsLand.fd/run/ufsLand.exe fi -export DADIR=${CYCLEDIR}/sorc/DA_update/ +export DADIR=${HOMElandda}/sorc/DA_update/ export DAscript=${DADIR}/do_landDA.sh export MPIEXEC=`which mpiexec` export LANDDADIR=${DADIR} -export analdate=${CYCLEDIR}/analdates.sh -export incdate=${CYCLEDIR}/incdate.sh +export analdate=${HOMElandda}/analdates.sh +export incdate=${HOMElandda}/incdate.sh -export BUILDDIR=${CYCLEDIR}/sorc/build +export BUILDDIR=${HOMElandda}/sorc/build export INCR_EXECDIR=${DADIR}/add_jedi_incr/exec/ ############################ # read in dates -export logfile=${CYCLEDIR}/cycle.log +export logfile=${HOMElandda}/cycle.log touch $logfile echo "***************************************" >> $logfile echo "cycling from $STARTDATE to $ENDDATE" >> $logfile diff --git a/doc/source/BuildingRunningTesting/BuildRunLandDA.rst b/doc/source/BuildingRunningTesting/BuildRunLandDA.rst index 92f4c0eb..70dae2cf 100644 --- a/doc/source/BuildingRunningTesting/BuildRunLandDA.rst +++ b/doc/source/BuildingRunningTesting/BuildRunLandDA.rst @@ -125,7 +125,6 @@ Users will need to configure certain elements of their experiment in ``land_anal * ``EXP_BASEDIR:`` The full path to the directory where land-DA_workflow was cloned (i.e., ``$LANDDAROOT``) * ``JEDI_INSTALL:`` The full path to the system's ``jedi-bundle`` installation * ``LANDDA_INPUTS:`` The full path to the experiment data. See :ref:`Data ` below for information on prestaged data on Level 1 platforms. - * ``OUTDIR:`` The full path to the directory where experiment will write its output. By default, this is set to ``"&EXP_BASEDIR;/landda_expts/DA__test"``, but users can change the ``DA__test`` portion to a name of their choice. If users do not change the name, the new experiment will overwrite data from the previous experiment. .. note:: @@ -193,9 +192,9 @@ Each Land DA experiment includes multiple tasks that must be run in order to sat - Sets up the observation files * - JLANDDA_PREP_BMAT - Sets up the :term:`JEDI` run - * - JLANDDA_RUN_ANA + * - JLANDDA_ANALYSIS - Runs JEDI - * - JLANDDA_RUN_FCST + * - JLANDDA_FORECAST - Runs forecast Users may run these tasks :ref:`using the Rocoto workflow manager ` or :ref:`using a batch script `. @@ -263,18 +262,20 @@ As the experiment progresses, it will generate a number of directories to hold i $LANDDAROOT: Base directory ├── land-DA_workflow(): Home directory of the land DA workflow - ├── com - │ ├── landda () - │ │ └── vX.Y.Z () - │ │ └── DA_ () - │ │ ├── DA: Directory containing the output files of JEDI run - │ │ │ ├── hofx - │ │ │ ├── jedi_incr - │ │ │ └── logs - │ │ └── mem000: Directory containing the output files - │ └── output - │ └── logs - │ └── run_ (): Directory containing the log file of the Rocoto workflow + ├── ptmp () + │ └── test () + │ └── com + │ ├── landda () + │ │ └── vX.Y.Z () + │ │ └── landda.YYYYMMDD (.) + │ │ └── HH () + │ │ ├── DA: Directory containing the output files of JEDI run + │ │ │ ├── hofx + │ │ │ └── jedi_incr + │ │ └── mem000: Directory containing the output files + │ └── output + │ └── logs + │ └── run_ (): Directory containing the log file of the Rocoto workflow └── workdir() └── run_ └── mem000: Working directory @@ -285,7 +286,7 @@ Check for the background and analysis files in the experiment directory: .. code-block:: console - ls -l $LANDDAROOT/com/landda/v1.2.1/run_/mem000/restarts/ + ls -l $LANDDAROOT/ptmp/test/com/landda/v1.2.1/landda.//run_/mem000/restarts/ where: diff --git a/jobs/JLANDDA_ANALYSIS b/jobs/JLANDDA_ANALYSIS new file mode 100755 index 00000000..3318b221 --- /dev/null +++ b/jobs/JLANDDA_ANALYSIS @@ -0,0 +1,103 @@ +#!/bin/bash + +date +export PS4='+ $SECONDS + ' +set -xue +# +#----------------------------------------------------------------------- +# +# Set the NCO standard environment variables (Table 1, pp.4) +# +#----------------------------------------------------------------------- +# +export USHlandda="${HOMElandda}/ush" +export EXEClandda="${HOMElandda}/exec" +export PARMlandda="${HOMElandda}/parm" +export SCRIPTSlandda="${HOMElandda}/scripts" +# +#----------------------------------------------------------------------- +# +# Define job and jobid by default for rocoto +# +#----------------------------------------------------------------------- +# +WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}" +if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then + if [ "${SCHED}" = "slurm" ]; then + job=${SLURM_JOB_NAME} + pid=${SLURM_JOB_ID} + elif [ "${SCHED}" = "pbspro" ]; then + job=${PBS_JOBNAME} + pid=${PBS_JOBID} + else + job="task" + pid=$$ + fi + jobid="${job}.${PDY}${cyc}.${pid}" +fi +# +#----------------------------------------------------------------------- +# +# Create a temp working directory (DATA) and cd into it. +# +#----------------------------------------------------------------------- +# +export DATA="${DATA:-${DATAROOT}/${jobid}}" +mkdir -p $DATA +cd $DATA +# +#----------------------------------------------------------------------- +# +# Define NCO environment variables and set COM type definitions. +# +#----------------------------------------------------------------------- +# +export NET="${NET:-landda}" +export RUN="${RUN:-landda}" + +[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT +if [ "${MACHINE}" = "WCOSS2" ]; then + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" +else + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" +fi + +mkdir -p ${COMOUT} + +# Create a teomporary share directory +export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}${cyc}}" +mkdir -p ${DATA_SHARE} + +# Run setpdy to initialize PDYm and PDYp variables +export cycle="${cycle:-t${cyc}z}" +setpdy.sh +. ./PDY +# +#----------------------------------------------------------------------- +# +# Call the ex-script for this J-job. +# +#----------------------------------------------------------------------- +# +export pgmout="${DATA}/OUTPUT.$$" +env + +${SCRIPTSlandda}/exlandda_analysis.sh +export err=$?; err_chk + +if [ -e "$pgmout" ]; then + cat $pgmout +fi +# +#----------------------------------------------------------------------- +# +# Whether or not working directory DATA should be kept. +# +#----------------------------------------------------------------------- +# +if [ "${KEEPDATA}" = "NO" ]; then + rm -rf ${DATA} +fi +date diff --git a/jobs/JLANDDA_FORECAST b/jobs/JLANDDA_FORECAST new file mode 100755 index 00000000..d5e65880 --- /dev/null +++ b/jobs/JLANDDA_FORECAST @@ -0,0 +1,103 @@ +#!/bin/bash + +date +export PS4='+ $SECONDS + ' +set -xue +# +#----------------------------------------------------------------------- +# +# Set the NCO standard environment variables (Table 1, pp.4) +# +#----------------------------------------------------------------------- +# +export USHlandda="${HOMElandda}/ush" +export EXEClandda="${HOMElandda}/exec" +export PARMlandda="${HOMElandda}/parm" +export SCRIPTSlandda="${HOMElandda}/scripts" +# +#----------------------------------------------------------------------- +# +# Define job and jobid by default for rocoto +# +#----------------------------------------------------------------------- +# +WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}" +if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then + if [ "${SCHED}" = "slurm" ]; then + job=${SLURM_JOB_NAME} + pid=${SLURM_JOB_ID} + elif [ "${SCHED}" = "pbspro" ]; then + job=${PBS_JOBNAME} + pid=${PBS_JOBID} + else + job="task" + pid=$$ + fi + jobid="${job}.${PDY}${cyc}.${pid}" +fi +# +#----------------------------------------------------------------------- +# +# Create a temp working directory (DATA) and cd into it. +# +#----------------------------------------------------------------------- +# +export DATA="${DATA:-${DATAROOT}/${jobid}}" +mkdir -p $DATA +cd $DATA +# +#----------------------------------------------------------------------- +# +# Define NCO environment variables and set COM type definitions. +# +#----------------------------------------------------------------------- +# +export NET="${NET:-landda}" +export RUN="${RUN:-landda}" + +[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT +if [ "${MACHINE}" = "WCOSS2" ]; then + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" +else + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" +fi + +mkdir -p ${COMOUT} + +# Create a teomporary share directory +export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}${cyc}}" +mkdir -p ${DATA_SHARE} + +# Run setpdy to initialize PDYm and PDYp variables +export cycle="${cycle:-t${cyc}z}" +setpdy.sh +. ./PDY +# +#----------------------------------------------------------------------- +# +# Call the ex-script for this J-job. +# +#----------------------------------------------------------------------- +# +export pgmout="${DATA}/OUTPUT.$$" +env + +${SCRIPTSlandda}/exlandda_forecast.sh +export err=$?; err_chk + +if [ -e "$pgmout" ]; then + cat $pgmout +fi +# +#----------------------------------------------------------------------- +# +# Whether or not working directory DATA should be kept. +# +#----------------------------------------------------------------------- +# +if [ "${KEEPDATA}" = "NO" ]; then + rm -rf ${DATA} +fi +date diff --git a/jobs/JLANDDA_PREP_BMAT b/jobs/JLANDDA_PREP_BMAT index 52515cfa..3ae23e6a 100755 --- a/jobs/JLANDDA_PREP_BMAT +++ b/jobs/JLANDDA_PREP_BMAT @@ -1,86 +1,103 @@ -#!/bin/sh - -set -ex - -############################ -# copy restarts to workdir, convert to UFS tile for DA (all members) - -if [[ ${EXP_NAME} == "openloop" ]]; then - do_jedi="NO" +#!/bin/bash + +date +export PS4='+ $SECONDS + ' +set -xue +# +#----------------------------------------------------------------------- +# +# Set the NCO standard environment variables (Table 1, pp.4) +# +#----------------------------------------------------------------------- +# +export USHlandda="${HOMElandda}/ush" +export EXEClandda="${HOMElandda}/exec" +export PARMlandda="${HOMElandda}/parm" +export SCRIPTSlandda="${HOMElandda}/scripts" +# +#----------------------------------------------------------------------- +# +# Define job and jobid by default for rocoto +# +#----------------------------------------------------------------------- +# +WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}" +if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then + if [ "${SCHED}" = "slurm" ]; then + job=${SLURM_JOB_NAME} + pid=${SLURM_JOB_ID} + elif [ "${SCHED}" = "pbspro" ]; then + job=${PBS_JOBNAME} + pid=${PBS_JOBID} + else + job="task" + pid=$$ + fi + jobid="${job}.${PDY}${cyc}.${pid}" +fi +# +#----------------------------------------------------------------------- +# +# Create a temp working directory (DATA) and cd into it. +# +#----------------------------------------------------------------------- +# +export DATA="${DATA:-${DATAROOT}/${jobid}}" +mkdir -p $DATA +cd $DATA +# +#----------------------------------------------------------------------- +# +# Define NCO environment variables and set COM type definitions. +# +#----------------------------------------------------------------------- +# +export NET="${NET:-landda}" +export RUN="${RUN:-landda}" + +[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT +if [ "${MACHINE}" = "WCOSS2" ]; then + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" else - do_jedi="YES" - SAVE_TILE="YES" - LANDDADIR=${CYCLEDIR}/sorc/DA_update + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" fi -TPATH=${LANDDA_INPUTS}/forcing/${ATMOS_FORC}/orog_files/ -YYYY=${CTIME:0:4} -MM=${CTIME:4:2} -DD=${CTIME:6:2} -HH=${CTIME:8:2} - -mem_ens="mem000" - -MEM_WORKDIR=${WORKDIR}/${mem_ens} -MEM_MODL_OUTDIR=${OUTDIR}/${mem_ens} -RSTRDIR=${MEM_WORKDIR} -JEDIWORKDIR=${WORKDIR}/mem000/jedi -FILEDATE=${YYYY}${MM}${DD}.${HH}0000 - -cd $MEM_WORKDIR - -# load modulefiles -BUILD_VERSION_FILE="${CYCLEDIR}/versions/build.ver_${MACHINE}" -if [ -e ${BUILD_VERSION_FILE} ]; then - . ${BUILD_VERSION_FILE} +mkdir -p ${COMOUT} + +# Create a teomporary share directory +export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}${cyc}}" +mkdir -p ${DATA_SHARE} + +# Run setpdy to initialize PDYm and PDYp variables +export cycle="${cycle:-t${cyc}z}" +setpdy.sh +. ./PDY +# +#----------------------------------------------------------------------- +# +# Call the ex-script for this J-job. +# +#----------------------------------------------------------------------- +# +export pgmout="${DATA}/OUTPUT.$$" +env + +${SCRIPTSlandda}/exlandda_prep_bmat.sh +export err=$?; err_chk + +if [ -e "$pgmout" ]; then + cat $pgmout fi -module use modulefiles; module load modules.landda -PYTHON=$(/usr/bin/which python) - -#fv3bundle_vn=psl_develop -#DAtype=letkfoi_snow -#SNOWDEPTHVAR=snwdph -YAML_DA=construct -GFSv17="NO" -B=30 # back ground error std for LETKFOI -cd $JEDIWORKDIR - -################################################ -# 4. CREATE BACKGROUND ENSEMBLE (LETKFOI) -################################################ - -if [[ ${DAtype} == 'letkfoi_snow' ]]; then - - JEDI_EXEC="fv3jedi_letkf.x" - - if [ $GFSv17 == "YES" ]; then - SNOWDEPTHVAR="snodl" - # field overwrite file with GFSv17 variables. - cp ${LANDDADIR}/jedi/fv3-jedi/yaml_files/${fv3bundle_vn}/gfs-land-v17.yaml ${JEDIWORKDIR}/gfs-land-v17.yaml - else - SNOWDEPTHVAR="snwdph" - fi - # FOR LETKFOI, CREATE THE PSEUDO-ENSEMBLE - for ens in pos neg - do - if [ -e $JEDIWORKDIR/mem_${ens} ]; then - rm -r $JEDIWORKDIR/mem_${ens} - fi - mkdir -p $JEDIWORKDIR/mem_${ens} - for tile in 1 2 3 4 5 6 - do - cp ${JEDIWORKDIR}/${FILEDATE}.sfc_data.tile${tile}.nc ${JEDIWORKDIR}/mem_${ens}/${FILEDATE}.sfc_data.tile${tile}.nc - done - cp ${JEDIWORKDIR}/${FILEDATE}.coupler.res ${JEDIWORKDIR}/mem_${ens}/${FILEDATE}.coupler.res - done - - echo 'do_landDA: calling create ensemble' - - # using ioda mods to get a python version with netCDF4 - ${PYTHON} ${LANDDADIR}/letkf_create_ens.py $FILEDATE $SNOWDEPTHVAR $B - if [[ $? != 0 ]]; then - echo "letkf create failed" - exit 10 - fi - +# +#----------------------------------------------------------------------- +# +# Whether or not working directory DATA should be kept. +# +#----------------------------------------------------------------------- +# +if [ "${KEEPDATA}" = "NO" ]; then + rm -rf ${DATA} fi +date diff --git a/jobs/JLANDDA_PREP_EXP b/jobs/JLANDDA_PREP_EXP index 4b780e69..af82c244 100755 --- a/jobs/JLANDDA_PREP_EXP +++ b/jobs/JLANDDA_PREP_EXP @@ -1,179 +1,103 @@ -#!/bin/sh - -set -ex - -############################ -# copy restarts to workdir, convert to UFS tile for DA (all members) - -if [[ ${EXP_NAME} == "openloop" ]]; then - do_jedi="NO" +#!/bin/bash + +date +export PS4='+ $SECONDS + ' +set -xue +# +#----------------------------------------------------------------------- +# +# Set the NCO standard environment variables (Table 1, pp.4) +# +#----------------------------------------------------------------------- +# +export USHlandda="${HOMElandda}/ush" +export EXEClandda="${HOMElandda}/exec" +export PARMlandda="${HOMElandda}/parm" +export SCRIPTSlandda="${HOMElandda}/scripts" +# +#----------------------------------------------------------------------- +# +# Define job and jobid by default for rocoto +# +#----------------------------------------------------------------------- +# +WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}" +if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then + if [ "${SCHED}" = "slurm" ]; then + job=${SLURM_JOB_NAME} + pid=${SLURM_JOB_ID} + elif [ "${SCHED}" = "pbspro" ]; then + job=${PBS_JOBNAME} + pid=${PBS_JOBID} + else + job="task" + pid=$$ + fi + jobid="${job}.${PDY}${cyc}.${pid}" +fi +# +#----------------------------------------------------------------------- +# +# Create a temp working directory (DATA) and cd into it. +# +#----------------------------------------------------------------------- +# +export DATA="${DATA:-${DATAROOT}/${jobid}}" +mkdir -p $DATA +cd $DATA +# +#----------------------------------------------------------------------- +# +# Define NCO environment variables and set COM type definitions. +# +#----------------------------------------------------------------------- +# +export NET="${NET:-landda}" +export RUN="${RUN:-landda}" + +[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT +if [ "${MACHINE}" = "WCOSS2" ]; then + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" else - do_jedi="YES" - SAVE_TILE="YES" - LANDDADIR=${CYCLEDIR}/sorc/DA_update + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" fi -echo ${LANDDA_INPUTS}, ${ATMOS_FORC} -TPATH=${LANDDA_INPUTS}/forcing/${ATMOS_FORC}/orog_files/ -YYYY=${CTIME:0:4} -MM=${CTIME:4:2} -DD=${CTIME:6:2} -HH=${CTIME:8:2} -YYYP=${PTIME:0:4} -MP=${PTIME:4:2} -DP=${PTIME:6:2} -HP=${PTIME:8:2} -mem_ens="mem000" - -MEM_WORKDIR=${WORKDIR}/${mem_ens} -MEM_MODL_OUTDIR=${OUTDIR}/${mem_ens} -RSTRDIR=${MEM_WORKDIR} -JEDIWORKDIR=${WORKDIR}/mem000/jedi -FILEDATE=${YYYY}${MM}${DD}.${HH}0000 - -if [[ ! -e ${MEM_WORKDIR} ]]; then - mkdir -p ${MEM_WORKDIR} +mkdir -p ${COMOUT} + +# Create a teomporary share directory +export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}${cyc}}" +mkdir -p ${DATA_SHARE} + +# Run setpdy to initialize PDYm and PDYp variables +export cycle="${cycle:-t${cyc}z}" +setpdy.sh +. ./PDY +# +#----------------------------------------------------------------------- +# +# Call the ex-script for this J-job. +# +#----------------------------------------------------------------------- +# +export pgmout="${DATA}/OUTPUT.$$" +env + +${SCRIPTSlandda}/exlandda_prep_exp.sh +export err=$?; err_chk + +if [ -e "$pgmout" ]; then + cat $pgmout fi -if [[ ! -e ${MEM_MODL_OUTDIR} ]]; then - mkdir -p ${MEM_MODL_OUTDIR} -fi - -mkdir -p $MEM_WORKDIR/modulefiles; cp ${CYCLEDIR}/modulefiles/build_${MACHINE}_intel.lua $MEM_WORKDIR/modulefiles/modules.landda.lua -cd $MEM_WORKDIR - -# load modulefiles -BUILD_VERSION_FILE="${CYCLEDIR}/versions/build.ver_${MACHINE}" -if [ -e ${BUILD_VERSION_FILE} ]; then - . ${BUILD_VERSION_FILE} +# +#----------------------------------------------------------------------- +# +# Whether or not working directory DATA should be kept. +# +#----------------------------------------------------------------------- +# +if [ "${KEEPDATA}" = "NO" ]; then + rm -rf ${DATA} fi - -module use modulefiles; module load modules.landda - -if [[ $do_jedi == "YES" && $ATMOS_FORC == "era5" ]]; then - - # copy restarts into work directory - rst_in=${MEM_MODL_OUTDIR}/restarts/vector/ufs_land_restart_back.${YYYY}-${MM}-${DD}_${HH}-00-00.nc - if [[ ! -e ${rst_in} ]]; then - rst_in=${LANDDA_INPUTS}/restarts/${ATMOS_FORC}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.nc - fi - rst_out=${MEM_WORKDIR}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.nc - cp ${rst_in} ${rst_out} - - echo '************************************************' - echo 'calling vector2tile' - - export MEM_WORKDIR - - # update vec2tile and tile2vec namelists - cp ${CYCLEDIR}/parm/templates/template.vector2tile vector2tile.namelist - - sed -i "s|LANDDA_INPUTS|${LANDDA_INPUTS}|g" vector2tile.namelist - sed -i -e "s/XXYYYY/${YYYY}/g" vector2tile.namelist - sed -i -e "s/XXMM/${MM}/g" vector2tile.namelist - sed -i -e "s/XXDD/${DD}/g" vector2tile.namelist - sed -i -e "s/XXHH/${HH}/g" vector2tile.namelist - sed -i -e "s/XXHH/${HH}/g" vector2tile.namelist - sed -i -e "s/MODEL_FORCING/${ATMOS_FORC}/g" vector2tile.namelist - sed -i -e "s/XXRES/${RES}/g" vector2tile.namelist - sed -i -e "s/XXTSTUB/${TSTUB}/g" vector2tile.namelist - sed -i -e "s#XXTPATH#${TPATH}#g" vector2tile.namelist - - # submit vec2tile - echo '************************************************' - echo 'calling vector2tile' - - ${EXECdir}/vector2tile_converter.exe vector2tile.namelist - if [[ $? != 0 ]]; then - echo "vec2tile failed" - exit - fi -fi # vector2tile for DA - -if [[ $do_jedi == "YES" && $ATMOS_FORC == "gswp3" ]]; then - - echo '************************************************' - echo 'calling tile2tile' - - export MEM_WORKDIR - - # copy restarts into work directory - for tile in 1 2 3 4 5 6 - do - rst_in=${MEM_MODL_OUTDIR}/restarts/tile/ufs_land_restart_back.${YYYY}-${MM}-${DD}_${HH}-00-00.nc - if [[ ! -e ${rst_in} ]]; then - rst_in=${LANDDA_INPUTS}/restarts/${ATMOS_FORC}/ufs.cpld.lnd.out.${YYYY}-${MM}-${DD}-00000.tile${tile}.nc - fi - rst_out=${MEM_WORKDIR}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${tile}.nc - cp ${rst_in} ${rst_out} - done - - # update tile2tile namelist - cp ${CYCLEDIR}/parm/templates/template.ufs2jedi ufs2jedi.namelist - - sed -i "s|LANDDA_INPUTS|${LANDDA_INPUTS}|g" ufs2jedi.namelist - sed -i -e "s/XXYYYY/${YYYY}/g" ufs2jedi.namelist - sed -i -e "s/XXMM/${MM}/g" ufs2jedi.namelist - sed -i -e "s/XXDD/${DD}/g" ufs2jedi.namelist - sed -i -e "s/XXHH/${HH}/g" ufs2jedi.namelist - sed -i -e "s/XXHH/${HH}/g" ufs2jedi.namelist - sed -i -e "s/MODEL_FORCING/${ATMOS_FORC}/g" ufs2jedi.namelist - sed -i -e "s/XXRES/${RES}/g" ufs2jedi.namelist - sed -i -e "s/XXTSTUB/${TSTUB}/g" ufs2jedi.namelist - sed -i -e "s#XXTPATH#${TPATH}#g" ufs2jedi.namelist - - # submit tile2tile - ${EXECdir}/tile2tile_converter.exe ufs2jedi.namelist - if [[ $? != 0 ]]; then - echo "tile2tile failed" - exit - fi -fi # tile2tile for DA - -if [[ $do_jedi == "YES" ]]; then - if [[ ! -e ${OUTDIR}/DA ]]; then - mkdir -p ${OUTDIR}/DA/jedi_incr - mkdir -p ${OUTDIR}/DA/logs - mkdir -p ${OUTDIR}/DA/hofx - fi - if [[ ! -e $JEDIWORKDIR ]]; then - mkdir -p $JEDIWORKDIR - fi - cd $JEDIWORKDIR - - if [[ ! -e ${JEDIWORKDIR}/output ]]; then - ln -s ${OUTDIR} ${JEDIWORKDIR}/output - fi - - if [[ $SAVE_TILE == "YES" ]]; then - for tile in 1 2 3 4 5 6 - do - cp ${RSTRDIR}/${FILEDATE}.sfc_data.tile${tile}.nc ${RSTRDIR}/${FILEDATE}.sfc_data_back.tile${tile}.nc - done - fi - - #stage restarts for applying JEDI update (files will get directly updated) - for tile in 1 2 3 4 5 6 - do - ln -fs ${RSTRDIR}/${FILEDATE}.sfc_data.tile${tile}.nc ${JEDIWORKDIR}/${FILEDATE}.sfc_data.tile${tile}.nc - done - - cres_file=${JEDIWORKDIR}/${FILEDATE}.coupler.res - - if [[ -e ${RSTRDIR}/${FILEDATE}.coupler.res ]]; then - ln -sf ${RSTRDIR}/${FILEDATE}.coupler.res $cres_file - else # if not present, need to create coupler.res for JEDI - cp ${LANDDADIR}/template.coupler.res $cres_file - - sed -i -e "s/XXYYYY/${YYYY}/g" $cres_file - sed -i -e "s/XXMM/${MM}/g" $cres_file - sed -i -e "s/XXDD/${DD}/g" $cres_file - sed -i -e "s/XXHH/${HH}/g" $cres_file - - sed -i -e "s/XXYYYP/${YYYP}/g" $cres_file - sed -i -e "s/XXMP/${MP}/g" $cres_file - sed -i -e "s/XXDP/${DP}/g" $cres_file - sed -i -e "s/XXHP/${HP}/g" $cres_file - - fi -fi # do_jedi setup - +date diff --git a/jobs/JLANDDA_PREP_OBS b/jobs/JLANDDA_PREP_OBS index 7ca67a98..5907dfcb 100755 --- a/jobs/JLANDDA_PREP_OBS +++ b/jobs/JLANDDA_PREP_OBS @@ -1,68 +1,103 @@ -#!/bin/sh +#!/bin/bash -set -ex - -############################ -# copy restarts to workdir, convert to UFS tile for DA (all members) +date +export PS4='+ $SECONDS + ' +set -xue +# +#----------------------------------------------------------------------- +# +# Set the NCO standard environment variables (Table 1, pp.4) +# +#----------------------------------------------------------------------- +# +export USHlandda="${HOMElandda}/ush" +export EXEClandda="${HOMElandda}/exec" +export PARMlandda="${HOMElandda}/parm" +export SCRIPTSlandda="${HOMElandda}/scripts" +# +#----------------------------------------------------------------------- +# +# Define job and jobid by default for rocoto +# +#----------------------------------------------------------------------- +# +WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}" +if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then + if [ "${SCHED}" = "slurm" ]; then + job=${SLURM_JOB_NAME} + pid=${SLURM_JOB_ID} + elif [ "${SCHED}" = "pbspro" ]; then + job=${PBS_JOBNAME} + pid=${PBS_JOBID} + else + job="task" + pid=$$ + fi + jobid="${job}.${PDY}${cyc}.${pid}" +fi +# +#----------------------------------------------------------------------- +# +# Create a temp working directory (DATA) and cd into it. +# +#----------------------------------------------------------------------- +# +export DATA="${DATA:-${DATAROOT}/${jobid}}" +mkdir -p $DATA +cd $DATA +# +#----------------------------------------------------------------------- +# +# Define NCO environment variables and set COM type definitions. +# +#----------------------------------------------------------------------- +# +export NET="${NET:-landda}" +export RUN="${RUN:-landda}" -if [[ ${EXP_NAME} == "openloop" ]]; then - do_jedi="NO" +[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT +if [ "${MACHINE}" = "WCOSS2" ]; then + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" else - do_jedi="YES" - SAVE_TILE="YES" - LANDDADIR=${CYCLEDIR}/sorc/DA_update + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" fi -TPATH=${LANDDA_INPUTS}/forcing/${ATMOS_FORC}/orog_files/ -YYYY=${CTIME:0:4} -MM=${CTIME:4:2} -DD=${CTIME:6:2} -HH=${CTIME:8:2} -YYYP=${PTIME:0:4} -MP=${PTIME:4:2} -DP=${PTIME:6:2} -HP=${PTIME:8:2} - -mem_ens="mem000" +mkdir -p ${COMOUT} -MEM_WORKDIR=${WORKDIR}/${mem_ens} -MEM_MODL_OUTDIR=${OUTDIR}/${mem_ens} -RSTRDIR=${MEM_WORKDIR} -JEDIWORKDIR=${WORKDIR}/mem000/jedi -FILEDATE=${YYYY}${MM}${DD}.${HH}0000 +# Create a teomporary share directory +export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}${cyc}}" +mkdir -p ${DATA_SHARE} -cd $JEDIWORKDIR +# Run setpdy to initialize PDYm and PDYp variables +export cycle="${cycle:-t${cyc}z}" +setpdy.sh +. ./PDY +# +#----------------------------------------------------------------------- +# +# Call the ex-script for this J-job. +# +#----------------------------------------------------------------------- +# +export pgmout="${DATA}/OUTPUT.$$" +env -# load modulefiles -#module use modulefiles; module load modules.landda +${SCRIPTSlandda}/exlandda_prep_obs.sh +export err=$?; err_chk -OBSDIR=${LANDDA_INPUTS}/DA -################################################ -# 2. PREPARE OBS FILES -################################################ -for obs in "${OBS_TYPES[@]}"; do - # get the. obs file name - if [ ${obs} == "GTS" ]; then - obsfile=$OBSDIR/snow_depth/GTS/data_proc/${YYYY}${MM}/adpsfc_snow_${YYYY}${MM}${DD}${HH}.nc4 - # GHCN are time-stamped at 18. If assimilating at 00, need to use previous day's obs, so that - # obs are within DA window. - elif [ $ATMOS_FORC == "era5" ] && [ ${obs} == "GHCN" ]; then - obsfile=$OBSDIR/snow_depth/GHCN/data_proc/v3/${YYYY}/ghcn_snwd_ioda_${YYYP}${MP}${DP}.nc - elif [ $ATMOS_FORC == "gswp3" ] && [ ${obs} == "GHCN" ]; then - obsfile=$OBSDIR/snow_depth/GHCN/data_proc/v3/${YYYY}/fake_ghcn_snwd_ioda_${YYYP}${MP}${DP}.nc - elif [ ${obs} == "SYNTH" ]; then - obsfile=$OBSDIR/synthetic_noahmp/IODA.synthetic_gswp_obs.${YYYY}${MM}${DD}${HH}.nc - else - echo "do_landDA: Unknown obs type requested ${obs}, exiting" - exit 1 - fi - - # check obs are available - if [[ -e $obsfile ]]; then - echo "do_landDA: $i observations found: $obsfile" - ln -fs $obsfile ${obs}_${YYYY}${MM}${DD}${HH}.nc - else - echo "${obs} observations not found: $obsfile" - # JEDI_TYPES[$ii]="SKIP" - fi -done +if [ -e "$pgmout" ]; then + cat $pgmout +fi +# +#----------------------------------------------------------------------- +# +# Whether or not working directory DATA should be kept. +# +#----------------------------------------------------------------------- +# +if [ "${KEEPDATA}" = "NO" ]; then + rm -rf ${DATA} +fi +date diff --git a/module_check.sh b/module_check.sh index e98ae70f..0db71cca 100755 --- a/module_check.sh +++ b/module_check.sh @@ -11,7 +11,7 @@ fi # check which modules are required and notify the user if they are not currently loaded in the environment. if [[ ( ${MACHINE} == "orion" || ${MACHINE} == "hera" ) && ${USE_SINGULARITY} != "yes" ]]; then - env_mods=($(grep -o 'load("[^"]*")' ${CYCLEDIR}/modulefiles/build_${MACHINE}_intel.lua | sed 's/load("//;s/")//')) + env_mods=($(grep -o 'load("[^"]*")' ${HOMElandda}/modulefiles/build_${MACHINE}_intel.lua | sed 's/load("//;s/")//')) missing_mods=() @@ -23,7 +23,7 @@ if [[ ( ${MACHINE} == "orion" || ${MACHINE} == "hera" ) && ${USE_SINGULARITY} != done if [[ ${#missing_mods[@]} -gt 0 ]]; then - echo "Error: the following modules are not loaded in the current environment: ${missing_mods[@]}. Please load them via 'module use ${CYCLEDIR}/modulefiles; module load landda_${MACHINE}.intel' and then re-launch do_submit_cycle.sh." + echo "Error: the following modules are not loaded in the current environment: ${missing_mods[@]}. Please load them via 'module use ${HOMElandda}/modulefiles; module load landda_${MACHINE}.intel' and then re-launch do_submit_cycle.sh." exit 1 else echo "All modules properly loaded in environment. Continuing!" diff --git a/modulefiles/build_hera_intel.lua b/modulefiles/build_hera_intel.lua index 366735f5..d70b6f4e 100644 --- a/modulefiles/build_hera_intel.lua +++ b/modulefiles/build_hera_intel.lua @@ -32,6 +32,7 @@ load(pathJoin("sp", sp_ver)) load(pathJoin("w3emc", w3emc_ver)) load(pathJoin("gftl-shared", gftl_shared_ver)) load(pathJoin("mapl", mapl_ver)) +load(pathJoin("prod-util", prod_util_ver)) load("ufs-pyenv") load("atlas") diff --git a/modulefiles/build_orion_intel.lua b/modulefiles/build_orion_intel.lua index b3db0e2e..5d6ea8ae 100644 --- a/modulefiles/build_orion_intel.lua +++ b/modulefiles/build_orion_intel.lua @@ -32,6 +32,7 @@ load(pathJoin("sp", sp_ver)) load(pathJoin("w3emc", w3emc_ver)) load(pathJoin("gftl-shared", gftl_shared_ver)) load(pathJoin("mapl", mapl_ver)) +load(pathJoin("prod-util", prod_util_ver)) load("ufs-pyenv") diff --git a/modulefiles/tasks/hera/task.analysis.lua b/modulefiles/tasks/hera/task.analysis.lua new file mode 100644 index 00000000..0655a281 --- /dev/null +++ b/modulefiles/tasks/hera/task.analysis.lua @@ -0,0 +1,8 @@ +prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack")) +prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack_jedi")) + +load(pathJoin("stack-intel", stack_intel_ver)) +load(pathJoin("stack-intel-oneapi-mpi", stack_impi_ver)) +load(pathJoin("stack-python", stack_python_ver)) + +load(pathJoin("prod-util", prod_util_ver)) diff --git a/modulefiles/tasks/hera/task.forecast.lua b/modulefiles/tasks/hera/task.forecast.lua new file mode 100644 index 00000000..0655a281 --- /dev/null +++ b/modulefiles/tasks/hera/task.forecast.lua @@ -0,0 +1,8 @@ +prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack")) +prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack_jedi")) + +load(pathJoin("stack-intel", stack_intel_ver)) +load(pathJoin("stack-intel-oneapi-mpi", stack_impi_ver)) +load(pathJoin("stack-python", stack_python_ver)) + +load(pathJoin("prod-util", prod_util_ver)) diff --git a/modulefiles/tasks/hera/task.prep_bmat.lua b/modulefiles/tasks/hera/task.prep_bmat.lua new file mode 100644 index 00000000..0655a281 --- /dev/null +++ b/modulefiles/tasks/hera/task.prep_bmat.lua @@ -0,0 +1,8 @@ +prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack")) +prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack_jedi")) + +load(pathJoin("stack-intel", stack_intel_ver)) +load(pathJoin("stack-intel-oneapi-mpi", stack_impi_ver)) +load(pathJoin("stack-python", stack_python_ver)) + +load(pathJoin("prod-util", prod_util_ver)) diff --git a/modulefiles/tasks/hera/task.prep_exp.lua b/modulefiles/tasks/hera/task.prep_exp.lua new file mode 100644 index 00000000..0655a281 --- /dev/null +++ b/modulefiles/tasks/hera/task.prep_exp.lua @@ -0,0 +1,8 @@ +prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack")) +prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack_jedi")) + +load(pathJoin("stack-intel", stack_intel_ver)) +load(pathJoin("stack-intel-oneapi-mpi", stack_impi_ver)) +load(pathJoin("stack-python", stack_python_ver)) + +load(pathJoin("prod-util", prod_util_ver)) diff --git a/modulefiles/tasks/hera/task.prep_obs.lua b/modulefiles/tasks/hera/task.prep_obs.lua new file mode 100644 index 00000000..0655a281 --- /dev/null +++ b/modulefiles/tasks/hera/task.prep_obs.lua @@ -0,0 +1,8 @@ +prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack")) +prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack_jedi")) + +load(pathJoin("stack-intel", stack_intel_ver)) +load(pathJoin("stack-intel-oneapi-mpi", stack_impi_ver)) +load(pathJoin("stack-python", stack_python_ver)) + +load(pathJoin("prod-util", prod_util_ver)) diff --git a/modulefiles/tasks/orion/task.analysis.lua b/modulefiles/tasks/orion/task.analysis.lua new file mode 100644 index 00000000..0655a281 --- /dev/null +++ b/modulefiles/tasks/orion/task.analysis.lua @@ -0,0 +1,8 @@ +prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack")) +prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack_jedi")) + +load(pathJoin("stack-intel", stack_intel_ver)) +load(pathJoin("stack-intel-oneapi-mpi", stack_impi_ver)) +load(pathJoin("stack-python", stack_python_ver)) + +load(pathJoin("prod-util", prod_util_ver)) diff --git a/modulefiles/tasks/orion/task.forecast.lua b/modulefiles/tasks/orion/task.forecast.lua new file mode 100644 index 00000000..0655a281 --- /dev/null +++ b/modulefiles/tasks/orion/task.forecast.lua @@ -0,0 +1,8 @@ +prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack")) +prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack_jedi")) + +load(pathJoin("stack-intel", stack_intel_ver)) +load(pathJoin("stack-intel-oneapi-mpi", stack_impi_ver)) +load(pathJoin("stack-python", stack_python_ver)) + +load(pathJoin("prod-util", prod_util_ver)) diff --git a/modulefiles/tasks/orion/task.prep_bmat.lua b/modulefiles/tasks/orion/task.prep_bmat.lua new file mode 100644 index 00000000..0655a281 --- /dev/null +++ b/modulefiles/tasks/orion/task.prep_bmat.lua @@ -0,0 +1,8 @@ +prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack")) +prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack_jedi")) + +load(pathJoin("stack-intel", stack_intel_ver)) +load(pathJoin("stack-intel-oneapi-mpi", stack_impi_ver)) +load(pathJoin("stack-python", stack_python_ver)) + +load(pathJoin("prod-util", prod_util_ver)) diff --git a/modulefiles/tasks/orion/task.prep_exp.lua b/modulefiles/tasks/orion/task.prep_exp.lua new file mode 100644 index 00000000..0655a281 --- /dev/null +++ b/modulefiles/tasks/orion/task.prep_exp.lua @@ -0,0 +1,8 @@ +prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack")) +prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack_jedi")) + +load(pathJoin("stack-intel", stack_intel_ver)) +load(pathJoin("stack-intel-oneapi-mpi", stack_impi_ver)) +load(pathJoin("stack-python", stack_python_ver)) + +load(pathJoin("prod-util", prod_util_ver)) diff --git a/modulefiles/tasks/orion/task.prep_obs.lua b/modulefiles/tasks/orion/task.prep_obs.lua new file mode 100644 index 00000000..0655a281 --- /dev/null +++ b/modulefiles/tasks/orion/task.prep_obs.lua @@ -0,0 +1,8 @@ +prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack")) +prepend_path("MODULEPATH", os.getenv("modulepath_spack_stack_jedi")) + +load(pathJoin("stack-intel", stack_intel_ver)) +load(pathJoin("stack-intel-oneapi-mpi", stack_impi_ver)) +load(pathJoin("stack-python", stack_python_ver)) + +load(pathJoin("prod-util", prod_util_ver)) diff --git a/parm/land_analysis_era5_hera.yaml b/parm/land_analysis_era5_hera.yaml index a5664795..0a4df89f 100644 --- a/parm/land_analysis_era5_hera.yaml +++ b/parm/land_analysis_era5_hera.yaml @@ -8,37 +8,44 @@ workflow: spec: 201912210000 201912210000 24:00:00 entities: MACHINE: "hera" + SCHED: "slurm" ACCOUNT: "nems" EXP_NAME: "LETKF" - NET: "landda" - model_ver: "v1.2.1" EXP_BASEDIR: "/scratch2/NAGAPE/epic/{USER}/landda_test" JEDI_INSTALL: "/scratch2/NAGAPE/epic/UFS_Land-DA/jedi" LANDDA_INPUTS: "/scratch2/NAGAPE/epic/UFS_Land-DA/inputs" FORCING: "era5" RES: "96" FCSTHR: "24" - NPROCS_ANA: "6" - NPROCS_FCST: "6" + NPROCS_ANALYSIS: "6" + NPROCS_FORECAST: "6" OBS_TYPES: "GHCN" fv3bundle_vn: "psl_develop" DAtype: "letkfoi_snow" SNOWDEPTHVAR: "snwdph" TSTUB: "oro_C96.mx100" + NET: "landda" + envir: "test" + model_ver: "v1.2.1" + HOMElandda: "&EXP_BASEDIR;/land-DA_workflow" + PTMP: "&EXP_BASEDIR;/ptmp" + COMROOT: "&PTMP;/&envir;/com" + DATAROOT: "&PTMP;/&envir;/tmp" + KEEPDATA: "YES" WORKDIR: "&EXP_BASEDIR;/workdir/run_&FORCING;" - CYCLEDIR: "&EXP_BASEDIR;/land-DA_workflow" - EXECdir: "&CYCLEDIR;/exec" - OUTDIR: "&EXP_BASEDIR;/com/&NET;/&model_ver;/run_&FORCING;" - LOGDIR: "&EXP_BASEDIR;/com/output/logs/run_&FORCING;" + LOGDIR: "&COMROOT;/output/logs/run_&FORCING;" PATHRT: "&EXP_BASEDIR;" - CTIME: "@Y@m@d@H" - PTIME: "@Y@m@d@H" - NTIME: "@Y@m@d@H" + PDY: "@Y@m@d" + cyc: "@H" + SLASH_ENSMEM_SUBDIR: "" + PTIME: "@Y@m@d@H" + NTIME: "@Y@m@d@H" log: "&LOGDIR;/workflow.log" tasks: - task_prepexp: + task_prep_exp: envars: MACHINE: "&MACHINE;" + SCHED: "&SCHED;" ACCOUNT: "&ACCOUNT;" EXP_NAME: "&EXP_NAME;" LANDDA_INPUTS: "&LANDDA_INPUTS;" @@ -46,35 +53,44 @@ workflow: RES: "&RES;" TSTUB: "&TSTUB;" WORKDIR: "&WORKDIR;" - CYCLEDIR: "&CYCLEDIR;" - EXECdir: "&EXECdir;" - OUTDIR: "&OUTDIR;" - CTIME: "&CTIME;" + model_ver: "&model_ver;" + HOMElandda: "&HOMElandda;" + COMROOT: "&COMROOT;" + DATAROOT: "&DATAROOT;" + KEEPDATA: "&KEEPDATA;" + PDY: "&PDY;" + cyc: "&cyc;" + SLASH_ENSMEM_SUBDIR: "&SLASH_ENSMEM_SUBDIR;" PTIME: "&PTIME;" account: "&ACCOUNT;" - command: "&CYCLEDIR;/jobs/JLANDDA_PREP_EXP" - jobname: prepexp + command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "prep_exp" "&HOMElandda;" "&MACHINE;"' + jobname: prep_exp cores: 1 walltime: 00:02:00 queue: batch join: "&LOGDIR;/prep_exp.log" - task_prepobs: + task_prep_obs: envars: OBS_TYPES: "&OBS_TYPES;" MACHINE: "&MACHINE;" + SCHED: "&SCHED;" ACCOUNT: "&ACCOUNT;" EXP_NAME: "&EXP_NAME;" LANDDA_INPUTS: "&LANDDA_INPUTS;" ATMOS_FORC: "&FORCING;" WORKDIR: "&WORKDIR;" - CYCLEDIR: "&CYCLEDIR;" - EXECdir: "&EXECdir;" - OUTDIR: "&OUTDIR;" - CTIME: "&CTIME;" + model_ver: "&model_ver;" + HOMElandda: "&HOMElandda;" + COMROOT: "&COMROOT;" + DATAROOT: "&DATAROOT;" + KEEPDATA: "&KEEPDATA;" + PDY: "&PDY;" + cyc: "&cyc;" + SLASH_ENSMEM_SUBDIR: "&SLASH_ENSMEM_SUBDIR;" PTIME: "&PTIME;" account: "&ACCOUNT;" - command: "&CYCLEDIR;/jobs/JLANDDA_PREP_OBS" - jobname: prepobs + command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "prep_obs" "&HOMElandda;" "&MACHINE;"' + jobname: prep_obs cores: 1 walltime: 00:02:00 queue: batch @@ -82,26 +98,31 @@ workflow: dependency: taskdep: attrs: - task: prepexp - task_prepbmat: + task: prep_exp + task_prep_bmat: envars: MACHINE: "&MACHINE;" + SCHED: "&SCHED;" ACCOUNT: "&ACCOUNT;" EXP_NAME: "&EXP_NAME;" LANDDA_INPUTS: "&LANDDA_INPUTS;" ATMOS_FORC: "&FORCING;" WORKDIR: "&WORKDIR;" - CYCLEDIR: "&CYCLEDIR;" - EXECdir: "&EXECdir;" - OUTDIR: "&OUTDIR;" - CTIME: "&CTIME;" + model_ver: "&model_ver;" + HOMElandda: "&HOMElandda;" + COMROOT: "&COMROOT;" + DATAROOT: "&DATAROOT;" + KEEPDATA: "&KEEPDATA;" + PDY: "&PDY;" + cyc: "&cyc;" + SLASH_ENSMEM_SUBDIR: "&SLASH_ENSMEM_SUBDIR;" PTIME: "&PTIME;" fv3bundle_vn: "&fv3bundle_vn;" DAtype: "&DAtype;" SNOWDEPTHVAR: "&SNOWDEPTHVAR;" account: "&ACCOUNT;" - command: "&CYCLEDIR;/jobs/JLANDDA_PREP_BMAT" - jobname: prepbmat + command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "prep_bmat" "&HOMElandda;" "&MACHINE;"' + jobname: prep_bmat cores: 1 walltime: 00:02:00 queue: batch @@ -109,11 +130,12 @@ workflow: dependency: taskdep: attrs: - task: prepobs - task_runana: + task: prep_obs + task_analysis: envars: OBS_TYPES: "&OBS_TYPES;" MACHINE: "&MACHINE;" + SCHED: "&SCHED;" ACCOUNT: "&ACCOUNT;" EXP_NAME: "&EXP_NAME;" LANDDA_INPUTS: "&LANDDA_INPUTS;" @@ -121,32 +143,37 @@ workflow: RES: "&RES;" TSTUB: "&TSTUB;" WORKDIR: "&WORKDIR;" - CYCLEDIR: "&CYCLEDIR;" - EXECdir: "&EXECdir;" - OUTDIR: "&OUTDIR;" - CTIME: "&CTIME;" + model_ver: "&model_ver;" + HOMElandda: "&HOMElandda;" + COMROOT: "&COMROOT;" + DATAROOT: "&DATAROOT;" + KEEPDATA: "&KEEPDATA;" + PDY: "&PDY;" + cyc: "&cyc;" + SLASH_ENSMEM_SUBDIR: "&SLASH_ENSMEM_SUBDIR;" PTIME: "&PTIME;" NTIME: "&NTIME;" fv3bundle_vn: "&fv3bundle_vn;" DAtype: "&DAtype;" SNOWDEPTHVAR: "&SNOWDEPTHVAR;" - NPROC_JEDI: "&NPROCS_ANA;" + NPROC_JEDI: "&NPROCS_ANALYSIS;" JEDI_INSTALL: "&JEDI_INSTALL;" account: "&ACCOUNT;" - command: "&CYCLEDIR;/jobs/JLANDDA_RUN_ANA" - jobname: runana - nodes: "1:ppn=&NPROCS_ANA;" + command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "analysis" "&HOMElandda;" "&MACHINE;"' + jobname: analysis + nodes: "1:ppn=&NPROCS_ANALYSIS;" walltime: 00:15:00 queue: batch - join: "&LOGDIR;/run_ana.log" + join: "&LOGDIR;/analysis.log" dependency: taskdep: attrs: - task: prepbmat - task_runfcst: + task: prep_bmat + task_forecast: envars: OBS_TYPES: "&OBS_TYPES;" MACHINE: "&MACHINE;" + SCHED: "&SCHED;" ACCOUNT: "&ACCOUNT;" EXP_NAME: "&EXP_NAME;" LANDDA_INPUTS: "&LANDDA_INPUTS;" @@ -154,10 +181,15 @@ workflow: RES: "&RES;" TSTUB: "&TSTUB;" WORKDIR: "&WORKDIR;" - CYCLEDIR: "&CYCLEDIR;" - EXECdir: "&EXECdir;" - OUTDIR: "&OUTDIR;" - CTIME: "&CTIME;" + model_ver: "&model_ver;" + HOMElandda: "&HOMElandda;" + COMROOT: "&COMROOT;" + DATAROOT: "&DATAROOT;" + KEEPDATA: "&KEEPDATA;" + LOGDIR: "&LOGDIR;" + PDY: "&PDY;" + cyc: "&cyc;" + SLASH_ENSMEM_SUBDIR: "&SLASH_ENSMEM_SUBDIR;" PTIME: "&PTIME;" NTIME: "&NTIME;" fv3bundle_vn: "&fv3bundle_vn;" @@ -166,13 +198,13 @@ workflow: JEDI_INSTALL: "&JEDI_INSTALL;" FCSTHR: "&FCSTHR;" account: "&ACCOUNT;" - command: "&CYCLEDIR;/jobs/JLANDDA_RUN_FCST" - jobname: runfcst - nodes: "1:ppn=&NPROCS_FCST;" + command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "forecast" "&HOMElandda;" "&MACHINE;"' + jobname: forecast + nodes: "1:ppn=&NPROCS_FORECAST;" walltime: 00:30:00 queue: batch - join: "&LOGDIR;/run_fcst.log" + join: "&LOGDIR;/forecast.log" dependency: taskdep: attrs: - task: runana + task: analysis diff --git a/parm/land_analysis_era5_orion.yaml b/parm/land_analysis_era5_orion.yaml index a676ed84..5b049c86 100644 --- a/parm/land_analysis_era5_orion.yaml +++ b/parm/land_analysis_era5_orion.yaml @@ -8,37 +8,44 @@ workflow: spec: 201912210000 201912210000 24:00:00 entities: MACHINE: "orion" + SCHED: "slurm" ACCOUNT: "epic" EXP_NAME: "LETKF" - NET: "landda" - model_ver: "v1.2.1" EXP_BASEDIR: "/work/noaa/epic/{USER}/landda_test" JEDI_INSTALL: "/work/noaa/epic/UFS_Land-DA/jedi" LANDDA_INPUTS: "/work/noaa/epic/UFS_Land-DA/inputs" FORCING: "era5" RES: "96" FCSTHR: "24" - NPROCS_ANA: "6" - NPROCS_FCST: "6" + NPROCS_ANALYSIS: "6" + NPROCS_FORECAST: "6" OBS_TYPES: "GHCN" fv3bundle_vn: "psl_develop" DAtype: "letkfoi_snow" SNOWDEPTHVAR: "snwdph" TSTUB: "oro_C96.mx100" + NET: "landda" + envir: "test" + model_ver: "v1.2.1" + HOMElandda: "&EXP_BASEDIR;/land-DA_workflow" + PTMP: "&EXP_BASEDIR;/ptmp" + COMROOT: "&PTMP;/&envir;/com" + DATAROOT: "&PTMP;/&envir;/tmp" + KEEPDATA: "YES" WORKDIR: "&EXP_BASEDIR;/workdir/run_&FORCING;" - CYCLEDIR: "&EXP_BASEDIR;/land-DA_workflow" - EXECdir: "&CYCLEDIR;/exec" - OUTDIR: "&EXP_BASEDIR;/com/&NET;/&model_ver;/run_&FORCING;" - LOGDIR: "&EXP_BASEDIR;/com/output/logs/run_&FORCING;" + LOGDIR: "&COMROOT;/output/logs/run_&FORCING;" PATHRT: "&EXP_BASEDIR;" - CTIME: "@Y@m@d@H" - PTIME: "@Y@m@d@H" - NTIME: "@Y@m@d@H" + PDY: "@Y@m@d" + cyc: "@H" + SLASH_ENSMEM_SUBDIR: "" + PTIME: "@Y@m@d@H" + NTIME: "@Y@m@d@H" log: "&LOGDIR;/workflow.log" tasks: - task_prepexp: + task_prep_exp: envars: MACHINE: "&MACHINE;" + SCHED: "&SCHED;" ACCOUNT: "&ACCOUNT;" EXP_NAME: "&EXP_NAME;" LANDDA_INPUTS: "&LANDDA_INPUTS;" @@ -46,35 +53,44 @@ workflow: RES: "&RES;" TSTUB: "&TSTUB;" WORKDIR: "&WORKDIR;" - CYCLEDIR: "&CYCLEDIR;" - EXECdir: "&EXECdir;" - OUTDIR: "&OUTDIR;" - CTIME: "&CTIME;" + model_ver: "&model_ver;" + HOMElandda: "&HOMElandda;" + COMROOT: "&COMROOT;" + DATAROOT: "&DATAROOT;" + KEEPDATA: "&KEEPDATA;" + PDY: "&PDY;" + cyc: "&cyc;" + SLASH_ENSMEM_SUBDIR: "&SLASH_ENSMEM_SUBDIR;" PTIME: "&PTIME;" account: "&ACCOUNT;" - command: "&CYCLEDIR;/jobs/JLANDDA_PREP_EXP" - jobname: prepexp + command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "prep_exp" "&HOMElandda;" "&MACHINE;"' + jobname: prep_exp cores: 1 walltime: 00:02:00 queue: batch join: "&LOGDIR;/prep_exp.log" - task_prepobs: + task_prep_obs: envars: OBS_TYPES: "&OBS_TYPES;" MACHINE: "&MACHINE;" + SCHED: "&SCHED;" ACCOUNT: "&ACCOUNT;" EXP_NAME: "&EXP_NAME;" LANDDA_INPUTS: "&LANDDA_INPUTS;" ATMOS_FORC: "&FORCING;" WORKDIR: "&WORKDIR;" - CYCLEDIR: "&CYCLEDIR;" - EXECdir: "&EXECdir;" - OUTDIR: "&OUTDIR;" - CTIME: "&CTIME;" + model_ver: "&model_ver;" + HOMElandda: "&HOMElandda;" + COMROOT: "&COMROOT;" + DATAROOT: "&DATAROOT;" + KEEPDATA: "&KEEPDATA;" + PDY: "&PDY;" + cyc: "&cyc;" + SLASH_ENSMEM_SUBDIR: "&SLASH_ENSMEM_SUBDIR;" PTIME: "&PTIME;" account: "&ACCOUNT;" - command: "&CYCLEDIR;/jobs/JLANDDA_PREP_OBS" - jobname: prepobs + command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "prep_obs" "&HOMElandda;" "&MACHINE;"' + jobname: prep_obs cores: 1 walltime: 00:02:00 queue: batch @@ -82,26 +98,31 @@ workflow: dependency: taskdep: attrs: - task: prepexp - task_prepbmat: + task: prep_exp + task_prep_bmat: envars: MACHINE: "&MACHINE;" + SCHED: "&SCHED;" ACCOUNT: "&ACCOUNT;" EXP_NAME: "&EXP_NAME;" LANDDA_INPUTS: "&LANDDA_INPUTS;" ATMOS_FORC: "&FORCING;" WORKDIR: "&WORKDIR;" - CYCLEDIR: "&CYCLEDIR;" - EXECdir: "&EXECdir;" - OUTDIR: "&OUTDIR;" - CTIME: "&CTIME;" + model_ver: "&model_ver;" + HOMElandda: "&HOMElandda;" + COMROOT: "&COMROOT;" + DATAROOT: "&DATAROOT;" + KEEPDATA: "&KEEPDATA;" + PDY: "&PDY;" + cyc: "&cyc;" + SLASH_ENSMEM_SUBDIR: "&SLASH_ENSMEM_SUBDIR;" PTIME: "&PTIME;" fv3bundle_vn: "&fv3bundle_vn;" DAtype: "&DAtype;" SNOWDEPTHVAR: "&SNOWDEPTHVAR;" account: "&ACCOUNT;" - command: "&CYCLEDIR;/jobs/JLANDDA_PREP_BMAT" - jobname: prepbmat + command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "prep_bmat" "&HOMElandda;" "&MACHINE;"' + jobname: prep_bmat cores: 1 walltime: 00:02:00 queue: batch @@ -109,11 +130,12 @@ workflow: dependency: taskdep: attrs: - task: prepobs - task_runana: + task: prep_obs + task_analysis: envars: OBS_TYPES: "&OBS_TYPES;" MACHINE: "&MACHINE;" + SCHED: "&SCHED;" ACCOUNT: "&ACCOUNT;" EXP_NAME: "&EXP_NAME;" LANDDA_INPUTS: "&LANDDA_INPUTS;" @@ -121,32 +143,37 @@ workflow: RES: "&RES;" TSTUB: "&TSTUB;" WORKDIR: "&WORKDIR;" - CYCLEDIR: "&CYCLEDIR;" - EXECdir: "&EXECdir;" - OUTDIR: "&OUTDIR;" - CTIME: "&CTIME;" + model_ver: "&model_ver;" + HOMElandda: "&HOMElandda;" + COMROOT: "&COMROOT;" + DATAROOT: "&DATAROOT;" + KEEPDATA: "&KEEPDATA;" + PDY: "&PDY;" + cyc: "&cyc;" + SLASH_ENSMEM_SUBDIR: "&SLASH_ENSMEM_SUBDIR;" PTIME: "&PTIME;" NTIME: "&NTIME;" fv3bundle_vn: "&fv3bundle_vn;" DAtype: "&DAtype;" SNOWDEPTHVAR: "&SNOWDEPTHVAR;" - NPROC_JEDI: "&NPROCS_ANA;" + NPROC_JEDI: "&NPROCS_ANALYSIS;" JEDI_INSTALL: "&JEDI_INSTALL;" account: "&ACCOUNT;" - command: "&CYCLEDIR;/jobs/JLANDDA_RUN_ANA" - jobname: runana - nodes: "1:ppn=&NPROCS_ANA;" + command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "analysis" "&HOMElandda;" "&MACHINE;"' + jobname: analysis + nodes: "1:ppn=&NPROCS_ANALYSIS;" walltime: 00:15:00 queue: batch - join: "&LOGDIR;/run_ana.log" + join: "&LOGDIR;/analysis.log" dependency: taskdep: attrs: - task: prepbmat - task_runfcst: + task: prep_bmat + task_forecast: envars: OBS_TYPES: "&OBS_TYPES;" MACHINE: "&MACHINE;" + SCHED: "&SCHED;" ACCOUNT: "&ACCOUNT;" EXP_NAME: "&EXP_NAME;" LANDDA_INPUTS: "&LANDDA_INPUTS;" @@ -154,10 +181,15 @@ workflow: RES: "&RES;" TSTUB: "&TSTUB;" WORKDIR: "&WORKDIR;" - CYCLEDIR: "&CYCLEDIR;" - EXECdir: "&EXECdir;" - OUTDIR: "&OUTDIR;" - CTIME: "&CTIME;" + model_ver: "&model_ver;" + HOMElandda: "&HOMElandda;" + COMROOT: "&COMROOT;" + DATAROOT: "&DATAROOT;" + KEEPDATA: "&KEEPDATA;" + LOGDIR: "&LOGDIR;" + PDY: "&PDY;" + cyc: "&cyc;" + SLASH_ENSMEM_SUBDIR: "&SLASH_ENSMEM_SUBDIR;" PTIME: "&PTIME;" NTIME: "&NTIME;" fv3bundle_vn: "&fv3bundle_vn;" @@ -166,13 +198,13 @@ workflow: JEDI_INSTALL: "&JEDI_INSTALL;" FCSTHR: "&FCSTHR;" account: "&ACCOUNT;" - command: "&CYCLEDIR;/jobs/JLANDDA_RUN_FCST" - jobname: runfcst - nodes: "1:ppn=&NPROCS_FCST;" + command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "forecast" "&HOMElandda;" "&MACHINE;"' + jobname: forecast + nodes: "1:ppn=&NPROCS_FORECAST;" walltime: 00:30:00 queue: batch - join: "&LOGDIR;/run_fcst.log" + join: "&LOGDIR;/forecast.log" dependency: taskdep: attrs: - task: runana + task: analysis diff --git a/parm/land_analysis_gswp3_hera.yaml b/parm/land_analysis_gswp3_hera.yaml index d061a01f..c4d42f57 100644 --- a/parm/land_analysis_gswp3_hera.yaml +++ b/parm/land_analysis_gswp3_hera.yaml @@ -8,37 +8,44 @@ workflow: spec: 200001030000 200001030000 24:00:00 entities: MACHINE: "hera" - ACCOUNT: "epic" + SCHED: "slurm" + ACCOUNT: "nems" EXP_NAME: "LETKF" - NET: "landda" - model_ver: "v1.2.1" EXP_BASEDIR: "/scratch2/NAGAPE/epic/{USER}/landda_test" JEDI_INSTALL: "/scratch2/NAGAPE/epic/UFS_Land-DA/jedi" LANDDA_INPUTS: "/scratch2/NAGAPE/epic/UFS_Land-DA/inputs" FORCING: "gswp3" RES: "96" FCSTHR: "24" - NPROCS_ANA: "6" - NPROCS_FCST: "6" + NPROCS_ANALYSIS: "6" + NPROCS_FORECAST: "6" OBS_TYPES: "GHCN" fv3bundle_vn: "psl_develop" DAtype: "letkfoi_snow" SNOWDEPTHVAR: "snwdph" TSTUB: "oro_C96.mx100" + NET: "landda" + envir: "test" + model_ver: "v1.2.1" + HOMElandda: "&EXP_BASEDIR;/land-DA_workflow" + PTMP: "&EXP_BASEDIR;/ptmp" + COMROOT: "&PTMP;/&envir;/com" + DATAROOT: "&PTMP;/&envir;/tmp" + KEEPDATA: "YES" WORKDIR: "&EXP_BASEDIR;/workdir/run_&FORCING;" - CYCLEDIR: "&EXP_BASEDIR;/land-DA_workflow" - EXECdir: "&CYCLEDIR;/exec" - OUTDIR: "&EXP_BASEDIR;/com/&NET;/&model_ver;/run_&FORCING;" - LOGDIR: "&EXP_BASEDIR;/com/output/logs/run_&FORCING;" + LOGDIR: "&COMROOT;/output/logs/run_&FORCING;" PATHRT: "&EXP_BASEDIR;" - CTIME: "@Y@m@d@H" + PDY: "@Y@m@d" + cyc: "@H" + SLASH_ENSMEM_SUBDIR: "" PTIME: "@Y@m@d@H" NTIME: "@Y@m@d@H" log: "&LOGDIR;/workflow.log" tasks: - task_prepexp: + task_prep_exp: envars: MACHINE: "&MACHINE;" + SCHED: "&SCHED;" ACCOUNT: "&ACCOUNT;" EXP_NAME: "&EXP_NAME;" LANDDA_INPUTS: "&LANDDA_INPUTS;" @@ -46,35 +53,44 @@ workflow: RES: "&RES;" TSTUB: "&TSTUB;" WORKDIR: "&WORKDIR;" - CYCLEDIR: "&CYCLEDIR;" - EXECdir: "&EXECdir;" - OUTDIR: "&OUTDIR;" - CTIME: "&CTIME;" + model_ver: "&model_ver;" + HOMElandda: "&HOMElandda;" + COMROOT: "&COMROOT;" + DATAROOT: "&DATAROOT;" + KEEPDATA: "&KEEPDATA;" + PDY: "&PDY;" + cyc: "&cyc;" + SLASH_ENSMEM_SUBDIR: "&SLASH_ENSMEM_SUBDIR;" PTIME: "&PTIME;" account: "&ACCOUNT;" - command: "&CYCLEDIR;/jobs/JLANDDA_PREP_EXP" - jobname: prepexp + command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "prep_exp" "&HOMElandda;" "&MACHINE;"' + jobname: prep_exp cores: 1 walltime: 00:02:00 queue: batch join: "&LOGDIR;/prep_exp.log" - task_prepobs: + task_prep_obs: envars: OBS_TYPES: "&OBS_TYPES;" MACHINE: "&MACHINE;" + SCHED: "&SCHED;" ACCOUNT: "&ACCOUNT;" EXP_NAME: "&EXP_NAME;" LANDDA_INPUTS: "&LANDDA_INPUTS;" ATMOS_FORC: "&FORCING;" WORKDIR: "&WORKDIR;" - CYCLEDIR: "&CYCLEDIR;" - EXECdir: "&EXECdir;" - OUTDIR: "&OUTDIR;" - CTIME: "&CTIME;" + model_ver: "&model_ver;" + HOMElandda: "&HOMElandda;" + COMROOT: "&COMROOT;" + DATAROOT: "&DATAROOT;" + KEEPDATA: "&KEEPDATA;" + PDY: "&PDY;" + cyc: "&cyc;" + SLASH_ENSMEM_SUBDIR: "&SLASH_ENSMEM_SUBDIR;" PTIME: "&PTIME;" account: "&ACCOUNT;" - command: "&CYCLEDIR;/jobs/JLANDDA_PREP_OBS" - jobname: prepobs + command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "prep_obs" "&HOMElandda;" "&MACHINE;"' + jobname: prep_obs cores: 1 walltime: 00:02:00 queue: batch @@ -82,26 +98,31 @@ workflow: dependency: taskdep: attrs: - task: prepexp - task_prepbmat: + task: prep_exp + task_prep_bmat: envars: MACHINE: "&MACHINE;" + SCHED: "&SCHED;" ACCOUNT: "&ACCOUNT;" EXP_NAME: "&EXP_NAME;" LANDDA_INPUTS: "&LANDDA_INPUTS;" ATMOS_FORC: "&FORCING;" WORKDIR: "&WORKDIR;" - CYCLEDIR: "&CYCLEDIR;" - EXECdir: "&EXECdir;" - OUTDIR: "&OUTDIR;" - CTIME: "&CTIME;" + model_ver: "&model_ver;" + HOMElandda: "&HOMElandda;" + COMROOT: "&COMROOT;" + DATAROOT: "&DATAROOT;" + KEEPDATA: "&KEEPDATA;" + PDY: "&PDY;" + cyc: "&cyc;" + SLASH_ENSMEM_SUBDIR: "&SLASH_ENSMEM_SUBDIR;" PTIME: "&PTIME;" fv3bundle_vn: "&fv3bundle_vn;" DAtype: "&DAtype;" SNOWDEPTHVAR: "&SNOWDEPTHVAR;" account: "&ACCOUNT;" - command: "&CYCLEDIR;/jobs/JLANDDA_PREP_BMAT" - jobname: prepbmat + command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "prep_bmat" "&HOMElandda;" "&MACHINE;"' + jobname: prep_bmat cores: 1 walltime: 00:02:00 queue: batch @@ -109,11 +130,12 @@ workflow: dependency: taskdep: attrs: - task: prepobs - task_runana: + task: prep_obs + task_analysis: envars: OBS_TYPES: "&OBS_TYPES;" MACHINE: "&MACHINE;" + SCHED: "&SCHED;" ACCOUNT: "&ACCOUNT;" EXP_NAME: "&EXP_NAME;" LANDDA_INPUTS: "&LANDDA_INPUTS;" @@ -121,32 +143,37 @@ workflow: RES: "&RES;" TSTUB: "&TSTUB;" WORKDIR: "&WORKDIR;" - CYCLEDIR: "&CYCLEDIR;" - EXECdir: "&EXECdir;" - OUTDIR: "&OUTDIR;" - CTIME: "&CTIME;" + model_ver: "&model_ver;" + HOMElandda: "&HOMElandda;" + COMROOT: "&COMROOT;" + DATAROOT: "&DATAROOT;" + KEEPDATA: "&KEEPDATA;" + PDY: "&PDY;" + cyc: "&cyc;" + SLASH_ENSMEM_SUBDIR: "&SLASH_ENSMEM_SUBDIR;" PTIME: "&PTIME;" NTIME: "&NTIME;" fv3bundle_vn: "&fv3bundle_vn;" DAtype: "&DAtype;" SNOWDEPTHVAR: "&SNOWDEPTHVAR;" - NPROC_JEDI: "&NPROCS_ANA;" + NPROC_JEDI: "&NPROCS_ANALYSIS;" JEDI_INSTALL: "&JEDI_INSTALL;" account: "&ACCOUNT;" - command: "&CYCLEDIR;/jobs/JLANDDA_RUN_ANA" - jobname: runana - nodes: "1:ppn=&NPROCS_ANA;" + command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "analysis" "&HOMElandda;" "&MACHINE;"' + jobname: analysis + nodes: "1:ppn=&NPROCS_ANALYSIS;" walltime: 00:15:00 queue: batch - join: "&LOGDIR;/run_ana.log" + join: "&LOGDIR;/analysis.log" dependency: taskdep: attrs: - task: prepbmat - task_runfcst: + task: prep_bmat + task_forecast: envars: OBS_TYPES: "&OBS_TYPES;" MACHINE: "&MACHINE;" + SCHED: "&SCHED;" ACCOUNT: "&ACCOUNT;" EXP_NAME: "&EXP_NAME;" LANDDA_INPUTS: "&LANDDA_INPUTS;" @@ -154,10 +181,15 @@ workflow: RES: "&RES;" TSTUB: "&TSTUB;" WORKDIR: "&WORKDIR;" - CYCLEDIR: "&CYCLEDIR;" - EXECdir: "&EXECdir;" - OUTDIR: "&OUTDIR;" - CTIME: "&CTIME;" + model_ver: "&model_ver;" + HOMElandda: "&HOMElandda;" + COMROOT: "&COMROOT;" + DATAROOT: "&DATAROOT;" + KEEPDATA: "&KEEPDATA;" + LOGDIR: "&LOGDIR;" + PDY: "&PDY;" + cyc: "&cyc;" + SLASH_ENSMEM_SUBDIR: "&SLASH_ENSMEM_SUBDIR;" PTIME: "&PTIME;" NTIME: "&NTIME;" fv3bundle_vn: "&fv3bundle_vn;" @@ -166,13 +198,13 @@ workflow: JEDI_INSTALL: "&JEDI_INSTALL;" FCSTHR: "&FCSTHR;" account: "&ACCOUNT;" - command: "&CYCLEDIR;/jobs/JLANDDA_RUN_FCST" - jobname: runfcst - nodes: "1:ppn=&NPROCS_FCST;" - walltime: 00:30:00 + command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "forecast" "&HOMElandda;" "&MACHINE;"' + jobname: forecast + nodes: "1:ppn=&NPROCS_FORECAST;" + walltime: 00:45:00 queue: batch - join: "&LOGDIR;/run_fcst.log" + join: "&LOGDIR;/forecast.log" dependency: taskdep: attrs: - task: runana + task: analysis diff --git a/parm/land_analysis_gswp3_orion.yaml b/parm/land_analysis_gswp3_orion.yaml index c06e091e..51c09e43 100644 --- a/parm/land_analysis_gswp3_orion.yaml +++ b/parm/land_analysis_gswp3_orion.yaml @@ -8,37 +8,44 @@ workflow: spec: 200001030000 200001030000 24:00:00 entities: MACHINE: "orion" + SCHED: "slurm" ACCOUNT: "epic" EXP_NAME: "LETKF" - NET: "landda" - model_ver: "v1.2.1" EXP_BASEDIR: "/work/noaa/epic/{USER}/landda_test" JEDI_INSTALL: "/work/noaa/epic/UFS_Land-DA/jedi" LANDDA_INPUTS: "/work/noaa/epic/UFS_Land-DA/inputs" FORCING: "gswp3" RES: "96" FCSTHR: "24" - NPROCS_ANA: "6" - NPROCS_FCST: "6" + NPROCS_ANALYSIS: "6" + NPROCS_FORECAST: "6" OBS_TYPES: "GHCN" fv3bundle_vn: "psl_develop" DAtype: "letkfoi_snow" SNOWDEPTHVAR: "snwdph" TSTUB: "oro_C96.mx100" + NET: "landda" + envir: "test" + model_ver: "v1.2.1" + HOMElandda: "&EXP_BASEDIR;/land-DA_workflow" + PTMP: "&EXP_BASEDIR;/ptmp" + COMROOT: "&PTMP;/&envir;/com" + DATAROOT: "&PTMP;/&envir;/tmp" + KEEPDATA: "YES" WORKDIR: "&EXP_BASEDIR;/workdir/run_&FORCING;" - CYCLEDIR: "&EXP_BASEDIR;/land-DA_workflow" - EXECdir: "&CYCLEDIR;/exec" - OUTDIR: "&EXP_BASEDIR;/com/&NET;/&model_ver;/run_&FORCING;" - LOGDIR: "&EXP_BASEDIR;/com/output/logs/run_&FORCING;" + LOGDIR: "&COMROOT;/output/logs/run_&FORCING;" PATHRT: "&EXP_BASEDIR;" - CTIME: "@Y@m@d@H" + PDY: "@Y@m@d" + cyc: "@H" + SLASH_ENSMEM_SUBDIR: "" PTIME: "@Y@m@d@H" NTIME: "@Y@m@d@H" log: "&LOGDIR;/workflow.log" tasks: - task_prepexp: + task_prep_exp: envars: MACHINE: "&MACHINE;" + SCHED: "&SCHED;" ACCOUNT: "&ACCOUNT;" EXP_NAME: "&EXP_NAME;" LANDDA_INPUTS: "&LANDDA_INPUTS;" @@ -46,35 +53,44 @@ workflow: RES: "&RES;" TSTUB: "&TSTUB;" WORKDIR: "&WORKDIR;" - CYCLEDIR: "&CYCLEDIR;" - EXECdir: "&EXECdir;" - OUTDIR: "&OUTDIR;" - CTIME: "&CTIME;" + model_ver: "&model_ver;" + HOMElandda: "&HOMElandda;" + COMROOT: "&COMROOT;" + DATAROOT: "&DATAROOT;" + KEEPDATA: "&KEEPDATA;" + PDY: "&PDY;" + cyc: "&cyc;" + SLASH_ENSMEM_SUBDIR: "&SLASH_ENSMEM_SUBDIR;" PTIME: "&PTIME;" account: "&ACCOUNT;" - command: "&CYCLEDIR;/jobs/JLANDDA_PREP_EXP" - jobname: prepexp + command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "prep_exp" "&HOMElandda;" "&MACHINE;"' + jobname: prep_exp cores: 1 walltime: 00:02:00 queue: batch join: "&LOGDIR;/prep_exp.log" - task_prepobs: + task_prep_obs: envars: OBS_TYPES: "&OBS_TYPES;" MACHINE: "&MACHINE;" + SCHED: "&SCHED;" ACCOUNT: "&ACCOUNT;" EXP_NAME: "&EXP_NAME;" LANDDA_INPUTS: "&LANDDA_INPUTS;" ATMOS_FORC: "&FORCING;" WORKDIR: "&WORKDIR;" - CYCLEDIR: "&CYCLEDIR;" - EXECdir: "&EXECdir;" - OUTDIR: "&OUTDIR;" - CTIME: "&CTIME;" + model_ver: "&model_ver;" + HOMElandda: "&HOMElandda;" + COMROOT: "&COMROOT;" + DATAROOT: "&DATAROOT;" + KEEPDATA: "&KEEPDATA;" + PDY: "&PDY;" + cyc: "&cyc;" + SLASH_ENSMEM_SUBDIR: "&SLASH_ENSMEM_SUBDIR;" PTIME: "&PTIME;" account: "&ACCOUNT;" - command: "&CYCLEDIR;/jobs/JLANDDA_PREP_OBS" - jobname: prepobs + command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "prep_obs" "&HOMElandda;" "&MACHINE;"' + jobname: prep_obs cores: 1 walltime: 00:02:00 queue: batch @@ -82,26 +98,31 @@ workflow: dependency: taskdep: attrs: - task: prepexp - task_prepbmat: + task: prep_exp + task_prep_bmat: envars: MACHINE: "&MACHINE;" + SCHED: "&SCHED;" ACCOUNT: "&ACCOUNT;" EXP_NAME: "&EXP_NAME;" LANDDA_INPUTS: "&LANDDA_INPUTS;" ATMOS_FORC: "&FORCING;" WORKDIR: "&WORKDIR;" - CYCLEDIR: "&CYCLEDIR;" - EXECdir: "&EXECdir;" - OUTDIR: "&OUTDIR;" - CTIME: "&CTIME;" + model_ver: "&model_ver;" + HOMElandda: "&HOMElandda;" + COMROOT: "&COMROOT;" + DATAROOT: "&DATAROOT;" + KEEPDATA: "&KEEPDATA;" + PDY: "&PDY;" + cyc: "&cyc;" + SLASH_ENSMEM_SUBDIR: "&SLASH_ENSMEM_SUBDIR;" PTIME: "&PTIME;" fv3bundle_vn: "&fv3bundle_vn;" DAtype: "&DAtype;" SNOWDEPTHVAR: "&SNOWDEPTHVAR;" account: "&ACCOUNT;" - command: "&CYCLEDIR;/jobs/JLANDDA_PREP_BMAT" - jobname: prepbmat + command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "prep_bmat" "&HOMElandda;" "&MACHINE;"' + jobname: prep_bmat cores: 1 walltime: 00:02:00 queue: batch @@ -109,11 +130,12 @@ workflow: dependency: taskdep: attrs: - task: prepobs - task_runana: + task: prep_obs + task_analysis: envars: OBS_TYPES: "&OBS_TYPES;" MACHINE: "&MACHINE;" + SCHED: "&SCHED;" ACCOUNT: "&ACCOUNT;" EXP_NAME: "&EXP_NAME;" LANDDA_INPUTS: "&LANDDA_INPUTS;" @@ -121,32 +143,37 @@ workflow: RES: "&RES;" TSTUB: "&TSTUB;" WORKDIR: "&WORKDIR;" - CYCLEDIR: "&CYCLEDIR;" - EXECdir: "&EXECdir;" - OUTDIR: "&OUTDIR;" - CTIME: "&CTIME;" + model_ver: "&model_ver;" + HOMElandda: "&HOMElandda;" + COMROOT: "&COMROOT;" + DATAROOT: "&DATAROOT;" + KEEPDATA: "&KEEPDATA;" + PDY: "&PDY;" + cyc: "&cyc;" + SLASH_ENSMEM_SUBDIR: "&SLASH_ENSMEM_SUBDIR;" PTIME: "&PTIME;" NTIME: "&NTIME;" fv3bundle_vn: "&fv3bundle_vn;" DAtype: "&DAtype;" SNOWDEPTHVAR: "&SNOWDEPTHVAR;" - NPROC_JEDI: "&NPROCS_ANA;" + NPROC_JEDI: "&NPROCS_ANALYSIS;" JEDI_INSTALL: "&JEDI_INSTALL;" account: "&ACCOUNT;" - command: "&CYCLEDIR;/jobs/JLANDDA_RUN_ANA" - jobname: runana - nodes: "1:ppn=&NPROCS_ANA;" + command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "analysis" "&HOMElandda;" "&MACHINE;"' + jobname: analysis + nodes: "1:ppn=&NPROCS_ANALYSIS;" walltime: 00:15:00 queue: batch - join: "&LOGDIR;/run_ana.log" + join: "&LOGDIR;/analysis.log" dependency: taskdep: attrs: - task: prepbmat - task_runfcst: + task: prep_bmat + task_forecast: envars: OBS_TYPES: "&OBS_TYPES;" MACHINE: "&MACHINE;" + SCHED: "&SCHED;" ACCOUNT: "&ACCOUNT;" EXP_NAME: "&EXP_NAME;" LANDDA_INPUTS: "&LANDDA_INPUTS;" @@ -154,10 +181,15 @@ workflow: RES: "&RES;" TSTUB: "&TSTUB;" WORKDIR: "&WORKDIR;" - CYCLEDIR: "&CYCLEDIR;" - EXECdir: "&EXECdir;" - OUTDIR: "&OUTDIR;" - CTIME: "&CTIME;" + model_ver: "&model_ver;" + HOMElandda: "&HOMElandda;" + COMROOT: "&COMROOT;" + DATAROOT: "&DATAROOT;" + KEEPDATA: "&KEEPDATA;" + LOGDIR: "&LOGDIR;" + PDY: "&PDY;" + cyc: "&cyc;" + SLASH_ENSMEM_SUBDIR: "&SLASH_ENSMEM_SUBDIR;" PTIME: "&PTIME;" NTIME: "&NTIME;" fv3bundle_vn: "&fv3bundle_vn;" @@ -166,13 +198,13 @@ workflow: JEDI_INSTALL: "&JEDI_INSTALL;" FCSTHR: "&FCSTHR;" account: "&ACCOUNT;" - command: "&CYCLEDIR;/jobs/JLANDDA_RUN_FCST" - jobname: runfcst - nodes: "1:ppn=&NPROCS_FCST;" - walltime: 00:45:00 + command: '&HOMElandda;/parm/task_load_modules_run_jjob.sh "forecast" "&HOMElandda;" "&MACHINE;"' + jobname: forecast + nodes: "1:ppn=&NPROCS_FORECAST;" + walltime: 00:30:00 queue: batch - join: "&LOGDIR;/run_fcst.log" + join: "&LOGDIR;/forecast.log" dependency: taskdep: attrs: - task: runana + task: analysis diff --git a/parm/land_analysis_test.xml b/parm/land_analysis_test.xml deleted file mode 100644 index 3bf6d006..00000000 --- a/parm/land_analysis_test.xml +++ /dev/null @@ -1,380 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - @Y@m@d@H"> - @Y@m@d@H"> - @Y@m@d@H"> -]> - - 201912210000 201912210000 24:00:00 - &LOG;/workflow.log - - &ACCOUNT; - 1 - batch - 00:02:00 - &CYCLEDIR;/jobs/JLANDDA_PREP_EXP - prepexp - - MACHINE - &MACHINE; - - - EXP_NAME - &EXP_NAME; - - - LANDDA_INPUTS - &LANDDA_INPUTS; - - - ATMOS_FORC - &FORCING; - - - RES - &RES; - - - TSTUB - &TSTUB; - - - WORKDIR - &WORKDIR; - - - CYCLEDIR - &CYCLEDIR; - - - EXECdir - &EXECdir; - - - OUTDIR - &OUTDIR; - - - CTIME - &CTIME; - - - PTIME - &PTIME; - - - - &ACCOUNT; - 1 - batch - 00:02:00 - &CYCLEDIR;/jobs/JLANDDA_PREP_OBS - prepobs - - OBS_TYPES - &OBS_TYPES; - - - MACHINE - &MACHINE; - - - EXP_NAME - &EXP_NAME; - - - LANDDA_INPUTS - &LANDDA_INPUTS; - - - ATMOS_FORC - &FORCING; - - - WORKDIR - &WORKDIR; - - - CYCLEDIR - &CYCLEDIR; - - - EXECdir - &EXECdir; - - - OUTDIR - &OUTDIR; - - - CTIME - &CTIME; - - - PTIME - &PTIME; - - - - - - - &ACCOUNT; - 1 - batch - 00:02:00 - &CYCLEDIR;/jobs/JLANDDA_PREP_BMAT - prepbmat - - MACHINE - &MACHINE; - - - EXP_NAME - &EXP_NAME; - - - LANDDA_INPUTS - &LANDDA_INPUTS; - - - ATMOS_FORC - &FORCING; - - - WORKDIR - &WORKDIR; - - - CYCLEDIR - &CYCLEDIR; - - - EXECdir - &EXECdir; - - - OUTDIR - &OUTDIR; - - - CTIME - &CTIME; - - - PTIME - &PTIME; - - - fv3bundle_vn - &fv3bundle_vn; - - - DAtype - &DAtype; - - - SNOWDEPTHVAR - &SNOWDEPTHVAR; - - - - - - - &ACCOUNT; - 1:ppn=&NPROCS_ANA; - batch - 00:15:00 - &CYCLEDIR;/jobs/JLANDDA_RUN_ANA - runana - - OBS_TYPES - &OBS_TYPES; - - - MACHINE - &MACHINE; - - - EXP_NAME - &EXP_NAME; - - - LANDDA_INPUTS - &LANDDA_INPUTS; - - - ATMOS_FORC - &FORCING; - - - RES - &RES; - - - TSTUB - &TSTUB; - - - WORKDIR - &WORKDIR; - - - CYCLEDIR - &CYCLEDIR; - - - EXECdir - &EXECdir; - - - OUTDIR - &OUTDIR; - - - CTIME - &CTIME; - - - PTIME - &PTIME; - - - NTIME - &NTIME; - - - fv3bundle_vn - &fv3bundle_vn; - - - DAtype - &DAtype; - - - SNOWDEPTHVAR - &SNOWDEPTHVAR; - - - NPROC_JEDI - &NPROCS_ANA; - - - JEDI_INSTALL - &JEDI_INSTALL; - - - - - - - &ACCOUNT; - 1:ppn=&NPROCS_FCST; - batch - 00:30:00 - &CYCLEDIR;/jobs/JLANDDA_RUN_FCST - runfcst - - OBS_TYPES - &OBS_TYPES; - - - MACHINE - &MACHINE; - - - EXP_NAME - &EXP_NAME; - - - LANDDA_INPUTS - &LANDDA_INPUTS; - - - ATMOS_FORC - &FORCING; - - - RES - &RES; - - - TSTUB - &TSTUB; - - - WORKDIR - &WORKDIR; - - - CYCLEDIR - &CYCLEDIR; - - - EXECdir - &EXECdir; - - - OUTDIR - &OUTDIR; - - - CTIME - &CTIME; - - - PTIME - &PTIME; - - - NTIME - &NTIME; - - - fv3bundle_vn - &fv3bundle_vn; - - - DAtype - &DAtype; - - - SNOWDEPTHVAR - &SNOWDEPTHVAR; - - - JEDI_INSTALL - &JEDI_INSTALL; - - - FCSTHR - &FCSTHR; - - - - - - diff --git a/parm/run_without_rocoto.sh b/parm/run_without_rocoto.sh old mode 100644 new mode 100755 index 4e8a3bb6..87c22ef7 --- a/parm/run_without_rocoto.sh +++ b/parm/run_without_rocoto.sh @@ -11,46 +11,52 @@ export MACHINE="orion" +export SCHED="slurm" export ACCOUNT="epic" export FORCING="era5" -export NET="landda" -export model_ver="v1.2.1" if [ "${MACHINE}" = "hera" ]; then export EXP_BASEDIR="/scratch2/NAGAPE/epic/{USER}/landda_test" export JEDI_INSTALL="/scratch2/NAGAPE/epic/UFS_Land-DA/jedi" export LANDDA_INPUTS="/scratch2/NAGAPE/epic/UFS_Land-DA/inputs" elif [ "${MACHINE}" = "orion" ]; then - export EXP_BASEDIR="/work/noaa/epic/{USER}/landda_test" + export EXP_BASEDIR="/work/noaa/epic/chjeon/landda_test" export JEDI_INSTALL="/work/noaa/epic/UFS_Land-DA/jedi" export LANDDA_INPUTS="/work/noaa/epic/UFS_Land-DA/inputs" fi export RES="96" export FCSTHR="24" -export NPROCS_ANA="6" -export NPROCS_FCST="6" +export NPROCS_ANALYSIS="6" +export NPROCS_FORECAST="6" export OBS_TYPES="GHCN" export fv3bundle_vn="psl_develop" export DAtype="letkfoi_snow" export SNOWDEPTHVAR="snwdph" export TSTUB="oro_C96.mx100" +export NET="landda" +export envir="test" +export model_ver="v1.2.1" +export HOMElandda="${EXP_BASEDIR}/land-DA_workflow" +export PTMP="${EXP_BASEDIR}/ptmp" +export COMROOT="${PTMP}/${envir}/com" +export DATAROOT="${PTMP}/${envir}/tmp" +export KEEPDATA="YES" export WORKDIR="${EXP_BASEDIR}/workdir/run_&FORCING;" -export CYCLEDIR="${EXP_BASEDIR}/land-DA_workflow" -export EXECdir="${CYCLEDIR}/exec" -export OUTDIR="${EXP_BASEDIR}/com/${NET}/${model_ver}/run_${FORCING}" export LOGDIR="${EXP_BASEDIR}/com/output/logs" export PATHRT="${EXP_BASEDIR}" - +export SLASH_ENSMEM_SUBDIR="" export ATMOS_FORC="${FORCING}" -export NPROC_JEDI="${NPROCS_ANA}" +export NPROC_JEDI="${NPROCS_ANALYSIS}" if [ "${FORCING}" = "era5" ]; then - export CTIME="2019122100" + export PDY="20191221" + export cyc="00" export PTIME="2019122000" export NTIME="2019122200" elif [ "${FORCING}" = "gswp3" ]; then - export CTIME="2000010300" + export PDY="20000103" + export cyc="00" export PTIME="2000010200" export NTIME="2000010400" fi @@ -58,7 +64,7 @@ fi # Call J-job scripts # echo " ... PREP_EXP running ... " -${CYCLEDIR}/jobs/JLANDDA_PREP_EXP +${HOMElandda}/parm/task_load_modules_run_jjob.sh "prep_exp" "${HOMElandda}" "${MACHINE}" export err=$? if [ $err = 0 ]; then echo " === PREP_EXP completed successfully === " @@ -68,7 +74,7 @@ else fi echo " ... PREP_OBS running ... " -${CYCLEDIR}/jobs/JLANDDA_PREP_OBS +${HOMElandda}/parm/task_load_modules_run_jjob.sh "prep_obs" "${HOMElandda}" "${MACHINE}" export err=$? if [ $err = 0 ]; then echo " === PREP_OBS completed successfully === " @@ -78,7 +84,7 @@ else fi echo " ... PREP_BMAT running ... " -${CYCLEDIR}/jobs/JLANDDA_PREP_BMAT +${HOMElandda}/parm/task_load_modules_run_jjob.sh "prep_bmat" "${HOMElandda}" "${MACHINE}" export err=$? if [ $err = 0 ]; then echo " === PREP_BMAT completed successfully === " @@ -87,23 +93,23 @@ else exit 3 fi -echo " ... RUN_ANA running ... " -${CYCLEDIR}/jobs/JLANDDA_RUN_ANA +echo " ... ANALYSIS running ... " +${HOMElandda}/parm/task_load_modules_run_jjob.sh "analysis" "${HOMElandda}" "${MACHINE}" export err=$? if [ $err = 0 ]; then - echo " === RUN_ANA completed successfully === " + echo " === Task ANALYSIS completed successfully === " else - echo " ERROR: RUN_ANA failed !!! " + echo " ERROR: ANALYSIS failed !!! " exit 4 fi -echo " ... RUN_FCST running ... " -${CYCLEDIR}/jobs/JLANDDA_RUN_FCST +echo " ... FORECAST running ... " +${HOMElandda}/parm/task_load_modules_run_jjob.sh "forecast" "${HOMElandda}" "${MACHINE}" export err=$? if [ $err = 0 ]; then - echo " === RUN_FCST completed successfully === " + echo " === Task FORECAST completed successfully === " else - echo " ERROR: RUN_FCST failed !!! " + echo " ERROR: FORECAST failed !!! " exit 5 fi diff --git a/parm/task_load_modules_run_jjob.sh b/parm/task_load_modules_run_jjob.sh new file mode 100755 index 00000000..7622fc85 --- /dev/null +++ b/parm/task_load_modules_run_jjob.sh @@ -0,0 +1,51 @@ +#!/bin/bash + +set -xue + +if [ "$#" -ne 3 ]; then + echo "Incorrect number of arguments specified: + Number of arguments specified: $# + +Usage: task_load_modules_run_jjob.sh task_name home_dir machine_name jjob_fn + +where the arguments are defined as follows: + task_name: + Task name for which this script will load modules and launch the J-job. + + home_dir: + Full path to the pachage home directory. + + machine_name: + Machine name in lowercase: e.g. hera/orion" +fi + +task_name="$1" +home_dir="$2" +machine_name="$3" + +machine="${machine_name,,}" +task_name_upper="${task_name^^}" + +module purge + +# Source version file for run +ver_fp="${home_dir}/versions/run.ver_${machine}" +if [ -f ${ver_fp} ]; then + . ${ver_fp} +else + echo "FATAL ERROR: version file does not exist !!!" +fi +module_dp="${home_dir}/modulefiles/tasks/${machine}" +module use "${module_dp}" + +# Load module file for a specific task +task_module_fn="task.${task_name}" +if [ -f "${module_dp}/${task_module_fn}.lua" ]; then + module load "${task_module_fn}" + module list +else + echo "FATAL ERROR: task module file does not exist !!!" +fi + +# Run J-job script +${home_dir}/jobs/JLANDDA_${task_name_upper} diff --git a/release.environment b/release.environment index 7eed494d..9c5f01ab 100644 --- a/release.environment +++ b/release.environment @@ -3,12 +3,12 @@ #Set defaults export LANDDAROOT=${LANDDAROOT:-`dirname $PWD`} export LANDDA_INPUTS=${LANDDA_INPUTS:-${LANDDAROOT}/inputs} -export CYCLEDIR=$(pwd) +export HOMElandda=$(pwd) export LANDDA_EXPTS=${LANDDA_EXPTS:-${LANDDAROOT}/landda_expts} if [[ ! $BASELINE =~ 'hera.internal' ]]; then export PYTHON=`/usr/bin/which python` fi -export BUILDDIR=${BUILDDIR:-${CYCLEDIR}/sorc/build} +export BUILDDIR=${BUILDDIR:-${HOMElandda}/sorc/build} #Change some variables if working with a container if [[ ${USE_SINGULARITY} =~ yes ]]; then @@ -20,7 +20,7 @@ if [[ ${USE_SINGULARITY} =~ yes ]]; then #Scripts that launch containerized versions of the executables are in $PWD/singularity/bin They should be called #from the host system to be run (e.g. mpiexec -n 6 $BUILDDIR/bin/fv3jedi_letkf.x ) export BUILDDIR=$PWD/sorc/build - export JEDI_EXECDIR=${CYCLEDIR}/sorc/build/bin + export JEDI_EXECDIR=${HOMElandda}/sorc/build/bin module try-load singularity export SINGULARITYBIN=`/usr/bin/which singularity` sed -i 's/singularity exec/${SINGULARITYBIN} exec/g' run_container_executable.sh diff --git a/jobs/JLANDDA_RUN_ANA b/scripts/exlandda_analysis.sh similarity index 93% rename from jobs/JLANDDA_RUN_ANA rename to scripts/exlandda_analysis.sh index 84b4896e..53ec35ed 100755 --- a/jobs/JLANDDA_RUN_ANA +++ b/scripts/exlandda_analysis.sh @@ -10,14 +10,14 @@ if [[ ${EXP_NAME} == "openloop" ]]; then else do_jedi="YES" SAVE_TILE="YES" - LANDDADIR=${CYCLEDIR}/sorc/DA_update + LANDDADIR=${HOMElandda}/sorc/DA_update fi TPATH=${LANDDA_INPUTS}/forcing/${ATMOS_FORC}/orog_files/ -YYYY=${CTIME:0:4} -MM=${CTIME:4:2} -DD=${CTIME:6:2} -HH=${CTIME:8:2} +YYYY=${PDY:0:4} +MM=${PDY:4:2} +DD=${PDY:6:2} +HH=${cyc} YYYP=${PTIME:0:4} MP=${PTIME:4:2} DP=${PTIME:6:2} @@ -26,22 +26,22 @@ HP=${PTIME:8:2} mem_ens="mem000" MEM_WORKDIR=${WORKDIR}/${mem_ens} -MEM_MODL_OUTDIR=${OUTDIR}/${mem_ens} +MEM_MODL_OUTDIR=${COMOUT}/${mem_ens} RSTRDIR=${MEM_WORKDIR} JEDIWORKDIR=${WORKDIR}/mem000/jedi FILEDATE=${YYYY}${MM}${DD}.${HH}0000 JEDI_STATICDIR=${JEDI_INSTALL}/jedi-bundle/fv3-jedi/test/Data JEDI_EXECDIR=${JEDI_INSTALL}/build/bin JEDI_EXEC=$JEDI_EXECDIR/fv3jedi_letkf.x -LOGDIR=${OUTDIR}/DA/logs -apply_incr_EXEC=${EXECdir}/apply_incr.exe +LOGDIR=${COMOUT}/DA/logs +apply_incr_EXEC=${EXEClandda}/apply_incr.exe SAVE_INCR="YES" KEEPJEDIDIR="YES" cd $MEM_WORKDIR # load modulefiles -BUILD_VERSION_FILE="${CYCLEDIR}/versions/build.ver_${MACHINE}" +BUILD_VERSION_FILE="${HOMElandda}/versions/build.ver_${MACHINE}" if [ -e ${BUILD_VERSION_FILE} ]; then . ${BUILD_VERSION_FILE} fi @@ -206,7 +206,7 @@ fi # keep increments if [ $SAVE_INCR == "YES" ] && [ $do_DA == "YES" ]; then - cp ${JEDIWORKDIR}/${FILEDATE}.xainc.sfc_data.tile*.nc ${OUTDIR}/DA/jedi_incr/ + cp ${JEDIWORKDIR}/${FILEDATE}.xainc.sfc_data.tile*.nc ${COMOUT}/DA/jedi_incr/ fi # clean up diff --git a/jobs/JLANDDA_RUN_FCST b/scripts/exlandda_forecast.sh similarity index 88% rename from jobs/JLANDDA_RUN_FCST rename to scripts/exlandda_forecast.sh index bc1d0cce..e78c7b70 100755 --- a/jobs/JLANDDA_RUN_FCST +++ b/scripts/exlandda_forecast.sh @@ -10,15 +10,15 @@ if [[ ${EXP_NAME} == "openloop" ]]; then else do_jedi="YES" SAVE_TILE="YES" - LANDDADIR=${CYCLEDIR}/sorc/DA_update + LANDDADIR=${HOMElandda}/sorc/DA_update fi MACHINE_ID=${MACHINE} TPATH=${LANDDA_INPUTS}/forcing/${ATMOS_FORC}/orog_files/ -YYYY=${CTIME:0:4} -MM=${CTIME:4:2} -DD=${CTIME:6:2} -HH=${CTIME:8:2} +YYYY=${PDY:0:4} +MM=${PDY:4:2} +DD=${PDY:6:2} +HH=${cyc} YYYP=${PTIME:0:4} MP=${PTIME:4:2} DP=${PTIME:6:2} @@ -30,15 +30,15 @@ nHH=${NTIME:8:2} mem_ens="mem000" MEM_WORKDIR=${WORKDIR}/${mem_ens} -MEM_MODL_OUTDIR=${OUTDIR}/${mem_ens} +MEM_MODL_OUTDIR=${COMOUT}/${mem_ens} RSTRDIR=${MEM_WORKDIR} JEDIWORKDIR=${WORKDIR}/mem000/jedi FILEDATE=${YYYY}${MM}${DD}.${HH}0000 JEDI_STATICDIR=${JEDI_INSTALL}/jedi-bundle/fv3-jedi/test/Data JEDI_EXECDIR=${JEDI_INSTALL}/build/bin JEDI_EXEC=$JEDI_EXECDIR/fv3jedi_letkf.x -LOGDIR=${OUTDIR}/DA/logs -apply_incr_EXEC=${EXECdir}/apply_incr.exe +LOGDIR=${COMOUT}/DA/logs +apply_incr_EXEC=${EXEClandda}/apply_incr.exe SAVE_INCR="YES" KEEPJEDIDIR="YES" FREQ=$((${FCSTHR}*3600)) @@ -48,7 +48,7 @@ RHH=$((${FCSTHR}%24)) cd $MEM_WORKDIR # load modulefiles -BUILD_VERSION_FILE="${CYCLEDIR}/versions/build.ver_${MACHINE}" +BUILD_VERSION_FILE="${HOMElandda}/versions/build.ver_${MACHINE}" if [ -e ${BUILD_VERSION_FILE} ]; then . ${BUILD_VERSION_FILE} fi @@ -68,7 +68,7 @@ if [[ $do_jedi == "YES" && ${ATMOS_FORC} == "era5" ]]; then echo '************************************************' echo 'calling tile2vector' - cp ${CYCLEDIR}/parm/templates/template.tile2vector tile2vector.namelist + cp ${HOMElandda}/parm/templates/template.tile2vector tile2vector.namelist sed -i "s|LANDDA_INPUTS|${LANDDA_INPUTS}|g" tile2vector.namelist sed -i -e "s/XXYYYY/${YYYY}/g" tile2vector.namelist @@ -80,7 +80,7 @@ if [[ $do_jedi == "YES" && ${ATMOS_FORC} == "era5" ]]; then sed -i -e "s/XXTSTUB/${TSTUB}/g" tile2vector.namelist sed -i -e "s#XXTPATH#${TPATH}#g" tile2vector.namelist - ${EXECdir}/vector2tile_converter.exe tile2vector.namelist + ${EXEClandda}/vector2tile_converter.exe tile2vector.namelist if [[ $? != 0 ]]; then echo "tile2vector failed" exit @@ -96,7 +96,7 @@ if [[ $do_jedi == "YES" && ${ATMOS_FORC} == "gswp3" ]]; then echo '************************************************' echo 'calling tile2tile' - cp ${CYCLEDIR}/parm/templates/template.jedi2ufs jedi2ufs.namelist + cp ${HOMElandda}/parm/templates/template.jedi2ufs jedi2ufs.namelist sed -i "s|LANDDA_INPUTS|${LANDDA_INPUTS}|g" jedi2ufs.namelist sed -i -e "s/XXYYYY/${YYYY}/g" jedi2ufs.namelist @@ -108,7 +108,7 @@ if [[ $do_jedi == "YES" && ${ATMOS_FORC} == "gswp3" ]]; then sed -i -e "s/XXTSTUB/${TSTUB}/g" jedi2ufs.namelist sed -i -e "s#XXTPATH#${TPATH}#g" jedi2ufs.namelist - ${EXECdir}/tile2tile_converter.exe jedi2ufs.namelist + ${EXEClandda}/tile2tile_converter.exe jedi2ufs.namelist if [[ $? != 0 ]]; then echo "tile2tile failed" exit @@ -133,7 +133,7 @@ if [[ $do_jedi == "YES" && ${ATMOS_FORC} == "era5" ]]; then echo 'running the forecast model' # update model namelist - cp ${CYCLEDIR}/parm/templates/template.ufs-noahMP.namelist.${ATMOS_FORC} ufs-land.namelist + cp ${HOMElandda}/parm/templates/template.ufs-noahMP.namelist.${ATMOS_FORC} ufs-land.namelist sed -i "s|LANDDA_INPUTS|${LANDDA_INPUTS}|g" ufs-land.namelist sed -i -e "s/XXYYYY/${YYYY}/g" ufs-land.namelist @@ -149,7 +149,7 @@ if [[ $do_jedi == "YES" && ${ATMOS_FORC} == "era5" ]]; then nt=$SLURM_NTASKS - ${MPIEXEC} -n 1 ${EXECdir}/ufsLand.exe + ${MPIEXEC} -n 1 ${EXEClandda}/ufsLand.exe fi # no error codes on exit from model, check for restart below instead @@ -160,11 +160,11 @@ if [[ $do_jedi == "YES" && ${ATMOS_FORC} == "gswp3" ]]; then TEST_NAME=datm_cdeps_lnd_gswp3 TEST_NAME_RST=datm_cdeps_lnd_gswp3_rst - PATHRT=${CYCLEDIR}/sorc/ufs_model.fd/tests + PATHRT=${HOMElandda}/sorc/ufs_model.fd/tests RT_COMPILER=${RT_COMPILER:-intel} ATOL="1e-7" - cp $CYCLEDIR/$TEST_NAME_RST ${PATHRT}/tests/$TEST_NAME_RST + cp $HOMElandda/$TEST_NAME_RST ${PATHRT}/tests/$TEST_NAME_RST source ${PATHRT}/rt_utils.sh source ${PATHRT}/default_vars.sh source ${PATHRT}/tests/$TEST_NAME_RST @@ -198,8 +198,8 @@ if [[ $do_jedi == "YES" && ${ATMOS_FORC} == "gswp3" ]]; then export layout_y=1 # FV3 executable: - cp ${EXECdir}/ufs_model ./ufs_model - cp ${CYCLEDIR}/fv3_run ./fv3_run + cp ${EXEClandda}/ufs_model ./ufs_model + cp ${HOMElandda}/fv3_run ./fv3_run if [[ $DATM_CDEPS = 'true' ]] || [[ $FV3 = 'true' ]] || [[ $S2S = 'true' ]]; then if [[ $HAFS = 'false' ]] || [[ $FV3 = 'true' && $HAFS = 'true' ]]; then @@ -247,11 +247,6 @@ fi ############################ # check model ouput (all members) -#mem_ens="mem000" - -#MEM_WORKDIR=${WORKDIR}/${mem_ens} -#MEM_MODL_OUTDIR=${OUTDIR}/${mem_ens} - if [[ ${ATMOS_FORC} == "era5" ]]; then if [[ -e ${MEM_WORKDIR}/ufs_land_restart.${nYYYY}-${nMM}-${nDD}_${nHH}-00-00.nc ]]; then cp ${MEM_WORKDIR}/ufs_land_restart.${nYYYY}-${nMM}-${nDD}_${nHH}-00-00.nc ${MEM_MODL_OUTDIR}/restarts/vector/ufs_land_restart_back.${nYYYY}-${nMM}-${nDD}_${nHH}-00-00.nc @@ -261,6 +256,6 @@ fi if [[ ${ATMOS_FORC} == "gswp3" ]]; then for tile in 1 2 3 4 5 6 do - cp ${OUTDIR}/${mem_ens}/noahmp/${TEST_NAME_RST}/ufs.cpld.lnd.out.${nYYYY}-${nMM}-${nDD}-00000.tile${tile}.nc ${MEM_MODL_OUTDIR}/restarts/tile/ufs_land_restart_back.${nYYYY}-${nMM}-${nDD}_${nHH}-00-00.tile${tile}.nc + cp ${COMOUT}/${mem_ens}/noahmp/${TEST_NAME_RST}/ufs.cpld.lnd.out.${nYYYY}-${nMM}-${nDD}-00000.tile${tile}.nc ${MEM_MODL_OUTDIR}/restarts/tile/ufs_land_restart_back.${nYYYY}-${nMM}-${nDD}_${nHH}-00-00.tile${tile}.nc done fi diff --git a/scripts/exlandda_prep_bmat.sh b/scripts/exlandda_prep_bmat.sh new file mode 100755 index 00000000..cfc85ebf --- /dev/null +++ b/scripts/exlandda_prep_bmat.sh @@ -0,0 +1,86 @@ +#!/bin/sh + +set -ex + +############################ +# copy restarts to workdir, convert to UFS tile for DA (all members) + +if [[ ${EXP_NAME} == "openloop" ]]; then + do_jedi="NO" +else + do_jedi="YES" + SAVE_TILE="YES" + LANDDADIR=${HOMElandda}/sorc/DA_update +fi + +TPATH=${LANDDA_INPUTS}/forcing/${ATMOS_FORC}/orog_files/ +YYYY=${PDY:0:4} +MM=${PDY:4:2} +DD=${PDY:6:2} +HH=${cyc} + +mem_ens="mem000" + +MEM_WORKDIR=${WORKDIR}/${mem_ens} +MEM_MODL_OUTDIR=${COMOUT}/${mem_ens} +RSTRDIR=${MEM_WORKDIR} +JEDIWORKDIR=${WORKDIR}/mem000/jedi +FILEDATE=${YYYY}${MM}${DD}.${HH}0000 + +cd $MEM_WORKDIR + +# load modulefiles +BUILD_VERSION_FILE="${HOMElandda}/versions/build.ver_${MACHINE}" +if [ -e ${BUILD_VERSION_FILE} ]; then + . ${BUILD_VERSION_FILE} +fi +module use modulefiles; module load modules.landda +PYTHON=$(/usr/bin/which python) + +#fv3bundle_vn=psl_develop +#DAtype=letkfoi_snow +#SNOWDEPTHVAR=snwdph +YAML_DA=construct +GFSv17="NO" +B=30 # back ground error std for LETKFOI +cd $JEDIWORKDIR + +################################################ +# 4. CREATE BACKGROUND ENSEMBLE (LETKFOI) +################################################ + +if [[ ${DAtype} == 'letkfoi_snow' ]]; then + + JEDI_EXEC="fv3jedi_letkf.x" + + if [ $GFSv17 == "YES" ]; then + SNOWDEPTHVAR="snodl" + # field overwrite file with GFSv17 variables. + cp ${LANDDADIR}/jedi/fv3-jedi/yaml_files/${fv3bundle_vn}/gfs-land-v17.yaml ${JEDIWORKDIR}/gfs-land-v17.yaml + else + SNOWDEPTHVAR="snwdph" + fi + # FOR LETKFOI, CREATE THE PSEUDO-ENSEMBLE + for ens in pos neg + do + if [ -e $JEDIWORKDIR/mem_${ens} ]; then + rm -r $JEDIWORKDIR/mem_${ens} + fi + mkdir -p $JEDIWORKDIR/mem_${ens} + for tile in 1 2 3 4 5 6 + do + cp ${JEDIWORKDIR}/${FILEDATE}.sfc_data.tile${tile}.nc ${JEDIWORKDIR}/mem_${ens}/${FILEDATE}.sfc_data.tile${tile}.nc + done + cp ${JEDIWORKDIR}/${FILEDATE}.coupler.res ${JEDIWORKDIR}/mem_${ens}/${FILEDATE}.coupler.res + done + + echo 'do_landDA: calling create ensemble' + + # using ioda mods to get a python version with netCDF4 + ${PYTHON} ${LANDDADIR}/letkf_create_ens.py $FILEDATE $SNOWDEPTHVAR $B + if [[ $? != 0 ]]; then + echo "letkf create failed" + exit 10 + fi + +fi diff --git a/scripts/exlandda_prep_exp.sh b/scripts/exlandda_prep_exp.sh new file mode 100755 index 00000000..39cc41df --- /dev/null +++ b/scripts/exlandda_prep_exp.sh @@ -0,0 +1,179 @@ +#!/bin/sh + +set -ex + +############################ +# copy restarts to workdir, convert to UFS tile for DA (all members) + +if [[ ${EXP_NAME} == "openloop" ]]; then + do_jedi="NO" +else + do_jedi="YES" + SAVE_TILE="YES" + LANDDADIR=${HOMElandda}/sorc/DA_update +fi + +echo ${LANDDA_INPUTS}, ${ATMOS_FORC} +TPATH=${LANDDA_INPUTS}/forcing/${ATMOS_FORC}/orog_files/ +YYYY=${PDY:0:4} +MM=${PDY:4:2} +DD=${PDY:6:2} +HH=${cyc} +YYYP=${PTIME:0:4} +MP=${PTIME:4:2} +DP=${PTIME:6:2} +HP=${PTIME:8:2} +mem_ens="mem000" + +MEM_WORKDIR=${WORKDIR}/${mem_ens} +MEM_MODL_OUTDIR=${COMOUT}/${mem_ens} +RSTRDIR=${MEM_WORKDIR} +JEDIWORKDIR=${WORKDIR}/mem000/jedi +FILEDATE=${YYYY}${MM}${DD}.${HH}0000 + +if [[ ! -e ${MEM_WORKDIR} ]]; then + mkdir -p ${MEM_WORKDIR} +fi +if [[ ! -e ${MEM_MODL_OUTDIR} ]]; then + mkdir -p ${MEM_MODL_OUTDIR} +fi + +mkdir -p $MEM_WORKDIR/modulefiles; cp ${HOMElandda}/modulefiles/build_${MACHINE}_intel.lua $MEM_WORKDIR/modulefiles/modules.landda.lua +cd $MEM_WORKDIR + +# load modulefiles +BUILD_VERSION_FILE="${HOMElandda}/versions/build.ver_${MACHINE}" +if [ -e ${BUILD_VERSION_FILE} ]; then + . ${BUILD_VERSION_FILE} +fi + +module use modulefiles; module load modules.landda + +if [[ $do_jedi == "YES" && $ATMOS_FORC == "era5" ]]; then + + # copy restarts into work directory + rst_in=${MEM_MODL_OUTDIR}/restarts/vector/ufs_land_restart_back.${YYYY}-${MM}-${DD}_${HH}-00-00.nc + if [[ ! -e ${rst_in} ]]; then + rst_in=${LANDDA_INPUTS}/restarts/${ATMOS_FORC}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.nc + fi + rst_out=${MEM_WORKDIR}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.nc + cp ${rst_in} ${rst_out} + + echo '************************************************' + echo 'calling vector2tile' + + export MEM_WORKDIR + + # update vec2tile and tile2vec namelists + cp ${HOMElandda}/parm/templates/template.vector2tile vector2tile.namelist + + sed -i "s|LANDDA_INPUTS|${LANDDA_INPUTS}|g" vector2tile.namelist + sed -i -e "s/XXYYYY/${YYYY}/g" vector2tile.namelist + sed -i -e "s/XXMM/${MM}/g" vector2tile.namelist + sed -i -e "s/XXDD/${DD}/g" vector2tile.namelist + sed -i -e "s/XXHH/${HH}/g" vector2tile.namelist + sed -i -e "s/XXHH/${HH}/g" vector2tile.namelist + sed -i -e "s/MODEL_FORCING/${ATMOS_FORC}/g" vector2tile.namelist + sed -i -e "s/XXRES/${RES}/g" vector2tile.namelist + sed -i -e "s/XXTSTUB/${TSTUB}/g" vector2tile.namelist + sed -i -e "s#XXTPATH#${TPATH}#g" vector2tile.namelist + + # submit vec2tile + echo '************************************************' + echo 'calling vector2tile' + + ${EXEClandda}/vector2tile_converter.exe vector2tile.namelist + if [[ $? != 0 ]]; then + echo "vec2tile failed" + exit + fi +fi # vector2tile for DA + +if [[ $do_jedi == "YES" && $ATMOS_FORC == "gswp3" ]]; then + + echo '************************************************' + echo 'calling tile2tile' + + export MEM_WORKDIR + + # copy restarts into work directory + for tile in 1 2 3 4 5 6 + do + rst_in=${MEM_MODL_OUTDIR}/restarts/tile/ufs_land_restart_back.${YYYY}-${MM}-${DD}_${HH}-00-00.nc + if [[ ! -e ${rst_in} ]]; then + rst_in=${LANDDA_INPUTS}/restarts/${ATMOS_FORC}/ufs.cpld.lnd.out.${YYYY}-${MM}-${DD}-00000.tile${tile}.nc + fi + rst_out=${MEM_WORKDIR}/ufs_land_restart.${YYYY}-${MM}-${DD}_${HH}-00-00.tile${tile}.nc + cp ${rst_in} ${rst_out} + done + + # update tile2tile namelist + cp ${HOMElandda}/parm/templates/template.ufs2jedi ufs2jedi.namelist + + sed -i "s|LANDDA_INPUTS|${LANDDA_INPUTS}|g" ufs2jedi.namelist + sed -i -e "s/XXYYYY/${YYYY}/g" ufs2jedi.namelist + sed -i -e "s/XXMM/${MM}/g" ufs2jedi.namelist + sed -i -e "s/XXDD/${DD}/g" ufs2jedi.namelist + sed -i -e "s/XXHH/${HH}/g" ufs2jedi.namelist + sed -i -e "s/XXHH/${HH}/g" ufs2jedi.namelist + sed -i -e "s/MODEL_FORCING/${ATMOS_FORC}/g" ufs2jedi.namelist + sed -i -e "s/XXRES/${RES}/g" ufs2jedi.namelist + sed -i -e "s/XXTSTUB/${TSTUB}/g" ufs2jedi.namelist + sed -i -e "s#XXTPATH#${TPATH}#g" ufs2jedi.namelist + + # submit tile2tile + ${EXEClandda}/tile2tile_converter.exe ufs2jedi.namelist + if [[ $? != 0 ]]; then + echo "tile2tile failed" + exit + fi +fi # tile2tile for DA + +if [[ $do_jedi == "YES" ]]; then + if [[ ! -e ${COMOUT}/DA ]]; then + mkdir -p ${COMOUT}/DA/jedi_incr + mkdir -p ${COMOUT}/DA/logs + mkdir -p ${COMOUT}/DA/hofx + fi + if [[ ! -e $JEDIWORKDIR ]]; then + mkdir -p $JEDIWORKDIR + fi + cd $JEDIWORKDIR + + if [[ ! -e ${JEDIWORKDIR}/output ]]; then + ln -s ${COMOUT} ${JEDIWORKDIR}/output + fi + + if [[ $SAVE_TILE == "YES" ]]; then + for tile in 1 2 3 4 5 6 + do + cp ${RSTRDIR}/${FILEDATE}.sfc_data.tile${tile}.nc ${RSTRDIR}/${FILEDATE}.sfc_data_back.tile${tile}.nc + done + fi + + #stage restarts for applying JEDI update (files will get directly updated) + for tile in 1 2 3 4 5 6 + do + ln -fs ${RSTRDIR}/${FILEDATE}.sfc_data.tile${tile}.nc ${JEDIWORKDIR}/${FILEDATE}.sfc_data.tile${tile}.nc + done + + cres_file=${JEDIWORKDIR}/${FILEDATE}.coupler.res + + if [[ -e ${RSTRDIR}/${FILEDATE}.coupler.res ]]; then + ln -sf ${RSTRDIR}/${FILEDATE}.coupler.res $cres_file + else # if not present, need to create coupler.res for JEDI + cp ${LANDDADIR}/template.coupler.res $cres_file + + sed -i -e "s/XXYYYY/${YYYY}/g" $cres_file + sed -i -e "s/XXMM/${MM}/g" $cres_file + sed -i -e "s/XXDD/${DD}/g" $cres_file + sed -i -e "s/XXHH/${HH}/g" $cres_file + + sed -i -e "s/XXYYYP/${YYYP}/g" $cres_file + sed -i -e "s/XXMP/${MP}/g" $cres_file + sed -i -e "s/XXDP/${DP}/g" $cres_file + sed -i -e "s/XXHP/${HP}/g" $cres_file + + fi +fi # do_jedi setup + diff --git a/scripts/exlandda_prep_obs.sh b/scripts/exlandda_prep_obs.sh new file mode 100755 index 00000000..dad81427 --- /dev/null +++ b/scripts/exlandda_prep_obs.sh @@ -0,0 +1,68 @@ +#!/bin/sh + +set -ex + +############################ +# copy restarts to workdir, convert to UFS tile for DA (all members) + +if [[ ${EXP_NAME} == "openloop" ]]; then + do_jedi="NO" +else + do_jedi="YES" + SAVE_TILE="YES" + LANDDADIR=${HOMElandda}/sorc/DA_update +fi + +TPATH=${LANDDA_INPUTS}/forcing/${ATMOS_FORC}/orog_files/ +YYYY=${PDY:0:4} +MM=${PDY:4:2} +DD=${PDY:6:2} +HH=${cyc} +YYYP=${PTIME:0:4} +MP=${PTIME:4:2} +DP=${PTIME:6:2} +HP=${PTIME:8:2} + +mem_ens="mem000" + +MEM_WORKDIR=${WORKDIR}/${mem_ens} +MEM_MODL_OUTDIR=${COMOUT}/${mem_ens} +RSTRDIR=${MEM_WORKDIR} +JEDIWORKDIR=${WORKDIR}/mem000/jedi +FILEDATE=${YYYY}${MM}${DD}.${HH}0000 + +cd $JEDIWORKDIR + +# load modulefiles +#module use modulefiles; module load modules.landda + +OBSDIR=${LANDDA_INPUTS}/DA +################################################ +# 2. PREPARE OBS FILES +################################################ +for obs in "${OBS_TYPES[@]}"; do + # get the. obs file name + if [ ${obs} == "GTS" ]; then + obsfile=$OBSDIR/snow_depth/GTS/data_proc/${YYYY}${MM}/adpsfc_snow_${YYYY}${MM}${DD}${HH}.nc4 + # GHCN are time-stamped at 18. If assimilating at 00, need to use previous day's obs, so that + # obs are within DA window. + elif [ $ATMOS_FORC == "era5" ] && [ ${obs} == "GHCN" ]; then + obsfile=$OBSDIR/snow_depth/GHCN/data_proc/v3/${YYYY}/ghcn_snwd_ioda_${YYYP}${MP}${DP}.nc + elif [ $ATMOS_FORC == "gswp3" ] && [ ${obs} == "GHCN" ]; then + obsfile=$OBSDIR/snow_depth/GHCN/data_proc/v3/${YYYY}/fake_ghcn_snwd_ioda_${YYYP}${MP}${DP}.nc + elif [ ${obs} == "SYNTH" ]; then + obsfile=$OBSDIR/synthetic_noahmp/IODA.synthetic_gswp_obs.${YYYY}${MM}${DD}${HH}.nc + else + echo "do_landDA: Unknown obs type requested ${obs}, exiting" + exit 1 + fi + + # check obs are available + if [[ -e $obsfile ]]; then + echo "do_landDA: $i observations found: $obsfile" + ln -fs $obsfile ${obs}_${YYYY}${MM}${DD}${HH}.nc + else + echo "${obs} observations not found: $obsfile" + # JEDI_TYPES[$ii]="SKIP" + fi +done diff --git a/settings_DA_test b/settings_DA_test index a8bab44c..103f8655 100644 --- a/settings_DA_test +++ b/settings_DA_test @@ -13,7 +13,7 @@ # if calling from submit_cycle.sh, do not change these three: JEDIWORKDIR=${WORKDIR}/jedi/ -LANDDADIR=${CYCLEDIR}/sorc/DA_update/ # directory where do_landDA.sh script is +LANDDADIR=${HOMElandda}/sorc/DA_update/ # directory where do_landDA.sh script is RSTRDIR=${MEM_WORKDIR} ############################ diff --git a/submit_cycle.sh b/submit_cycle.sh index c4f38dfc..a9dcd1e2 100755 --- a/submit_cycle.sh +++ b/submit_cycle.sh @@ -21,7 +21,7 @@ while [ $date_count -lt $cycles_per_job ]; do if [ $THISDATE -ge $ENDDATE ]; then echo "All done, at date ${THISDATE}" >> $logfile - cd $CYCLEDIR + cd $HOMElandda if [ $KEEPWORKDIR == "NO" ]; then rm -rf $WORKDIR fi @@ -83,7 +83,7 @@ while [ $date_count -lt $cycles_per_job ]; do export MEM_WORKDIR # update vec2tile and tile2vec namelists - cp ${CYCLEDIR}/parm/templates/template.vector2tile vector2tile.namelist + cp ${HOMElandda}/parm/templates/template.vector2tile vector2tile.namelist sed -i "s|LANDDA_INPUTS|${LANDDA_INPUTS}|g" vector2tile.namelist sed -i -e "s/XXYYYY/${YYYY}/g" vector2tile.namelist @@ -101,7 +101,7 @@ while [ $date_count -lt $cycles_per_job ]; do echo 'calling vector2tile' if [[ $BASELINE =~ 'hera.internal' ]]; then - source ${CYCLEDIR}/land_mods + source ${HOMElandda}/land_mods fi $vec2tileexec vector2tile.namelist if [[ $? != 0 ]]; then @@ -130,7 +130,7 @@ while [ $date_count -lt $cycles_per_job ]; do done # update tile2tile namelist - cp ${CYCLEDIR}/parm/templates/template.ufs2jedi ufs2jedi.namelist + cp ${HOMElandda}/parm/templates/template.ufs2jedi ufs2jedi.namelist sed -i "s|LANDDA_INPUTS|${LANDDA_INPUTS}|g" ufs2jedi.namelist sed -i -e "s/XXYYYY/${YYYY}/g" ufs2jedi.namelist @@ -146,7 +146,7 @@ while [ $date_count -lt $cycles_per_job ]; do # submit tile2tile if [[ $BASELINE =~ 'hera.internal' ]]; then - source ${CYCLEDIR}/land_mods + source ${HOMElandda}/land_mods fi $tile2tileexec ufs2jedi.namelist if [[ $? != 0 ]]; then @@ -167,8 +167,8 @@ while [ $date_count -lt $cycles_per_job ]; do cd $WORKDIR export THISDATE - export EXECdir="${CYCLEDIR}/exec" - $DAscript ${CYCLEDIR}/$DA_config + export EXECdir="${HOMElandda}/exec" + $DAscript ${HOMElandda}/$DA_config if [[ $? != 0 ]]; then echo "land DA script failed" exit @@ -191,10 +191,10 @@ while [ $date_count -lt $cycles_per_job ]; do echo 'calling tile2vector' if [[ $BASELINE =~ 'hera.internal' ]]; then - source ${CYCLEDIR}/land_mods + source ${HOMElandda}/land_mods fi - cp ${CYCLEDIR}/parm/templates/template.tile2vector tile2vector.namelist + cp ${HOMElandda}/parm/templates/template.tile2vector tile2vector.namelist sed -i "s|LANDDA_INPUTS|${LANDDA_INPUTS}|g" tile2vector.namelist sed -i -e "s/XXYYYY/${YYYY}/g" tile2vector.namelist @@ -222,10 +222,10 @@ while [ $date_count -lt $cycles_per_job ]; do echo 'calling tile2tile' if [[ $BASELINE =~ 'hera.internal' ]]; then - source ${CYCLEDIR}/land_mods + source ${HOMElandda}/land_mods fi - cp ${CYCLEDIR}/parm/templates/template.jedi2ufs jedi2ufs.namelist + cp ${HOMElandda}/parm/templates/template.jedi2ufs jedi2ufs.namelist sed -i "s|LANDDA_INPUTS|${LANDDA_INPUTS}|g" jedi2ufs.namelist sed -i -e "s/XXYYYY/${YYYY}/g" jedi2ufs.namelist @@ -261,7 +261,7 @@ while [ $date_count -lt $cycles_per_job ]; do set -x # update model namelist - cp ${CYCLEDIR}/parm/templates/template.ufs-noahMP.namelist.${atmos_forc} ufs-land.namelist + cp ${HOMElandda}/parm/templates/template.ufs-noahMP.namelist.${atmos_forc} ufs-land.namelist sed -i "s|LANDDA_INPUTS|${LANDDA_INPUTS}|g" ufs-land.namelist sed -i -e "s/XXYYYY/${YYYY}/g" ufs-land.namelist @@ -277,7 +277,7 @@ while [ $date_count -lt $cycles_per_job ]; do nt=$SLURM_NTASKS if [[ $BASELINE =~ 'hera.internal' ]]; then - source ${CYCLEDIR}/land_mods + source ${HOMElandda}/land_mods fi if [[ $BASELINE =~ 'hera.internal' ]]; then @@ -296,11 +296,11 @@ while [ $date_count -lt $cycles_per_job ]; do TEST_NAME=datm_cdeps_lnd_gswp3 TEST_NAME_RST=datm_cdeps_lnd_gswp3_rst - PATHRT=${CYCLEDIR}/sorc/ufs_model.fd/tests + PATHRT=${HOMElandda}/sorc/ufs_model.fd/tests RT_COMPILER=${RT_COMPILER:-intel} ATOL="1e-7" - cp $CYCLEDIR/$TEST_NAME_RST ${PATHRT}/tests/$TEST_NAME_RST + cp $HOMElandda/$TEST_NAME_RST ${PATHRT}/tests/$TEST_NAME_RST source ${PATHRT}/detect_machine.sh source ${PATHRT}/rt_utils.sh source ${PATHRT}/default_vars.sh @@ -335,8 +335,8 @@ while [ $date_count -lt $cycles_per_job ]; do export layout_y=1 # FV3 executable: - cp ${CYCLEDIR}/exec/ufs_model . - cp ${CYCLEDIR}/fv3_run ./fv3_run + cp ${HOMElandda}/exec/ufs_model . + cp ${HOMElandda}/fv3_run ./fv3_run if [[ $DATM_CDEPS = 'true' ]] || [[ $FV3 = 'true' ]] || [[ $S2S = 'true' ]]; then if [[ $HAFS = 'false' ]] || [[ $FV3 = 'true' && $HAFS = 'true' ]]; then @@ -412,7 +412,7 @@ done # date_count -lt cycles_per_job if [ $THISDATE -lt $ENDDATE ]; then echo "STARTDATE=${THISDATE}" > ${analdate} echo "ENDDATE=${ENDDATE}" >> ${analdate} - cd ${CYCLEDIR} - sbatch ${CYCLEDIR}/submit_cycle.sh + cd ${HOMElandda} + sbatch ${HOMElandda}/submit_cycle.sh fi diff --git a/versions/build.ver_hera b/versions/build.ver_hera index ade8f887..572e5edc 100644 --- a/versions/build.ver_hera +++ b/versions/build.ver_hera @@ -20,6 +20,7 @@ export mapl_ver="2.22.0-esmf-8.3.0b09" export netcdf_c_ver="4.9.0" export netcdf_fortran_ver="4.6.0" export pio_ver="2.5.9" +export prod_util_ver="1.2.2" export sp_ver="2.3.3" export stack_impi_ver="2021.5.1" export stack_intel_ver="2021.5.0" diff --git a/versions/build.ver_orion b/versions/build.ver_orion index d78ddc63..b2ebab8a 100644 --- a/versions/build.ver_orion +++ b/versions/build.ver_orion @@ -20,6 +20,7 @@ export mapl_ver="2.22.0-esmf-8.3.0b09" export netcdf_c_ver="4.9.0" export netcdf_fortran_ver="4.6.0" export pio_ver="2.5.9" +export prod_util_ver="1.2.2" export sp_ver="2.3.3" export stack_impi_ver="2021.5.1" export stack_intel_ver="2022.0.2"