From 9a61cb018d6dcb52488ce6692d3509172d895533 Mon Sep 17 00:00:00 2001 From: Andrew Collard Date: Tue, 20 Aug 2024 21:19:52 +0000 Subject: [PATCH 01/22] Add GSI script files into workflow scripts directory --- .gitignore | 6 - jobs/JGDAS_ATMOS_ANALYSIS_DIAG | 156 ++++ jobs/JGLOBAL_ATMOS_ANALYSIS | 193 ++++ jobs/JGLOBAL_ATMOS_ANALYSIS_CALC | 167 ++++ scripts/exglobal_atmos_analysis.sh | 1107 +++++++++++++++++++++++ scripts/exglobal_atmos_analysis_calc.sh | 214 +++++ scripts/exglobal_diag.sh | 295 ++++++ 7 files changed, 2132 insertions(+), 6 deletions(-) create mode 100755 jobs/JGDAS_ATMOS_ANALYSIS_DIAG create mode 100755 jobs/JGLOBAL_ATMOS_ANALYSIS create mode 100755 jobs/JGLOBAL_ATMOS_ANALYSIS_CALC create mode 100755 scripts/exglobal_atmos_analysis.sh create mode 100755 scripts/exglobal_atmos_analysis_calc.sh create mode 100755 scripts/exglobal_diag.sh diff --git a/.gitignore b/.gitignore index 8cb483c5a0..0f38550c05 100644 --- a/.gitignore +++ b/.gitignore @@ -94,7 +94,6 @@ sorc/wafs_setmissing.fd # Ignore scripts from externals #------------------------------ # jobs symlinks -jobs/JGDAS_ATMOS_ANALYSIS_DIAG jobs/JGDAS_ATMOS_CHGRES_FORENKF jobs/JGDAS_ATMOS_GLDAS jobs/JGDAS_ATMOS_VERFOZN @@ -114,8 +113,6 @@ jobs/JGFS_ATMOS_WAFS_BLENDING_0P25 jobs/JGFS_ATMOS_WAFS_GCIP jobs/JGFS_ATMOS_WAFS_GRIB2 jobs/JGFS_ATMOS_WAFS_GRIB2_0P25 -jobs/JGLOBAL_ATMOS_ANALYSIS -jobs/JGLOBAL_ATMOS_ANALYSIS_CALC jobs/JGLOBAL_ATMOS_NCEPPOST jobs/JGLOBAL_ATMOS_POST_MANAGER # scripts symlinks @@ -140,10 +137,7 @@ scripts/exgfs_atmos_wafs_gcip.sh scripts/exgfs_atmos_wafs_grib.sh scripts/exgfs_atmos_wafs_grib2.sh scripts/exgfs_atmos_wafs_grib2_0p25.sh -scripts/exglobal_atmos_analysis.sh -scripts/exglobal_atmos_analysis_calc.sh scripts/exglobal_atmos_pmgr.sh -scripts/exglobal_diag.sh # ush symlinks ush/calcanl_gfs.py ush/calcinc_gfs.py diff --git a/jobs/JGDAS_ATMOS_ANALYSIS_DIAG b/jobs/JGDAS_ATMOS_ANALYSIS_DIAG new file mode 100755 index 0000000000..b6e5c1b2de --- /dev/null +++ b/jobs/JGDAS_ATMOS_ANALYSIS_DIAG @@ -0,0 +1,156 @@ +#!/bin/bash +set -x + +export RUN_ENVIR=${RUN_ENVIR:-"nco"} +export PS4='$SECONDS + ' +date + + +############################# +# Source relevant config files +############################# +export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} +configs="base anal analdiag" +config_path=${EXPDIR:-$NWROOT/gfs.${gfs_ver}/parm/config} +for config in $configs; do + . $config_path/config.$config + status=$? + [[ $status -ne 0 ]] && exit $status +done + + +########################################## +# Source machine runtime environment +########################################## +. $HOMEgfs/env/${machine}.env anal +status=$? +[[ $status -ne 0 ]] && exit $status + + +############################################## +# Obtain unique process id (pid) and make temp directory +############################################## +export pid=${pid:-$$} +export outid=${outid:-"LL$job"} + +export DATA=${DATA:-${DATAROOT}/${jobid:?}} +mkdir -p $DATA +cd $DATA + + +############################################## +# Run setpdy and initialize PDY variables +############################################## +export cycle="t${cyc}z" +setpdy.sh +. ./PDY + + +############################################## +# Determine Job Output Name on System +############################################## +export pgmout="OUTPUT.${pid}" +export pgmerr=errfile + + +############################################## +# Set variables used in the script +############################################## +export CDATE=${CDATE:-${PDY}${cyc}} +export CDUMP=${CDUMP:-${RUN:-"gfs"}} +export COMPONENT=${COMPONENT:-atmos} +export DO_CALC_ANALYSIS=${DO_CALC_ANALYSIS:-"YES"} + + +############################################## +# Begin JOB SPECIFIC work +############################################## + +GDATE=$($NDATE -$assim_freq $CDATE) +gPDY=$(echo $GDATE | cut -c1-8) +gcyc=$(echo $GDATE | cut -c9-10) +GDUMP=${GDUMP:-"gdas"} + +export OPREFIX="${CDUMP}.t${cyc}z." +export GPREFIX="${GDUMP}.t${gcyc}z." +export APREFIX="${CDUMP}.t${cyc}z." +export GSUFFIX=${GSUFFIX:-$SUFFIX} +export ASUFFIX=${ASUFFIX:-$SUFFIX} + + +if [ $RUN_ENVIR = "nco" -o ${ROTDIR_DUMP:-NO} = "YES" ]; then + export COMIN=${COMIN:-$ROTDIR/$RUN.$PDY/$cyc/$COMPONENT} + export COMOUT=${COMOUT:-$ROTDIR/$RUN.$PDY/$cyc/$COMPONENT} + export COMIN_OBS=${COMIN_OBS:-$(compath.py ${envir}/obsproc/${obsproc_ver})/$RUN.$PDY/$cyc/$COMPONENT} + export COMIN_GES_OBS=${COMIN_GES_OBS:-$(compath.py ${envir}/obsproc/${obsproc_ver})/$GDUMP.$gPDY/$gcyc/$COMPONENT} +else + export COMOUT="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" + export COMIN_OBS="$DMPDIR/$CDUMP.$PDY/$cyc/$COMPONENT" + export COMIN_GES_OBS="$DMPDIR/$GDUMP.$gPDY/$gcyc/$COMPONENT" +fi +mkdir -m 775 -p $COMOUT +# COMIN_GES and COMIN_GES_ENS are used in script +export COMIN_GES="$ROTDIR/$GDUMP.$gPDY/$gcyc/$COMPONENT" +export COMIN_GES_ENS="$ROTDIR/enkfgdas.$gPDY/$gcyc/$COMPONENT" + + +export ATMGES="$COMIN_GES/${GPREFIX}atmf006${GSUFFIX}" +if [ ! -f $ATMGES ]; then + echo "FATAL ERROR: FILE MISSING: ATMGES = $ATMGES" + exit 1 +fi + + +if [ $DOHYBVAR = "YES" ]; then + export ATMGES_ENSMEAN="$COMIN_GES_ENS/${GPREFIX}atmf006.ensmean$GSUFFIX" + if [ ! -f $ATMGES_ENSMEAN ]; then + echo "FATAL ERROR: FILE MISSING: ATMGES_ENSMEAN = $ATMGES_ENSMEAN" + exit 2 + fi +fi + + + +# Update surface fields with global_cycle +export DOGCYCLE=${DOGCYCLE:-"YES"} + + +# Generate Gaussian surface analysis +export DOGAUSFCANL=${DOGAUSFCANL:-"YES"} + + +############################################################### +# Run relevant script +env +echo "HAS BEGUN on $(hostname)" +$LOGSCRIPT + + +${ANALDIAGSH:-$SCRgfs/exglobal_diag.sh} +status=$? +[[ $status -ne 0 ]] && exit $status + + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [ -e "$pgmout" ] ; then + cat $pgmout +fi + + +echo "ENDED NORMALLY." + + +########################################## +# Remove the Temporary working directory +########################################## +cd $DATAROOT +[[ $KEEPDATA = "NO" ]] && rm -rf $DATA + +date +exit 0 diff --git a/jobs/JGLOBAL_ATMOS_ANALYSIS b/jobs/JGLOBAL_ATMOS_ANALYSIS new file mode 100755 index 0000000000..cafa8f3c92 --- /dev/null +++ b/jobs/JGLOBAL_ATMOS_ANALYSIS @@ -0,0 +1,193 @@ +#!/bin/bash +set -x + +export RUN_ENVIR=${RUN_ENVIR:-"nco"} +export PS4='$SECONDS + ' +date + + +############################# +# Source relevant config files +############################# +export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} +configs="base anal" +config_path=${EXPDIR:-$NWROOT/gfs.${gfs_ver}/parm/config} +for config in $configs; do + . $config_path/config.$config + status=$? + [[ $status -ne 0 ]] && exit $status +done + + +########################################## +# Source machine runtime environment +########################################## +. $HOMEgfs/env/${machine}.env anal +status=$? +[[ $status -ne 0 ]] && exit $status + + +############################################## +# Obtain unique process id (pid) and make temp directory +############################################## +export pid=${pid:-$$} +export outid=${outid:-"LL$job"} + +export DATA=${DATA:-${DATAROOT}/${jobid:?}} +mkdir -p $DATA +cd $DATA + + +############################################## +# Run setpdy and initialize PDY variables +############################################## +export cycle="t${cyc}z" +setpdy.sh +. ./PDY + + +############################################## +# Determine Job Output Name on System +############################################## +export pgmout="OUTPUT.${pid}" +export pgmerr=errfile + + +############################################## +# Set variables used in the script +############################################## +export CDATE=${CDATE:-${PDY}${cyc}} +export CDUMP=${CDUMP:-${RUN:-"gfs"}} +export COMPONENT=${COMPONENT:-atmos} +export DO_CALC_ANALYSIS=${DO_CALC_ANALYSIS:-"YES"} + + +############################################## +# Begin JOB SPECIFIC work +############################################## + +GDATE=$($NDATE -$assim_freq $CDATE) +gPDY=$(echo $GDATE | cut -c1-8) +gcyc=$(echo $GDATE | cut -c9-10) +GDUMP=${GDUMP:-"gdas"} + +export OPREFIX="${CDUMP}.t${cyc}z." +export GPREFIX="${GDUMP}.t${gcyc}z." +export APREFIX="${CDUMP}.t${cyc}z." +export GSUFFIX=${GSUFFIX:-$SUFFIX} +export ASUFFIX=${ASUFFIX:-$SUFFIX} + + +if [ $RUN_ENVIR = "nco" -o ${ROTDIR_DUMP:-NO} = "YES" ]; then + export COMIN=${COMIN:-$ROTDIR/$RUN.$PDY/$cyc/$COMPONENT} + export COMOUT=${COMOUT:-$ROTDIR/$RUN.$PDY/$cyc/$COMPONENT} + export COMIN_OBS=${COMIN_OBS:-$(compath.py ${envir}/obsproc/${obsproc_ver})/$RUN.$PDY/$cyc/$COMPONENT} + export COMIN_GES_OBS=${COMIN_GES_OBS:-$(compath.py ${envir}/obsproc/${obsproc_ver})/$GDUMP.$gPDY/$gcyc/$COMPONENT} +else + export COMOUT="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" + export COMIN_OBS="$DMPDIR/$CDUMP.$PDY/$cyc/$COMPONENT" + export COMIN_GES_OBS="$DMPDIR/$GDUMP.$gPDY/$gcyc/$COMPONENT" +fi +mkdir -m 775 -p $COMOUT +# COMIN_GES and COMIN_GES_ENS are used in script +export COMIN_GES="$ROTDIR/$GDUMP.$gPDY/$gcyc/$COMPONENT" +export COMIN_GES_ENS="$ROTDIR/enkfgdas.$gPDY/$gcyc/$COMPONENT" + + +export ATMGES="$COMIN_GES/${GPREFIX}atmf006${GSUFFIX}" +if [ ! -f $ATMGES ]; then + echo "FATAL ERROR: FILE MISSING: ATMGES = $ATMGES" + exit 1 +fi + + +# Get LEVS +if [ ${GSUFFIX} = ".nc" ]; then + export LEVS=$($NCLEN $ATMGES pfull) + status=$? +else + export LEVS=$($NEMSIOGET $ATMGES dimz | awk '{print $2}') + status=$? +fi +[[ $status -ne 0 ]] && exit $status + + +if [ $DOHYBVAR = "YES" ]; then + export ATMGES_ENSMEAN="$COMIN_GES_ENS/${GPREFIX}atmf006.ensmean$GSUFFIX" + if [ ! -f $ATMGES_ENSMEAN ]; then + echo "FATAL ERROR: FILE MISSING: ATMGES_ENSMEAN = $ATMGES_ENSMEAN" + exit 2 + fi +fi + + +# Link observational data +export PREPQC="${COMIN_OBS}/${OPREFIX}prepbufr" +if [ ! -f $PREPQC ]; then + echo "WARNING: Global PREPBUFR FILE $PREPQC MISSING" +fi +export PREPQCPF="${COMIN_OBS}/${OPREFIX}prepbufr.acft_profiles" +export TCVITL="${COMOUT}/${OPREFIX}syndata.tcvitals.tm00" +[[ $DONST = "YES" ]] && export NSSTBF="${COMIN_OBS}/${OPREFIX}nsstbufr" + + +# Update surface fields with global_cycle +export DOGCYCLE=${DOGCYCLE:-"YES"} + + +# Generate Gaussian surface analysis +export DOGAUSFCANL=${DOGAUSFCANL:-"YES"} + + +# Copy fix file for obsproc +if [ $RUN = "gfs" ]; then + mkdir -p $ROTDIR/fix + cp $FIXgsi/prepobs_errtable.global $ROTDIR/fix/ +fi + + +############################################################### +# Run relevant script +env +echo "HAS BEGUN on $(hostname)" +$LOGSCRIPT + + +${ANALYSISSH:-$SCRgfs/exglobal_atmos_analysis.sh} +status=$? +[[ $status -ne 0 ]] && exit $status + + +############################################## +# Send Alerts +############################################## +if [ $SENDDBN = YES -a $RUN = gdas ] ; then + $DBNROOT/bin/dbn_alert MODEL GDAS_MSC_abias $job $COMOUT/${APREFIX}abias + $DBNROOT/bin/dbn_alert MODEL GDAS_MSC_abias_pc $job $COMOUT/${APREFIX}abias_pc + $DBNROOT/bin/dbn_alert MODEL GDAS_MSC_abias_air $job $COMOUT/${APREFIX}abias_air +fi + + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [ -e "$pgmout" ] ; then + cat $pgmout +fi + + +echo "ENDED NORMALLY." + + +########################################## +# Remove the Temporary working directory +########################################## +cd $DATAROOT +[[ $KEEPDATA = "NO" ]] && rm -rf $DATA + +date +exit 0 diff --git a/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC b/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC new file mode 100755 index 0000000000..c8dd111bbe --- /dev/null +++ b/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC @@ -0,0 +1,167 @@ +#!/bin/bash +set -x + +export RUN_ENVIR=${RUN_ENVIR:-"nco"} +export PS4='$SECONDS + ' +date + + +############################# +# Source relevant config files +############################# +export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} +configs="base anal analcalc" +config_path=${EXPDIR:-$NWROOT/gfs.${gfs_ver}/parm/config} +for config in $configs; do + . $config_path/config.$config + status=$? + [[ $status -ne 0 ]] && exit $status +done + + +########################################## +# Source machine runtime environment +########################################## +. $HOMEgfs/env/${machine}.env anal +status=$? +[[ $status -ne 0 ]] && exit $status + + +############################################## +# Obtain unique process id (pid) and make temp directory +############################################## +export pid=${pid:-$$} +export outid=${outid:-"LL$job"} + +export DATA=${DATA:-${DATAROOT}/${jobid:?}} +mkdir -p $DATA +cd $DATA + + +############################################## +# Run setpdy and initialize PDY variables +############################################## +export cycle="t${cyc}z" +setpdy.sh +. ./PDY + + +############################################## +# Determine Job Output Name on System +############################################## +export pgmout="OUTPUT.${pid}" +export pgmerr=errfile + + +############################################## +# Set variables used in the script +############################################## +export CDATE=${CDATE:-${PDY}${cyc}} +export CDUMP=${CDUMP:-${RUN:-"gfs"}} +export COMPONENT=${COMPONENT:-atmos} +export DO_CALC_ANALYSIS=${DO_CALC_ANALYSIS:-"YES"} + + +############################################## +# Begin JOB SPECIFIC work +############################################## + +GDATE=$($NDATE -$assim_freq $CDATE) +gPDY=$(echo $GDATE | cut -c1-8) +gcyc=$(echo $GDATE | cut -c9-10) +GDUMP=${GDUMP:-"gdas"} + +export OPREFIX="${CDUMP}.t${cyc}z." +export GPREFIX="${GDUMP}.t${gcyc}z." +export APREFIX="${CDUMP}.t${cyc}z." +export GSUFFIX=${GSUFFIX:-$SUFFIX} +export ASUFFIX=${ASUFFIX:-$SUFFIX} + + +if [ $RUN_ENVIR = "nco" -o ${ROTDIR_DUMP:-NO} = "YES" ]; then + export COMIN=${COMIN:-$ROTDIR/$RUN.$PDY/$cyc/$COMPONENT} + export COMOUT=${COMOUT:-$ROTDIR/$RUN.$PDY/$cyc/$COMPONENT} + export COMIN_OBS=${COMIN_OBS:-$(compath.py ${envir}/obsproc/${obsproc_ver})/$RUN.$PDY/$cyc/$COMPONENT} + export COMIN_GES_OBS=${COMIN_GES_OBS:-$(compath.py ${envir}/obsproc/${obsproc_ver})/$GDUMP.$gPDY/$gcyc/$COMPONENT} +else + export COMOUT="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" + export COMIN_OBS="$DMPDIR/$CDUMP.$PDY/$cyc/$COMPONENT" + export COMIN_GES_OBS="$DMPDIR/$GDUMP.$gPDY/$gcyc/$COMPONENT" +fi +mkdir -m 775 -p $COMOUT +# COMIN_GES and COMIN_GES_ENS are used in script +export COMIN_GES="$ROTDIR/$GDUMP.$gPDY/$gcyc/$COMPONENT" +export COMIN_GES_ENS="$ROTDIR/enkfgdas.$gPDY/$gcyc/$COMPONENT" + + +export ATMGES="$COMIN_GES/${GPREFIX}atmf006${GSUFFIX}" +if [ ! -f $ATMGES ]; then + echo "FATAL ERROR: FILE MISSING: ATMGES = $ATMGES" + exit 1 +fi + + +# Get LEVS +if [ ${GSUFFIX} = ".nc" ]; then + export LEVS=$($NCLEN $ATMGES pfull) + status=$? +else + export LEVS=$($NEMSIOGET $ATMGES dimz | awk '{print $2}') + status=$? +fi +[[ $status -ne 0 ]] && exit $status + + +if [ $DOHYBVAR = "YES" ]; then + export ATMGES_ENSMEAN="$COMIN_GES_ENS/${GPREFIX}atmf006.ensmean$GSUFFIX" + if [ ! -f $ATMGES_ENSMEAN ]; then + echo "FATAL ERROR: FILE MISSING: ATMGES_ENSMEAN = $ATMGES_ENSMEAN" + exit 2 + fi +fi + + + +# Update surface fields with global_cycle +export DOGCYCLE=${DOGCYCLE:-"YES"} + + +# Generate Gaussian surface analysis +export DOGAUSFCANL=${DOGAUSFCANL:-"YES"} + + +############################################################### +# Run relevant script +env +echo "HAS BEGUN on $(hostname)" +$LOGSCRIPT + + +${ANALCALCSH:-$SCRgfs/exglobal_atmos_analysis_calc.sh} +status=$? +[[ $status -ne 0 ]] && exit $status + + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [ -e "$pgmout" ] ; then + cat $pgmout +fi + + +echo "ENDED NORMALLY." + + +########################################## +# Remove the Temporary working directory +########################################## +cd $DATAROOT +[[ $KEEPDATA = "NO" ]] && rm -rf $DATA + +date +exit 0 diff --git a/scripts/exglobal_atmos_analysis.sh b/scripts/exglobal_atmos_analysis.sh new file mode 100755 index 0000000000..ca841e6e57 --- /dev/null +++ b/scripts/exglobal_atmos_analysis.sh @@ -0,0 +1,1107 @@ +#!/bin/bash +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: exglobal_atmos_analysis.sh +# Script description: Makes a global model upper air analysis with GSI +# +# Author: Rahul Mahajan Org: NCEP/EMC Date: 2017-03-02 +# +# Abstract: This script makes a global model analysis using the GSI +# +# $Id$ +# +# Attributes: +# Language: POSIX shell +# Machine: WCOSS-Cray / Theia +# +################################################################################ + +# Set environment. +export VERBOSE=${VERBOSE:-"YES"} +if [ $VERBOSE = "YES" ]; then + echo $(date) EXECUTING $0 $* >&2 + set -x +fi + +# Directories. +pwd=$(pwd) + +# Base variables +CDATE=${CDATE:-"2001010100"} +CDUMP=${CDUMP:-"gdas"} +GDUMP=${GDUMP:-"gdas"} + +# Derived base variables +GDATE=$($NDATE -$assim_freq $CDATE) +BDATE=$($NDATE -3 $CDATE) +PDY=$(echo $CDATE | cut -c1-8) +cyc=$(echo $CDATE | cut -c9-10) +bPDY=$(echo $BDATE | cut -c1-8) +bcyc=$(echo $BDATE | cut -c9-10) + +# Utilities +export NCP=${NCP:-"/bin/cp"} +export NMV=${NMV:-"/bin/mv"} +export NLN=${NLN:-"/bin/ln -sf"} +export CHGRP_CMD=${CHGRP_CMD:-"chgrp ${group_name:-rstprod}"} +export NCLEN=${NCLEN:-$HOMEgfs/ush/getncdimlen} +COMPRESS=${COMPRESS:-gzip} +UNCOMPRESS=${UNCOMPRESS:-gunzip} +APRUNCFP=${APRUNCFP:-""} +APRUN_GSI=${APRUN_GSI:-${APRUN:-""}} +NTHREADS_GSI=${NTHREADS_GSI:-${NTHREADS:-1}} + +# Surface cycle related parameters +DOGCYCLE=${DOGCYCLE:-"NO"} +CYCLESH=${CYCLESH:-$HOMEgfs/ush/global_cycle.sh} +export CYCLEXEC=${CYCLEXEC:-$HOMEgfs/exec/global_cycle} +NTHREADS_CYCLE=${NTHREADS_CYCLE:-24} +APRUN_CYCLE=${APRUN_CYCLE:-${APRUN:-""}} +export SNOW_NUDGE_COEFF=${SNOW_NUDGE_COEFF:-'-2.'} +export CYCLVARS=${CYCLVARS:-""} +export FHOUR=${FHOUR:-0} +export DELTSFC=${DELTSFC:-6} +export FIXgsm=${FIXgsm:-$HOMEgfs/fix/fix_am} +export FIXfv3=${FIXfv3:-$HOMEgfs/fix/fix_fv3_gmted2010} + +DOGAUSFCANL=${DOGAUSFCANL-"NO"} +GAUSFCANLSH=${GAUSFCANLSH:-$HOMEgfs/ush/gaussian_sfcanl.sh} +export GAUSFCANLEXE=${GAUSFCANLEXE:-$HOMEgfs/exec/gaussian_sfcanl.exe} +NTHREADS_GAUSFCANL=${NTHREADS_GAUSFCANL:-1} +APRUN_GAUSFCANL=${APRUN_GAUSFCANL:-${APRUN:-""}} + +# FV3 specific info (required for global_cycle) +export CASE=${CASE:-"C384"} +ntiles=${ntiles:-6} + +# Microphysics in the model; 99:ZC, 11:GFDLMP +export imp_physics=${imp_physics:-99} +lupp=${lupp:-".true."} +cnvw_option=${cnvw_option:-".false."} + +# Observation usage options +cao_check=${cao_check:-".true."} +ta2tb=${ta2tb:-".true."} + +# Diagnostic files options +lobsdiag_forenkf=${lobsdiag_forenkf:-".false."} +netcdf_diag=${netcdf_diag:-".true."} +binary_diag=${binary_diag:-".false."} + +# IAU +DOIAU=${DOIAU:-"NO"} +export IAUFHRS=${IAUFHRS:-"6"} + +# Dependent Scripts and Executables +GSIEXEC=${GSIEXEC:-$HOMEgfs/exec/gsi.x} +export NTHREADS_CALCINC=${NTHREADS_CALCINC:-1} +export APRUN_CALCINC=${APRUN_CALCINC:-${APRUN:-""}} +export APRUN_CALCANL=${APRUN_CALCANL:-${APRUN:-""}} +export APRUN_CHGRES=${APRUN_CALCANL:-${APRUN:-""}} +export CALCINCEXEC=${CALCINCEXEC:-$HOMEgfs/exec/calc_increment_ens.x} +export CALCINCNCEXEC=${CALCINCNCEXEC:-$HOMEgfs/exec/calc_increment_ens_ncio.x} +export CALCANLEXEC=${CALCANLEXEC:-$HOMEgfs/exec/calc_analysis.x} +export CHGRESNCEXEC=${CHGRESNCEXEC:-$HOMEgfs/exec/enkf_chgres_recenter_nc.x} +export CHGRESINCEXEC=${CHGRESINCEXEC:-$HOMEgfs/exec/interp_inc.x} +CHGRESEXEC=${CHGRESEXEC:-$HOMEgfs/exec/enkf_chgres_recenter.x} +export NTHREADS_CHGRES=${NTHREADS_CHGRES:-24} +CALCINCPY=${CALCINCPY:-$HOMEgfs/ush/calcinc_gfs.py} +CALCANLPY=${CALCANLPY:-$HOMEgfs/ush/calcanl_gfs.py} + +# OPS flags +RUN=${RUN:-""} +SENDECF=${SENDECF:-"NO"} +SENDDBN=${SENDDBN:-"NO"} +RUN_GETGES=${RUN_GETGES:-"NO"} +GETGESSH=${GETGESSH:-"getges.sh"} +export gesenvir=${gesenvir:-$envir} + +# Observations +OPREFIX=${OPREFIX:-""} +OSUFFIX=${OSUFFIX:-""} +PREPQC=${PREPQC:-${COMIN_OBS}/${OPREFIX}prepbufr${OSUFFIX}} +PREPQCPF=${PREPQCPF:-${COMIN_OBS}/${OPREFIX}prepbufr.acft_profiles${OSUFFIX}} +NSSTBF=${NSSTBF:-${COMIN_OBS}/${OPREFIX}nsstbufr${OSUFFIX}} +SATWND=${SATWND:-${COMIN_OBS}/${OPREFIX}satwnd.tm00.bufr_d${OSUFFIX}} +OSCATBF=${OSCATBF:-${COMIN_OBS}/${OPREFIX}oscatw.tm00.bufr_d${OSUFFIX}} +RAPIDSCATBF=${RAPIDSCATBF:-${COMIN_OBS}/${OPREFIX}rapidscatw.tm00.bufr_d${OSUFFIX}} +GSNDBF=${GSNDBF:-${COMIN_OBS}/${OPREFIX}goesnd.tm00.bufr_d${OSUFFIX}} +GSNDBF1=${GSNDBF1:-${COMIN_OBS}/${OPREFIX}goesfv.tm00.bufr_d${OSUFFIX}} +B1HRS2=${B1HRS2:-${COMIN_OBS}/${OPREFIX}1bhrs2.tm00.bufr_d${OSUFFIX}} +B1MSU=${B1MSU:-${COMIN_OBS}/${OPREFIX}1bmsu.tm00.bufr_d${OSUFFIX}} +B1HRS3=${B1HRS3:-${COMIN_OBS}/${OPREFIX}1bhrs3.tm00.bufr_d${OSUFFIX}} +B1HRS4=${B1HRS4:-${COMIN_OBS}/${OPREFIX}1bhrs4.tm00.bufr_d${OSUFFIX}} +B1AMUA=${B1AMUA:-${COMIN_OBS}/${OPREFIX}1bamua.tm00.bufr_d${OSUFFIX}} +B1AMUB=${B1AMUB:-${COMIN_OBS}/${OPREFIX}1bamub.tm00.bufr_d${OSUFFIX}} +B1MHS=${B1MHS:-${COMIN_OBS}/${OPREFIX}1bmhs.tm00.bufr_d${OSUFFIX}} +ESHRS3=${ESHRS3:-${COMIN_OBS}/${OPREFIX}eshrs3.tm00.bufr_d${OSUFFIX}} +ESAMUA=${ESAMUA:-${COMIN_OBS}/${OPREFIX}esamua.tm00.bufr_d${OSUFFIX}} +ESAMUB=${ESAMUB:-${COMIN_OBS}/${OPREFIX}esamub.tm00.bufr_d${OSUFFIX}} +ESMHS=${ESMHS:-${COMIN_OBS}/${OPREFIX}esmhs.tm00.bufr_d${OSUFFIX}} +HRS3DB=${HRS3DB:-${COMIN_OBS}/${OPREFIX}hrs3db.tm00.bufr_d${OSUFFIX}} +AMUADB=${AMUADB:-${COMIN_OBS}/${OPREFIX}amuadb.tm00.bufr_d${OSUFFIX}} +AMUBDB=${AMUBDB:-${COMIN_OBS}/${OPREFIX}amubdb.tm00.bufr_d${OSUFFIX}} +MHSDB=${MHSDB:-${COMIN_OBS}/${OPREFIX}mhsdb.tm00.bufr_d${OSUFFIX}} +AIRSBF=${AIRSBF:-${COMIN_OBS}/${OPREFIX}airsev.tm00.bufr_d${OSUFFIX}} +IASIBF=${IASIBF:-${COMIN_OBS}/${OPREFIX}mtiasi.tm00.bufr_d${OSUFFIX}} +ESIASI=${ESIASI:-${COMIN_OBS}/${OPREFIX}esiasi.tm00.bufr_d${OSUFFIX}} +IASIDB=${IASIDB:-${COMIN_OBS}/${OPREFIX}iasidb.tm00.bufr_d${OSUFFIX}} +AMSREBF=${AMSREBF:-${COMIN_OBS}/${OPREFIX}amsre.tm00.bufr_d${OSUFFIX}} +AMSR2BF=${AMSR2BF:-${COMIN_OBS}/${OPREFIX}amsr2.tm00.bufr_d${OSUFFIX}} +#GMI1CRBF=${GMI1CRBF:-${COMIN_OBS}/${OPREFIX}gmi1cr.tm00.bufr_d${OSUFFIX}} # GMI temporarily disabled due to array overflow. +SAPHIRBF=${SAPHIRBF:-${COMIN_OBS}/${OPREFIX}saphir.tm00.bufr_d${OSUFFIX}} +SEVIRIBF=${SEVIRIBF:-${COMIN_OBS}/${OPREFIX}sevcsr.tm00.bufr_d${OSUFFIX}} +AHIBF=${AHIBF:-${COMIN_OBS}/${OPREFIX}ahicsr.tm00.bufr_d${OSUFFIX}} +SSTVIIRS=${SSTVIIRS:-${COMIN_OBS}/${OPREFIX}sstvcw.tm00.bufr_d${OSUFFIX}} +ABIBF=${ABIBF:-${COMIN_OBS}/${OPREFIX}gsrcsr.tm00.bufr_d${OSUFFIX}} +CRISBF=${CRISBF:-${COMIN_OBS}/${OPREFIX}cris.tm00.bufr_d${OSUFFIX}} +ESCRIS=${ESCRIS:-${COMIN_OBS}/${OPREFIX}escris.tm00.bufr_d${OSUFFIX}} +CRISDB=${CRISDB:-${COMIN_OBS}/${OPREFIX}crisdb.tm00.bufr_d${OSUFFIX}} +CRISFSBF=${CRISFSBF:-${COMIN_OBS}/${OPREFIX}crisf4.tm00.bufr_d${OSUFFIX}} +ESCRISFS=${ESCRISFS:-${COMIN_OBS}/${OPREFIX}escrsf.tm00.bufr_d${OSUFFIX}} +CRISFSDB=${CRISFSDB:-${COMIN_OBS}/${OPREFIX}crsfdb.tm00.bufr_d${OSUFFIX}} +ATMSBF=${ATMSBF:-${COMIN_OBS}/${OPREFIX}atms.tm00.bufr_d${OSUFFIX}} +ESATMS=${ESATMS:-${COMIN_OBS}/${OPREFIX}esatms.tm00.bufr_d${OSUFFIX}} +ATMSDB=${ATMSDB:-${COMIN_OBS}/${OPREFIX}atmsdb.tm00.bufr_d${OSUFFIX}} +SSMITBF=${SSMITBF:-${COMIN_OBS}/${OPREFIX}ssmit.tm00.bufr_d${OSUFFIX}} +SSMISBF=${SSMISBF:-${COMIN_OBS}/${OPREFIX}ssmisu.tm00.bufr_d${OSUFFIX}} +SBUVBF=${SBUVBF:-${COMIN_OBS}/${OPREFIX}osbuv8.tm00.bufr_d${OSUFFIX}} +OMPSNPBF=${OMPSNPBF:-${COMIN_OBS}/${OPREFIX}ompsn8.tm00.bufr_d${OSUFFIX}} +OMPSTCBF=${OMPSTCBF:-${COMIN_OBS}/${OPREFIX}ompst8.tm00.bufr_d${OSUFFIX}} +OMPSLPBF=${OMPSLPBF:-${COMIN_OBS}/${OPREFIX}ompslp.tm00.bufr_d${OSUFFIX}} +GOMEBF=${GOMEBF:-${COMIN_OBS}/${OPREFIX}gome.tm00.bufr_d${OSUFFIX}} +OMIBF=${OMIBF:-${COMIN_OBS}/${OPREFIX}omi.tm00.bufr_d${OSUFFIX}} +MLSBF=${MLSBF:-${COMIN_OBS}/${OPREFIX}mls.tm00.bufr_d${OSUFFIX}} +SMIPCP=${SMIPCP:-${COMIN_OBS}/${OPREFIX}spssmi.tm00.bufr_d${OSUFFIX}} +TMIPCP=${TMIPCP:-${COMIN_OBS}/${OPREFIX}sptrmm.tm00.bufr_d${OSUFFIX}} +GPSROBF=${GPSROBF:-${COMIN_OBS}/${OPREFIX}gpsro.tm00.bufr_d${OSUFFIX}} +TCVITL=${TCVITL:-${COMIN_OBS}/${OPREFIX}syndata.tcvitals.tm00} +B1AVHAM=${B1AVHAM:-${COMIN_OBS}/${OPREFIX}avcsam.tm00.bufr_d${OSUFFIX}} +B1AVHPM=${B1AVHPM:-${COMIN_OBS}/${OPREFIX}avcspm.tm00.bufr_d${OSUFFIX}} +HDOB=${HDOB:-${COMIN_OBS}/${OPREFIX}hdob.tm00.bufr_d${OSUFFIX}} + +# Guess files +GPREFIX=${GPREFIX:-""} +GSUFFIX=${GSUFFIX:-$SUFFIX} +SFCG03=${SFCG03:-${COMIN_GES}/${GPREFIX}sfcf003${GSUFFIX}} +SFCG04=${SFCG04:-${COMIN_GES}/${GPREFIX}sfcf004${GSUFFIX}} +SFCG05=${SFCG05:-${COMIN_GES}/${GPREFIX}sfcf005${GSUFFIX}} +SFCGES=${SFCGES:-${COMIN_GES}/${GPREFIX}sfcf006${GSUFFIX}} +SFCG07=${SFCG07:-${COMIN_GES}/${GPREFIX}sfcf007${GSUFFIX}} +SFCG08=${SFCG08:-${COMIN_GES}/${GPREFIX}sfcf008${GSUFFIX}} +SFCG09=${SFCG09:-${COMIN_GES}/${GPREFIX}sfcf009${GSUFFIX}} +ATMG03=${ATMG03:-${COMIN_GES}/${GPREFIX}atmf003${GSUFFIX}} +ATMG04=${ATMG04:-${COMIN_GES}/${GPREFIX}atmf004${GSUFFIX}} +ATMG05=${ATMG05:-${COMIN_GES}/${GPREFIX}atmf005${GSUFFIX}} +ATMGES=${ATMGES:-${COMIN_GES}/${GPREFIX}atmf006${GSUFFIX}} +ATMG07=${ATMG07:-${COMIN_GES}/${GPREFIX}atmf007${GSUFFIX}} +ATMG08=${ATMG08:-${COMIN_GES}/${GPREFIX}atmf008${GSUFFIX}} +ATMG09=${ATMG09:-${COMIN_GES}/${GPREFIX}atmf009${GSUFFIX}} +GBIAS=${GBIAS:-${COMIN_GES}/${GPREFIX}abias} +GBIASPC=${GBIASPC:-${COMIN_GES}/${GPREFIX}abias_pc} +GBIASAIR=${GBIASAIR:-${COMIN_GES}/${GPREFIX}abias_air} +GRADSTAT=${GRADSTAT:-${COMIN_GES}/${GPREFIX}radstat} + +# Analysis files +export APREFIX=${APREFIX:-""} +export ASUFFIX=${ASUFFIX:-$SUFFIX} +SFCANL=${SFCANL:-${COMOUT}/${APREFIX}sfcanl${ASUFFIX}} +DTFANL=${DTFANL:-${COMOUT}/${APREFIX}dtfanl.nc} +ATMANL=${ATMANL:-${COMOUT}/${APREFIX}atmanl${ASUFFIX}} +ABIAS=${ABIAS:-${COMOUT}/${APREFIX}abias} +ABIASPC=${ABIASPC:-${COMOUT}/${APREFIX}abias_pc} +ABIASAIR=${ABIASAIR:-${COMOUT}/${APREFIX}abias_air} +ABIASe=${ABIASe:-${COMOUT}/${APREFIX}abias_int} +RADSTAT=${RADSTAT:-${COMOUT}/${APREFIX}radstat} +GSISTAT=${GSISTAT:-${COMOUT}/${APREFIX}gsistat} +PCPSTAT=${PCPSTAT:-${COMOUT}/${APREFIX}pcpstat} +CNVSTAT=${CNVSTAT:-${COMOUT}/${APREFIX}cnvstat} +OZNSTAT=${OZNSTAT:-${COMOUT}/${APREFIX}oznstat} + +# Increment files +ATMINC=${ATMINC:-${COMOUT}/${APREFIX}atminc.nc} + +# Obs diag +RUN_SELECT=${RUN_SELECT:-"NO"} +USE_SELECT=${USE_SELECT:-"NO"} +USE_RADSTAT=${USE_RADSTAT:-"YES"} +SELECT_OBS=${SELECT_OBS:-${COMOUT}/${APREFIX}obsinput} +GENDIAG=${GENDIAG:-"YES"} +DIAG_SUFFIX=${DIAG_SUFFIX:-""} +if [ $netcdf_diag = ".true." ] ; then + DIAG_SUFFIX="${DIAG_SUFFIX}.nc4" +fi +DIAG_COMPRESS=${DIAG_COMPRESS:-"YES"} +DIAG_TARBALL=${DIAG_TARBALL:-"YES"} +USE_CFP=${USE_CFP:-"NO"} +CFP_MP=${CFP_MP:-"NO"} +nm="" +if [ $CFP_MP = "YES" ]; then + nm=0 +fi +DIAG_DIR=${DIAG_DIR:-${COMOUT}/gsidiags} + +# Set script / GSI control parameters +DOHYBVAR=${DOHYBVAR:-"NO"} +NMEM_ENKF=${NMEM_ENKF:-0} +export DONST=${DONST:-"NO"} +NST_GSI=${NST_GSI:-0} +NSTINFO=${NSTINFO:-0} +ZSEA1=${ZSEA1:-0} +ZSEA2=${ZSEA2:-0} +FAC_DTL=${FAC_DTL:-1} +FAC_TSL=${FAC_TSL:-1} +TZR_QC=${TZR_QC:-1} +USE_READIN_ANL_SFCMASK=${USE_READIN_ANL_SFCMASK:-.false.} +SMOOTH_ENKF=${SMOOTH_ENKF:-"YES"} +export DOIAU=${DOIAU:-"NO"} +DO_CALC_INCREMENT=${DO_CALC_INCREMENT:-"NO"} +DO_CALC_ANALYSIS=${DO_CALC_ANALYSIS:-"NO"} +export INCREMENTS_TO_ZERO=${INCREMENTS_TO_ZERO:-"'NONE'"} +USE_CORRELATED_OBERRS=${USE_CORRELATED_OBERRS:-"YES"} + +# Get header information from Guess files +if [ ${SUFFIX} = ".nc" ]; then + LONB=${LONB:-$($NCLEN $ATMGES grid_xt)} # get LONB + LATB=${LATB:-$($NCLEN $ATMGES grid_yt)} # get LATB + LEVS=${LEVS:-$($NCLEN $ATMGES pfull)} # get LEVS + JCAP=${JCAP:--9999} # there is no jcap in these files +else + LONB=${LONB:-$($NEMSIOGET $ATMGES dimx | grep -i "dimx" | awk -F"= " '{print $2}' | awk -F" " '{print $1}')} # 'get LONB + LATB=${LATB:-$($NEMSIOGET $ATMGES dimy | grep -i "dimy" | awk -F"= " '{print $2}' | awk -F" " '{print $1}')} # 'get LATB + LEVS=${LEVS:-$($NEMSIOGET $ATMGES dimz | grep -i "dimz" | awk -F"= " '{print $2}' | awk -F" " '{print $1}')} # 'get LEVS + JCAP=${JCAP:-$($NEMSIOGET $ATMGES jcap | grep -i "jcap" | awk -F"= " '{print $2}' | awk -F" " '{print $1}')} # 'get JCAP +fi +[ $JCAP -eq -9999 -a $LATB -ne -9999 ] && JCAP=$((LATB-2)) +[ $LONB -eq -9999 -o $LATB -eq -9999 -o $LEVS -eq -9999 -o $JCAP -eq -9999 ] && exit -9999 + +# Get header information from Ensemble Guess files +if [ $DOHYBVAR = "YES" ]; then + SFCGES_ENSMEAN=${SFCGES_ENSMEAN:-${COMIN_GES_ENS}/${GPREFIX}sfcf006.ensmean${GSUFFIX}} + export ATMGES_ENSMEAN=${ATMGES_ENSMEAN:-${COMIN_GES_ENS}/${GPREFIX}atmf006.ensmean${GSUFFIX}} + if [ ${SUFFIX} = ".nc" ]; then + LONB_ENKF=${LONB_ENKF:-$($NCLEN $ATMGES_ENSMEAN grid_xt)} # get LONB_ENKF + LATB_ENKF=${LATB_ENKF:-$($NCLEN $ATMGES_ENSMEAN grid_yt)} # get LATB_ENFK + LEVS_ENKF=${LEVS_ENKF:-$($NCLEN $ATMGES_ENSMEAN pfull)} # get LATB_ENFK + JCAP_ENKF=${JCAP_ENKF:--9999} # again, no jcap in the netcdf files + else + LONB_ENKF=${LONB_ENKF:-$($NEMSIOGET $ATMGES_ENSMEAN dimx | grep -i "dimx" | awk -F"= " '{print $2}' | awk -F" " '{print $1}')} # 'get LONB_ENKF + LATB_ENKF=${LATB_ENKF:-$($NEMSIOGET $ATMGES_ENSMEAN dimy | grep -i "dimy" | awk -F"= " '{print $2}' | awk -F" " '{print $1}')} # 'get LATB_ENKF + LEVS_ENKF=${LEVS_ENKF:-$($NEMSIOGET $ATMGES_ENSMEAN dimz | grep -i "dimz" | awk -F"= " '{print $2}' | awk -F" " '{print $1}')} # 'get LEVS_ENKF + JCAP_ENKF=${JCAP_ENKF:-$($NEMSIOGET $ATMGES_ENSMEAN jcap | grep -i "jcap" | awk -F"= " '{print $2}' | awk -F" " '{print $1}')} # 'get JCAP_ENKF + fi + NLON_ENKF=${NLON_ENKF:-$LONB_ENKF} + NLAT_ENKF=${NLAT_ENKF:-$(($LATB_ENKF+2))} + [ $JCAP_ENKF -eq -9999 -a $LATB_ENKF -ne -9999 ] && JCAP_ENKF=$((LATB_ENKF-2)) + [ $LONB_ENKF -eq -9999 -o $LATB_ENKF -eq -9999 -o $LEVS_ENKF -eq -9999 -o $JCAP_ENKF -eq -9999 ] && exit -9999 +else + LONB_ENKF=0 # just for if statement later +fi + +# Get dimension information based on CASE +res=$(echo $CASE | cut -c2-) +JCAP_CASE=$((res*2-2)) +LATB_CASE=$((res*2)) +LONB_CASE=$((res*4)) + +# Set analysis resolution information +if [ $DOHYBVAR = "YES" ]; then + JCAP_A=${JCAP_A:-${JCAP_ENKF:-$JCAP}} + LONA=${LONA:-${LONB_ENKF:-$LONB}} + LATA=${LATA:-${LATB_ENKF:-$LATB}} +else + JCAP_A=${JCAP_A:-$JCAP} + LONA=${LONA:-$LONB} + LATA=${LATA:-$LATB} +fi +NLON_A=${NLON_A:-$LONA} +NLAT_A=${NLAT_A:-$(($LATA+2))} + +DELTIM=${DELTIM:-$((3600/($JCAP_A/20)))} + +# logic for netCDF I/O +if [ ${SUFFIX} = ".nc" ]; then + # GSI namelist options to use netCDF background + use_gfs_nemsio=".false." + use_gfs_ncio=".true." +else + # GSI namelist options to use NEMSIO background + use_gfs_nemsio=".true." + use_gfs_ncio=".false." +fi + +# determine if writing or calculating increment +if [ $DO_CALC_INCREMENT = "YES" ]; then + write_fv3_increment=".false." +else + write_fv3_increment=".true." + WRITE_INCR_ZERO="incvars_to_zero= $INCREMENTS_TO_ZERO," + WRITE_ZERO_STRAT="incvars_zero_strat= $INCVARS_ZERO_STRAT," + WRITE_STRAT_EFOLD="incvars_efold= $INCVARS_EFOLD," +fi + +# GSI Fix files +RTMFIX=${RTMFIX:-${CRTM_FIX}} +BERROR=${BERROR:-${FIXgsi}/Big_Endian/global_berror.l${LEVS}y${NLAT_A}.f77} +SATANGL=${SATANGL:-${FIXgsi}/global_satangbias.txt} +SATINFO=${SATINFO:-${FIXgsi}/global_satinfo.txt} +RADCLOUDINFO=${RADCLOUDINFO:-${FIXgsi}/cloudy_radiance_info.txt} +ATMSFILTER=${ATMSFILTER:-${FIXgsi}/atms_beamwidth.txt} +ANAVINFO=${ANAVINFO:-${FIXgsi}/global_anavinfo.l${LEVS}.txt} +CONVINFO=${CONVINFO:-${FIXgsi}/global_convinfo.txt} +vqcdat=${vqcdat:-${FIXgsi}/vqctp001.dat} +INSITUINFO=${INSITUINFO:-${FIXgsi}/global_insituinfo.txt} +OZINFO=${OZINFO:-${FIXgsi}/global_ozinfo.txt} +PCPINFO=${PCPINFO:-${FIXgsi}/global_pcpinfo.txt} +AEROINFO=${AEROINFO:-${FIXgsi}/global_aeroinfo.txt} +SCANINFO=${SCANINFO:-${FIXgsi}/global_scaninfo.txt} +HYBENSINFO=${HYBENSINFO:-${FIXgsi}/global_hybens_info.l${LEVS}.txt} +OBERROR=${OBERROR:-${FIXgsi}/prepobs_errtable.global} + +# GSI namelist +SETUP=${SETUP:-""} +GRIDOPTS=${GRIDOPTS:-""} +BKGVERR=${BKGVERR:-""} +ANBKGERR=${ANBKGERR:-""} +JCOPTS=${JCOPTS:-""} +STRONGOPTS=${STRONGOPTS:-""} +OBSQC=${OBSQC:-""} +OBSINPUT=${OBSINPUT:-""} +SUPERRAD=${SUPERRAD:-""} +SINGLEOB=${SINGLEOB:-""} +LAGDATA=${LAGDATA:-""} +HYBRID_ENSEMBLE=${HYBRID_ENSEMBLE:-""} +RAPIDREFRESH_CLDSURF=${RAPIDREFRESH_CLDSURF:-""} +CHEM=${CHEM:-""} +NST=${NST:-""} + +#uGSI Namelist parameters +lrun_subdirs=${lrun_subdirs:-".true."} +if [ $DOHYBVAR = "YES" ]; then + l_hyb_ens=.true. + export l4densvar=${l4densvar:-".false."} + export lwrite4danl=${lwrite4danl:-".false."} +else + l_hyb_ens=.false. + export l4densvar=.false. + export lwrite4danl=.false. +fi + +# Set 4D-EnVar specific variables +if [ $DOHYBVAR = "YES" -a $l4densvar = ".true." -a $lwrite4danl = ".true." ]; then + ATMA03=${ATMA03:-${COMOUT}/${APREFIX}atma003${ASUFFIX}} + ATMI03=${ATMI03:-${COMOUT}/${APREFIX}atmi003.nc} + ATMA04=${ATMA04:-${COMOUT}/${APREFIX}atma004${ASUFFIX}} + ATMI04=${ATMI04:-${COMOUT}/${APREFIX}atmi004.nc} + ATMA05=${ATMA05:-${COMOUT}/${APREFIX}atma005${ASUFFIX}} + ATMI05=${ATMI05:-${COMOUT}/${APREFIX}atmi005.nc} + ATMA07=${ATMA07:-${COMOUT}/${APREFIX}atma007${ASUFFIX}} + ATMI07=${ATMI07:-${COMOUT}/${APREFIX}atmi007.nc} + ATMA08=${ATMA08:-${COMOUT}/${APREFIX}atma008${ASUFFIX}} + ATMI08=${ATMI08:-${COMOUT}/${APREFIX}atmi008.nc} + ATMA09=${ATMA09:-${COMOUT}/${APREFIX}atma009${ASUFFIX}} + ATMI09=${ATMI09:-${COMOUT}/${APREFIX}atmi009.nc} +fi + +################################################################################ +# Preprocessing +mkdata=NO +if [ ! -d $DATA ]; then + mkdata=YES + mkdir -p $DATA +fi + +cd $DATA || exit 99 + +############################################################## +# Fixed files +$NLN $BERROR berror_stats +$NLN $SATANGL satbias_angle +$NLN $SATINFO satinfo +$NLN $RADCLOUDINFO cloudy_radiance_info.txt +$NLN $ATMSFILTER atms_beamwidth.txt +$NLN $ANAVINFO anavinfo +$NLN $CONVINFO convinfo +$NLN $vqcdat vqctp001.dat +$NLN $INSITUINFO insituinfo +$NLN $OZINFO ozinfo +$NLN $PCPINFO pcpinfo +$NLN $AEROINFO aeroinfo +$NLN $SCANINFO scaninfo +$NLN $HYBENSINFO hybens_info +$NLN $OBERROR errtable + +#If using correlated error, link to the covariance files +if [ $USE_CORRELATED_OBERRS == "YES" ]; then + if grep -q "Rcov" $ANAVINFO ; then + if ls ${FIXgsi}/Rcov* 1> /dev/null 2>&1; then + $NLN ${FIXgsi}/Rcov* $DATA + echo "using correlated obs error" + else + echo "FATAL ERROR: Satellite error covariance files (Rcov) are missing." + echo "Check for the required Rcov files in " $ANAVINFO + exit 1 + fi + else + echo "FATAL ERROR: Satellite error covariance info missing in " $ANAVINFO + exit 1 + fi + +# Correlated error utlizes mkl lapack. Found it necesary to fix the +# number of mkl threads to ensure reproducible results independent +# of the job configuration. + export MKL_NUM_THREADS=1 + +else + echo "not using correlated obs error" +fi + +############################################################## +# CRTM Spectral and Transmittance coefficients +mkdir -p crtm_coeffs +for file in $(awk '{if($1!~"!"){print $1}}' satinfo | sort | uniq); do + $NLN $RTMFIX/${file}.SpcCoeff.bin ./crtm_coeffs/${file}.SpcCoeff.bin + $NLN $RTMFIX/${file}.TauCoeff.bin ./crtm_coeffs/${file}.TauCoeff.bin +done +$NLN $RTMFIX/amsua_metop-a_v2.SpcCoeff.bin ./crtm_coeffs/amsua_metop-a_v2.SpcCoeff.bin + +$NLN $RTMFIX/Nalli.IRwater.EmisCoeff.bin ./crtm_coeffs/Nalli.IRwater.EmisCoeff.bin +$NLN $RTMFIX/NPOESS.IRice.EmisCoeff.bin ./crtm_coeffs/NPOESS.IRice.EmisCoeff.bin +$NLN $RTMFIX/NPOESS.IRland.EmisCoeff.bin ./crtm_coeffs/NPOESS.IRland.EmisCoeff.bin +$NLN $RTMFIX/NPOESS.IRsnow.EmisCoeff.bin ./crtm_coeffs/NPOESS.IRsnow.EmisCoeff.bin +$NLN $RTMFIX/NPOESS.VISice.EmisCoeff.bin ./crtm_coeffs/NPOESS.VISice.EmisCoeff.bin +$NLN $RTMFIX/NPOESS.VISland.EmisCoeff.bin ./crtm_coeffs/NPOESS.VISland.EmisCoeff.bin +$NLN $RTMFIX/NPOESS.VISsnow.EmisCoeff.bin ./crtm_coeffs/NPOESS.VISsnow.EmisCoeff.bin +$NLN $RTMFIX/NPOESS.VISwater.EmisCoeff.bin ./crtm_coeffs/NPOESS.VISwater.EmisCoeff.bin +$NLN $RTMFIX/FASTEM6.MWwater.EmisCoeff.bin ./crtm_coeffs/FASTEM6.MWwater.EmisCoeff.bin +$NLN $RTMFIX/AerosolCoeff.bin ./crtm_coeffs/AerosolCoeff.bin +$NLN $RTMFIX/CloudCoeff.GFDLFV3.-109z-1.bin ./crtm_coeffs/CloudCoeff.bin +#$NLN $RTMFIX/CloudCoeff.bin ./crtm_coeffs/CloudCoeff.bin + + +############################################################## +# Observational data +$NLN $PREPQC prepbufr +$NLN $PREPQCPF prepbufr_profl +$NLN $SATWND satwndbufr +$NLN $OSCATBF oscatbufr +$NLN $RAPIDSCATBF rapidscatbufr +$NLN $GSNDBF gsndrbufr +$NLN $GSNDBF1 gsnd1bufr +$NLN $B1MSU msubufr +$NLN $B1AMUA amsuabufr +$NLN $B1AMUB amsubbufr +$NLN $B1MHS mhsbufr +$NLN $ESAMUA amsuabufrears +$NLN $ESAMUB amsubbufrears +#$NLN $ESMHS mhsbufrears +$NLN $AMUADB amsuabufr_db +$NLN $AMUBDB amsubbufr_db +#$NLN $MHSDB mhsbufr_db +$NLN $SBUVBF sbuvbufr +$NLN $OMPSNPBF ompsnpbufr +$NLN $OMPSLPBF ompslpbufr +$NLN $OMPSTCBF ompstcbufr +$NLN $GOMEBF gomebufr +$NLN $OMIBF omibufr +$NLN $MLSBF mlsbufr +$NLN $SMIPCP ssmirrbufr +$NLN $TMIPCP tmirrbufr +$NLN $AIRSBF airsbufr +$NLN $IASIBF iasibufr +$NLN $ESIASI iasibufrears +$NLN $IASIDB iasibufr_db +$NLN $AMSREBF amsrebufr +$NLN $AMSR2BF amsr2bufr +#$NLN $GMI1CRBF gmibufr # GMI temporarily disabled due to array overflow. +$NLN $SAPHIRBF saphirbufr +$NLN $SEVIRIBF seviribufr +$NLN $CRISBF crisbufr +$NLN $ESCRIS crisbufrears +$NLN $CRISDB crisbufr_db +$NLN $CRISFSBF crisfsbufr +$NLN $ESCRISFS crisfsbufrears +$NLN $CRISFSDB crisfsbufr_db +$NLN $ATMSBF atmsbufr +$NLN $ESATMS atmsbufrears +$NLN $ATMSDB atmsbufr_db +$NLN $SSMITBF ssmitbufr +$NLN $SSMISBF ssmisbufr +$NLN $GPSROBF gpsrobufr +$NLN $TCVITL tcvitl +$NLN $B1AVHAM avhambufr +$NLN $B1AVHPM avhpmbufr +$NLN $AHIBF ahibufr +$NLN $ABIBF abibufr +$NLN $HDOB hdobbufr +$NLN $SSTVIIRS sstviirs + +[[ $DONST = "YES" ]] && $NLN $NSSTBF nsstbufr + +############################################################## +# Required bias guess files +$NLN $GBIAS satbias_in +$NLN $GBIASPC satbias_pc +$NLN $GBIASAIR aircftbias_in +$NLN $GRADSTAT radstat.gdas + +############################################################## +# Required model guess files +$NLN $ATMG03 sigf03 +$NLN $ATMGES sigf06 +$NLN $ATMG09 sigf09 + +$NLN $SFCG03 sfcf03 +$NLN $SFCGES sfcf06 +$NLN $SFCG09 sfcf09 + +# Link hourly backgrounds (if present) +if [ -f $ATMG04 -a -f $ATMG05 -a -f $ATMG07 -a -f $ATMG08 ]; then + nhr_obsbin=1 +fi + +[[ -f $ATMG04 ]] && $NLN $ATMG04 sigf04 +[[ -f $ATMG05 ]] && $NLN $ATMG05 sigf05 +[[ -f $ATMG07 ]] && $NLN $ATMG07 sigf07 +[[ -f $ATMG08 ]] && $NLN $ATMG08 sigf08 + +[[ -f $SFCG04 ]] && $NLN $SFCG04 sfcf04 +[[ -f $SFCG05 ]] && $NLN $SFCG05 sfcf05 +[[ -f $SFCG07 ]] && $NLN $SFCG07 sfcf07 +[[ -f $SFCG08 ]] && $NLN $SFCG08 sfcf08 + +if [ $DOHYBVAR = "YES" ]; then + + # Link ensemble members + mkdir -p ensemble_data + + ENKF_SUFFIX="s" + [[ $SMOOTH_ENKF = "NO" ]] && ENKF_SUFFIX="" + + fhrs="06" + if [ $l4densvar = ".true." ]; then + fhrs="03 04 05 06 07 08 09" + fi + + for imem in $(seq 1 $NMEM_ENKF); do + memchar="mem"$(printf %03i $imem) + for fhr in $fhrs; do + $NLN ${COMIN_GES_ENS}/$memchar/${GPREFIX}atmf0${fhr}${ENKF_SUFFIX}${GSUFFIX} ./ensemble_data/sigf${fhr}_ens_$memchar + if [ $cnvw_option = ".true." ]; then + $NLN ${COMIN_GES_ENS}/$memchar/${GPREFIX}sfcf0${fhr}${GSUFFIX} ./ensemble_data/sfcf${fhr}_ens_$memchar + fi + done + done + +fi + +############################################################## +# Handle inconsistent surface mask between background, ensemble and analysis grids +# This needs re-visiting in the context of NSST; especially references to JCAP* +if [ $JCAP -ne $JCAP_A ]; then + if [ $DOHYBVAR = "YES" -a $JCAP_A = $JCAP_ENKF ]; then + if [ -e $SFCGES_ENSMEAN ]; then + USE_READIN_ANL_SFCMASK=.true. + $NLN $SFCGES_ENSMEAN sfcf06_anlgrid + else + echo "Warning: Inconsistent sfc mask between analysis and ensemble grids, GSI will interpolate" + fi + else + echo "Warning: Inconsistent sfc mask between analysis and background grids, GSI will interpolate" + fi +fi + +############################################################## +# Diagnostic files +# if requested, link GSI diagnostic file directories for use later +if [ $GENDIAG = "YES" ] ; then + if [ $lrun_subdirs = ".true." ] ; then + if [ -d $DIAG_DIR ]; then + rm -rf $DIAG_DIR + fi + npe_m1="$(($npe_gsi-1))" + for pe in $(seq 0 $npe_m1); do + pedir="dir."$(printf %04i $pe) + mkdir -p $DIAG_DIR/$pedir + $NLN $DIAG_DIR/$pedir $pedir + done + else + err_exit "FATAL ERROR: lrun_subdirs must be true. lrun_subdirs=$lrun_subdirs" + fi +fi + +############################################################## +# Output files +# $SFCANL is no longer created here since global_cycle is not called +$NLN $ATMANL siganl +$NLN $ATMINC siginc.nc +if [ $DOHYBVAR = "YES" -a $l4densvar = ".true." -a $lwrite4danl = ".true." ]; then + $NLN $ATMA03 siga03 + $NLN $ATMI03 sigi03.nc + $NLN $ATMA04 siga04 + $NLN $ATMI04 sigi04.nc + $NLN $ATMA05 siga05 + $NLN $ATMI05 sigi05.nc + $NLN $ATMA07 siga07 + $NLN $ATMI07 sigi07.nc + $NLN $ATMA08 siga08 + $NLN $ATMI08 sigi08.nc + $NLN $ATMA09 siga09 + $NLN $ATMI09 sigi09.nc +fi +$NLN $ABIAS satbias_out +$NLN $ABIASPC satbias_pc.out +$NLN $ABIASAIR aircftbias_out + +if [ $DONST = "YES" ]; then + $NLN $DTFANL dtfanl +fi + +# If requested, link (and if tarred, de-tar obsinput.tar) into obs_input.* files +if [ $USE_SELECT = "YES" ]; then + rm obs_input.* + nl=$(file $SELECT_OBS | cut -d: -f2 | grep tar | wc -l) + if [ $nl -eq 1 ]; then + rm obsinput.tar + $NLN $SELECT_OBS obsinput.tar + tar -xvf obsinput.tar + rm obsinput.tar + else + for filetop in $(ls $SELECT_OBS/obs_input.*); do + fileloc=$(basename $filetop) + $NLN $filetop $fileloc + done + fi +fi + +############################################################## +# If requested, copy and de-tar guess radstat file +if [ $USE_RADSTAT = "YES" ]; then + if [ $USE_CFP = "YES" ]; then + [[ -f $DATA/unzip.sh ]] && rm $DATA/unzip.sh + [[ -f $DATA/mp_unzip.sh ]] && rm $DATA/mp_unzip.sh + cat > $DATA/unzip.sh << EOFunzip +#!/bin/sh + diag_file=\$1 + diag_suffix=\$2 + fname=\$(echo \$diag_file | cut -d'.' -f1) + fdate=\$(echo \$diag_file | cut -d'.' -f2) + $UNCOMPRESS \$diag_file + fnameges=\$(echo \$fname | sed 's/_ges//g') + $NMV \$fname.\$fdate\$diag_suffix \$fnameges +EOFunzip + chmod 755 $DATA/unzip.sh + fi + + listdiag=$(tar xvf radstat.gdas | cut -d' ' -f2 | grep _ges) + for type in $listdiag; do + diag_file=$(echo $type | cut -d',' -f1) + if [ $USE_CFP = "YES" ] ; then + echo "$nm $DATA/unzip.sh $diag_file $DIAG_SUFFIX" | tee -a $DATA/mp_unzip.sh + if [ ${CFP_MP:-"NO"} = "YES" ]; then + nm=$((nm+1)) + fi + else + fname=$(echo $diag_file | cut -d'.' -f1) + date=$(echo $diag_file | cut -d'.' -f2) + $UNCOMPRESS $diag_file + fnameges=$(echo $fname|sed 's/_ges//g') + $NMV $fname.$date$DIAG_SUFFIX $fnameges + fi + done + + if [ $USE_CFP = "YES" ] ; then + chmod 755 $DATA/mp_unzip.sh + ncmd=$(cat $DATA/mp_unzip.sh | wc -l) + if [ $ncmd -gt 0 ]; then + ncmd_max=$((ncmd < npe_node_max ? ncmd : npe_node_max)) + APRUNCFP_UNZIP=$(eval echo $APRUNCFP) + $APRUNCFP_UNZIP $DATA/mp_unzip.sh + export err=$?; err_chk + fi + fi +fi # if [ $USE_RADSTAT = "YES" ] + +############################################################## +# GSI Namelist options +if [ $DOHYBVAR = "YES" ]; then + HYBRID_ENSEMBLE="n_ens=$NMEM_ENKF,jcap_ens=$JCAP_ENKF,nlat_ens=$NLAT_ENKF,nlon_ens=$NLON_ENKF,jcap_ens_test=$JCAP_ENKF,$HYBRID_ENSEMBLE" + if [ $l4densvar = ".true." ]; then + SETUP="niter(1)=50,niter(2)=150,niter_no_qc(1)=25,niter_no_qc(2)=0,thin4d=.true.,ens_nstarthr=3,l4densvar=$l4densvar,lwrite4danl=$lwrite4danl,$SETUP" + JCOPTS="ljc4tlevs=.true.,$JCOPTS" + STRONGOPTS="tlnmc_option=3,$STRONGOPTS" + OBSQC="c_varqc=0.04,$OBSQC" + fi +fi + +if [ $DONST = "YES" ]; then + NST="nstinfo=$NSTINFO,fac_dtl=$FAC_DTL,fac_tsl=$FAC_TSL,zsea1=$ZSEA1,zsea2=$ZSEA2,$NST" +fi + +############################################################## +# Create global_gsi namelist +cat > gsiparm.anl << EOF +&SETUP + miter=2, + niter(1)=100,niter(2)=100, + niter_no_qc(1)=50,niter_no_qc(2)=0, + write_diag(1)=.true.,write_diag(2)=.false.,write_diag(3)=.true., + qoption=2, + gencode=${IGEN:-0},deltim=$DELTIM, + factqmin=0.5,factqmax=0.0002, + iguess=-1, + tzr_qc=$TZR_QC, + oneobtest=.false.,retrieval=.false.,l_foto=.false., + use_pbl=.false.,use_compress=.true.,nsig_ext=45.,gpstop=50.,commgpstop=45.,commgpserrinf=1.0, + use_gfs_nemsio=${use_gfs_nemsio},use_gfs_ncio=${use_gfs_ncio},sfcnst_comb=.true., + use_readin_anl_sfcmask=${USE_READIN_ANL_SFCMASK}, + lrun_subdirs=$lrun_subdirs, + crtm_coeffs_path='./crtm_coeffs/', + newpc4pred=.true.,adp_anglebc=.true.,angord=4,passive_bc=.true.,use_edges=.false., + diag_precon=.true.,step_start=1.e-3,emiss_bc=.true.,nhr_obsbin=${nhr_obsbin:-3}, + cwoption=3,imp_physics=$imp_physics,lupp=$lupp,cnvw_option=$cnvw_option,cao_check=${cao_check}, + netcdf_diag=$netcdf_diag,binary_diag=$binary_diag, + lobsdiag_forenkf=$lobsdiag_forenkf, + write_fv3_incr=$write_fv3_increment, + nhr_anal=${IAUFHRS}, + ta2tb=${ta2tb}, + $WRITE_INCR_ZERO + $WRITE_ZERO_STRAT + $WRITE_STRAT_EFOLD + $SETUP +/ +&GRIDOPTS + JCAP_B=$JCAP,JCAP=$JCAP_A,NLAT=$NLAT_A,NLON=$NLON_A,nsig=$LEVS, + regional=.false.,nlayers(63)=3,nlayers(64)=6, + $GRIDOPTS +/ +&BKGERR + vs=0.7, + hzscl=1.7,0.8,0.5, + hswgt=0.45,0.3,0.25, + bw=0.0,norsp=4, + bkgv_flowdep=.true.,bkgv_rewgtfct=1.5, + bkgv_write=.false., + cwcoveqqcov=.false., + $BKGVERR +/ +&ANBKGERR + anisotropic=.false., + $ANBKGERR +/ +&JCOPTS + ljcdfi=.false.,alphajc=0.0,ljcpdry=.true.,bamp_jcpdry=5.0e7, + $JCOPTS +/ +&STRONGOPTS + tlnmc_option=2,nstrong=1,nvmodes_keep=8,period_max=6.,period_width=1.5, + $STRONGOPTS +/ +&OBSQC + dfact=0.75,dfact1=3.0,noiqc=.true.,oberrflg=.false.,c_varqc=0.02, + use_poq7=.true.,qc_noirjaco3_pole=.true.,vqc=.false.,nvqc=.true., + aircraft_t_bc=.true.,biaspredt=1.0e5,upd_aircraft=.true.,cleanup_tail=.true., + tcp_width=70.0,tcp_ermax=7.35, + $OBSQC +/ +&OBS_INPUT + dmesh(1)=145.0,dmesh(2)=150.0,dmesh(3)=100.0,dmesh(4)=50.0,time_window_max=3.0, + $OBSINPUT +/ +OBS_INPUT:: +! dfile dtype dplat dsis dval dthin dsfcalc + prepbufr ps null ps 0.0 0 0 + prepbufr t null t 0.0 0 0 + prepbufr_profl t null t 0.0 0 0 + hdobbufr t null t 0.0 0 0 + prepbufr q null q 0.0 0 0 + prepbufr_profl q null q 0.0 0 0 + hdobbufr q null q 0.0 0 0 + prepbufr pw null pw 0.0 0 0 + prepbufr uv null uv 0.0 0 0 + prepbufr_profl uv null uv 0.0 0 0 + satwndbufr uv null uv 0.0 0 0 + hdobbufr uv null uv 0.0 0 0 + prepbufr spd null spd 0.0 0 0 + hdobbufr spd null spd 0.0 0 0 + prepbufr dw null dw 0.0 0 0 + radarbufr rw null rw 0.0 0 0 + nsstbufr sst nsst sst 0.0 0 0 + gpsrobufr gps_bnd null gps 0.0 0 0 + ssmirrbufr pcp_ssmi dmsp pcp_ssmi 0.0 -1 0 + tmirrbufr pcp_tmi trmm pcp_tmi 0.0 -1 0 + sbuvbufr sbuv2 n16 sbuv8_n16 0.0 0 0 + sbuvbufr sbuv2 n17 sbuv8_n17 0.0 0 0 + sbuvbufr sbuv2 n18 sbuv8_n18 0.0 0 0 + gimgrbufr goes_img g11 imgr_g11 0.0 1 0 + gimgrbufr goes_img g12 imgr_g12 0.0 1 0 + airsbufr airs aqua airs_aqua 0.0 1 1 + amsuabufr amsua n15 amsua_n15 0.0 1 1 + amsuabufr amsua n18 amsua_n18 0.0 1 1 + amsuabufr amsua metop-a amsua_metop-a 0.0 1 1 + airsbufr amsua aqua amsua_aqua 0.0 1 1 + amsubbufr amsub n17 amsub_n17 0.0 1 1 + mhsbufr mhs n18 mhs_n18 0.0 1 1 + mhsbufr mhs metop-a mhs_metop-a 0.0 1 1 + ssmitbufr ssmi f15 ssmi_f15 0.0 1 0 + amsrebufr amsre_low aqua amsre_aqua 0.0 1 0 + amsrebufr amsre_mid aqua amsre_aqua 0.0 1 0 + amsrebufr amsre_hig aqua amsre_aqua 0.0 1 0 + ssmisbufr ssmis f16 ssmis_f16 0.0 1 0 + ssmisbufr ssmis f17 ssmis_f17 0.0 1 0 + ssmisbufr ssmis f18 ssmis_f18 0.0 1 0 + gsnd1bufr sndrd1 g12 sndrD1_g12 0.0 1 0 + gsnd1bufr sndrd2 g12 sndrD2_g12 0.0 1 0 + gsnd1bufr sndrd3 g12 sndrD3_g12 0.0 1 0 + gsnd1bufr sndrd4 g12 sndrD4_g12 0.0 1 0 + gsnd1bufr sndrd1 g11 sndrD1_g11 0.0 1 0 + gsnd1bufr sndrd2 g11 sndrD2_g11 0.0 1 0 + gsnd1bufr sndrd3 g11 sndrD3_g11 0.0 1 0 + gsnd1bufr sndrd4 g11 sndrD4_g11 0.0 1 0 + gsnd1bufr sndrd1 g13 sndrD1_g13 0.0 1 0 + gsnd1bufr sndrd2 g13 sndrD2_g13 0.0 1 0 + gsnd1bufr sndrd3 g13 sndrD3_g13 0.0 1 0 + gsnd1bufr sndrd4 g13 sndrD4_g13 0.0 1 0 + iasibufr iasi metop-a iasi_metop-a 0.0 1 1 + gomebufr gome metop-a gome_metop-a 0.0 2 0 + omibufr omi aura omi_aura 0.0 2 0 + sbuvbufr sbuv2 n19 sbuv8_n19 0.0 0 0 + amsuabufr amsua n19 amsua_n19 0.0 1 1 + mhsbufr mhs n19 mhs_n19 0.0 1 1 + tcvitl tcp null tcp 0.0 0 0 + seviribufr seviri m08 seviri_m08 0.0 1 0 + seviribufr seviri m09 seviri_m09 0.0 1 0 + seviribufr seviri m10 seviri_m10 0.0 1 0 + seviribufr seviri m11 seviri_m11 0.0 1 0 + amsuabufr amsua metop-b amsua_metop-b 0.0 1 1 + mhsbufr mhs metop-b mhs_metop-b 0.0 1 1 + iasibufr iasi metop-b iasi_metop-b 0.0 1 1 + gomebufr gome metop-b gome_metop-b 0.0 2 0 + atmsbufr atms npp atms_npp 0.0 1 1 + atmsbufr atms n20 atms_n20 0.0 1 1 + crisbufr cris npp cris_npp 0.0 1 0 + crisfsbufr cris-fsr npp cris-fsr_npp 0.0 1 0 + crisfsbufr cris-fsr n20 cris-fsr_n20 0.0 1 0 + gsnd1bufr sndrd1 g14 sndrD1_g14 0.0 1 0 + gsnd1bufr sndrd2 g14 sndrD2_g14 0.0 1 0 + gsnd1bufr sndrd3 g14 sndrD3_g14 0.0 1 0 + gsnd1bufr sndrd4 g14 sndrD4_g14 0.0 1 0 + gsnd1bufr sndrd1 g15 sndrD1_g15 0.0 1 0 + gsnd1bufr sndrd2 g15 sndrD2_g15 0.0 1 0 + gsnd1bufr sndrd3 g15 sndrD3_g15 0.0 1 0 + gsnd1bufr sndrd4 g15 sndrD4_g15 0.0 1 0 + oscatbufr uv null uv 0.0 0 0 + mlsbufr mls30 aura mls30_aura 0.0 0 0 + avhambufr avhrr metop-a avhrr3_metop-a 0.0 4 0 + avhpmbufr avhrr n18 avhrr3_n18 0.0 4 0 + avhambufr avhrr metop-b avhrr3_metop-b 0.0 4 0 + avhambufr avhrr metop-c avhrr3_metop-c 0.0 4 0 + avhpmbufr avhrr n19 avhrr3_n19 0.0 4 0 + amsr2bufr amsr2 gcom-w1 amsr2_gcom-w1 0.0 3 0 + gmibufr gmi gpm gmi_gpm 0.0 1 0 + saphirbufr saphir meghat saphir_meghat 0.0 3 0 + ahibufr ahi himawari8 ahi_himawari8 0.0 1 0 + abibufr abi g16 abi_g16 0.0 1 0 + abibufr abi g17 abi_g17 0.0 1 0 + abibufr abi g18 abi_g18 0.0 1 0 + rapidscatbufr uv null uv 0.0 0 0 + ompsnpbufr ompsnp npp ompsnp_npp 0.0 0 0 + ompslpbufr ompslp npp ompslp_npp 0.0 0 0 + ompstcbufr ompstc8 npp ompstc8_npp 0.0 2 0 + ompsnpbufr ompsnp n20 ompsnp_n20 0.0 0 0 + ompstcbufr ompstc8 n20 ompstc8_n20 0.0 2 0 + amsuabufr amsua metop-c amsua_metop-c 0.0 1 1 + mhsbufr mhs metop-c mhs_metop-c 0.0 1 1 + iasibufr iasi metop-c iasi_metop-c 0.0 1 1 + sstviirs viirs-m npp viirs-m_npp 0.0 4 0 + sstviirs viirs-m j1 viirs-m_j1 0.0 4 0 + ahibufr ahi himawari9 ahi_himawari9 0.0 1 0 + sstviirs viirs-m j2 viirs-m_j2 0.0 4 0 + atmsbufr atms n21 atms_n21 0.0 1 0 + crisfsbufr cris-fsr n21 cris-fsr_n21 0.0 1 0 + ompsnpbufr ompsnp n21 ompsnp_n21 0.0 0 0 + ompstcbufr ompstc8 n21 ompstc8_n21 0.0 2 0 + gomebufr gome metop-c gome_metop-c 0.0 2 0 +:: +&SUPEROB_RADAR + $SUPERRAD +/ +&LAG_DATA + $LAGDATA +/ +&HYBRID_ENSEMBLE + l_hyb_ens=$l_hyb_ens, + generate_ens=.false., + beta_s0=0.125,readin_beta=.false., + s_ens_h=800.,s_ens_v=-0.8,readin_localization=.true., + aniso_a_en=.false.,oz_univ_static=.false.,uv_hyb_ens=.true., + ensemble_path='./ensemble_data/', + ens_fast_read=.true., + $HYBRID_ENSEMBLE +/ +&RAPIDREFRESH_CLDSURF + dfi_radar_latent_heat_time_period=30.0, + $RAPIDREFRESH_CLDSURF +/ +&CHEM + $CHEM +/ +&SINGLEOB_TEST + maginnov=0.1,magoberr=0.1,oneob_type='t', + oblat=45.,oblon=180.,obpres=1000.,obdattim=$CDATE, + obhourset=0., + $SINGLEOB +/ +&NST + nst_gsi=$NST_GSI, + $NST +/ +EOF +cat gsiparm.anl + +############################################################## +# Run gsi analysis + +export OMP_NUM_THREADS=$NTHREADS_GSI +export pgm=$GSIEXEC +. prep_step + +$NCP $GSIEXEC $DATA +$APRUN_GSI ${DATA}/$(basename $GSIEXEC) 1>&1 2>&2 +export err=$?; err_chk + + +############################################################## +# If full analysis field written, calculate analysis increment +# here before releasing FV3 forecast +if [ $DO_CALC_INCREMENT = "YES" ]; then + $CALCINCPY + export err=$?; err_chk +fi + +############################################################## +# Update surface fields in the FV3 restart's using global_cycle +if [ $DOGCYCLE = "YES" ]; then + + mkdir -p $COMOUT/RESTART + + # Global cycle requires these files + export FNTSFA=${FNTSFA:-$COMIN_OBS/${OPREFIX}rtgssthr.grb} + export FNACNA=${FNACNA:-$COMIN/${OPREFIX}seaice.5min.blend.grb} + export FNSNOA=${FNSNOA:-$COMIN/${OPREFIX}snogrb_t${JCAP_CASE}.${LONB_CASE}.${LATB_CASE}} + [[ ! -f $FNSNOA ]] && export FNSNOA="$COMIN/${OPREFIX}snogrb_t1534.3072.1536" + FNSNOG=${FNSNOG:-$COMIN_GES/${GPREFIX}snogrb_t${JCAP_CASE}.${LONB_CASE}.${LATB_CASE}} + [[ ! -f $FNSNOG ]] && FNSNOG="$COMIN_GES/${GPREFIX}snogrb_t1534.3072.1536" + + # Set CYCLVARS by checking grib date of current snogrb vs that of prev cycle + if [ $RUN_GETGES = "YES" ]; then + snoprv=$($GETGESSH -q -t snogrb_$JCAP_CASE -e $gesenvir -n $GDUMP -v $GDATE) + else + snoprv=${snoprv:-$FNSNOG} + fi + + if [ $($WGRIB -4yr $FNSNOA 2>/dev/null | grep -i snowc | awk -F: '{print $3}' | awk -F= '{print $2}') -le \ + $($WGRIB -4yr $snoprv 2>/dev/null | grep -i snowc | awk -F: '{print $3}' | awk -F= '{print $2}') ] ; then + export FNSNOA=" " + export CYCLVARS="FSNOL=99999.,FSNOS=99999.," + else + export SNOW_NUDGE_COEFF=${SNOW_NUDGE_COEFF:-0.} + export CYCLVARS="FSNOL=${SNOW_NUDGE_COEFF},$CYCLVARS" + fi + + if [ $DONST = "YES" ]; then + export GSI_FILE=${GSI_FILE:-$COMOUT/${APREFIX}dtfanl.nc} + else + export GSI_FILE="NULL" + fi + + if [ $DOIAU = "YES" ]; then + # update surface restarts at the beginning of the window, if IAU + # For now assume/hold dtfanl.nc valid at beginning of window + for n in $(seq 1 $ntiles); do + $NLN $COMIN_GES/RESTART/$bPDY.${bcyc}0000.sfc_data.tile${n}.nc $DATA/fnbgsi.00$n + $NLN $COMOUT/RESTART/$bPDY.${bcyc}0000.sfcanl_data.tile${n}.nc $DATA/fnbgso.00$n + $NLN $FIXfv3/$CASE/${CASE}_grid.tile${n}.nc $DATA/fngrid.00$n + $NLN $FIXfv3/$CASE/${CASE}_oro_data.tile${n}.nc $DATA/fnorog.00$n + done + + export APRUNCY=$APRUN_CYCLE + export OMP_NUM_THREADS_CY=$NTHREADS_CYCLE + export MAX_TASKS_CY=$ntiles + + $CYCLESH + export err=$?; err_chk + fi + # update surface restarts at middle of window + for n in $(seq 1 $ntiles); do + $NLN $COMIN_GES/RESTART/$PDY.${cyc}0000.sfc_data.tile${n}.nc $DATA/fnbgsi.00$n + $NLN $COMOUT/RESTART/$PDY.${cyc}0000.sfcanl_data.tile${n}.nc $DATA/fnbgso.00$n + $NLN $FIXfv3/$CASE/${CASE}_grid.tile${n}.nc $DATA/fngrid.00$n + $NLN $FIXfv3/$CASE/${CASE}_oro_data.tile${n}.nc $DATA/fnorog.00$n + done + + export APRUNCY=$APRUN_CYCLE + export OMP_NUM_THREADS_CY=$NTHREADS_CYCLE + export MAX_TASKS_CY=$ntiles + + $CYCLESH + export err=$?; err_chk +fi + + +############################################################## +# For eupd +if [ -s satbias_out.int ]; then + $NCP satbias_out.int $ABIASe +else + $NCP satbias_in $ABIASe +fi + +# Cat runtime output files. +cat fort.2* > $GSISTAT + +# If requested, create obsinput tarball from obs_input.* files +if [ $RUN_SELECT = "YES" ]; then + echo $(date) START tar obs_input >&2 + [[ -s obsinput.tar ]] && rm obsinput.tar + $NLN $SELECT_OBS obsinput.tar + ${CHGRP_CMD} obs_input.* + tar -cvf obsinput.tar obs_input.* + chmod 750 $SELECT_OBS + ${CHGRP_CMD} $SELECT_OBS + rm obsinput.tar + echo $(date) END tar obs_input >&2 +fi + +################################################################################ +# Send alerts +if [ $SENDDBN = "YES" ]; then + if [ $RUN = "gfs" ]; then + $DBNROOT/bin/dbn_alert MODEL GFS_abias $job $ABIAS + fi +fi + +################################################################################ +# Postprocessing +cd $pwd +[[ $mkdata = "YES" ]] && rm -rf $DATA + +############################################################## +# Add this statement to release the forecast job once the +# atmopsheric analysis and updated surface RESTARTS are +# available. Do not release forecast when RUN=enkf +############################################################## +if [ $SENDECF = "YES" -a "$RUN" != "enkf" ]; then + ecflow_client --event release_fcst +fi +echo "$CDUMP $CDATE atminc and tiled sfcanl done at `date`" > $COMOUT/${APREFIX}loginc.txt + +################################################################################ +set +x +if [ $VERBOSE = "YES" ]; then + echo $(date) EXITING $0 with return code $err >&2 +fi +exit $err + +################################################################################ diff --git a/scripts/exglobal_atmos_analysis_calc.sh b/scripts/exglobal_atmos_analysis_calc.sh new file mode 100755 index 0000000000..197cf120b0 --- /dev/null +++ b/scripts/exglobal_atmos_analysis_calc.sh @@ -0,0 +1,214 @@ +#!/bin/bash +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: exglobal_atmos_analysis_calc.sh +# Script description: Runs non-diagnostic file tasks after GSI analysis is performed +# +# Author: Cory Martin Org: NCEP/EMC Date: 2020-03-03 +# +# Abstract: This script wraps up analysis-related tasks after GSI exits successfully +# +# $Id$ +# +# Attributes: +# Language: POSIX shell +# Machine: WCOSS-Dell / Hera +# +################################################################################ + +# Set environment. +export VERBOSE=${VERBOSE:-"YES"} +if [ $VERBOSE = "YES" ]; then + echo $(date) EXECUTING $0 $* >&2 + set -x +fi + +# Directories. +pwd=$(pwd) +export FIXgsm=${FIXgsm:-$HOMEgfs/fix/fix_am} + +# Base variables +CDATE=${CDATE:-"2001010100"} +CDUMP=${CDUMP:-"gdas"} +GDUMP=${GDUMP:-"gdas"} + +# Derived base variables +GDATE=$($NDATE -$assim_freq $CDATE) +BDATE=$($NDATE -3 $CDATE) +PDY=$(echo $CDATE | cut -c1-8) +cyc=$(echo $CDATE | cut -c9-10) +bPDY=$(echo $BDATE | cut -c1-8) +bcyc=$(echo $BDATE | cut -c9-10) + +# Utilities +export NCP=${NCP:-"/bin/cp"} +export NMV=${NMV:-"/bin/mv"} +export NLN=${NLN:-"/bin/ln -sf"} +export CHGRP_CMD=${CHGRP_CMD:-"chgrp ${group_name:-rstprod}"} +export NCLEN=${NCLEN:-$HOMEgfs/ush/getncdimlen} +COMPRESS=${COMPRESS:-gzip} +UNCOMPRESS=${UNCOMPRESS:-gunzip} +APRUNCFP=${APRUNCFP:-""} + +# Diagnostic files options +netcdf_diag=${netcdf_diag:-".true."} +binary_diag=${binary_diag:-".false."} + +# IAU +DOIAU=${DOIAU:-"NO"} +export IAUFHRS=${IAUFHRS:-"6"} + +# Dependent Scripts and Executables +export NTHREADS_CALCINC=${NTHREADS_CALCINC:-1} +export APRUN_CALCINC=${APRUN_CALCINC:-${APRUN:-""}} +export APRUN_CALCANL=${APRUN_CALCANL:-${APRUN:-""}} +export APRUN_CHGRES=${APRUN_CALCANL:-${APRUN:-""}} + +export CALCANLEXEC=${CALCANLEXEC:-$HOMEgfs/exec/calc_analysis.x} +export CHGRESNCEXEC=${CHGRESNCEXEC:-$HOMEgfs/exec/enkf_chgres_recenter_nc.x} +export CHGRESINCEXEC=${CHGRESINCEXEC:-$HOMEgfs/exec/interp_inc.x} +export NTHREADS_CHGRES=${NTHREADS_CHGRES:-1} +CALCINCPY=${CALCINCPY:-$HOMEgfs/ush/calcinc_gfs.py} +CALCANLPY=${CALCANLPY:-$HOMEgfs/ush/calcanl_gfs.py} + +DOGAUSFCANL=${DOGAUSFCANL-"NO"} +GAUSFCANLSH=${GAUSFCANLSH:-$HOMEgfs/ush/gaussian_sfcanl.sh} +export GAUSFCANLEXE=${GAUSFCANLEXE:-$HOMEgfs/exec/gaussian_sfcanl.exe} +NTHREADS_GAUSFCANL=${NTHREADS_GAUSFCANL:-1} +APRUN_GAUSFCANL=${APRUN_GAUSFCANL:-${APRUN:-""}} + +# OPS flags +RUN=${RUN:-""} +SENDECF=${SENDECF:-"NO"} +SENDDBN=${SENDDBN:-"NO"} + +# Guess files +GPREFIX=${GPREFIX:-""} +GSUFFIX=${GSUFFIX:-$SUFFIX} +ATMG03=${ATMG03:-${COMIN_GES}/${GPREFIX}atmf003${GSUFFIX}} +ATMG04=${ATMG04:-${COMIN_GES}/${GPREFIX}atmf004${GSUFFIX}} +ATMG05=${ATMG05:-${COMIN_GES}/${GPREFIX}atmf005${GSUFFIX}} +ATMGES=${ATMGES:-${COMIN_GES}/${GPREFIX}atmf006${GSUFFIX}} +ATMG07=${ATMG07:-${COMIN_GES}/${GPREFIX}atmf007${GSUFFIX}} +ATMG08=${ATMG08:-${COMIN_GES}/${GPREFIX}atmf008${GSUFFIX}} +ATMG09=${ATMG09:-${COMIN_GES}/${GPREFIX}atmf009${GSUFFIX}} + +# Analysis files +export APREFIX=${APREFIX:-""} +export ASUFFIX=${ASUFFIX:-$SUFFIX} +SFCANL=${SFCANL:-${COMOUT}/${APREFIX}sfcanl${ASUFFIX}} +DTFANL=${DTFANL:-${COMOUT}/${APREFIX}dtfanl.nc} +ATMANL=${ATMANL:-${COMOUT}/${APREFIX}atmanl${ASUFFIX}} + +# Increment files +ATMINC=${ATMINC:-${COMOUT}/${APREFIX}atminc.nc} + +# Set script / GSI control parameters +DOHYBVAR=${DOHYBVAR:-"NO"} +lrun_subdirs=${lrun_subdirs:-".true."} +if [ $DOHYBVAR = "YES" ]; then + l_hyb_ens=.true. + export l4densvar=${l4densvar:-".false."} + export lwrite4danl=${lwrite4danl:-".false."} +else + l_hyb_ens=.false. + export l4densvar=.false. + export lwrite4danl=.false. +fi + +# Set 4D-EnVar specific variables +if [ $DOHYBVAR = "YES" -a $l4densvar = ".true." -a $lwrite4danl = ".true." ]; then + ATMA03=${ATMA03:-${COMOUT}/${APREFIX}atma003${ASUFFIX}} + ATMI03=${ATMI03:-${COMOUT}/${APREFIX}atmi003.nc} + ATMA04=${ATMA04:-${COMOUT}/${APREFIX}atma004${ASUFFIX}} + ATMI04=${ATMI04:-${COMOUT}/${APREFIX}atmi004.nc} + ATMA05=${ATMA05:-${COMOUT}/${APREFIX}atma005${ASUFFIX}} + ATMI05=${ATMI05:-${COMOUT}/${APREFIX}atmi005.nc} + ATMA07=${ATMA07:-${COMOUT}/${APREFIX}atma007${ASUFFIX}} + ATMI07=${ATMI07:-${COMOUT}/${APREFIX}atmi007.nc} + ATMA08=${ATMA08:-${COMOUT}/${APREFIX}atma008${ASUFFIX}} + ATMI08=${ATMI08:-${COMOUT}/${APREFIX}atmi008.nc} + ATMA09=${ATMA09:-${COMOUT}/${APREFIX}atma009${ASUFFIX}} + ATMI09=${ATMI09:-${COMOUT}/${APREFIX}atmi009.nc} +fi + +################################################################################ +################################################################################ +# Preprocessing +mkdata=NO +if [ ! -d $DATA ]; then + mkdata=YES + mkdir -p $DATA +fi + +cd $DATA || exit 99 + +################################################################################ +# Clean the run-directory +rm -rf dir.* + +############################################################## +# If analysis increment is written by GSI, produce an analysis file here +if [ $DO_CALC_ANALYSIS == "YES" ]; then + # link analysis and increment files + $NLN $ATMANL siganl + $NLN $ATMINC siginc.nc + if [ $DOHYBVAR = "YES" -a $l4densvar = ".true." -a $lwrite4danl = ".true." ]; then + $NLN $ATMA03 siga03 + $NLN $ATMI03 sigi03.nc + $NLN $ATMA04 siga04 + $NLN $ATMI04 sigi04.nc + $NLN $ATMA05 siga05 + $NLN $ATMI05 sigi05.nc + $NLN $ATMA07 siga07 + $NLN $ATMI07 sigi07.nc + $NLN $ATMA08 siga08 + $NLN $ATMI08 sigi08.nc + $NLN $ATMA09 siga09 + $NLN $ATMI09 sigi09.nc + fi + # link guess files + $NLN $ATMG03 sigf03 + $NLN $ATMGES sigf06 + $NLN $ATMG09 sigf09 + + [[ -f $ATMG04 ]] && $NLN $ATMG04 sigf04 + [[ -f $ATMG05 ]] && $NLN $ATMG05 sigf05 + [[ -f $ATMG07 ]] && $NLN $ATMG07 sigf07 + [[ -f $ATMG08 ]] && $NLN $ATMG08 sigf08 + + # Link hourly backgrounds (if present) + if [ -f $ATMG04 -a -f $ATMG05 -a -f $ATMG07 -a -f $ATMG08 ]; then + nhr_obsbin=1 + fi + + $CALCANLPY + export err=$?; err_chk +else + echo "Neither increment nor analysis are generated by external utils" +fi + +############################################################## +# Create gaussian grid surface analysis file at middle of window +if [ $DOGAUSFCANL = "YES" ]; then + export APRUNSFC=$APRUN_GAUSFCANL + export OMP_NUM_THREADS_SFC=$NTHREADS_GAUSFCANL + + $GAUSFCANLSH + export err=$?; err_chk +fi + +echo "$CDUMP $CDATE atmanl and sfcanl done at `date`" > $COMOUT/${APREFIX}loganl.txt + +################################################################################ +# Postprocessing +cd $pwd +[[ $mkdata = "YES" ]] && rm -rf $DATA + +set +x +if [ $VERBOSE = "YES" ]; then + echo $(date) EXITING $0 with return code $err >&2 +fi +exit $err + diff --git a/scripts/exglobal_diag.sh b/scripts/exglobal_diag.sh new file mode 100755 index 0000000000..3ed2a5bf7b --- /dev/null +++ b/scripts/exglobal_diag.sh @@ -0,0 +1,295 @@ +#!/bin/bash +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: exglobal_diag.sh +# Script description: Creates diagnostic files after GSI analysis is performed +# +# Author: Cory Martin Org: NCEP/EMC Date: 2020-03-03 +# +# Abstract: This script creates GSI diagnostic files after GSI exits successfully +# +# $Id$ +# +# Attributes: +# Language: POSIX shell +# Machine: WCOSS-Dell / Hera +# +################################################################################ + +# Set environment. +export VERBOSE=${VERBOSE:-"YES"} +if [[ "$VERBOSE" = "YES" ]]; then + echo $(date) EXECUTING $0 $* >&2 + set -x +fi + +# Directories. +pwd=$(pwd) + +# Base variables +CDATE=${CDATE:-"2001010100"} +CDUMP=${CDUMP:-"gdas"} +GDUMP=${GDUMP:-"gdas"} + +# Derived base variables +GDATE=$($NDATE -$assim_freq $CDATE) +BDATE=$($NDATE -3 $CDATE) +PDY=$(echo $CDATE | cut -c1-8) +cyc=$(echo $CDATE | cut -c9-10) +bPDY=$(echo $BDATE | cut -c1-8) +bcyc=$(echo $BDATE | cut -c9-10) + +# Utilities +export NCP=${NCP:-"/bin/cp"} +export NMV=${NMV:-"/bin/mv"} +export NLN=${NLN:-"/bin/ln -sf"} +export CHGRP_CMD=${CHGRP_CMD:-"chgrp ${group_name:-rstprod}"} +export NCLEN=${NCLEN:-$HOMEgfs/ush/getncdimlen} +export CATEXEC=${CATEXEC:-$HOMEgfs/exec/ncdiag_cat_serial.x} +COMPRESS=${COMPRESS:-gzip} +UNCOMPRESS=${UNCOMPRESS:-gunzip} +APRUNCFP=${APRUNCFP:-""} + +# Diagnostic files options +netcdf_diag=${netcdf_diag:-".true."} +binary_diag=${binary_diag:-".false."} + +# OPS flags +RUN=${RUN:-""} +SENDECF=${SENDECF:-"NO"} +SENDDBN=${SENDDBN:-"NO"} + +# Guess files + +# Analysis files +export APREFIX=${APREFIX:-""} +export ASUFFIX=${ASUFFIX:-$SUFFIX} +RADSTAT=${RADSTAT:-${COMOUT}/${APREFIX}radstat} +PCPSTAT=${PCPSTAT:-${COMOUT}/${APREFIX}pcpstat} +CNVSTAT=${CNVSTAT:-${COMOUT}/${APREFIX}cnvstat} +OZNSTAT=${OZNSTAT:-${COMOUT}/${APREFIX}oznstat} + +# Remove stat file if file already exists +[[ -s $RADSTAT ]] && rm -f $RADSTAT +[[ -s $PCPSTAT ]] && rm -f $PCPSTAT +[[ -s $CNVSTAT ]] && rm -f $CNVSTAT +[[ -s $OZNSTAT ]] && rm -f $OZNSTAT + +# Obs diag +GENDIAG=${GENDIAG:-"YES"} +DIAG_SUFFIX=${DIAG_SUFFIX:-""} +if [ $netcdf_diag = ".true." ] ; then + DIAG_SUFFIX="${DIAG_SUFFIX}.nc4" +fi +DIAG_COMPRESS=${DIAG_COMPRESS:-"YES"} +DIAG_TARBALL=${DIAG_TARBALL:-"YES"} +USE_CFP=${USE_CFP:-"NO"} +CFP_MP=${CFP_MP:-"NO"} +nm="" +if [ $CFP_MP = "YES" ]; then + nm=0 +fi +DIAG_DIR=${DIAG_DIR:-${COMOUT}/gsidiags} +REMOVE_DIAG_DIR=${REMOVE_DIAG_DIR:-"NO"} + +# Set script / GSI control parameters +lrun_subdirs=${lrun_subdirs:-".true."} + + +################################################################################ +# If requested, generate diagnostic files +if [ $GENDIAG = "YES" ] ; then + if [ $lrun_subdirs = ".true." ] ; then + for pe in $DIAG_DIR/dir.*; do + pedir="$(basename -- $pe)" + $NLN $pe $DATA/$pedir + done + else + err_exit "***FATAL ERROR*** lrun_subdirs must be true. Abort job" + fi + + # Set up lists and variables for various types of diagnostic files. + ntype=3 + + diagtype[0]="conv conv_gps conv_ps conv_pw conv_q conv_sst conv_t conv_tcp conv_uv conv_spd" + diagtype[1]="pcp_ssmi_dmsp pcp_tmi_trmm" + diagtype[2]="sbuv2_n16 sbuv2_n17 sbuv2_n18 sbuv2_n19 gome_metop-a gome_metop-b omi_aura mls30_aura ompsnp_npp ompstc8_npp ompstc8_n20 ompsnp_n20 ompstc8_n21 ompsnp_n21 ompslp_npp gome_metop-c" + diagtype[3]="hirs2_n14 msu_n14 sndr_g08 sndr_g11 sndr_g12 sndr_g13 sndr_g08_prep sndr_g11_prep sndr_g12_prep sndr_g13_prep sndrd1_g11 sndrd2_g11 sndrd3_g11 sndrd4_g11 sndrd1_g12 sndrd2_g12 sndrd3_g12 sndrd4_g12 sndrd1_g13 sndrd2_g13 sndrd3_g13 sndrd4_g13 sndrd1_g14 sndrd2_g14 sndrd3_g14 sndrd4_g14 sndrd1_g15 sndrd2_g15 sndrd3_g15 sndrd4_g15 hirs3_n15 hirs3_n16 hirs3_n17 amsua_n15 amsua_n16 amsua_n17 amsub_n15 amsub_n16 amsub_n17 hsb_aqua airs_aqua amsua_aqua imgr_g08 imgr_g11 imgr_g12 imgr_g14 imgr_g15 ssmi_f13 ssmi_f15 hirs4_n18 hirs4_metop-a amsua_n18 amsua_metop-a mhs_n18 mhs_metop-a amsre_low_aqua amsre_mid_aqua amsre_hig_aqua ssmis_f16 ssmis_f17 ssmis_f18 ssmis_f19 ssmis_f20 iasi_metop-a hirs4_n19 amsua_n19 mhs_n19 seviri_m08 seviri_m09 seviri_m10 seviri_m11 cris_npp cris-fsr_npp cris-fsr_n20 atms_npp atms_n20 hirs4_metop-b amsua_metop-b mhs_metop-b iasi_metop-b avhrr_metop-b avhrr_n18 avhrr_n19 avhrr_metop-a amsr2_gcom-w1 gmi_gpm saphir_meghat ahi_himawari8 abi_g16 abi_g17 amsua_metop-c mhs_metop-c iasi_metop-c avhrr_metop-c viirs-m_npp viirs-m_j1 abi_g18 ahi_himawari9 viirs-m_j2 cris-fsr_n21 atms_n21" + + diaglist[0]=listcnv + diaglist[1]=listpcp + diaglist[2]=listozn + diaglist[3]=listrad + + diagfile[0]=$CNVSTAT + diagfile[1]=$PCPSTAT + diagfile[2]=$OZNSTAT + diagfile[3]=$RADSTAT + + numfile[0]=0 + numfile[1]=0 + numfile[2]=0 + numfile[3]=0 + + # Set diagnostic file prefix based on lrun_subdirs variable + if [ $lrun_subdirs = ".true." ]; then + prefix=" dir.*/" + else + prefix="pe*" + fi + + if [ $USE_CFP = "YES" ]; then + [[ -f $DATA/diag.sh ]] && rm $DATA/diag.sh + [[ -f $DATA/mp_diag.sh ]] && rm $DATA/mp_diag.sh + cat > $DATA/diag.sh << EOFdiag +#!/bin/sh +lrun_subdirs=\$1 +binary_diag=\$2 +type=\$3 +loop=\$4 +string=\$5 +CDATE=\$6 +DIAG_COMPRESS=\$7 +DIAG_SUFFIX=\$8 +if [ \$lrun_subdirs = ".true." ]; then + prefix=" dir.*/" +else + prefix="pe*" +fi +file=diag_\${type}_\${string}.\${CDATE}\${DIAG_SUFFIX} +if [ \$binary_diag = ".true." ]; then + cat \${prefix}\${type}_\${loop}* > \$file +else + $CATEXEC -o \$file \${prefix}\${type}_\${loop}* +fi +if [ \$DIAG_COMPRESS = "YES" ]; then + $COMPRESS \$file +fi +EOFdiag + chmod 755 $DATA/diag.sh + fi + + # Collect diagnostic files as a function of loop and type. + # Loop over first and last outer loops to generate innovation + # diagnostic files for indicated observation types (groups) + # + # NOTE: Since we set miter=2 in GSI namelist SETUP, outer + # loop 03 will contain innovations with respect to + # the analysis. Creation of o-a innovation files + # is triggered by write_diag(3)=.true. The setting + # write_diag(1)=.true. turns on creation of o-g + # innovation files. + + loops="01 03" + for loop in $loops; do + case $loop in + 01) string=ges;; + 03) string=anl;; + *) string=$loop;; + esac + echo $(date) START loop $string >&2 + n=-1 + while [ $((n+=1)) -le $ntype ] ;do + for type in $(echo ${diagtype[n]}); do + count=$(ls ${prefix}${type}_${loop}* 2>/dev/null | wc -l) + if [ $count -gt 1 ]; then + if [ $USE_CFP = "YES" ]; then + echo "$nm $DATA/diag.sh $lrun_subdirs $binary_diag $type $loop $string $CDATE $DIAG_COMPRESS $DIAG_SUFFIX" | tee -a $DATA/mp_diag.sh + if [ ${CFP_MP:-"NO"} = "YES" ]; then + nm=$((nm+1)) + fi + else + if [ $binary_diag = ".true." ]; then + cat ${prefix}${type}_${loop}* > diag_${type}_${string}.${CDATE}${DIAG_SUFFIX} + else + $CATEXEC -o diag_${type}_${string}.${CDATE}${DIAG_SUFFIX} ${prefix}${type}_${loop}* + fi + fi + echo "diag_${type}_${string}.${CDATE}*" >> ${diaglist[n]} + numfile[n]=$(expr ${numfile[n]} + 1) + elif [ $count -eq 1 ]; then + cat ${prefix}${type}_${loop}* > diag_${type}_${string}.${CDATE}${DIAG_SUFFIX} + if [ $DIAG_COMPRESS = "YES" ]; then + $COMPRESS diag_${type}_${string}.${CDATE}${DIAG_SUFFIX} + fi + echo "diag_${type}_${string}.${CDATE}*" >> ${diaglist[n]} + numfile[n]=$(expr ${numfile[n]} + 1) + fi + done + done + echo $(date) END loop $string >&2 + done + + # We should already be in $DATA, but extra cd to be sure. + cd $DATA + + # If requested, compress diagnostic files + if [ $DIAG_COMPRESS = "YES" -a $USE_CFP = "NO" ]; then + echo $(date) START $COMPRESS diagnostic files >&2 + for file in $(ls diag_*${CDATE}${DIAG_SUFFIX}); do + $COMPRESS $file + done + echo $(date) END $COMPRESS diagnostic files >&2 + fi + + if [ $USE_CFP = "YES" ] ; then + chmod 755 $DATA/mp_diag.sh + ncmd=$(cat $DATA/mp_diag.sh | wc -l) + if [ $ncmd -gt 0 ]; then + ncmd_max=$((ncmd < npe_node_max ? ncmd : npe_node_max)) + APRUNCFP_DIAG=$(eval echo $APRUNCFP) + $APRUNCFP_DIAG $DATA/mp_diag.sh + export err=$?; err_chk + fi + fi + + # Restrict diagnostic files containing rstprod data + rlist="conv_gps conv_ps conv_pw conv_q conv_sst conv_t conv_uv saphir" + for rtype in $rlist; do + ${CHGRP_CMD} *${rtype}* + done + + # If requested, create diagnostic file tarballs + if [ $DIAG_TARBALL = "YES" ]; then + echo $(date) START tar diagnostic files >&2 + n=-1 + while [ $((n+=1)) -le $ntype ] ;do + TAROPTS="-uvf" + if [ ! -s ${diagfile[n]} ]; then + TAROPTS="-cvf" + fi + if [ ${numfile[n]} -gt 0 ]; then + tar $TAROPTS ${diagfile[n]} $(cat ${diaglist[n]}) + export err=$?; err_chk + fi + done + + # Restrict CNVSTAT + chmod 750 $CNVSTAT + ${CHGRP_CMD} $CNVSTAT + + # Restrict RADSTAT + chmod 750 $RADSTAT + ${CHGRP_CMD} $RADSTAT + + echo $(date) END tar diagnostic files >&2 + fi +fi # End diagnostic file generation block - if [ $GENDIAG = "YES" ] + +################################################################################ +# Postprocessing +# If no processing error, remove $DIAG_DIR +if [[ "$REMOVE_DIAG_DIR" = "YES" && "$err" = "0" ]]; then + rm -rf $DIAG_DIR +fi + +cd $pwd +[[ $mkdata = "YES" ]] && rm -rf $DATA + +set +x +if [[ "$VERBOSE" = "YES" ]]; then + echo $(date) EXITING $0 with return code $err >&2 +fi +exit $err + From b3ce33491676bbcfce1ae35a8d5758242e70d222 Mon Sep 17 00:00:00 2001 From: Andrew Collard Date: Tue, 20 Aug 2024 21:47:55 +0000 Subject: [PATCH 02/22] Check out GSI develop --- sorc/checkout.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/sorc/checkout.sh b/sorc/checkout.sh index d7bef93330..6a8a6ad816 100755 --- a/sorc/checkout.sh +++ b/sorc/checkout.sh @@ -35,6 +35,8 @@ fi echo gsi checkout ... if [[ ! -d gsi.fd ]] ; then rm -f ${topdir}/checkout-gsi.log +# git clone --recursive --branch gfsda.v16.3.12 https://github.com/NOAA-EMC/GSI.git gsi.fd >> ${topdir}/checkout-gsi.log 2>&1 +# Check out develop for now git clone --recursive --branch gfsda.v16.3.12 https://github.com/NOAA-EMC/GSI.git gsi.fd >> ${topdir}/checkout-gsi.log 2>&1 cd gsi.fd git submodule update --init From 6d1ab29b1fb736090b0626fbec5112905477e828 Mon Sep 17 00:00:00 2001 From: Andrew Collard Date: Tue, 20 Aug 2024 21:53:35 +0000 Subject: [PATCH 03/22] Check out GSI develop --- sorc/checkout.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/checkout.sh b/sorc/checkout.sh index 6a8a6ad816..2f568a0595 100755 --- a/sorc/checkout.sh +++ b/sorc/checkout.sh @@ -37,7 +37,7 @@ if [[ ! -d gsi.fd ]] ; then rm -f ${topdir}/checkout-gsi.log # git clone --recursive --branch gfsda.v16.3.12 https://github.com/NOAA-EMC/GSI.git gsi.fd >> ${topdir}/checkout-gsi.log 2>&1 # Check out develop for now - git clone --recursive --branch gfsda.v16.3.12 https://github.com/NOAA-EMC/GSI.git gsi.fd >> ${topdir}/checkout-gsi.log 2>&1 + git clone --recursive https://github.com/NOAA-EMC/GSI.git gsi.fd >> ${topdir}/checkout-gsi.log 2>&1 cd gsi.fd git submodule update --init cd ${topdir} From bc683d8ba8c31a18ab59b45db34e44013b302d64 Mon Sep 17 00:00:00 2001 From: Andrew Collard Date: Wed, 21 Aug 2024 15:56:11 +0000 Subject: [PATCH 04/22] Add Enkf scripts and jobs --- .gitignore | 23 -- jobs/JGDAS_ATMOS_CHGRES_FORENKF | 132 ++++++++ jobs/JGDAS_ATMOS_VERFOZN | 120 ++++++++ jobs/JGDAS_ATMOS_VERFRAD | 136 +++++++++ jobs/JGDAS_ATMOS_VMINMON | 104 +++++++ jobs/JGDAS_ENKF_DIAG | 182 +++++++++++ jobs/JGDAS_ENKF_ECEN | 138 +++++++++ jobs/JGDAS_ENKF_FCST | 140 +++++++++ jobs/JGDAS_ENKF_POST | 112 +++++++ jobs/JGDAS_ENKF_SELECT_OBS | 192 ++++++++++++ jobs/JGDAS_ENKF_SFC | 139 +++++++++ jobs/JGDAS_ENKF_UPDATE | 123 ++++++++ jobs/JGFS_ATMOS_VMINMON | 102 +++++++ scripts/exgdas_atmos_chgres_forenkf.sh | 211 +++++++++++++ scripts/exgdas_atmos_verfozn.sh | 100 ++++++ scripts/exgdas_atmos_verfrad.sh | 228 ++++++++++++++ scripts/exgdas_atmos_vminmon.sh | 131 ++++++++ scripts/exgdas_enkf_ecen.sh | 376 +++++++++++++++++++++++ scripts/exgdas_enkf_fcst.sh | 235 ++++++++++++++ scripts/exgdas_enkf_post.sh | 169 +++++++++++ scripts/exgdas_enkf_select_obs.sh | 127 ++++++++ scripts/exgdas_enkf_sfc.sh | 203 +++++++++++++ scripts/exgdas_enkf_update.sh | 405 +++++++++++++++++++++++++ scripts/exgfs_atmos_vminmon.sh | 127 ++++++++ 24 files changed, 3932 insertions(+), 23 deletions(-) create mode 100755 jobs/JGDAS_ATMOS_CHGRES_FORENKF create mode 100755 jobs/JGDAS_ATMOS_VERFOZN create mode 100755 jobs/JGDAS_ATMOS_VERFRAD create mode 100755 jobs/JGDAS_ATMOS_VMINMON create mode 100755 jobs/JGDAS_ENKF_DIAG create mode 100755 jobs/JGDAS_ENKF_ECEN create mode 100755 jobs/JGDAS_ENKF_FCST create mode 100755 jobs/JGDAS_ENKF_POST create mode 100755 jobs/JGDAS_ENKF_SELECT_OBS create mode 100755 jobs/JGDAS_ENKF_SFC create mode 100755 jobs/JGDAS_ENKF_UPDATE create mode 100755 jobs/JGFS_ATMOS_VMINMON create mode 100755 scripts/exgdas_atmos_chgres_forenkf.sh create mode 100755 scripts/exgdas_atmos_verfozn.sh create mode 100755 scripts/exgdas_atmos_verfrad.sh create mode 100755 scripts/exgdas_atmos_vminmon.sh create mode 100755 scripts/exgdas_enkf_ecen.sh create mode 100755 scripts/exgdas_enkf_fcst.sh create mode 100755 scripts/exgdas_enkf_post.sh create mode 100755 scripts/exgdas_enkf_select_obs.sh create mode 100755 scripts/exgdas_enkf_sfc.sh create mode 100755 scripts/exgdas_enkf_update.sh create mode 100755 scripts/exgfs_atmos_vminmon.sh diff --git a/.gitignore b/.gitignore index 0f38550c05..27cf8b2f8b 100644 --- a/.gitignore +++ b/.gitignore @@ -94,19 +94,7 @@ sorc/wafs_setmissing.fd # Ignore scripts from externals #------------------------------ # jobs symlinks -jobs/JGDAS_ATMOS_CHGRES_FORENKF jobs/JGDAS_ATMOS_GLDAS -jobs/JGDAS_ATMOS_VERFOZN -jobs/JGDAS_ATMOS_VERFRAD -jobs/JGDAS_ATMOS_VMINMON -jobs/JGDAS_ENKF_DIAG -jobs/JGDAS_ENKF_ECEN -jobs/JGDAS_ENKF_FCST -jobs/JGDAS_ENKF_POST -jobs/JGDAS_ENKF_SELECT_OBS -jobs/JGDAS_ENKF_SFC -jobs/JGDAS_ENKF_UPDATE -jobs/JGFS_ATMOS_VMINMON jobs/JGFS_ATMOS_WAFS jobs/JGFS_ATMOS_WAFS_BLENDING jobs/JGFS_ATMOS_WAFS_BLENDING_0P25 @@ -117,20 +105,9 @@ jobs/JGLOBAL_ATMOS_NCEPPOST jobs/JGLOBAL_ATMOS_POST_MANAGER # scripts symlinks scripts/exemcsfc_global_sfc_prep.sh -scripts/exgdas_atmos_chgres_forenkf.sh scripts/exgdas_atmos_gldas.sh scripts/exgdas_atmos_nceppost.sh -scripts/exgdas_atmos_verfozn.sh -scripts/exgdas_atmos_verfrad.sh -scripts/exgdas_atmos_vminmon.sh -scripts/exgdas_enkf_ecen.sh -scripts/exgdas_enkf_fcst.sh -scripts/exgdas_enkf_post.sh -scripts/exgdas_enkf_select_obs.sh -scripts/exgdas_enkf_sfc.sh -scripts/exgdas_enkf_update.sh scripts/exgfs_atmos_nceppost.sh -scripts/exgfs_atmos_vminmon.sh scripts/exgfs_atmos_wafs_blending.sh scripts/exgfs_atmos_wafs_blending_0p25.sh scripts/exgfs_atmos_wafs_gcip.sh diff --git a/jobs/JGDAS_ATMOS_CHGRES_FORENKF b/jobs/JGDAS_ATMOS_CHGRES_FORENKF new file mode 100755 index 0000000000..cf69bdd770 --- /dev/null +++ b/jobs/JGDAS_ATMOS_CHGRES_FORENKF @@ -0,0 +1,132 @@ +#!/bin/bash +set -x + +export RUN_ENVIR=${RUN_ENVIR:-"nco"} +export PS4='$SECONDS + ' +date + + +############################# +# Source relevant config files +############################# +export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} +configs="base anal echgres" +config_path=${EXPDIR:-$NWROOT/gfs.${gfs_ver}/parm/config} +for config in $configs; do + . $config_path/config.$config + status=$? + [[ $status -ne 0 ]] && exit $status +done + + +########################################## +# Source machine runtime environment +########################################## +. $HOMEgfs/env/${machine}.env anal +status=$? +[[ $status -ne 0 ]] && exit $status + + +############################################## +# Obtain unique process id (pid) and make temp directory +############################################## +export pid=${pid:-$$} +export outid=${outid:-"LL$job"} + +export DATA=${DATA:-${DATAROOT}/${jobid:?}} +mkdir -p $DATA +cd $DATA + + +############################################## +# Run setpdy and initialize PDY variables +############################################## +export cycle="t${cyc}z" +setpdy.sh +. ./PDY + + +############################################## +# Determine Job Output Name on System +############################################## +export pgmout="OUTPUT.${pid}" +export pgmerr=errfile + + +############################################## +# Set variables used in the script +############################################## +export CDATE=${CDATE:-${PDY}${cyc}} +export CDUMP=${CDUMP:-${RUN:-"gfs"}} +export COMPONENT=${COMPONENT:-atmos} +export DO_CALC_ANALYSIS=${DO_CALC_ANALYSIS:-"YES"} + + +############################################## +# Begin JOB SPECIFIC work +############################################## + +GDATE=$($NDATE -$assim_freq $CDATE) +gPDY=$(echo $GDATE | cut -c1-8) +gcyc=$(echo $GDATE | cut -c9-10) +GDUMP=${GDUMP:-"gdas"} + +export OPREFIX="${CDUMP}.t${cyc}z." +export GPREFIX="${GDUMP}.t${gcyc}z." +export APREFIX="${CDUMP}.t${cyc}z." +export GSUFFIX=${GSUFFIX:-$SUFFIX} +export ASUFFIX=${ASUFFIX:-$SUFFIX} + + +if [ $RUN_ENVIR = "nco" -o ${ROTDIR_DUMP:-NO} = "YES" ]; then + export COMIN=${COMIN:-$ROTDIR/$RUN.$PDY/$cyc/$COMPONENT} + export COMOUT=${COMOUT:-$ROTDIR/$RUN.$PDY/$cyc/$COMPONENT} + export COMOUT_ENS=${COMOUT_ENS:-$ROTDIR/enkfgdas.$PDY/$cyc/$COMPONENT} + export COMIN_OBS=${COMIN_OBS:-$(compath.py ${envir}/obsproc/${obsproc_ver})/$RUN.$PDY/$cyc/$COMPONENT} + export COMIN_GES_OBS=${COMIN_GES_OBS:-$(compath.py ${envir}/obsproc/${obsproc_ver})/$GDUMP.$gPDY/$gcyc/$COMPONENT} +else + export COMOUT="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" + export COMOUT_ENS="$ROTDIR/enkfgdas.$PDY/$cyc/$COMPONENT" + export COMIN_OBS="$DMPDIR/$CDUMP.$PDY/$cyc/$COMPONENT" + export COMIN_GES_OBS="$DMPDIR/$GDUMP.$gPDY/$gcyc/$COMPONENT" +fi +mkdir -m 775 -p $COMOUT +# COMIN_GES and COMIN_GES_ENS are used in script +export COMIN_GES="$ROTDIR/$GDUMP.$gPDY/$gcyc/$COMPONENT" +export COMIN_GES_ENS="$ROTDIR/enkfgdas.$gPDY/$gcyc/$COMPONENT" + +############################################################### +# Run relevant script +env +echo "HAS BEGUN on $(hostname)" +$LOGSCRIPT + + +${CHGRESFCSTSH:-$SCRgfs/exgdas_atmos_chgres_forenkf.sh} +status=$? +[[ $status -ne 0 ]] && exit $status + + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [ -e "$pgmout" ] ; then + cat $pgmout +fi + + +echo "ENDED NORMALLY." + + +########################################## +# Remove the Temporary working directory +########################################## +cd $DATAROOT +[[ $KEEPDATA = "NO" ]] && rm -rf $DATA + +date +exit 0 diff --git a/jobs/JGDAS_ATMOS_VERFOZN b/jobs/JGDAS_ATMOS_VERFOZN new file mode 100755 index 0000000000..9071beaf30 --- /dev/null +++ b/jobs/JGDAS_ATMOS_VERFOZN @@ -0,0 +1,120 @@ +#!/bin/sh +############################################################# +# Set up environment for GDAS Ozone Monitor job +############################################################# +set -xa +echo `date` $0 `date -u` begin +export PS4='$SECONDS + ' + +############################### +# Specify NET, RUN, and COMPONENT name +############################## +export NET=${NET:-gfs} +export RUN=${RUN:-gdas} +export COMPONENT=${COMPONENT:-atmos} + +########################################################### +# obtain unique process id (pid) and make temp directories +########################################################### +export pid=$$ +export outid=${outid:-"LL$job"} +export DATA=${DATA:-${DATAROOT}/${jobid:?}} + +export OZNMON_SUFFIX=${OZNMON_SUFFIX:-${NET}} + +mkdir -p ${DATA} +cd ${DATA} + + +#################################### +# Determine Job Output Name on System +#################################### +export pgmout="OUTPUT.${pid}" +export pgmerr=errfile +export cycle=t${cyc}z + + +############################################## +# Run setpdy and initialize PDY variables +############################################## +setpdy.sh +. ./PDY + + +#--------------------------------------------- +# Specify Execution Areas +# +export HOMEgfs_ozn=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} +export HOMEgdas_ozn=${HOMEgfs_ozn:-${NWROOT}/gfs.${gfs_ver}} +export PARMgdas_ozn=${PARMgfs_ozn:-$HOMEgfs_ozn/parm/mon} +export SCRgdas_ozn=${SCRgfs_ozn:-$HOMEgfs_ozn/scripts} +export FIXgdas_ozn=${FIXgfs_ozn:-$HOMEgfs_ozn/fix/gdas} + +export HOMEoznmon=${HOMEoznmon:-${HOMEgfs_ozn}} +export EXECoznmon=${EXECoznmon:-$HOMEoznmon/exec} +export FIXoznmon=${FIXoznmon:-${HOMEoznmon}/fix} +export USHoznmon=${USHoznmon:-$HOMEoznmon/ush} + + +#----------------------------------- +# source the parm file +# +. ${PARMgdas_ozn}/gdas_oznmon.parm + + +############################################# +# determine PDY and cyc for previous cycle +############################################# + +cdate=`${NDATE} -6 ${PDY}${cyc}` +echo 'pdate = ${pdate}' + +export P_PDY=`echo ${cdate} | cut -c1-8` +export p_cyc=`echo ${cdate} | cut -c9-10` + +#--------------------------------------------- +# OZN_TANKDIR - WHERE OUTPUT DATA WILL RESIDE +# +export OZN_TANKDIR=${OZN_TANKDIR:-$(compath.py ${envir}/${NET}/${gfs_ver})} +export TANKverf_ozn=${TANKverf_ozn:-${OZN_TANKDIR}/${RUN}.${PDY}/${cyc}/${COMPONENT}/oznmon} +export TANKverf_oznM1=${TANKverf_oznM1:-${OZN_TANKDIR}/${RUN}.${P_PDY}/${p_cyc}/${COMPONENT}/oznmon} +export COM_IN=${COM_IN:-$(compath.py ${envir}/${NET}/${gfs_ver})} +export COMIN=${COMIN:-${COM_IN}/${RUN}.${PDY}/${cyc}/${COMPONENT}} + +if [[ ! -d ${TANKverf_ozn} ]]; then + mkdir -p -m 775 ${TANKverf_ozn} +fi + +#--------------------------------------- +# set up validation file +# +if [[ ${VALIDATE_DATA} -eq 1 ]]; then + export ozn_val_file=${ozn_val_file:-${FIXgdas_ozn}/gdas_oznmon_base.tar} +fi + +#--------------------------------------- +# Set necessary environment variables +# +export OZN_AREA=${OZN_AREA:-glb} +export oznstat=${oznstat:-$COMIN/gdas.t${cyc}z.oznstat} + + +#------------------------------------------------------- +# Execute the script. +# +${OZNMONSH:-${SCRgdas_ozn}/exgdas_atmos_verfozn.sh} ${PDY} ${cyc} +err=$? +[[ $err -ne 0 ]] && exit $err + + +################################ +# Remove the Working Directory +################################ +KEEPDATA=${KEEPDATA:-YES} +cd $DATAROOT +if [ ${KEEPDATA} = NO ] ; then + rm -rf $DATA +fi + +date + diff --git a/jobs/JGDAS_ATMOS_VERFRAD b/jobs/JGDAS_ATMOS_VERFRAD new file mode 100755 index 0000000000..39c7b6661f --- /dev/null +++ b/jobs/JGDAS_ATMOS_VERFRAD @@ -0,0 +1,136 @@ +#!/bin/sh +############################################################# +# Set up environment for GDAS Radiance Monitor job +############################################################# +set -xa +echo `date` $0 `date -u` begin +export PS4='$SECONDS + ' + +############################### +# Specify NET, RUN, and COMPONENT name +############################## +export NET=${NET:-gfs} +export RUN=${RUN:-gdas} +export COMPONENT=${COMPONENT:-atmos} + +########################################################### +# obtain unique process id (pid) and make temp directories +########################################################### +export pid=$$ +export outid=${outid:-"LL$job"} +export RAD_DATA_IN=${RAD_DATA_IN:-${DATAROOT}/${jobid:?}} + +export RADMON_SUFFIX=${RADMON_SUFFIX:-${RUN}} +export CYCLE_INTERVAL=${CYCLE_INTERVAL:-6} + +mkdir -p $RAD_DATA_IN +cd $RAD_DATA_IN + +#################################### +# Determine Job Output Name on System +#################################### +export pgmout="OUTPUT.${pid}" +export pgmerr=errfile +export cycle=t${cyc}z + +############################################## +# Specify Execution Areas +############################################## +export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} +export SCRgfs=${SCRgfs:-$HOMEgfs/scripts} + +export FIXgdas=${FIXgdas:-$HOMEgfs/fix/gdas} +export PARMmon=${PARMmon:-$HOMEgfs/parm/mon} + +export HOMEradmon=${HOMEradmon:-${HOMEgfs}} +export EXECradmon=${EXECradmon:-$HOMEradmon/exec} +export FIXradmon=${FIXradmon:-${FIXgfs}} +export USHradmon=${USHradmon:-$HOMEradmon/ush} + + +################################### +# source the parm file +################################### +parm_file=${parm_file:-${PARMmon}/da_mon.parm} +. ${parm_file} + + +############################################# +# Run setpdy and initialize PDY variables +############################################# +if [[ $MY_MACHINE != "HERA" && $MY_MACHINE != "hera" ]]; then + setpdy.sh + . ./PDY +fi + +############################################# +# determine PDY and cyc for previous cycle +############################################# + +cdate=`${NDATE} -6 ${PDY}${cyc}` +echo 'pdate = ${pdate}' + +export P_PDY=`echo ${cdate} | cut -c1-8` +export p_cyc=`echo ${cdate} | cut -c9-10` + +############################################# +# COMOUT - WHERE GSI OUTPUT RESIDES +# TANKverf - WHERE OUTPUT DATA WILL RESIDE +############################################# +export TANKverf=${TANKverf:-$(compath.py ${envir}/${NET}/${gfs_ver})} +export TANKverf_rad=${TANKverf_rad:-${TANKverf}/${RUN}.${PDY}/${cyc}/$COMPONENT/radmon} +export TANKverf_radM1=${TANKverf_radM1:-${TANKverf}/${RUN}.${P_PDY}/${p_cyc}/$COMPONENT/radmon} +export COM_IN=${COM_IN:-$(compath.py ${envir}/${NET}/${gfs_ver})} +export COMIN=${COMIN:-${COM_IN}/${RUN}.${PDY}/${cyc}/$COMPONENT} + +################################ +# backwards compatibility for +# gfs v15 which doesn't have +# a $COMPONENT in output path +################################ +if [[ ! -d ${COMIN} ]]; then + export COMIN=${COM_IN}/${RUN}.${PDY}/${cyc} +fi + + +mkdir -p -m 775 $TANKverf_rad + +env + +######################################## +# Set necessary environment variables +######################################## +export RAD_AREA=${RAD_AREA:-glb} + +export biascr=${biascr:-$COMIN/gdas.t${cyc}z.abias} +export radstat=${radstat:-$COMIN/gdas.t${cyc}z.radstat} + +echo " " +echo "JOB HAS STARTED" +echo " " + + +######################################################## +# Execute the script. +${RADMONSH:-${SCRgfs}/exgdas_atmos_verfrad.sh} ${PDY} ${cyc} +err=$? + +if [[ $err -ne 0 ]] ; then + exit $err +else + echo " " + echo "JOB HAS COMPLETED NORMALLY" + echo " " +fi + +################################ +# Remove the Working Directory +################################ +KEEPDATA=${KEEPDATA:-YES} +cd $DATAROOT +if [ ${KEEPDATA} = NO ] ; then + rm -rf $RAD_DATA_IN +fi + +date + diff --git a/jobs/JGDAS_ATMOS_VMINMON b/jobs/JGDAS_ATMOS_VMINMON new file mode 100755 index 0000000000..d3ce40fafc --- /dev/null +++ b/jobs/JGDAS_ATMOS_VMINMON @@ -0,0 +1,104 @@ +#!/bin/sh +########################################################### +# GDAS Minimization Monitor (MinMon) job +########################################################### +set -xa +echo `date` $0 `date -u` begin +export PS4='$SECONDS + ' + +############################### +# Specify NET, RUN, and COMPONENT name +############################## +export NET=${NET:-gfs} +export RUN=${RUN:-gdas} +export COMPONENT=${COMPONENT:-atmos} + +########################################################### +# obtain unique process id (pid) and make temp directories +########################################################### +export pid=$$ +export outid=${outid:-"LL$job"} + +export DATA=${DATA:-${DATAROOT}/${jobid:?}} +mkdir -p $DATA +cd $DATA + + +########################################################### +# obtain unique process id (pid) and make temp directories +########################################################### +export MINMON_SUFFIX=${MINMON_SUFFIX:-${NET}} +export m_job=${m_job:-${MINMON_SUFFIX}_mmDE} + + +############################################## +# Specify Package Areas +############################################## +export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} +export SCRgfs=${SCRgfs:-$HOMEgfs/scripts} + +export M_FIXgdas=${M_FIXgdas:-$HOMEgfs/fix/gdas} + +export HOMEminmon=${HOMEminmon:-${HOMEgfs}} +export EXECminmon=${EXECminmon:-$HOMEminmon/exec} +export USHminmon=${USHminmon:-$HOMEminmon/ush} + + +############################################# +# Run setpdy and initialize PDY variables +############################################# +export cycle=t${cyc}z +setpdy.sh +. ./PDY + + +############################################# +# determine PDY and cyc for previous cycle +############################################# + +cdate=`${NDATE} -6 ${PDY}${cyc}` +echo 'pdate = ${pdate}' + +export P_PDY=`echo ${cdate} | cut -c1-8` +export p_cyc=`echo ${cdate} | cut -c9-10` + + +############################################# +# TANKverf - WHERE OUTPUT DATA WILL RESIDE +############################################# +export COM_IN=${COM_IN:-$(compath.py ${envir}/${NET}/${gfs_ver})} + +export M_TANKverf=${M_TANKverf:-${COM_IN}/${RUN}.${PDY}/${cyc}/${COMPONENT}/minmon} +export M_TANKverfM1=${M_TANKverfM1:-${COM_IN}/${RUN}.${P_PDY}/${p_cyc}/${COMPONENT}/minmon} + +export COMIN=${COMIN:-$COM_IN/${RUN}.${PDY}/${cyc}/$COMPONENT} + +mkdir -p -m 775 $M_TANKverf + + + +######################################## +# Set necessary environment variables +######################################## +export CYCLE_INTERVAL=6 +export gsistat=${gsistat:-${COMIN}/gdas.t${cyc}z.gsistat} + + +######################################################## +# Execute the script. +${GMONSH:-$SCRgfs/exgdas_atmos_vminmon.sh} ${PDY} ${cyc} +err=$? +[[ $err -ne 0 ]] && exit $err + + +################################ +# Remove the Working Directory +################################ +KEEPDATA=${KEEPDATA:-NO} +cd ${DATAROOT} + +if [ ${KEEPDATA} = NO ] ; then + rm -rf ${DATA} +fi + + diff --git a/jobs/JGDAS_ENKF_DIAG b/jobs/JGDAS_ENKF_DIAG new file mode 100755 index 0000000000..1c26192569 --- /dev/null +++ b/jobs/JGDAS_ENKF_DIAG @@ -0,0 +1,182 @@ +#!/bin/bash +set -x + +export RUN_ENVIR=${RUN_ENVIR:-"nco"} +export PS4='$SECONDS + ' +date + + +############################# +# Source relevant config files +############################# +export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} +configs="base anal eobs analdiag ediag" +config_path=${EXPDIR:-$NWROOT/gfs.${gfs_ver}/parm/config} +for config in $configs; do + . $config_path/config.$config + status=$? + [[ $status -ne 0 ]] && exit $status +done + + +########################################## +# Source machine runtime environment +########################################## +. $HOMEgfs/env/${machine}.env eobs +status=$? +[[ $status -ne 0 ]] && exit $status + + +############################################## +# Obtain unique process id (pid) and make temp directory +############################################## +export pid=${pid:-$$} +export outid=${outid:-"LL$job"} +export DATA=${DATA:-${DATAROOT}/${jobid:?}} +mkdir -p $DATA +cd $DATA + + +############################################## +# Run setpdy and initialize PDY variables +############################################## +export cycle="t${cyc}z" +setpdy.sh +. ./PDY + + +############################################## +# Determine Job Output Name on System +############################################## +export pgmout="OUTPUT.${pid}" +export pgmerr=errfile + + +############################################## +# Set variables used in the script +############################################## +export CDATE=${CDATE:-${PDY}${cyc}} +export CDUMP=${CDUMP:-${RUN:-"gdas"}} +export COMPONENT=${COMPONENT:-atmos} + + +############################################## +# Begin JOB SPECIFIC work +############################################## + +GDATE=$($NDATE -$assim_freq $CDATE) +gPDY=$(echo $GDATE | cut -c1-8) +gcyc=$(echo $GDATE | cut -c9-10) +GDUMP=${GDUMP:-"gdas"} + +export CASE=$CASE_ENKF +export CDUMP_OBS=${CDUMP_OBS:-$CDUMP} + +export OPREFIX="${CDUMP_OBS}.t${cyc}z." +export APREFIX="${CDUMP}.t${cyc}z." +export GPREFIX="${GDUMP}.t${gcyc}z." +export GSUFFIX="${GSUFFIX:-".ensmean${SUFFIX}"}" +export ASUFFIX="${ASUFFIX:-"${SUFFIX}"}" + +if [ $RUN_ENVIR = "nco" -o ${ROTDIR_DUMP:-NO} = "YES" ]; then + export COMIN_OBS=${COMIN_OBS:-$(compath.py ${envir}/obsproc/${obsproc_ver})/$RUN.$PDY/$cyc/$COMPONENT} + export COMIN_GES_OBS=${COMIN_GES_OBS:-$(compath.py ${envir}/obsproc/${obsproc_ver})/$GDUMP.$gPDY/$gcyc/$COMPONENT} +else + export COMIN_OBS="$DMPDIR/$CDUMP.$PDY/$cyc/$COMPONENT" + export COMIN_GES_OBS="$DMPDIR/$GDUMP.$gPDY/$gcyc/$COMPONENT" +fi + +# COMIN_GES, COMIN_ANL COMIN_GES_ENS, and COMOUT are used in script +COMIN_GES_CTL="$ROTDIR/gdas.$gPDY/$gcyc/$COMPONENT" +export COMIN_ANL="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" +export COMIN_GES_ENS="$ROTDIR/enkfgdas.$gPDY/$gcyc/$COMPONENT" +export COMIN_GES=$COMIN_GES_ENS +export COMOUT="$ROTDIR/enkf$CDUMP.$PDY/$cyc/$COMPONENT" + + +export ATMGES_ENSMEAN="$COMIN_GES_ENS/${GPREFIX}atmf006$GSUFFIX" +if [ ! -f $ATMGES_ENSMEAN ]; then + echo "FATAL ERROR: FILE MISSING: ATMGES_ENSMEAN = $ATMGES_ENSMEAN" + exit 1 +fi + + +# Link observational data +export PREPQC="$COMIN_OBS/${OPREFIX}prepbufr" +if [ ! -f $PREPQC ]; then + echo "WARNING: Global PREPBUFR FILE $PREPQC MISSING" +fi +export PREPQCPF="$COMIN_OBS/${OPREFIX}prepbufr.acft_profiles" +export TCVITL="$COMIN_ANL/${OPREFIX}syndata.tcvitals.tm00" +[[ $DONST = "YES" ]] && export NSSTBF="$COMIN_OBS/${OPREFIX}nsstbufr" + +# Guess Bias correction coefficients related to control +export GBIAS=${COMIN_GES_CTL}/${GPREFIX}abias +export GBIASPC=${COMIN_GES_CTL}/${GPREFIX}abias_pc +export GBIASAIR=${COMIN_GES_CTL}/${GPREFIX}abias_air +export GRADSTAT=${COMIN_GES_CTL}/${GPREFIX}radstat + +# Bias correction coefficients related to ensemble mean +export ABIAS="$COMOUT/${APREFIX}abias.ensmean" +export ABIASPC="$COMOUT/${APREFIX}abias_pc.ensmean" +export ABIASAIR="$COMOUT/${APREFIX}abias_air.ensmean" +export ABIASe="$COMOUT/${APREFIX}abias_int.ensmean" + +# Diagnostics related to ensemble mean +export GSISTAT="$COMOUT/${APREFIX}gsistat.ensmean" +export CNVSTAT="$COMOUT/${APREFIX}cnvstat.ensmean" +export OZNSTAT="$COMOUT/${APREFIX}oznstat.ensmean" +export RADSTAT="$COMOUT/${APREFIX}radstat.ensmean" + +# Select observations based on ensemble mean +export RUN_SELECT="YES" +export USE_SELECT="NO" +export SELECT_OBS="$COMOUT/${APREFIX}obsinput.ensmean" + +export DIAG_SUFFIX="_ensmean" +export DIAG_COMPRESS="NO" + +# GSI namelist options specific to eobs +export SETUP_INVOBS="passive_bc=.false.,$SETUP_INVOBS" + +# Ensure clean stat tarballs for ensemble mean +for fstat in $CNVSTAT $OZNSTAT $RADSTAT; do + [[ -f $fstat ]] && rm -f $fstat +done + + +############################################################### +# Run relevant script +env +echo "HAS BEGUN on $(hostname)" +$LOGSCRIPT + + +${ANALDIAGSH:-$SCRgfs/exglobal_diag.sh} +status=$? +[[ $status -ne 0 ]] && exit $status + + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [ -e "$pgmout" ] ; then + cat $pgmout +fi + + +echo "ENDED NORMALLY." + + +########################################## +# Remove the Temporary working directory +########################################## +cd $DATAROOT +[[ $KEEPDATA = "NO" ]] && rm -rf $DATA + +date +exit 0 diff --git a/jobs/JGDAS_ENKF_ECEN b/jobs/JGDAS_ENKF_ECEN new file mode 100755 index 0000000000..f52abfe420 --- /dev/null +++ b/jobs/JGDAS_ENKF_ECEN @@ -0,0 +1,138 @@ +#!/bin/bash +set -x + +export RUN_ENVIR=${RUN_ENVIR:-"nco"} +export PS4='$SECONDS + ' +date + + +############################# +# Source relevant config files +############################# +export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} +configs="base ecen" +config_path=${EXPDIR:-$NWROOT/gfs.${gfs_ver}/parm/config} +for config in $configs; do + . $config_path/config.$config + status=$? + [[ $status -ne 0 ]] && exit $status +done + + +########################################## +# Source machine runtime environment +########################################## +. $HOMEgfs/env/${machine}.env ecen +status=$? +[[ $status -ne 0 ]] && exit $status + + +############################################## +# Obtain unique process id (pid) and make temp directory +############################################## +export pid=${pid:-$$} +export outid=${outid:-"LL$job"} +export DATA=${DATA:-${DATAROOT}/${jobid:?}} +mkdir -p $DATA +cd $DATA + + +############################################## +# Run setpdy and initialize PDY variables +############################################## +export cycle="t${cyc}z" +setpdy.sh +. ./PDY + + +############################################## +# Determine Job Output Name on System +############################################## +export pgmout="OUTPUT.${pid}" +export pgmerr=errfile + + +############################################## +# Set variables used in the script +############################################## +export CDATE=${CDATE:-${PDY}${cyc}} +export CDUMP=${CDUMP:-${RUN:-"gdas"}} +export COMPONENT=${COMPONENT:-atmos} + + +############################################## +# Begin JOB SPECIFIC work +############################################## + +GDATE=$($NDATE -$assim_freq $CDATE) +gPDY=$(echo $GDATE | cut -c1-8) +gcyc=$(echo $GDATE | cut -c9-10) +GDUMP=${GDUMP:-"gdas"} + +export CASE=$CASE_ENKF + + +EUPD_CYC=$(echo ${EUPD_CYC:-"gdas"} | tr a-z A-Z) +if [ $EUPD_CYC = "GFS" ]; then + CDUMP_ENKF="gfs" +else + CDUMP_ENKF=$CDUMP +fi + +export OPREFIX="${CDUMP}.t${cyc}z." +export APREFIX="${CDUMP}.t${cyc}z." +export APREFIX_ENKF="${CDUMP_ENKF}.t${cyc}z." +export GPREFIX="${CDUMP}.t${gcyc}z." +export GSUFFIX=${GSUFFIX:-$SUFFIX} +export ASUFFIX=${ASUFFIX:-$SUFFIX} + +if [ $RUN_ENVIR = "nco" -o ${ROTDIR_DUMP:-NO} = "YES" ]; then + export COMIN_OBS=${COMIN_OBS:-$(compath.py ${envir}/obsproc/${obsproc_ver})/$RUN.$PDY/$cyc/$COMPONENT} + export COMIN_GES_OBS=${COMIN_GES_OBS:-$(compath.py ${envir}/obsproc/${obsproc_ver})/$GDUMP.$gPDY/$gcyc/$COMPONENT} +else + export COMIN_OBS="$DMPDIR/$CDUMP.$PDY/$cyc/$COMPONENT" + export COMIN_GES_OBS="$DMPDIR/$GDUMP.$gPDY/$gcyc/$COMPONENT" +fi + +# COMIN, COMIN_ENS and COMIN_GES_ENS are used in script +export COMIN="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" +export COMIN_ENS="$ROTDIR/enkf$CDUMP_ENKF.$PDY/$cyc/$COMPONENT" +export COMOUT_ENS="$ROTDIR/enkf$CDUMP.$PDY/$cyc/$COMPONENT" +export COMIN_GES_ENS="$ROTDIR/enkf$CDUMP.$gPDY/$gcyc/$COMPONENT" + + +############################################################### +# Run relevant script +env +echo "HAS BEGUN on $(hostname)" +$LOGSCRIPT + + +${ENKFRECENSH:-$SCRgfs/exgdas_enkf_ecen.sh} +status=$? +[[ $status -ne 0 ]] && exit $status + + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [ -e "$pgmout" ] ; then + cat $pgmout +fi + + +echo "ENDED NORMALLY." + + +########################################## +# Remove the Temporary working directory +########################################## +cd $DATAROOT +[[ $KEEPDATA = "NO" ]] && rm -rf $DATA + +date +exit 0 diff --git a/jobs/JGDAS_ENKF_FCST b/jobs/JGDAS_ENKF_FCST new file mode 100755 index 0000000000..88d61c0870 --- /dev/null +++ b/jobs/JGDAS_ENKF_FCST @@ -0,0 +1,140 @@ +#!/bin/bash +set -x + +export RUN_ENVIR=${RUN_ENVIR:-"nco"} +export PS4='$SECONDS + ' +date + + +############################# +# Source relevant config files +############################# +export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} +config_path=${EXPDIR:-$NWROOT/gfs.${gfs_ver}/parm/config} +configs="base fcst efcs" +for config in $configs; do + . $config_path/config.$config + status=$? + [[ $status -ne 0 ]] && exit $status +done + + +########################################## +# Source machine runtime environment +########################################## +. $HOMEgfs/env/${machine}.env efcs +status=$? +[[ $status -ne 0 ]] && exit $status + + +############################################## +# Obtain unique process id (pid) and make temp directory +############################################## +export pid=${pid:-$$} +export outid=${outid:-"LL$job"} +export DATA=${DATA:-${DATAROOT}/${jobid:?}} +mkdir -p $DATA +cd $DATA + + +############################################## +# Run setpdy and initialize PDY variables +############################################## +export cycle="t${cyc}z" +setpdy.sh +. ./PDY + + +############################################## +# Determine Job Output Name on System +############################################## +export pgmout="OUTPUT.${pid}" +export pgmerr=errfile + + +############################################## +# Set variables used in the script +############################################## +export CDATE=${CDATE:-${PDY}${cyc}} +export CDUMP=${CDUMP:-${RUN:-"gdas"}} +export COMPONENT=${COMPONENT:-atmos} + + +############################################## +# Begin JOB SPECIFIC work +############################################## + +export CASE=$CASE_ENKF + +# COMOUT is used in script +export COMOUT="$ROTDIR/enkf$CDUMP.$PDY/$cyc/$COMPONENT" + + +# Forecast length for EnKF forecast +export FHMIN=$FHMIN_ENKF +export FHOUT=$FHOUT_ENKF +export FHMAX=$FHMAX_ENKF + + +# Get ENSBEG/ENSEND from ENSGRP and NMEM_EFCSGRP +export ENSEND=$((NMEM_EFCSGRP * 10#${ENSGRP})) +export ENSBEG=$((ENSEND - NMEM_EFCSGRP + 1)) + + +############################################################### +# Run relevant script +env +echo "HAS BEGUN on $(hostname)" +$LOGSCRIPT + + +${ENKFFCSTSH:-$SCRgfs/exgdas_enkf_fcst.sh} +status=$? +[[ $status -ne 0 ]] && exit $status + + +# Double check the status of members in ENSGRP +EFCSGRP=$COMOUT/efcs.grp${ENSGRP} +npass=0 +if [ -f $EFCSGRP ]; then + npass=$(grep "PASS" $EFCSGRP | wc -l) +fi +echo "$npass/$NMEM_EFCSGRP members successfull in efcs.grp$ENSGRP" +if [ $npass -ne $NMEM_EFCSGRP ]; then + echo "FATAL ERROR: Failed members in group $ENSGRP, ABORT!" + cat $EFCSGRP + exit 99 +fi + + +############################################## +# Send Alerts +############################################## +if [ $SENDDBN = YES ] ; then + $DBNROOT/bin/dbn_alert MODEL ENKF1_MSC_fcsstat $job $EFCSGRP +fi + + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [ -e "$pgmout" ] ; then + cat $pgmout +fi + + +echo "ENDED NORMALLY." + + +########################################## +# Remove the Temporary working directory +########################################## +cd $DATAROOT +[[ $KEEPDATA = "NO" ]] && rm -rf $DATA + +date +exit 0 diff --git a/jobs/JGDAS_ENKF_POST b/jobs/JGDAS_ENKF_POST new file mode 100755 index 0000000000..7309305c0f --- /dev/null +++ b/jobs/JGDAS_ENKF_POST @@ -0,0 +1,112 @@ +#!/bin/bash +set -x + +export RUN_ENVIR=${RUN_ENVIR:-"nco"} +export PS4='$SECONDS + ' +date + + +############################# +# Source relevant config files +############################# +export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} +configs="base epos" +config_path=${EXPDIR:-$NWROOT/gfs.${gfs_ver}/parm/config} +for config in $configs; do + . $config_path/config.$config + status=$? + [[ $status -ne 0 ]] && exit $status +done + + +########################################## +# Source machine runtime environment +########################################## +. $HOMEgfs/env/${machine}.env epos +status=$? +[[ $status -ne 0 ]] && exit $status + + +############################################## +# Obtain unique process id (pid) and make temp directory +############################################## +export pid=${pid:-$$} +export outid=${outid:-"LL$job"} +export DATA=${DATA:-${DATAROOT}/${jobid:?}} +mkdir -p $DATA +cd $DATA + + +############################################## +# Run setpdy and initialize PDY variables +############################################## +export cycle="t${cyc}z" +setpdy.sh +. ./PDY + + +############################################## +# Determine Job Output Name on System +############################################## +export pgmout="OUTPUT.${pid}" +export pgmerr=errfile + + +############################################## +# Set variables used in the script +############################################## +export CDATE=${CDATE:-${PDY}${cyc}} +export CDUMP=${CDUMP:-${RUN:-"gdas"}} +export COMPONENT=${COMPONENT:-atmos} + + +############################################## +# Begin JOB SPECIFIC work +############################################## +export GFS_NCIO=${GFS_NCIO:-"YES"} + +export PREFIX="${CDUMP}.t${cyc}z." + +# COMIN, COMOUT are used in script +export COMIN="$ROTDIR/enkf$CDUMP.$PDY/$cyc/$COMPONENT" +export COMOUT="$ROTDIR/enkf$CDUMP.$PDY/$cyc/$COMPONENT" + + +export LEVS=$((LEVS-1)) + + +############################################################### +# Run relevant script +env +echo "HAS BEGUN on $(hostname)" +$LOGSCRIPT + + +${ENKFPOSTSH:-$SCRgfs/exgdas_enkf_post.sh} +status=$? +[[ $status -ne 0 ]] && exit $status + + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [ -e "$pgmout" ] ; then + cat $pgmout +fi + + +echo "ENDED NORMALLY." + + +########################################## +# Remove the Temporary working directory +########################################## +cd $DATAROOT +[[ $KEEPDATA = "NO" ]] && rm -rf $DATA + +date +exit 0 diff --git a/jobs/JGDAS_ENKF_SELECT_OBS b/jobs/JGDAS_ENKF_SELECT_OBS new file mode 100755 index 0000000000..919eec5bc5 --- /dev/null +++ b/jobs/JGDAS_ENKF_SELECT_OBS @@ -0,0 +1,192 @@ +#!/bin/bash +set -x + +export RUN_ENVIR=${RUN_ENVIR:-"nco"} +export PS4='$SECONDS + ' +date + + +############################# +# Source relevant config files +############################# +export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} +configs="base anal eobs" +config_path=${EXPDIR:-$NWROOT/gfs.${gfs_ver}/parm/config} +for config in $configs; do + . $config_path/config.$config + status=$? + [[ $status -ne 0 ]] && exit $status +done + + +########################################## +# Source machine runtime environment +########################################## +. $HOMEgfs/env/${machine}.env eobs +status=$? +[[ $status -ne 0 ]] && exit $status + + +############################################## +# Obtain unique process id (pid) and make temp directory +############################################## +export pid=${pid:-$$} +export outid=${outid:-"LL$job"} +export DATA=${DATA:-${DATAROOT}/${jobid:?}} +mkdir -p $DATA +cd $DATA + + +############################################## +# Run setpdy and initialize PDY variables +############################################## +export cycle="t${cyc}z" +setpdy.sh +. ./PDY + + +############################################## +# Determine Job Output Name on System +############################################## +export pgmout="OUTPUT.${pid}" +export pgmerr=errfile + + +############################################## +# Set variables used in the script +############################################## +export CDATE=${CDATE:-${PDY}${cyc}} +export CDUMP=${CDUMP:-${RUN:-"gdas"}} +export COMPONENT=${COMPONENT:-atmos} + + +############################################## +# Begin JOB SPECIFIC work +############################################## + +GDATE=$($NDATE -$assim_freq $CDATE) +gPDY=$(echo $GDATE | cut -c1-8) +gcyc=$(echo $GDATE | cut -c9-10) +GDUMP=${GDUMP:-"gdas"} + +export CASE=$CASE_ENKF +export CDUMP_OBS=${CDUMP_OBS:-$CDUMP} + +export OPREFIX="${CDUMP_OBS}.t${cyc}z." +export APREFIX="${CDUMP}.t${cyc}z." +export GPREFIX="${GDUMP}.t${gcyc}z." +export GSUFFIX="${GSUFFIX:-".ensmean${SUFFIX}"}" +export ASUFFIX="${ASUFFIX:-"${SUFFIX}"}" + +if [ $RUN_ENVIR = "nco" -o ${ROTDIR_DUMP:-NO} = "YES" ]; then + export COMIN_OBS=${COMIN_OBS:-$(compath.py ${envir}/obsproc/${obsproc_ver})/$RUN.$PDY/$cyc/$COMPONENT} + export COMIN_GES_OBS=${COMIN_GES_OBS:-$(compath.py ${envir}/obsproc/${obsproc_ver})/$GDUMP.$gPDY/$gcyc/$COMPONENT} +else + export COMIN_OBS="$DMPDIR/$CDUMP.$PDY/$cyc/$COMPONENT" + export COMIN_GES_OBS="$DMPDIR/$GDUMP.$gPDY/$gcyc/$COMPONENT" +fi + +# COMIN_GES, COMIN_ANL COMIN_GES_ENS, and COMOUT are used in script +COMIN_GES_CTL="$ROTDIR/gdas.$gPDY/$gcyc/$COMPONENT" +export COMIN_ANL="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" +export COMIN_GES_ENS="$ROTDIR/enkfgdas.$gPDY/$gcyc/$COMPONENT" +export COMIN_GES=$COMIN_GES_ENS +export COMOUT="$ROTDIR/enkf$CDUMP.$PDY/$cyc/$COMPONENT" + + +export ATMGES_ENSMEAN="$COMIN_GES_ENS/${GPREFIX}atmf006$GSUFFIX" +if [ ! -f $ATMGES_ENSMEAN ]; then + echo "FATAL ERROR: FILE MISSING: ATMGES_ENSMEAN = $ATMGES_ENSMEAN" + exit 1 +fi + +export LEVS=$($NCDUMP -h $ATMGES_ENSMEAN | grep -i "pfull" | head -1 | awk -F" = " '{print $2}' | awk -F" " '{print $1}') # get LEVS +status=$? +[[ $status -ne 0 ]] && exit $status + +# Link observational data +export PREPQC="$COMIN_OBS/${OPREFIX}prepbufr" +if [ ! -f $PREPQC ]; then + echo "WARNING: Global PREPBUFR FILE $PREPQC MISSING" +fi +export PREPQCPF="$COMIN_OBS/${OPREFIX}prepbufr.acft_profiles" +export TCVITL="$COMIN_ANL/${OPREFIX}syndata.tcvitals.tm00" +[[ $DONST = "YES" ]] && export NSSTBF="$COMIN_OBS/${OPREFIX}nsstbufr" + +# Guess Bias correction coefficients related to control +export GBIAS=${COMIN_GES_CTL}/${GPREFIX}abias +export GBIASPC=${COMIN_GES_CTL}/${GPREFIX}abias_pc +export GBIASAIR=${COMIN_GES_CTL}/${GPREFIX}abias_air +export GRADSTAT=${COMIN_GES_CTL}/${GPREFIX}radstat + +# Bias correction coefficients related to ensemble mean +export ABIAS="$COMOUT/${APREFIX}abias.ensmean" +export ABIASPC="$COMOUT/${APREFIX}abias_pc.ensmean" +export ABIASAIR="$COMOUT/${APREFIX}abias_air.ensmean" +export ABIASe="$COMOUT/${APREFIX}abias_int.ensmean" + +# Diagnostics related to ensemble mean +export GSISTAT="$COMOUT/${APREFIX}gsistat.ensmean" +export CNVSTAT="$COMOUT/${APREFIX}cnvstat.ensmean" +export OZNSTAT="$COMOUT/${APREFIX}oznstat.ensmean" +export RADSTAT="$COMOUT/${APREFIX}radstat.ensmean" + +# Select observations based on ensemble mean +export RUN_SELECT="YES" +export USE_SELECT="NO" +export SELECT_OBS="$COMOUT/${APREFIX}obsinput.ensmean" + +export DIAG_SUFFIX="_ensmean" + +# GSI namelist options specific to eobs +export SETUP_INVOBS="passive_bc=.false.,$SETUP_INVOBS" + +# Ensure clean stat tarballs for ensemble mean +for fstat in $CNVSTAT $OZNSTAT $RADSTAT; do + [[ -f $fstat ]] && rm -f $fstat +done + + +############################################################### +# Run relevant script +env +echo "HAS BEGUN on $(hostname)" +$LOGSCRIPT + + +${INVOBSSH:-$SCRgfs/exgdas_enkf_select_obs.sh} +status=$? +[[ $status -ne 0 ]] && exit $status + + +############################################## +# Send Alerts +############################################## +if [ $SENDDBN = YES ] ; then + $DBNROOT/bin/dbn_alert MODEL ENKF1_MSC_gsistat $job $GSISTAT +fi + + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [ -e "$pgmout" ] ; then + cat $pgmout +fi + + +echo "ENDED NORMALLY." + + +########################################## +# Remove the Temporary working directory +########################################## +cd $DATAROOT +[[ $KEEPDATA = "NO" ]] && rm -rf $DATA + +date +exit 0 diff --git a/jobs/JGDAS_ENKF_SFC b/jobs/JGDAS_ENKF_SFC new file mode 100755 index 0000000000..a0383f2cf0 --- /dev/null +++ b/jobs/JGDAS_ENKF_SFC @@ -0,0 +1,139 @@ +#!/bin/bash +set -x + +export RUN_ENVIR=${RUN_ENVIR:-"nco"} +export PS4='$SECONDS + ' +date + + +############################# +# Source relevant config files +############################# +export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} +configs="base esfc" +config_path=${EXPDIR:-$NWROOT/gfs.${gfs_ver}/parm/config} +for config in $configs; do + . $config_path/config.$config + status=$? + [[ $status -ne 0 ]] && exit $status +done + + +########################################## +# Source machine runtime environment +########################################## +. $HOMEgfs/env/${machine}.env esfc +status=$? +[[ $status -ne 0 ]] && exit $status + + +############################################## +# Obtain unique process id (pid) and make temp directory +############################################## +export pid=${pid:-$$} +export outid=${outid:-"LL$job"} +export DATA=${DATA:-${DATAROOT}/${jobid:?}} +mkdir -p $DATA +cd $DATA + + +############################################## +# Run setpdy and initialize PDY variables +############################################## +export cycle="t${cyc}z" +setpdy.sh +. ./PDY + + +############################################## +# Determine Job Output Name on System +############################################## +export pgmout="OUTPUT.${pid}" +export pgmerr=errfile + + +############################################## +# Set variables used in the script +############################################## +export CDATE=${CDATE:-${PDY}${cyc}} +export CDUMP=${CDUMP:-${RUN:-"gdas"}} +export COMPONENT=${COMPONENT:-atmos} + + +############################################## +# Begin JOB SPECIFIC work +############################################## + +GDATE=$($NDATE -$assim_freq $CDATE) +gPDY=$(echo $GDATE | cut -c1-8) +gcyc=$(echo $GDATE | cut -c9-10) +GDUMP=${GDUMP:-"gdas"} + +export CASE=$CASE_ENKF + + +EUPD_CYC=$(echo ${EUPD_CYC:-"gdas"} | tr a-z A-Z) +if [ $EUPD_CYC = "GFS" ]; then + CDUMP_ENKF="gfs" +else + CDUMP_ENKF=$CDUMP +fi + +export OPREFIX="${CDUMP}.t${cyc}z." +export APREFIX="${CDUMP}.t${cyc}z." +export APREFIX_ENKF="${CDUMP_ENKF}.t${cyc}z." +export GPREFIX="${CDUMP}.t${gcyc}z." +export GSUFFIX=${GSUFFIX:-$SUFFIX} +export ASUFFIX=${ASUFFIX:-$SUFFIX} + +if [ $RUN_ENVIR = "nco" -o ${ROTDIR_DUMP:-NO} = "YES" ]; then + export COMIN_OBS=${COMIN_OBS:-$(compath.py ${envir}/obsproc/${obsproc_ver})/$RUN.$PDY/$cyc/$COMPONENT} + export COMIN_GES_OBS=${COMIN_GES_OBS:-$(compath.py ${envir}/obsproc/${obsproc_ver})/$GDUMP.$gPDY/$gcyc/$COMPONENT} +else + export COMIN_OBS="$DMPDIR/$CDUMP.$PDY/$cyc/$COMPONENT" + export COMIN_GES_OBS="$DMPDIR/$GDUMP.$gPDY/$gcyc/$COMPONENT" +fi + +# COMIN, COMIN_ENS and COMIN_GES_ENS are used in script +export COMIN="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" +export COMIN_GES="$ROTDIR/$CDUMP.$gPDY/$gcyc/$COMPONENT" +export COMIN_ENS="$ROTDIR/enkf$CDUMP_ENKF.$PDY/$cyc/$COMPONENT" +export COMOUT_ENS="$ROTDIR/enkf$CDUMP.$PDY/$cyc/$COMPONENT" +export COMIN_GES_ENS="$ROTDIR/enkf$CDUMP.$gPDY/$gcyc/$COMPONENT" + + +############################################################### +# Run relevant script +env +echo "HAS BEGUN on $(hostname)" +$LOGSCRIPT + + +${ENKFRESFCSH:-$SCRgfs/exgdas_enkf_sfc.sh} +status=$? +[[ $status -ne 0 ]] && exit $status + + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [ -e "$pgmout" ] ; then + cat $pgmout +fi + + +echo "ENDED NORMALLY." + + +########################################## +# Remove the Temporary working directory +########################################## +cd $DATAROOT +[[ $KEEPDATA = "NO" ]] && rm -rf $DATA + +date +exit 0 diff --git a/jobs/JGDAS_ENKF_UPDATE b/jobs/JGDAS_ENKF_UPDATE new file mode 100755 index 0000000000..07bf37f7f8 --- /dev/null +++ b/jobs/JGDAS_ENKF_UPDATE @@ -0,0 +1,123 @@ +#!/bin/bash +set -x + +export RUN_ENVIR=${RUN_ENVIR:-"nco"} +export PS4='$SECONDS + ' +date + + +############################# +# Source relevant config files +############################# +export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} +configs="base anal eupd" +config_path=${EXPDIR:-$NWROOT/gfs.${gfs_ver}/parm/config} +for config in $configs; do + . $config_path/config.$config + status=$? + [[ $status -ne 0 ]] && exit $status +done + + +########################################## +# Source machine runtime environment +########################################## +. $HOMEgfs/env/${machine}.env eupd +status=$? +[[ $status -ne 0 ]] && exit $status + + +############################################## +# Obtain unique process id (pid) and make temp directory +############################################## +export pid=${pid:-$$} +export outid=${outid:-"LL$job"} +export DATA=${DATA:-${DATAROOT}/${jobid:?}} +mkdir -p $DATA +cd $DATA + + +############################################## +# Run setpdy and initialize PDY variables +############################################## +export cycle="t${cyc}z" +setpdy.sh +. ./PDY + + +############################################## +# Determine Job Output Name on System +############################################## +export pgmout="OUTPUT.${pid}" +export pgmerr=errfile + + +############################################## +# Set variables used in the script +############################################## +export CDATE=${CDATE:-${PDY}${cyc}} +export CDUMP=${CDUMP:-${RUN:-"gdas"}} +export COMPONENT=${COMPONENT:-atmos} + + +############################################## +# Begin JOB SPECIFIC work +############################################## + +GDATE=$($NDATE -$assim_freq $CDATE) +gPDY=$(echo $GDATE | cut -c1-8) +gcyc=$(echo $GDATE | cut -c9-10) + +export APREFIX="${CDUMP}.t${cyc}z." +export GPREFIX="gdas.t${gcyc}z." +export ASUFFIX=${ASUFFIX:-$SUFFIX} +export GSUFFIX=${GSUFFIX:-$SUFFIX} + + +# COMIN_GES_ENS and COMOUT_ANL_ENS are used in script +export COMIN_GES_ENS="$ROTDIR/enkfgdas.$gPDY/$gcyc/$COMPONENT" +export COMOUT_ANL_ENS="$ROTDIR/enkf$CDUMP.$PDY/$cyc/$COMPONENT" + + +############################################################### +# Run relevant script +env +echo "HAS BEGUN on $(hostname)" +$LOGSCRIPT + +${ENKFUPDSH:-$SCRgfs/exgdas_enkf_update.sh} +status=$? +[[ $status -ne 0 ]] && exit $status + + +############################################## +# Send Alerts +############################################## +if [ $SENDDBN = YES ] ; then + $DBNROOT/bin/dbn_alert MODEL ENKF1_MSC_enkfstat $job $COMOUT_ANL_ENS/${APREFIX}enkfstat +fi + + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [ -e "$pgmout" ] ; then + cat $pgmout +fi + + +echo "ENDED NORMALLY." + + +########################################## +# Remove the Temporary working directory +########################################## +cd $DATAROOT +[[ $KEEPDATA = "NO" ]] && rm -rf $DATA + +date +exit 0 diff --git a/jobs/JGFS_ATMOS_VMINMON b/jobs/JGFS_ATMOS_VMINMON new file mode 100755 index 0000000000..3b43b385c3 --- /dev/null +++ b/jobs/JGFS_ATMOS_VMINMON @@ -0,0 +1,102 @@ +#!/bin/sh +########################################################### +# GFS Minimization Monitor (MinMon) job +########################################################### +set -xa +echo `date` $0 `date -u` begin +export PS4='$SECONDS + ' + +############################### +# Specify NET and RUN name +############################## +export NET=${NET:-gfs} +export RUN=${RUN:-gfs} +export COMPONENT=${COMPONENT:-atmos} + + +########################################################### +# obtain unique process id (pid) and make temp directories +########################################################### +export pid=$$ +export outid=${outid:-"LL$job"} +export DATA=${DATA:-${DATAROOT}/${jobid:?}} +mkdir -p $DATA +cd $DATA + + +########################################################### +# obtain unique process id (pid) and make temp directories +########################################################### +export MINMON_SUFFIX=${MINMON_SUFFIX:-GFS} +export m_job=${m_job:-${MINMON_SUFFIX}_mmDE} + + +############################################## +# Specify Package Areas +############################################## +export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} +export SCRgfs=${SCRgfs:-$HOMEgfs/scripts} +export M_FIXgfs=${M_FIXgfs:-$HOMEgfs/fix/product} + +export HOMEminmon=${HOMEminmon:-${HOMEgfs}} +export EXECminmon=${EXECminmon:-$HOMEminmon/exec} +export USHminmon=${USHminmon:-$HOMEminmon/ush} + + +############################################# +# Run setpdy and initialize PDY variables +############################################# +export cycle=t${cyc}z +setpdy.sh +. ./PDY + + +############################################# +# determine PDY and cyc for previous cycle +############################################# + +cdate=`${NDATE} -6 ${PDY}${cyc}` +echo 'pdate = ${pdate}' + +export P_PDY=`echo ${cdate} | cut -c1-8` +export p_cyc=`echo ${cdate} | cut -c9-10` + + +############################################# +# TANKverf - WHERE OUTPUT DATA WILL RESIDE +############################################# +export COM_IN=${COM_IN:-$(compath.py ${envir}/${NET}/${gfs_ver})} + +M_TANKverf=${M_TANKverf:-${COM_IN}/${RUN}.${PDY}/${cyc}/${COMPONENT}/minmon} +export M_TANKverfM1=${M_TANKverfM1:-${COM_IN}/${RUN}.${P_PDY}/${p_cyc}/${COMPONENT}/minmon} + +export COMIN=${COMIN:-$COM_IN/${RUN}.${PDY}/${cyc}/$COMPONENT} + +mkdir -p -m 775 $M_TANKverf + + +######################################## +# Set necessary environment variables +######################################## +export CYCLE_INTERVAL=6 +export gsistat=${gsistat:-${COMIN}/gfs.t${cyc}z.gsistat} + + +######################################################## +# Execute the script. +${GMONSH:-$SCRgfs/exgfs_atmos_vminmon.sh} ${PDY} ${cyc} +err=$? +[[ $err -ne 0 ]] && exit $err + + +################################ +# Remove the Working Directory +################################ +KEEPDATA=${KEEPDATA:-NO} +cd ${DATAROOT} + +if [ ${KEEPDATA} = NO ] ; then + rm -rf ${DATA} +fi + + diff --git a/scripts/exgdas_atmos_chgres_forenkf.sh b/scripts/exgdas_atmos_chgres_forenkf.sh new file mode 100755 index 0000000000..805b8be6d0 --- /dev/null +++ b/scripts/exgdas_atmos_chgres_forenkf.sh @@ -0,0 +1,211 @@ +#!/bin/bash +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: exgdas_atmos_chgres_forenkf.sh +# Script description: Runs chgres on full-resolution forecast for EnKF recentering +# +# Author: Cory Martin Org: NCEP/EMC Date: 2020-06-08 +# +# Abstract: This script runs chgres on full-resolution forecast for later +# use in the EnKF recentering step +# +# $Id$ +# +# Attributes: +# Language: POSIX shell +# Machine: WCOSS-Dell / Hera +# +################################################################################ + +# Set environment. +export VERBOSE=${VERBOSE:-"YES"} +if [ $VERBOSE = "YES" ]; then + echo $(date) EXECUTING $0 $* >&2 + set -x +fi + +# Directories. +pwd=$(pwd) +export FIXgsm=${FIXgsm:-$HOMEgfs/fix/fix_am} + +# Base variables +CDATE=${CDATE:-"2001010100"} +CDUMP=${CDUMP:-"gdas"} +GDUMP=${GDUMP:-"gdas"} + +# Derived base variables +GDATE=$($NDATE -$assim_freq $CDATE) +BDATE=$($NDATE -3 $CDATE) +PDY=$(echo $CDATE | cut -c1-8) +cyc=$(echo $CDATE | cut -c9-10) +bPDY=$(echo $BDATE | cut -c1-8) +bcyc=$(echo $BDATE | cut -c9-10) + +# Utilities +export NCP=${NCP:-"/bin/cp"} +export NMV=${NMV:-"/bin/mv"} +export NLN=${NLN:-"/bin/ln -sf"} +export CHGRP_CMD=${CHGRP_CMD:-"chgrp ${group_name:-rstprod}"} +export NCLEN=${NCLEN:-$HOMEgfs/ush/getncdimlen} + +# IAU +DOIAU=${DOIAU:-"NO"} +export IAUFHRS=${IAUFHRS:-"6"} + +# Dependent Scripts and Executables +export APRUN_CHGRES=${APRUN_CHGRES:-${APRUN:-""}} +export CHGRESNCEXEC=${CHGRESNCEXEC:-$HOMEgfs/exec/enkf_chgres_recenter_nc.x} +export NTHREADS_CHGRES=${NTHREADS_CHGRES:-1} +APRUNCFP=${APRUNCFP:-""} + +# OPS flags +RUN=${RUN:-""} +SENDECF=${SENDECF:-"NO"} +SENDDBN=${SENDDBN:-"NO"} + +# level info file +SIGLEVEL=${SIGLEVEL:-${FIXgsm}/global_hyblev.l${LEVS}.txt} + +# forecast files +APREFIX=${APREFIX:-""} +ASUFFIX=${ASUFFIX:-$SUFFIX} +# at full resolution +ATMF03=${ATMF03:-${COMOUT}/${APREFIX}atmf003${ASUFFIX}} +ATMF04=${ATMF04:-${COMOUT}/${APREFIX}atmf004${ASUFFIX}} +ATMF05=${ATMF05:-${COMOUT}/${APREFIX}atmf005${ASUFFIX}} +ATMF06=${ATMF06:-${COMOUT}/${APREFIX}atmf006${ASUFFIX}} +ATMF07=${ATMF07:-${COMOUT}/${APREFIX}atmf007${ASUFFIX}} +ATMF08=${ATMF08:-${COMOUT}/${APREFIX}atmf008${ASUFFIX}} +ATMF09=${ATMF09:-${COMOUT}/${APREFIX}atmf009${ASUFFIX}} +# at ensemble resolution +ATMF03ENS=${ATMF03ENS:-${COMOUT}/${APREFIX}atmf003.ensres${ASUFFIX}} +ATMF04ENS=${ATMF04ENS:-${COMOUT}/${APREFIX}atmf004.ensres${ASUFFIX}} +ATMF05ENS=${ATMF05ENS:-${COMOUT}/${APREFIX}atmf005.ensres${ASUFFIX}} +ATMF06ENS=${ATMF06ENS:-${COMOUT}/${APREFIX}atmf006.ensres${ASUFFIX}} +ATMF07ENS=${ATMF07ENS:-${COMOUT}/${APREFIX}atmf007.ensres${ASUFFIX}} +ATMF08ENS=${ATMF08ENS:-${COMOUT}/${APREFIX}atmf008.ensres${ASUFFIX}} +ATMF09ENS=${ATMF09ENS:-${COMOUT}/${APREFIX}atmf009.ensres${ASUFFIX}} +ATMFCST_ENSRES=${ATMFCST_ENSRES:-${COMOUT_ENS}/mem001/${APREFIX}atmf006${ASUFFIX}} + +# Set script / GSI control parameters +DOHYBVAR=${DOHYBVAR:-"NO"} +lrun_subdirs=${lrun_subdirs:-".true."} +USE_CFP=${USE_CFP:-"NO"} +CFP_MP=${CFP_MP:-"NO"} +nm="" +if [ $CFP_MP = "YES" ]; then + nm=0 +fi +if [ $DOHYBVAR = "YES" ]; then + l_hyb_ens=.true. + export l4densvar=${l4densvar:-".false."} + export lwrite4danl=${lwrite4danl:-".false."} +else + echo "DOHYBVAR != YES, this script will exit without regridding deterministic forecast" + exit 0 +fi + +################################################################################ +################################################################################ +# Preprocessing +mkdata=NO +if [ ! -d $DATA ]; then + mkdata=YES + mkdir -p $DATA +fi + +cd $DATA || exit 99 + +############################################################## +# get resolution information +LONB_ENKF=${LONB_ENKF:-$($NCLEN $ATMFCST_ENSRES grid_xt)} # get LONB_ENKF +LATB_ENKF=${LATB_ENKF:-$($NCLEN $ATMFCST_ENSRES grid_yt)} # get LATB_ENFK +LEVS_ENKF=${LEVS_ENKF:-$($NCLEN $ATMFCST_ENSRES pfull)} # get LATB_ENFK + +############################################################## +# If analysis increment is written by GSI, regrid forecasts to increment resolution +if [ $DO_CALC_ANALYSIS == "YES" ]; then + $NLN $ATMF06 fcst.06 + $NLN $ATMF06ENS fcst.ensres.06 + $NLN $ATMFCST_ENSRES atmens_fcst + if [ $DOHYBVAR = "YES" -a $l4densvar = ".true." -a $lwrite4danl = ".true." ]; then + $NLN $ATMF03 fcst.03 + $NLN $ATMF03ENS fcst.ensres.03 + $NLN $ATMF04 fcst.04 + $NLN $ATMF04ENS fcst.ensres.04 + $NLN $ATMF05 fcst.05 + $NLN $ATMF05ENS fcst.ensres.05 + $NLN $ATMF07 fcst.07 + $NLN $ATMF07ENS fcst.ensres.07 + $NLN $ATMF08 fcst.08 + $NLN $ATMF08ENS fcst.ensres.08 + $NLN $ATMF09 fcst.09 + $NLN $ATMF09ENS fcst.ensres.09 + fi + export OMP_NUM_THREADS=$NTHREADS_CHGRES + SIGLEVEL=${SIGLEVEL:-${FIXgsm}/global_hyblev.l${LEVS_ENKF}.txt} + + if [ $USE_CFP = "YES" ]; then + [[ -f $DATA/mp_chgres.sh ]] && rm $DATA/mp_chgres.sh + fi + + nfhrs=`echo $IAUFHRS_ENKF | sed 's/,/ /g'` + for FHR in $nfhrs; do + echo "Regridding deterministic forecast for forecast hour $FHR" + rm -f chgres_nc_gauss0$FHR.nml +cat > chgres_nc_gauss0$FHR.nml << EOF +&chgres_setup +i_output=$LONB_ENKF +j_output=$LATB_ENKF +input_file="fcst.0$FHR" +output_file="fcst.ensres.0$FHR" +terrain_file="atmens_fcst" +ref_file="atmens_fcst" +/ +EOF + if [ $USE_CFP = "YES" ]; then + echo "$nm $APRUN_CHGRES $CHGRESNCEXEC chgres_nc_gauss0$FHR.nml" | tee -a $DATA/mp_chgres.sh + if [ ${CFP_MP:-"NO"} = "YES" ]; then + nm=$((nm+1)) + fi + else + + export pgm=$CHGRESNCEXEC + . prep_step + + $APRUN_CHGRES $CHGRESNCEXEC chgres_nc_gauss0$FHR.nml + export err=$?; err_chk + fi + done + + if [ $USE_CFP = "YES" ]; then + chmod 755 $DATA/mp_chgres.sh + ncmd=$(cat $DATA/mp_chgres.sh | wc -l) + if [ $ncmd -gt 0 ]; then + ncmd_max=$((ncmd < npe_node_max ? ncmd : npe_node_max)) + APRUNCFP_CHGRES=$(eval echo $APRUNCFP) + + export pgm=$CHGRESNCEXEC + . prep_step + + $APRUNCFP_CHGRES $DATA/mp_chgres.sh + export err=$?; err_chk + fi + fi + +else + echo "DO_CALC_ANALYSIS != YES, doing nothing" +fi + + +################################################################################ +# Postprocessing +cd $pwd +[[ $mkdata = "YES" ]] && rm -rf $DATA + +set +x +if [ $VERBOSE = "YES" ]; then + echo $(date) EXITING $0 with return code $err >&2 +fi +exit $err diff --git a/scripts/exgdas_atmos_verfozn.sh b/scripts/exgdas_atmos_verfozn.sh new file mode 100755 index 0000000000..939b03a3d4 --- /dev/null +++ b/scripts/exgdas_atmos_verfozn.sh @@ -0,0 +1,100 @@ +#/bin/sh + +set -ax + +################################################################################ +# exgdas_vrfyozn.sh +# +# This script runs the data extract/validation portion of the Ozone Monitor +# (OznMon) DA package. +# +################################################################################ +export scr=exgdas_vrfyozn.sh + +err=0 + +#------------------------------------------------------------------------------- +# Set environment +# +export RUN_ENVIR=${RUN_ENVIR:-nco} +export NET=${NET:-gfs} +export RUN=${RUN:-gdas} +export envir=${envir:-prod} +export COMPONENT=${COMPONENT:-atmos} + +# Command line arguments +export PDY=${1:-${PDY:?}} +export cyc=${2:-${cyc:?}} + + +# Other variables +export SATYPE_FILE=${SATYPE_FILE:-$FIXgdas_ozn/gdas_oznmon_satype.txt} +export PDATE=${PDY}${cyc} +export DO_DATA_RPT=${DO_DATA_RPT:-1} +export NCP=${NCP:-/bin/cp} + + +#----------------------------------------------------------------- +# ensure work and TANK dirs exist, verify oznstat is available +# +export OZN_WORK_DIR=${OZN_WORK_DIR:-$(pwd)} + +if [[ ! -d ${OZN_WORK_DIR} ]]; then + mkdir $OZN_WORK_DIR +fi +cd $OZN_WORK_DIR + +if [[ ! -d ${TANKverf_ozn} ]]; then + mkdir -p $TANKverf_ozn +fi + +if [[ -s ${oznstat} ]]; then + echo ${oznstat} is available +fi + + + +data_available=0 + +if [[ -s ${oznstat} ]]; then + data_available=1 + + #------------------------------------------------------------------ + # Copy data files file to local data directory. + # Untar oznstat file. + #------------------------------------------------------------------ + + $NCP $oznstat ./oznstat.$PDATE + + tar -xvf oznstat.$PDATE + rm oznstat.$PDATE + + netcdf=0 + count=`ls diag* | grep ".nc4" | wc -l` + if [ $count -gt 0 ] ; then + netcdf=1 + for filenc4 in `ls diag*nc4.gz`; do + file=`echo $filenc4 | cut -d'.' -f1-2`.gz + mv $filenc4 $file + done + fi + + export OZNMON_NETCDF=${netcdf} + + ${HOMEoznmon}/ush/ozn_xtrct.sh + err=$? + +else + # oznstat file not found + err=1 +fi + + +if [[ "$VERBOSE" = "YES" ]]; then + echo "end exgdas_vrfyozn.sh, exit value = ${err}" +fi + + +set +x +exit ${err} + diff --git a/scripts/exgdas_atmos_verfrad.sh b/scripts/exgdas_atmos_verfrad.sh new file mode 100755 index 0000000000..7aec25432f --- /dev/null +++ b/scripts/exgdas_atmos_verfrad.sh @@ -0,0 +1,228 @@ +#/bin/sh +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: exgdas_vrfyrad.sh +# Script description: Runs data extract/validation for global radiance diag data +# +# Author: Ed Safford Org: NP23 Date: 2012-01-18 +# +# Abstract: This script runs the data extract/validation portion of the +# RadMon package. +# +# Condition codes +# 0 - no problem encountered +# >0 - some problem encountered +# +################################################################################ +scr=exgdas_vrfyrad.sh +echo "${scr} HAS STARTED" + +export VERBOSE=${VERBOSE:-"NO"} +if [[ "$VERBOSE" = "YES" ]] +then + set -x +fi + + +export RUN_ENVIR=${RUN_ENVIR:-nco} +export NET=${NET:-gfs} +export RUN=${RUN:-gdas} +export envir=${envir:-prod} +export COMPONENT=${COMPONENT:-atmos} + +# Command line arguments +export PDY=${1:-${PDY:?}} +export cyc=${2:-${cyc:?}} + +# Directories +export COM_IN=${COM_IN:-$(compath.py ${envir}/${NET}/${gfs_ver})} +export COMIN=${COMIN:-$COM_IN/${RUN}.${PDY}/${cyc}/$COMPONENT} + + +# Filenames +export biascr=${biascr:-$COMIN/gdas.t${cyc}z.abias} +export radstat=${radstat:-$COMIN/gdas.t${cyc}z.radstat} +export satype_file=${satype_file:-${FIXgdas}/gdas_radmon_satype.txt} + +# Other variables +export RAD_AREA=${RAD_AREA:-glb} +export MAKE_CTL=${MAKE_CTL:-1} +export MAKE_DATA=${MAKE_DATA:-1} +export USE_ANL=${USE_ANL:-1} +export PDATE=${PDY}${cyc} +export DO_DIAG_RPT=${DO_DIAG_RPT:-1} +export DO_DATA_RPT=${DO_DATA_RPT:-1} +export USE_MAIL=${USE_MAIL:-0} +export MAIL_TO=${MAIL_TO:-" "} +export MAIL_CC=${MAIL_CC:-" "} +export NCP=${NCP:-/bin/cp} + +########################################################################### +# ensure TANK dir exists, verify radstat and biascr are available +# +if [[ ! -d ${TANKverf_rad} ]]; then + mkdir -p $TANKverf_rad +fi + +if [[ "$VERBOSE" = "YES" ]]; then + if [[ -s ${radstat} ]]; then + echo ${radstat} is available + fi + if [[ -s ${biascr} ]]; then + echo ${biascr} is available + fi +fi +##################################################################### + +data_available=0 +if [[ -s ${radstat} && -s ${biascr} ]]; then + data_available=1 + + #------------------------------------------------------------------ + # Copy data files file to local data directory. + # Untar radstat file. + #------------------------------------------------------------------ + + $NCP $biascr ./biascr.$PDATE + $NCP $radstat ./radstat.$PDATE + + tar -xvf radstat.$PDATE + rm radstat.$PDATE + + #------------------------------------------------------------------ + # SATYPE is the list of expected satellite/instrument sources + # in the radstat file. It should be stored in the $TANKverf + # directory. If it isn't there then use the $FIXgdas copy. In all + # cases write it back out to the radmon.$PDY directory. Add any + # new sources to the list before writing back out. + #------------------------------------------------------------------ + + radstat_satype=`ls d*ges* | awk -F_ '{ print $2 "_" $3 }'` + if [[ "$VERBOSE" = "YES" ]]; then + echo $radstat_satype + fi + + echo satype_file = $satype_file + + #------------------------------------------------------------------ + # Get previous cycle's date, and look for the satype_file. Using + # the previous cycle will get us the previous day's directory if + # the cycle being processed is 00z. + #------------------------------------------------------------------ + if [[ $cyc = "00" ]]; then + use_tankdir=${TANKverf_radM1} + else + use_tankdir=${TANKverf_rad} + fi + + echo satype_file = $satype_file + export SATYPE=`cat ${satype_file}` + + + #------------------------------------------------------------- + # Update the SATYPE if any new sat/instrument was + # found in $radstat_satype. Write the SATYPE contents back + # to $TANKverf/radmon.$PDY. + #------------------------------------------------------------- + satype_changes=0 + new_satype=$SATYPE + for type in ${radstat_satype}; do + test=`echo $SATYPE | grep $type | wc -l` + + if [[ $test -eq 0 ]]; then + if [[ "$VERBOSE" = "YES" ]]; then + echo "Found $type in radstat file but not in SATYPE list. Adding it now." + fi + satype_changes=1 + new_satype="$new_satype $type" + fi + done + + + #------------------------------------------------------------------ + # Rename the diag files and uncompress + #------------------------------------------------------------------ + netcdf=0 + + for type in ${SATYPE}; do + + if [[ netcdf -eq 0 && -e diag_${type}_ges.${PDATE}.nc4.${Z} ]]; then + netcdf=1 + fi + + mv diag_${type}_ges.${PDATE}*.${Z} ${type}.${Z} + ${UNCOMPRESS} ./${type}.${Z} + + if [[ $USE_ANL -eq 1 ]]; then + mv diag_${type}_anl.${PDATE}*.${Z} ${type}_anl.${Z} + ${UNCOMPRESS} ./${type}_anl.${Z} + fi + done + + export RADMON_NETCDF=$netcdf + + + #------------------------------------------------------------------ + # Run the child sccripts. + #------------------------------------------------------------------ + ${USHradmon}/radmon_verf_angle.sh ${PDATE} + rc_angle=$? + + ${USHradmon}/radmon_verf_bcoef.sh ${PDATE} + rc_bcoef=$? + + ${USHradmon}/radmon_verf_bcor.sh ${PDATE} + rc_bcor=$? + + ${USHradmon}/radmon_verf_time.sh ${PDATE} + rc_time=$? + + #-------------------------------------- + # optionally run clean_tankdir script + # + if [[ ${CLEAN_TANKVERF} -eq 1 ]]; then + ${USHradmon}/clean_tankdir.sh glb 60 + rc_clean_tankdir=$? + echo "rc_clean_tankdir = $rc_clean_tankdir" + fi + +fi + + + +##################################################################### +# Postprocessing + +err=0 +if [[ ${data_available} -ne 1 ]]; then + err=1 +elif [[ $rc_angle -ne 0 ]]; then + err=$rc_angle +elif [[ $rc_bcoef -ne 0 ]]; then + err=$rc_bcoef +elif [[ $rc_bcor -ne 0 ]]; then + err=$rc_bcor +elif [[ $rc_time -ne 0 ]]; then + err=$rc_time +fi + +##################################################################### +# Restrict select sensors and satellites +export CHGRP_CMD=${CHGRP_CMD:-"chgrp ${group_name:-rstprod}"} +rlist="saphir" +for rtype in $rlist; do + ${CHGRP_CMD} $TANKverf_rad/*${rtype}* +done + + +if [[ "$VERBOSE" = "YES" ]]; then + echo "end exgdas_vrfyrad.sh, exit value = ${err}" +fi + +echo "${scr} HAS ENDED" + + +set +x +exit ${err} + diff --git a/scripts/exgdas_atmos_vminmon.sh b/scripts/exgdas_atmos_vminmon.sh new file mode 100755 index 0000000000..f5087e41d5 --- /dev/null +++ b/scripts/exgdas_atmos_vminmon.sh @@ -0,0 +1,131 @@ +#/bin/sh +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: exgdas_vrfminmon.sh +# Script description: Runs data extract/validation for GSI normalization diag data +# +# Author: Ed Safford Org: NP23 Date: 2015-04-10 +# +# Abstract: This script runs the data extract/validation portion of the +# MinMon package. +# +# Condition codes +# 0 - no problem encountered +# >0 - some problem encountered +# +################################################################################ + + +######################################## +# Set environment +######################################## +export VERBOSE=${VERBOSE:-"NO"} +if [[ "$VERBOSE" = "YES" ]] +then + set -x +fi + +export scr=exgdas_vrfyminmon.sh + + +export RUN_ENVIR=${RUN_ENVIR:-nco} +export NET=${NET:-gfs} +export RUN=${RUN:-gdas} +export envir=${envir:-prod} + +######################################## +# Command line arguments +######################################## +export PDY=${1:-${PDY:?}} +export cyc=${2:-${cyc:?}} + +######################################## +# Directories +######################################## +export DATA=${DATA:-$(pwd)} + + +######################################## +# Filenames +######################################## +gsistat=${gsistat:-$COMIN/gdas.t${cyc}z.gsistat} +export mm_gnormfile=${gnormfile:-${M_FIXgdas}/gdas_minmon_gnorm.txt} +export mm_costfile=${costfile:-${M_FIXgdas}/gdas_minmon_cost.txt} + +######################################## +# Other variables +######################################## +export MINMON_SUFFIX=${MINMON_SUFFIX:-GDAS} +export PDATE=${PDY}${cyc} +export NCP=${NCP:-/bin/cp} +export pgm=exgdas_vrfminmon.sh + +if [[ ! -d ${DATA} ]]; then + mkdir $DATA +fi +cd $DATA + +###################################################################### + +data_available=0 + +if [[ -s ${gsistat} ]]; then + + data_available=1 + + #----------------------------------------------------------------------- + # Copy the $MINMON_SUFFIX.gnorm_data.txt file to the working directory + # It's ok if it doesn't exist; we'll create a new one if needed. + # + # Note: The logic below is to accomodate two different data storage + # methods. Some parallels (and formerly ops) dump all MinMon data for + # a given day in the same directory (if condition). Ops now separates + # data into ${cyc} subdirectories (elif condition). + #----------------------------------------------------------------------- + if [[ -s ${M_TANKverf}/gnorm_data.txt ]]; then + $NCP ${M_TANKverf}/gnorm_data.txt gnorm_data.txt + elif [[ -s ${M_TANKverfM1}/gnorm_data.txt ]]; then + $NCP ${M_TANKverfM1}/gnorm_data.txt gnorm_data.txt + fi + + + #------------------------------------------------------------------ + # Run the child sccripts. + #------------------------------------------------------------------ + ${USHminmon}/minmon_xtrct_costs.pl ${MINMON_SUFFIX} ${PDY} ${cyc} ${gsistat} dummy + rc_costs=$? + echo "rc_costs = $rc_costs" + + ${USHminmon}/minmon_xtrct_gnorms.pl ${MINMON_SUFFIX} ${PDY} ${cyc} ${gsistat} dummy + rc_gnorms=$? + echo "rc_gnorms = $rc_gnorms" + + ${USHminmon}/minmon_xtrct_reduct.pl ${MINMON_SUFFIX} ${PDY} ${cyc} ${gsistat} dummy + rc_reduct=$? + echo "rc_reduct = $rc_reduct" + +fi + +##################################################################### +# Postprocessing + +err=0 +if [[ ${data_available} -ne 1 ]]; then + err=1 +elif [[ $rc_costs -ne 0 ]]; then + err=$rc_costs +elif [[ $rc_gnorms -ne 0 ]]; then + err=$rc_gnorms +elif [[ $rc_reduct -ne 0 ]]; then + err=$rc_reduct +fi + +if [[ "$VERBOSE" = "YES" ]]; then + echo "end exgdas_vrfminmon.sh, exit value = ${err}" +fi + + +set +x +exit ${err} + diff --git a/scripts/exgdas_enkf_ecen.sh b/scripts/exgdas_enkf_ecen.sh new file mode 100755 index 0000000000..41dffd94f2 --- /dev/null +++ b/scripts/exgdas_enkf_ecen.sh @@ -0,0 +1,376 @@ +#!/bin/bash +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: exgdas_enkf_ecen.sh +# Script description: recenter ensemble around hi-res deterministic analysis +# +# Author: Rahul Mahajan Org: NCEP/EMC Date: 2017-03-02 +# +# Abstract: This script recenters ensemble around hi-res deterministic analysis +# +# $Id$ +# +# Attributes: +# Language: POSIX shell +# Machine: WCOSS-Cray/Theia +# +################################################################################ + +# Set environment. +VERBOSE=${VERBOSE:-"YES"} +if [ $VERBOSE = "YES" ]; then + echo $(date) EXECUTING $0 $* >&2 + set -x +fi + +# Directories. +pwd=$(pwd) + +# Base variables +CDATE=${CDATE:-"2010010100"} +DONST=${DONST:-"NO"} +export CASE=${CASE:-384} +ntiles=${ntiles:-6} + +# Utilities +NCP=${NCP:-"/bin/cp -p"} +NLN=${NLN:-"/bin/ln -sf"} +NCLEN=${NCLEN:-$HOMEgfs/ush/getncdimlen} + +# Scripts + +# Executables. +GETATMENSMEANEXEC=${GETATMENSMEANEXEC:-$HOMEgfs/exec/getsigensmeanp_smooth.x} +GETSFCENSMEANEXEC=${GETSFCENSMEANEXEC:-$HOMEgfs/exec/getsfcensmeanp.x} +RECENATMEXEC=${RECENATMEXEC:-$HOMEgfs/exec/recentersigp.x} +CALCINCNEMSEXEC=${CALCINCNEMSEXEC:-$HOMEgfs/exec/calc_increment_ens.x} +CALCINCNCEXEC=${CALCINCEXEC:-$HOMEgfs/exec/calc_increment_ens_ncio.x} + +# Files. +OPREFIX=${OPREFIX:-""} +OSUFFIX=${OSUFFIX:-""} +APREFIX=${APREFIX:-""} +APREFIX_ENKF=${APREFIX_ENKF:-$APREFIX} +ASUFFIX=${ASUFFIX:-$SUFFIX} +GPREFIX=${GPREFIX:-""} +GSUFFIX=${GSUFFIX:-$SUFFIX} + +# Variables +NMEM_ENKF=${NMEM_ENKF:-80} +imp_physics=${imp_physics:-99} +INCREMENTS_TO_ZERO=${INCREMENTS_TO_ZERO:-"'NONE'"} +DOIAU=${DOIAU_ENKF:-"NO"} +FHMIN=${FHMIN_ECEN:-3} +FHMAX=${FHMAX_ECEN:-9} +FHOUT=${FHOUT_ECEN:-3} +FHSFC=${FHSFC_ECEN:-$FHMIN} +DO_CALC_INCREMENT=${DO_CALC_INCREMENT:-"NO"} + + +# global_chgres stuff +CHGRESNEMS=${CHGRESNEMS:-$HOMEgfs/exec/enkf_chgres_recenter.x} +CHGRESNC=${CHGRESNC:-$HOMEgfs/exec/enkf_chgres_recenter_nc.x} +NTHREADS_CHGRES=${NTHREADS_CHGRES:-24} +APRUN_CHGRES=${APRUN_CHGRES:-""} + +# global_cycle stuff +CYCLESH=${CYCLESH:-$HOMEgfs/ush/global_cycle.sh} +export CYCLEXEC=${CYCLEXEC:-$HOMEgfs/exec/global_cycle} +APRUN_CYCLE=${APRUN_CYCLE:-${APRUN:-""}} +NTHREADS_CYCLE=${NTHREADS_CYCLE:-${NTHREADS:-1}} +export FIXfv3=${FIXfv3:-$HOMEgfs/fix/fix_fv3_gmted2010} +export FIXgsm=${FIXgsm:-$HOMEgfs/fix/fix_am} +export CYCLVARS=${CYCLVARS:-"FSNOL=-2.,FSNOS=99999.,"} +export FHOUR=${FHOUR:-0} +export DELTSFC=${DELTSFC:-6} + + +RECENTER_ENKF=${RECENTER_ENKF:-"YES"} +SMOOTH_ENKF=${SMOOTH_ENKF:-"YES"} + +APRUN_ECEN=${APRUN_ECEN:-${APRUN:-""}} +NTHREADS_ECEN=${NTHREADS_ECEN:-${NTHREADS:-1}} +APRUN_CALCINC=${APRUN_CALCINC:-${APRUN:-""}} +NTHREADS_CALCINC=${NTHREADS_CALCINC:-${NTHREADS:-1}} + +################################################################################ +# Preprocessing +mkdata=NO +if [ ! -d $DATA ]; then + mkdata=YES + mkdir -p $DATA +fi +cd $DATA || exit 99 + +ENKF_SUFFIX="s" +[[ $SMOOTH_ENKF = "NO" ]] && ENKF_SUFFIX="" + +################################################################################ +# Link ensemble member guess, analysis and increment files +for FHR in $(seq $FHMIN $FHOUT $FHMAX); do + +for imem in $(seq 1 $NMEM_ENKF); do + memchar="mem"$(printf %03i $imem) + $NLN $COMIN_GES_ENS/$memchar/${GPREFIX}atmf00${FHR}${ENKF_SUFFIX}$GSUFFIX ./atmges_$memchar + if [ $DO_CALC_INCREMENT = "YES" ]; then + if [ $FHR -eq 6 ]; then + $NLN $COMIN_ENS/$memchar/${APREFIX_ENKF}atmanl$ASUFFIX ./atmanl_$memchar + else + $NLN $COMIN_ENS/$memchar/${APREFIX_ENKF}atma00${FHR}$ASUFFIX ./atmanl_$memchar + fi + fi + mkdir -p $COMOUT_ENS/$memchar + if [ $FHR -eq 6 ]; then + $NLN $COMOUT_ENS/$memchar/${APREFIX}atminc.nc ./atminc_$memchar + else + $NLN $COMOUT_ENS/$memchar/${APREFIX}atmi00${FHR}.nc ./atminc_$memchar + fi + if [[ $RECENTER_ENKF = "YES" ]]; then + if [ $DO_CALC_INCREMENT = "YES" ]; then + if [ $FHR -eq 6 ]; then + $NLN $COMOUT_ENS/$memchar/${APREFIX}ratmanl$ASUFFIX ./ratmanl_$memchar + else + $NLN $COMOUT_ENS/$memchar/${APREFIX}ratma00${FHR}$ASUFFIX ./ratmanl_$memchar + fi + else + if [ $FHR -eq 6 ]; then + $NLN $COMOUT_ENS/$memchar/${APREFIX}ratminc$ASUFFIX ./ratminc_$memchar + else + $NLN $COMOUT_ENS/$memchar/${APREFIX}ratmi00${FHR}$ASUFFIX ./ratminc_$memchar + fi + fi + fi +done + +if [ $DO_CALC_INCREMENT = "YES" ]; then + # Link ensemble mean analysis + if [ $FHR -eq 6 ]; then + $NLN $COMIN_ENS/${APREFIX_ENKF}atmanl.ensmean$ASUFFIX ./atmanl_ensmean + else + $NLN $COMIN_ENS/${APREFIX_ENKF}atma00${FHR}.ensmean$ASUFFIX ./atmanl_ensmean + fi + + # Compute ensemble mean analysis + DATAPATH="./" + ATMANLNAME="atmanl" + ATMANLMEANNAME="atmanl_ensmean" + + export OMP_NUM_THREADS=$NTHREADS_ECEN + export pgm=$GETATMENSMEANEXEC + . prep_step + + $NCP $GETATMENSMEANEXEC $DATA + $APRUN_ECEN ${DATA}/$(basename $GETATMENSMEANEXEC) $DATAPATH $ATMANLMEANNAME $ATMANLNAME $NMEM_ENKF + export err=$?; err_chk +else + # Link ensemble mean increment + if [ $FHR -eq 6 ]; then + $NLN $COMIN_ENS/${APREFIX_ENKF}atminc.ensmean$ASUFFIX ./atminc_ensmean + else + $NLN $COMIN_ENS/${APREFIX_ENKF}atmi00${FHR}.ensmean$ASUFFIX ./atminc_ensmean + fi + + # Compute ensemble mean increment + DATAPATH="./" + ATMINCNAME="atminc" + ATMINCMEANNAME="atminc_ensmean" + + export OMP_NUM_THREADS=$NTHREADS_ECEN + export pgm=$GETATMENSMEANEXEC + . prep_step + + $NCP $GETATMENSMEANEXEC $DATA + $APRUN_ECEN ${DATA}/$(basename $GETATMENSMEANEXEC) $DATAPATH $ATMINCMEANNAME $ATMINCNAME $NMEM_ENKF + export err=$?; err_chk + + # If available, link to ensemble mean guess. Otherwise, compute ensemble mean guess + if [ -s $COMIN_GES_ENS/${GPREFIX}atmf00${FHR}.ensmean$GSUFFIX ]; then + $NLN $COMIN_GES_ENS/${GPREFIX}atmf00${FHR}.ensmean$GSUFFIX ./atmges_ensmean + else + DATAPATH="./" + ATMGESNAME="atmges" + ATMGESMEANNAME="atmges_ensmean" + + export OMP_NUM_THREADS=$NTHREADS_ECEN + export pgm=$GETATMENSMEANEXEC + . prep_step + + $NCP $GETATMENSMEANEXEC $DATA + $APRUN_ECEN ${DATA}/$(basename $GETATMENSMEANEXEC) $DATAPATH $ATMGESMEANNAME $ATMGESNAME $NMEM_ENKF + export err=$?; err_chk + fi +fi + +if [ ${SUFFIX} = ".nc" ]; then + if [ $DO_CALC_INCREMENT = "YES" ]; then + LONB_ENKF=${LONB_ENKF:-$($NCLEN atmanl_ensmean grid_xt)} # get LONB + LATB_ENKF=${LATB_ENKF:-$($NCLEN atmanl_ensmean grid_yt)} # get LATB + LEVS_ENKF=${LEVS_ENKF:-$($NCLEN atmanl_ensmean pfull)} # get LEVS + else + LONB_ENKF=${LONB_ENKF:-$($NCLEN atminc_ensmean lon)} # get LONB + LATB_ENKF=${LATB_ENKF:-$($NCLEN atminc_ensmean lat)} # get LATB + LEVS_ENKF=${LEVS_ENKF:-$($NCLEN atminc_ensmean lev)} # get LEVS + fi + JCAP_ENKF=${JCAP_ENKF:--9999} # there is no jcap in these files +else + LONB_ENKF=${LONB_ENKF:-$($NEMSIOGET atmanl_ensmean dimx | awk '{print $2}')} + LATB_ENKF=${LATB_ENKF:-$($NEMSIOGET atmanl_ensmean dimy | awk '{print $2}')} + LEVS_ENKF=${LEVS_ENKF:-$($NEMSIOGET atmanl_ensmean dimz | awk '{print $2}')} + JCAP_ENKF=${JCAP_ENKF:-$($NEMSIOGET atmanl_ensmean jcap | awk '{print $2}')} +fi +[ $JCAP_ENKF -eq -9999 -a $LATB_ENKF -ne -9999 ] && JCAP_ENKF=$((LATB_ENKF-2)) +[ $LONB_ENKF -eq -9999 -o $LATB_ENKF -eq -9999 -o $LEVS_ENKF -eq -9999 -o $JCAP_ENKF -eq -9999 ] && exit -9999 + +################################################################################ +# This is to give the user the option to recenter, default is YES +if [ $RECENTER_ENKF = "YES" ]; then + + # GSI EnVar analysis + if [ $FHR -eq 6 ]; then + ATMANL_GSI=$COMIN/${APREFIX}atmanl$ASUFFIX + ATMANL_GSI_ENSRES=$COMIN/${APREFIX}atmanl.ensres$ASUFFIX + else + ATMANL_GSI=$COMIN/${APREFIX}atma00${FHR}$ASUFFIX + ATMANL_GSI_ENSRES=$COMIN/${APREFIX}atma00${FHR}.ensres$ASUFFIX + fi + + # if we already have a ensemble resolution GSI analysis then just link to it + if [ -f $ATMANL_GSI_ENSRES ]; then + + $NLN $ATMANL_GSI_ENSRES atmanl_gsi_ensres + + else + + $NLN $ATMANL_GSI atmanl_gsi + $NLN $ATMANL_GSI_ENSRES atmanl_gsi_ensres + SIGLEVEL=${SIGLEVEL:-${FIXgsm}/global_hyblev.l${LEVS}.txt} + if [ ${SUFFIX} = ".nc" ]; then + $NLN $CHGRESNC chgres.x + chgresnml=chgres_nc_gauss.nml + nmltitle=chgres + else + $NLN $CHGRESNEMS chgres.x + chgresnml=fort.43 + nmltitle=nam + fi + + export OMP_NUM_THREADS=$NTHREADS_CHGRES + + [[ -f $chgresnml ]] && rm -f $chgresnml + cat > $chgresnml << EOF +&${nmltitle}_setup + i_output=$LONB_ENKF + j_output=$LATB_ENKF + input_file="atmanl_gsi" + output_file="atmanl_gsi_ensres" + terrain_file="atmanl_ensmean" + vcoord_file="$SIGLEVEL" +/ +EOF + cat $chgresnml + $APRUN_CHGRES ./chgres.x + export err=$?; err_chk + fi + + if [ $DO_CALC_INCREMENT = "YES" ]; then + ################################################################################ + # Recenter ensemble member atmospheric analyses about hires analysis + + FILENAMEIN="atmanl" + FILENAME_MEANIN="atmanl_ensmean" # EnKF ensemble mean analysis + FILENAME_MEANOUT="atmanl_gsi_ensres" # recenter around GSI analysis at ensemble resolution + FILENAMEOUT="ratmanl" + + export OMP_NUM_THREADS=$NTHREADS_ECEN + export pgm=$RECENATMEXEC + . prep_step + + $NCP $RECENATMEXEC $DATA + $APRUN_ECEN ${DATA}/$(basename $RECENATMEXEC) $FILENAMEIN $FILENAME_MEANIN $FILENAME_MEANOUT $FILENAMEOUT $NMEM_ENKF + export err=$?; err_chk + else + ################################################################################ + # Recenter ensemble member atmospheric increments about hires analysis + + FILENAMEIN="atminc" + FILENAME_INCMEANIN="atminc_ensmean" # EnKF ensemble mean increment + FILENAME_GESMEANIN="atmges_ensmean" # EnKF ensemble mean guess + FILENAME_GSIDET="atmanl_gsi_ensres" # recenter around GSI analysis at ensemble resolution + FILENAMEOUT="ratminc" + + export OMP_NUM_THREADS=$NTHREADS_ECEN + + # make the small namelist file for incvars_to_zero + + [[ -f recenter.nml ]] && rm recenter.nml + cat > recenter.nml << EOF +&recenter + incvars_to_zero = $INCREMENTS_TO_ZERO +/ +EOF +cat recenter.nml + + export pgm=$RECENATMEXEC + . prep_step + + $NCP $RECENATMEXEC $DATA + $APRUN_ECEN ${DATA}/$(basename $RECENATMEXEC) $FILENAMEIN $FILENAME_INCMEANIN $FILENAME_GSIDET $FILENAMEOUT $NMEM_ENKF $FILENAME_GESMEANIN + export err=$?; err_chk + fi +fi + +################################################################################ +# Calculate ensemble analysis increment +if [ $DO_CALC_INCREMENT = "YES" ]; then + if [ $RECENTER_ENKF = "YES" ]; then + ATMANLNAME='ratmanl' + else + ATMANLNAME='atmanl' + fi + + export OMP_NUM_THREADS=$NTHREADS_CALCINC + if [ ${SUFFIX} = ".nc" ]; then + CALCINCEXEC=$CALCINCNCEXEC + else + CALCINCEXEC=$CALCINCNEMSEXEC + fi + + export pgm=$CALCINCEXEC + . prep_step + + $NCP $CALCINCEXEC $DATA + + rm calc_increment.nml + cat > calc_increment.nml << EOF +&setup + datapath = './' + analysis_filename = '$ATMANLNAME' + firstguess_filename = 'atmges' + increment_filename = 'atminc' + debug = .false. + nens = $NMEM_ENKF + imp_physics = $imp_physics +/ +&zeroinc + incvars_to_zero = $INCREMENTS_TO_ZERO +/ +EOF +cat calc_increment.nml + + $APRUN_CALCINC ${DATA}/$(basename $CALCINCEXEC) + export err=$?; err_chk +fi +done # loop over analysis times in window + +################################################################################ + +################################################################################ +# Postprocessing +cd $pwd +[[ $mkdata = "YES" ]] && rm -rf $DATA +set +x +if [ $VERBOSE = "YES" ]; then + echo $(date) EXITING $0 with return code $err >&2 +fi +exit $err diff --git a/scripts/exgdas_enkf_fcst.sh b/scripts/exgdas_enkf_fcst.sh new file mode 100755 index 0000000000..3263115057 --- /dev/null +++ b/scripts/exgdas_enkf_fcst.sh @@ -0,0 +1,235 @@ +#!/bin/bash +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: exgdas_enkf_fcst.sh +# Script description: Run ensemble forecasts +# +# Author: Rahul Mahajan Org: NCEP/EMC Date: 2017-03-02 +# +# Abstract: This script runs ensemble forecasts serially one-after-another +# +# $Id$ +# +# Attributes: +# Language: POSIX shell +# Machine: WCOSS-Cray/Theia +# +#### +################################################################################ + +# Set environment. +export VERBOSE=${VERBOSE:-"YES"} +if [ $VERBOSE = "YES" ] ; then + echo $(date) EXECUTING $0 $* >&2 + set -x +fi + +# Directories. +pwd=$(pwd) +export FIX_DIR=${FIX_DIR:-$HOMEgfs/fix} +export FIX_AM=${FIX_AM:-$FIX_DIR/fix_am} + +# Utilities +export NCP=${NCP:-"/bin/cp -p"} +export NMV=${NMV:-"/bin/mv"} +export NLN=${NLN:-"/bin/ln -sf"} + +# Scripts. +FORECASTSH=${FORECASTSH:-$HOMEgfs/scripts/exglobal_forecast.sh} + +# Enemble group, begin and end +ENSGRP=${ENSGRP:-1} +ENSBEG=${ENSBEG:-1} +ENSEND=${ENSEND:-1} + +# Model builds +export FCSTEXECDIR=${FCSTEXECDIR:-$HOMEgfs/sorc/fv3gfs.fd/BUILD/bin} +export FCSTEXEC=${FCSTEXEC:-fv3gfs.x} + +# Get DA specific diag table. +export PARM_FV3DIAG=${PARM_FV3DIAG:-$HOMEgfs/parm/parm_fv3diag} +export DIAG_TABLE=${DIAG_TABLE_ENKF:-${DIAG_TABLE:-$PARM_FV3DIAG/diag_table_da}} + +# Cycling and forecast hour specific parameters +export CDATE=${CDATE:-"2001010100"} +export CDUMP=${CDUMP:-"gdas"} + +# Re-run failed members, or entire group +RERUN_EFCSGRP=${RERUN_EFCSGRP:-"YES"} + +# Recenter flag and increment file prefix +RECENTER_ENKF=${RECENTER_ENKF:-"YES"} +export PREFIX_ATMINC=${PREFIX_ATMINC:-""} + +# Ops related stuff +SENDECF=${SENDECF:-"NO"} +SENDDBN=${SENDDBN:-"NO"} +GSUFFIX=${GSUFFIX:-$SUFFIX} + +################################################################################ +# Preprocessing +mkdata=NO +if [ ! -d $DATA ]; then + mkdata=YES + mkdir -p $DATA +fi +cd $DATA || exit 99 +DATATOP=$DATA + +################################################################################ +# Set output data +cymd=$(echo $CDATE | cut -c1-8) +chh=$(echo $CDATE | cut -c9-10) +EFCSGRP=$COMOUT/efcs.grp${ENSGRP} +if [ -f $EFCSGRP ]; then + if [ $RERUN_EFCSGRP = "YES" ]; then + rm -f $EFCSGRP + else + echo "RERUN_EFCSGRP = $RERUN_EFCSGRP, will re-run FAILED members only!" + $NMV $EFCSGRP ${EFCSGRP}.fail + fi +fi + +################################################################################ +# Set namelist/model config options common to all members once + +# There are many many model namelist options +# Some are resolution (CASE) dependent, some depend on the model configuration +# and will need to be added here before $FORECASTSH is called +# For now assume that +# 1. the ensemble and the deterministic are same resolution +# 2. the ensemble runs with the same configuration as the deterministic + +# Model config option for Ensemble +export TYPE=${TYPE_ENKF:-${TYPE:-nh}} # choices: nh, hydro +export MONO=${MONO_ENKF:-${MONO:-non-mono}} # choices: mono, non-mono + +# fv_core_nml +export CASE=${CASE_ENKF:-${CASE:-C768}} +export layout_x=${layout_x_ENKF:-${layout_x:-8}} +export layout_y=${layout_y_ENKF:-${layout_y:-16}} +export LEVS=${LEVS_ENKF:-${LEVS:-64}} + +# nggps_diag_nml +export FHOUT=${FHOUT_ENKF:-3} + +# model_configure +export DELTIM=${DELTIM_ENKF:-${DELTIM:-225}} +export FHMAX=${FHMAX_ENKF:-9} +export restart_interval=${restart_interval_ENKF:-${restart_interval:-6}} + +# gfs_physics_nml +export FHSWR=${FHSWR_ENKF:-${FHSWR:-3600.}} +export FHLWR=${FHLWR_ENKF:-${FHLWR:-3600.}} +export IEMS=${IEMS_ENKF:-${IEMS:-1}} +export ISOL=${ISOL_ENKF:-${ISOL:-2}} +export IAER=${IAER_ENKF:-${IAER:-111}} +export ICO2=${ICO2_ENKF:-${ICO2:-2}} +export cdmbgwd=${cdmbgwd_ENKF:-${cdmbgwd:-"3.5,0.25"}} +export dspheat=${dspheat_ENKF:-${dspheat:-".true."}} +export shal_cnv=${shal_cnv_ENKF:-${shal_cnv:-".true."}} +export FHZER=${FHZER_ENKF:-${FHZER:-6}} +export FHCYC=${FHCYC_ENKF:-${FHCYC:-6}} + +# Set PREFIX_ATMINC to r when recentering on +if [ $RECENTER_ENKF = "YES" ]; then + export PREFIX_ATMINC="r" +fi + +# APRUN for different executables +export APRUN_FV3=${APRUN_FV3:-${APRUN:-""}} +export NTHREADS_FV3=${NTHREADS_FV3:-${NTHREADS:-1}} + +################################################################################ +# Run forecast for ensemble member +rc=0 +for imem in $(seq $ENSBEG $ENSEND); do + + cd $DATATOP + + cmem=$(printf %03i $imem) + memchar="mem$cmem" + + echo "Processing MEMBER: $cmem" + + ra=0 + + skip_mem="NO" + if [ -f ${EFCSGRP}.fail ]; then + memstat=$(cat ${EFCSGRP}.fail | grep "MEMBER $cmem" | grep "PASS" | wc -l) + [[ $memstat -eq 1 ]] && skip_mem="YES" + fi + + if [ $skip_mem = "NO" ]; then + + ra=0 + + export MEMBER=$imem + export DATA=$DATATOP/$memchar + if [ -d $DATA ]; then rm -rf $DATA; fi + mkdir -p $DATA + $FORECASTSH + ra=$? + + # Notify a member forecast failed and abort + if [ $ra -ne 0 ]; then + err_exit "FATAL ERROR: forecast of member $cmem FAILED. Aborting job" + fi + + ((rc+=ra)) + + fi + + if [ $SENDDBN = YES ]; then + fhr=$FHOUT + while [ $fhr -le $FHMAX ]; do + FH3=$(printf %03i $fhr) + if [ $(expr $fhr % 3) -eq 0 ]; then + $DBNROOT/bin/dbn_alert MODEL GFS_ENKF $job $COMOUT/$memchar/${CDUMP}.t${cyc}z.sfcf${FH3}${GSUFFIX} + fi + fhr=$((fhr+FHOUT)) + done + fi + + cd $DATATOP + + if [ -s $EFCSGRP ]; then + $NCP $EFCSGRP log_old + fi + [[ -f log ]] && rm log + [[ -f log_new ]] && rm log_new + if [ $ra -ne 0 ]; then + echo "MEMBER $cmem : FAIL" > log + else + echo "MEMBER $cmem : PASS" > log + fi + if [ -s log_old ] ; then + cat log_old log > log_new + else + cat log > log_new + fi + $NCP log_new $EFCSGRP + +done + +################################################################################ +# Echo status of ensemble group +cd $DATATOP +echo "Status of ensemble members in group $ENSGRP:" +cat $EFCSGRP +[[ -f ${EFCSGRP}.fail ]] && rm ${EFCSGRP}.fail + +################################################################################ +# If any members failed, error out +export err=$rc; err_chk + +################################################################################ +# Postprocessing +cd $pwd +[[ $mkdata = "YES" ]] && rm -rf $DATATOP +set +x +if [ $VERBOSE = "YES" ] ; then + echo $(date) EXITING $0 with return code $err >&2 +fi +exit $err diff --git a/scripts/exgdas_enkf_post.sh b/scripts/exgdas_enkf_post.sh new file mode 100755 index 0000000000..ebf877b3cc --- /dev/null +++ b/scripts/exgdas_enkf_post.sh @@ -0,0 +1,169 @@ +#!/bin/bash +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: exgdas_enkf_post.sh +# Script description: Global ensemble forecast post processing +# +# Author: Rahul Mahajan Org: NCEP/EMC Date: 2017-03-02 +# +# Abstract: This script post-processes global ensemble forecast output +# +# $Id$ +# +# Attributes: +# Language: POSIX shell +# Machine: WCOSS-Cray/Theia +# +################################################################################ + +# Set environment. +VERBOSE=${VERBOSE:-"YES"} +if [ $VERBOSE = "YES" ]; then + echo $(date) EXECUTING $0 $* >&2 + set -x +fi + +# Directories. +pwd=$(pwd) + +# Utilities +NCP=${NCP:-"/bin/cp"} +NLN=${NLN:-"/bin/ln -sf"} + +APRUN_EPOS=${APRUN_EPOS:-${APRUN:-""}} +NTHREADS_EPOS=${NTHREADS_EPOS:-1} + +# Ops stuff +SENDDBN=${SENDDBN:-"NO"} + +# Fix files +LEVS=${LEVS:-64} +HYBENSMOOTH=${HYBENSMOOTH:-$FIXgsi/global_hybens_smoothinfo.l${LEVS}.txt} + +# Executables. +GETATMENSMEANEXEC=${GETATMENSMEANEXEC:-$HOMEgfs/exec/getsigensmeanp_smooth.x} +GETSFCENSMEANEXEC=${GETSFCENSMEANEXEC:-$HOMEgfs/exec/getsfcensmeanp.x} + +# Other variables. +PREFIX=${PREFIX:-""} +SUFFIX=${SUFFIX:-""} +FHMIN=${FHMIN_EPOS:-3} +FHMAX=${FHMAX_EPOS:-9} +FHOUT=${FHOUT_EPOS:-3} +NMEM_ENKF=${NMEM_ENKF:-80} +SMOOTH_ENKF=${SMOOTH_ENKF:-"NO"} +ENKF_SPREAD=${ENKF_SPREAD:-"NO"} + +################################################################################ +# Preprocessing +mkdata=NO +if [ ! -d $DATA ]; then + mkdata=YES + mkdir -p $DATA +fi +cd $DATA || exit 99 + +ENKF_SUFFIX="s" +[[ $SMOOTH_ENKF = "NO" ]] && ENKF_SUFFIX="" + +################################################################################ +# Copy executables to working directory +$NCP $GETSFCENSMEANEXEC $DATA +$NCP $GETATMENSMEANEXEC $DATA + +export OMP_NUM_THREADS=$NTHREADS_EPOS + +################################################################################ +# Forecast ensemble member files +for imem in $(seq 1 $NMEM_ENKF); do + memchar="mem"$(printf %03i $imem) + for fhr in $(seq $FHMIN $FHOUT $FHMAX); do + fhrchar=$(printf %03i $fhr) + $NLN $COMIN/$memchar/${PREFIX}sfcf$fhrchar${SUFFIX} sfcf${fhrchar}_$memchar + $NLN $COMIN/$memchar/${PREFIX}atmf$fhrchar${SUFFIX} atmf${fhrchar}_$memchar + done +done + +# Forecast ensemble mean and smoothed files +for fhr in $(seq $FHMIN $FHOUT $FHMAX); do + fhrchar=$(printf %03i $fhr) + $NLN $COMOUT/${PREFIX}sfcf${fhrchar}.ensmean${SUFFIX} sfcf${fhrchar}.ensmean + $NLN $COMOUT/${PREFIX}atmf${fhrchar}.ensmean${SUFFIX} atmf${fhrchar}.ensmean + if [ $SMOOTH_ENKF = "YES" ]; then + for imem in $(seq 1 $NMEM_ENKF); do + memchar="mem"$(printf %03i $imem) + $NLN $COMOUT/$memchar/${PREFIX}atmf${fhrchar}${ENKF_SUFFIX}${SUFFIX} atmf${fhrchar}${ENKF_SUFFIX}_$memchar + done + fi + [[ $ENKF_SPREAD = "YES" ]] && $NLN $COMOUT/${PREFIX}atmf${fhrchar}.ensspread${SUFFIX} atmf${fhrchar}.ensspread +done + +################################################################################ +# Generate ensemble mean surface and atmospheric files + +[[ $SMOOTH_ENKF = "YES" ]] && $NCP $HYBENSMOOTH ./hybens_smoothinfo + +rc=0 +for fhr in $(seq $FHMIN $FHOUT $FHMAX); do + fhrchar=$(printf %03i $fhr) + + export pgm=$GETSFCENSMEANEXEC + . prep_step + + $APRUN_EPOS ${DATA}/$(basename $GETSFCENSMEANEXEC) ./ sfcf${fhrchar}.ensmean sfcf${fhrchar} $NMEM_ENKF + ra=$? + ((rc+=ra)) + + export_pgm=$GETATMENSMEANEXEC + . prep_step + + if [ $ENKF_SPREAD = "YES" ]; then + $APRUN_EPOS ${DATA}/$(basename $GETATMENSMEANEXEC) ./ atmf${fhrchar}.ensmean atmf${fhrchar} $NMEM_ENKF atmf${fhrchar}.ensspread + else + $APRUN_EPOS ${DATA}/$(basename $GETATMENSMEANEXEC) ./ atmf${fhrchar}.ensmean atmf${fhrchar} $NMEM_ENKF + fi + ra=$? + ((rc+=ra)) +done +export err=$rc; err_chk + +################################################################################ +# If smoothing on but no smoothing output, copy smoothed ensemble atmospheric files +if [ $SMOOTH_ENKF = "YES" ]; then + for fhr in $(seq $FHMIN $FHOUT $FHMAX); do + fhrchar=$(printf %03i $fhr) + if [ ! -s atmf${fhrchar}${ENKF_SUFFIX}_mem001 ]; then + echo WARNING! no smoothed ensemble member for fhour = $fhrchar >&2 + for imem in $(seq 1 $NMEM_ENKF); do + memchar="mem"$(printf %03i $imem) + $NCP atmf${fhrchar}_$memchar atmf${fhrchar}${ENKF_SUFFIX}_$memchar + done + fi + done +fi + +################################################################################ +# Send DBN alerts +if [ $SENDDBN = "YES" ]; then + + for fhr in $(seq $FHMIN $FHOUT $FHMAX); do + fhrchar=$(printf %03i $fhr) + if [ $(expr $fhr % 3) -eq 0 ]; then + if [ -s ./sfcf${fhrchar}.ensmean ]; then + $DBNROOT/bin/dbn_alert MODEL GFS_ENKF $job $COMOUT/${PREFIX}sfcf${fhrchar}.ensmean${SUFFIX} + fi + fi + done + +fi + +################################################################################ +# Postprocessing +cd $pwd +[[ $mkdata = "YES" ]] && rm -rf $DATA +set +x +if [ $VERBOSE = "YES" ]; then + echo $(date) EXITING $0 with return code $err >&2 +fi +exit $err diff --git a/scripts/exgdas_enkf_select_obs.sh b/scripts/exgdas_enkf_select_obs.sh new file mode 100755 index 0000000000..4a0f0d3ee3 --- /dev/null +++ b/scripts/exgdas_enkf_select_obs.sh @@ -0,0 +1,127 @@ +#!/bin/bash +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: exgdas_enkf_select_obs.sh +# Script description: Compute global_gsi innovations +# +# Author: Rahul Mahajan Org: NCEP/EMC Date: 2017-03-02 +# +# Abstract: This script computes global_gsi innovations +# +# $Id$ +# +# Attributes: +# Language: POSIX shell +# Machine: WCOSS-Cray/Theia +# +################################################################################ + +# Set environment. +export VERBOSE=${VERBOSE:-"YES"} +if [ $VERBOSE = "YES" ]; then + echo $(date) EXECUTING $0 $* >&2 + set -x +fi + +# Directories. +pwd=$(pwd) + +# Utilities +export NLN=${NLN:-"/bin/ln -sf"} + +# Scripts. +ANALYSISSH=${ANALYSISSH:-$HOMEgfs/scripts/exglobal_atmos_analysis.sh} + +# Prefix and Suffix Variables. +export APREFIX=${APREFIX:-""} +export ASUFFIX=${ASUFFIX:-$SUFFIX} + +# Select obs +export RUN_SELECT=${RUN_SELECT:-"YES"} +export USE_SELECT=${USE_SELECT:-"NO"} +export SELECT_OBS=${SELECT_OBS:-$COMOUT/${APREFIX}obsinput} + +# Observation Operator GSI namelist initialization +SETUP_INVOBS=${SETUP_INVOBS:-""} +GRIDOPTS_INVOBS=${GRIDOPTS_INVOBS:-""} +BKGVERR_INVOBS=${BKGVERR_INVOBS:-""} +ANBKGERR_INVOBS=${ANBKGERR_INVOBS:-""} +JCOPTS_INVOBS=${JCOPTS_INVOBS:-""} +STRONGOPTS_INVOBS=${STRONGOPTS_INVOBS:-""} +OBSQC_INVOBS=${OBSQC_INVOBS:-""} +OBSINPUT_INVOBS=${OBSINPUT_INVOBS:-""} +SUPERRAD_INVOBS=${SUPERRAD_INVOBS:-""} +SINGLEOB_INVOBS=${SINGLEOB_INVOBS:-""} +LAGDATA_INVOBS=${LAGDATA_INVOBS:-""} +HYBRID_ENSEMBLE_INVOBS=${HYBRID_ENSEMBLE_INVOBS:-""} +RAPIDREFRESH_CLDSURF_INVOBS=${RAPIDREFRESH_CLDSURF_INVOBS:-""} +CHEM_INVOBS=${CHEM_INVOBS:-""} + +################################################################################ +# Preprocessing +mkdata=NO +if [ ! -d $DATA ]; then + mkdata=YES + mkdir -p $DATA +fi +cd $DATA || exit 8 + +[[ ! -d $COMOUT ]] && mkdir -p $COMOUT + +################################################################################ +# ObsInput file from ensemble mean +rm -f obs*input* +$NLN $SELECT_OBS obsinput.tar + +# Whether to save or skip obs +if [ $RUN_SELECT = "YES" -a $USE_SELECT = "NO" ]; then + lread_obs_save=".true." + lread_obs_skip=".false." +elif [ $RUN_SELECT = "NO" -a $USE_SELECT = "YES" ]; then + lread_obs_save=".false." + lread_obs_skip=".true." +fi + +################################################################################ +# Innovation Specific setup for ANALYSISSH +export DIAG_SUFFIX=${DIAG_SUFFIX:-""} +export DIAG_COMPRESS=${DIAG_COMPRESS:-"NO"} +export DIAG_TARBALL=${DIAG_TARBALL:-"YES"} +export DOHYBVAR="NO" +export DO_CALC_INCREMENT="NO" +export DO_CALC_ANALYSIS="NO" +export USE_CORRELATED_OBERRS="NO" +export write_fv3_increment=".false." + +# GSI Namelist options for observation operator only +export SETUP="miter=0,niter=1,lread_obs_save=$lread_obs_save,lread_obs_skip=$lread_obs_skip,lwrite_predterms=.true.,lwrite_peakwt=.true.,reduce_diag=.true.,$SETUP_INVOBS" +export GRIDOPTS="$GRIDOPTS_INVOBS" +export BKGVERR="bkgv_flowdep=.false.,$BKGVERR_INVOBS" +export ANBKGERR="$ANBKGERR_INVOBS" +export JCOPTS="$JCOPTS_INVOBS" +export STRONGOPTS="tlnmc_option=0,nstrong=0,nvmodes_keep=0,baldiag_full=.false.,baldiag_inc=.false.,$STRONGOPTS_INVOBS" +export OBSQC="$OBSQC_INVOBS" +export OBSINPUT="$OBSINPUT_INVOBS" +export SUPERRAD="$SUPERRAD_INVOBS" +export SINGLEOB="$SINGLEOB_INVOBS" +export LAGDATA="$LAGDATA_INVOBS" +export HYBRID_ENSEMBLE="" +export RAPIDREFRESH_CLDSURF="$RAPIDREFRESH_CLDSURF_INVOBS" +export CHEM="$CHEM_INVOBS" + +################################################################################ +# Execute GSI as a forward operator + +$ANALYSISSH +export err=$?; err_chk + +################################################################################ +# Postprocessing +cd $pwd +[[ $mkdata = "YES" ]] && rm -rf $DATA +set +x +if [ $VERBOSE = "YES" ]; then + echo $(date) EXITING $0 with return code $err >&2 +fi +exit $err diff --git a/scripts/exgdas_enkf_sfc.sh b/scripts/exgdas_enkf_sfc.sh new file mode 100755 index 0000000000..38cc38ed03 --- /dev/null +++ b/scripts/exgdas_enkf_sfc.sh @@ -0,0 +1,203 @@ +#!/bin/bash +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: exgdas_enkf_sfc.sh +# Script description: generate ensemble surface analyses on tiles +# +# Author: Rahul Mahajan Org: NCEP/EMC Date: 2017-03-02 +# +# Abstract: This script generates ensemble surface analyses on tiles +# +# $Id$ +# +# Attributes: +# Language: POSIX shell +# Machine: WCOSS-Cray/Theia +# +################################################################################ + +# Set environment. +VERBOSE=${VERBOSE:-"YES"} +if [ $VERBOSE = "YES" ]; then + echo $(date) EXECUTING $0 $* >&2 + set -x +fi + +# Directories. +pwd=$(pwd) + +# Base variables +CDATE=${CDATE:-"2010010100"} +DONST=${DONST:-"NO"} +DOSFCANL_ENKF=${DOSFCANL_ENKF:-"YES"} +export CASE=${CASE:-384} +ntiles=${ntiles:-6} + +# Utilities +NCP=${NCP:-"/bin/cp -p"} +NLN=${NLN:-"/bin/ln -sf"} +NCLEN=${NCLEN:-$HOMEgfs/ush/getncdimlen} + +# Scripts + +# Executables. + +# Files. +OPREFIX=${OPREFIX:-""} +OSUFFIX=${OSUFFIX:-""} +APREFIX=${APREFIX:-""} +APREFIX_ENKF=${APREFIX_ENKF:-$APREFIX} +ASUFFIX=${ASUFFIX:-$SUFFIX} +GPREFIX=${GPREFIX:-""} +GSUFFIX=${GSUFFIX:-$SUFFIX} + +# Variables +NMEM_ENKF=${NMEM_ENKF:-80} +DOIAU=${DOIAU_ENKF:-"NO"} + +# Global_cycle stuff +CYCLESH=${CYCLESH:-$HOMEgfs/ush/global_cycle.sh} +export CYCLEXEC=${CYCLEXEC:-$HOMEgfs/exec/global_cycle} +APRUN_CYCLE=${APRUN_CYCLE:-${APRUN:-""}} +NTHREADS_CYCLE=${NTHREADS_CYCLE:-${NTHREADS:-1}} +export FIXfv3=${FIXfv3:-$HOMEgfs/fix/fix_fv3_gmted2010} +export FIXgsm=${FIXgsm:-$HOMEgfs/fix/fix_am} +export CYCLVARS=${CYCLVARS:-"FSNOL=-2.,FSNOS=99999.,"} +export FHOUR=${FHOUR:-0} +export DELTSFC=${DELTSFC:-6} + +APRUN_ESFC=${APRUN_ESFC:-${APRUN:-""}} +NTHREADS_ESFC=${NTHREADS_ESFC:-${NTHREADS:-1}} + + +################################################################################ +# Preprocessing +mkdata=NO +if [ ! -d $DATA ]; then + mkdata=YES + mkdir -p $DATA +fi +cd $DATA || exit 99 + + +################################################################################ +# Update surface fields in the FV3 restart's using global_cycle. + +PDY=$(echo $CDATE | cut -c1-8) +cyc=$(echo $CDATE | cut -c9-10) + +GDATE=$($NDATE -$assim_freq $CDATE) +gPDY=$(echo $GDATE | cut -c1-8) +gcyc=$(echo $GDATE | cut -c9-10) +GDUMP=${GDUMP:-"gdas"} + +BDATE=$($NDATE -3 $CDATE) +bPDY=$(echo $BDATE | cut -c1-8) +bcyc=$(echo $BDATE | cut -c9-10) + +# Get dimension information based on CASE +res=$(echo $CASE | cut -c2-) +JCAP_CASE=$((res*2-2)) +LATB_CASE=$((res*2)) +LONB_CASE=$((res*4)) + +# Global cycle requires these files +export FNTSFA=${FNTSFA:-' '} +export FNACNA=${FNACNA:-$COMIN/${OPREFIX}seaice.5min.blend.grb} +export FNSNOA=${FNSNOA:-$COMIN/${OPREFIX}snogrb_t${JCAP_CASE}.${LONB_CASE}.${LATB_CASE}} +[[ ! -f $FNSNOA ]] && export FNSNOA="$COMIN/${OPREFIX}snogrb_t1534.3072.1536" +FNSNOG=${FNSNOG:-$COMIN_GES/${GPREFIX}snogrb_t${JCAP_CASE}.${LONB_CASE}.${LATB_CASE}} +[[ ! -f $FNSNOG ]] && FNSNOG="$COMIN_GES/${GPREFIX}snogrb_t1534.3072.1536" + +# Set CYCLVARS by checking grib date of current snogrb vs that of prev cycle +if [ ${RUN_GETGES:-"NO"} = "YES" ]; then + snoprv=$($GETGESSH -q -t snogrb_$JCAP_CASE -e $gesenvir -n $GDUMP -v $GDATE) +else + snoprv=${snoprv:-$FNSNOG} +fi + +if [ $($WGRIB -4yr $FNSNOA 2>/dev/null | grep -i snowc | awk -F: '{print $3}' | awk -F= '{print $2}') -le \ + $($WGRIB -4yr $snoprv 2>/dev/null | grep -i snowc | awk -F: '{print $3}' | awk -F= '{print $2}') ] ; then + export FNSNOA=" " + export CYCLVARS="FSNOL=99999.,FSNOS=99999.," +else + export SNOW_NUDGE_COEFF=${SNOW_NUDGE_COEFF:-0.} + export CYCLVARS="FSNOL=${SNOW_NUDGE_COEFF},$CYCLVARS" +fi + +if [ $DONST = "YES" ]; then + export GSI_FILE=${GSI_FILE:-$COMIN/${APREFIX}dtfanl.nc} +else + export GSI_FILE="NULL" +fi + +export APRUNCY=${APRUN_CYCLE:-$APRUN_ESFC} +export OMP_NUM_THREADS_CY=${NTHREADS_CYCLE:-$NTHREADS_ESFC} +export MAX_TASKS_CY=$NMEM_ENKF + +if [ $DOIAU = "YES" ]; then + # Update surface restarts at beginning of window when IAU is ON + # For now assume/hold dtfanl.nc is valid at beginning of window. + + for n in $(seq 1 $ntiles); do + + export TILE_NUM=$n + + for imem in $(seq 1 $NMEM_ENKF); do + + cmem=$(printf %03i $imem) + memchar="mem$cmem" + + [[ $TILE_NUM -eq 1 ]] && mkdir -p $COMOUT_ENS/$memchar/RESTART + + $NLN $COMIN_GES_ENS/$memchar/RESTART/$bPDY.${bcyc}0000.sfc_data.tile${n}.nc $DATA/fnbgsi.$cmem + $NLN $COMOUT_ENS/$memchar/RESTART/$bPDY.${bcyc}0000.sfcanl_data.tile${n}.nc $DATA/fnbgso.$cmem + $NLN $FIXfv3/$CASE/${CASE}_grid.tile${n}.nc $DATA/fngrid.$cmem + $NLN $FIXfv3/$CASE/${CASE}_oro_data.tile${n}.nc $DATA/fnorog.$cmem + + done + + $CYCLESH + export err=$?; err_chk + + done + +fi + +if [ $DOSFCANL_ENKF = "YES" ]; then + for n in $(seq 1 $ntiles); do + + export TILE_NUM=$n + + for imem in $(seq 1 $NMEM_ENKF); do + + cmem=$(printf %03i $imem) + memchar="mem$cmem" + + [[ $TILE_NUM -eq 1 ]] && mkdir -p $COMOUT_ENS/$memchar/RESTART + + $NLN $COMIN_GES_ENS/$memchar/RESTART/$PDY.${cyc}0000.sfc_data.tile${n}.nc $DATA/fnbgsi.$cmem + $NLN $COMOUT_ENS/$memchar/RESTART/$PDY.${cyc}0000.sfcanl_data.tile${n}.nc $DATA/fnbgso.$cmem + $NLN $FIXfv3/$CASE/${CASE}_grid.tile${n}.nc $DATA/fngrid.$cmem + $NLN $FIXfv3/$CASE/${CASE}_oro_data.tile${n}.nc $DATA/fnorog.$cmem + + done + + $CYCLESH + export err=$?; err_chk + + done +fi + +################################################################################ + +################################################################################ +# Postprocessing +cd $pwd +[[ $mkdata = "YES" ]] && rm -rf $DATA +set +x +if [ $VERBOSE = "YES" ]; then + echo $(date) EXITING $0 with return code $err >&2 +fi +exit $err diff --git a/scripts/exgdas_enkf_update.sh b/scripts/exgdas_enkf_update.sh new file mode 100755 index 0000000000..16fa92b1c9 --- /dev/null +++ b/scripts/exgdas_enkf_update.sh @@ -0,0 +1,405 @@ +#!/bin/bash +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: exgdas_enkf_update.sh +# Script description: Make global_enkf update +# +# Author: Rahul Mahajan Org: NCEP/EMC Date: 2017-03-02 +# +# Abstract: This script runs the global_enkf update +# +# $Id$ +# +# Attributes: +# Language: POSIX shell +# Machine: WCOSS-Cray/Theia +# +################################################################################ + +# Set environment. +VERBOSE=${VERBOSE:-"YES"} +if [ $VERBOSE = "YES" ] ; then + echo $(date) EXECUTING $0 $* >&2 + set -x +fi + +# Directories. +pwd=$(pwd) + +# Utilities +NCP=${NCP:-"/bin/cp -p"} +NLN=${NLN:-"/bin/ln -sf"} +NCLEN=${NCLEN:-$HOMEgfs/ush/getncdimlen} +USE_CFP=${USE_CFP:-"NO"} +CFP_MP=${CFP_MP:-"NO"} +nm="" +if [ $CFP_MP = "YES" ]; then + nm=0 +fi +APRUNCFP=${APRUNCFP:-""} +APRUN_ENKF=${APRUN_ENKF:-${APRUN:-""}} +NTHREADS_ENKF=${NTHREADS_ENKF:-${NTHREADS:-1}} + +# Executables +ENKFEXEC=${ENKFEXEC:-$HOMEgfs/exec/enkf.x} + +# Cycling and forecast hour specific parameters +CDATE=${CDATE:-"2001010100"} + +# Filenames. +GPREFIX=${GPREFIX:-""} +GSUFFIX=${GSUFFIX:-$SUFFIX} +APREFIX=${APREFIX:-""} +ASUFFIX=${ASUFFIX:-$SUFFIX} + +SMOOTH_ENKF=${SMOOTH_ENKF:-"YES"} + +GBIASe=${GBIASe:-${APREFIX}abias_int.ensmean} +CNVSTAT=${CNVSTAT:-${APREFIX}cnvstat} +OZNSTAT=${OZNSTAT:-${APREFIX}oznstat} +RADSTAT=${RADSTAT:-${APREFIX}radstat} +ENKFSTAT=${ENKFSTAT:-${APREFIX}enkfstat} + +# Namelist parameters +USE_CORRELATED_OBERRS=${USE_CORRELATED_OBERRS:-"NO"} +NMEM_ENKF=${NMEM_ENKF:-80} +NAM_ENKF=${NAM_ENKF:-""} +SATOBS_ENKF=${SATOBS_ENKF:-""} +OZOBS_ENKF=${OZOBS_ENKF:-""} +use_correlated_oberrs=${use_correlated_oberrs:-".false."} +if [ $USE_CORRELATED_OBERRS == "YES" ]; then + use_correlated_oberrs=".true." +fi +imp_physics=${imp_physics:-"99"} +lupp=${lupp:-".true."} +corrlength=${corrlength:-1250} +lnsigcutoff=${lnsigcutoff:-2.5} +analpertwt=${analpertwt:-0.85} +readin_localization_enkf=${readin_localization_enkf:-".true."} +reducedgrid=${reducedgrid:-".true."} +letkf_flag=${letkf_flag:-".false."} +getkf=${getkf:-".false."} +denkf=${denkf:-".false."} +nobsl_max=${nobsl_max:-10000} +lobsdiag_forenkf=${lobsdiag_forenkf:-".false."} +write_spread_diag=${write_spread_diag:-".false."} +cnvw_option=${cnvw_option:-".false."} +netcdf_diag=${netcdf_diag:-".true."} +modelspace_vloc=${modelspace_vloc:-".false."} # if true, 'vlocal_eig.dat' is needed +IAUFHRS_ENKF=${IAUFHRS_ENKF:-6} +DO_CALC_INCREMENT=${DO_CALC_INCREMENT:-"NO"} +INCREMENTS_TO_ZERO=${INCREMENTS_TO_ZERO:-"'NONE'"} + +################################################################################ +ATMGES_ENSMEAN=$COMIN_GES_ENS/${GPREFIX}atmf006.ensmean${GSUFFIX} +if [ $SUFFIX = ".nc" ]; then + LONB_ENKF=${LONB_ENKF:-$($NCLEN $ATMGES_ENSMEAN grid_xt)} # get LONB_ENKF + LATB_ENKF=${LATB_ENKF:-$($NCLEN $ATMGES_ENSMEAN grid_yt)} # get LATB_ENFK + LEVS_ENKF=${LEVS_ENKF:-$($NCLEN $ATMGES_ENSMEAN pfull)} # get LEVS_ENFK + use_gfs_ncio=".true." + use_gfs_nemsio=".false." + paranc=${paranc:-".true."} + if [ $DO_CALC_INCREMENT = "YES" ]; then + write_fv3_incr=".false." + else + write_fv3_incr=".true." + WRITE_INCR_ZERO="incvars_to_zero= $INCREMENTS_TO_ZERO," + fi +else + LEVS_ENKF=${LEVS_ENKF:-$($NEMSIOGET $ATMGES_ENSMEAN dimz | awk '{print $2}')} + LATB_ENKF=${LATB_ENKF:-$($NEMSIOGET $ATMGES_ENSMEAN dimy | awk '{print $2}')} + LONB_ENKF=${LONB_ENKF:-$($NEMSIOGET $ATMGES_ENSMEAN dimx | awk '{print $2}')} + use_gfs_ncio=".false." + use_gfs_nemsio=".true." + paranc=${paranc:-".false."} +fi +LATA_ENKF=${LATA_ENKF:-$LATB_ENKF} +LONA_ENKF=${LONA_ENKF:-$LONB_ENKF} + +SATANGL=${SATANGL:-${FIXgsi}/global_satangbias.txt} +SATINFO=${SATINFO:-${FIXgsi}/global_satinfo.txt} +CONVINFO=${CONVINFO:-${FIXgsi}/global_convinfo.txt} +OZINFO=${OZINFO:-${FIXgsi}/global_ozinfo.txt} +SCANINFO=${SCANINFO:-${FIXgsi}/global_scaninfo.txt} +HYBENSINFO=${HYBENSINFO:-${FIXgsi}/global_hybens_info.l${LEVS_ENKF}.txt} +ANAVINFO=${ANAVINFO:-${FIXgsi}/global_anavinfo.l${LEVS_ENKF}.txt} +VLOCALEIG=${VLOCALEIG:-${FIXgsi}/vlocal_eig_l${LEVS_ENKF}.dat} + +ENKF_SUFFIX="s" +[[ $SMOOTH_ENKF = "NO" ]] && ENKF_SUFFIX="" + +################################################################################ +# Preprocessing +mkdata=NO +if [ ! -d $DATA ]; then + mkdata=YES + mkdir -p $DATA +fi +cd $DATA || exit 99 + +################################################################################ +# Fixed files +$NLN $SATANGL satbias_angle +$NLN $SATINFO satinfo +$NLN $SCANINFO scaninfo +$NLN $CONVINFO convinfo +$NLN $OZINFO ozinfo +$NLN $HYBENSINFO hybens_info +$NLN $ANAVINFO anavinfo +$NLN $VLOCALEIG vlocal_eig.dat + +# Bias correction coefficients based on the ensemble mean +$NLN $COMOUT_ANL_ENS/$GBIASe satbias_in + +################################################################################ + +if [ $USE_CFP = "YES" ]; then + [[ -f $DATA/untar.sh ]] && rm $DATA/untar.sh + [[ -f $DATA/mp_untar.sh ]] && rm $DATA/mp_untar.sh + set +x + cat > $DATA/untar.sh << EOFuntar +#!/bin/sh +memchar=\$1 +flist="$CNVSTAT $OZNSTAT $RADSTAT" +for ftype in \$flist; do + if [ \$memchar = "ensmean" ]; then + fname=$COMOUT_ANL_ENS/\${ftype}.ensmean + else + fname=$COMOUT_ANL_ENS/\$memchar/\$ftype + fi + tar -xvf \$fname +done +EOFuntar + set -x + chmod 755 $DATA/untar.sh +fi + +################################################################################ +# Ensemble guess, observational data and analyses/increments + +flist="$CNVSTAT $OZNSTAT $RADSTAT" +if [ $USE_CFP = "YES" ]; then + echo "$nm $DATA/untar.sh ensmean" | tee -a $DATA/mp_untar.sh + if [ ${CFP_MP:-"NO"} = "YES" ]; then + nm=$((nm+1)) + fi +else + for ftype in $flist; do + fname=$COMOUT_ANL_ENS/${ftype}.ensmean + tar -xvf $fname + done +fi +nfhrs=`echo $IAUFHRS_ENKF | sed 's/,/ /g'` +for imem in $(seq 1 $NMEM_ENKF); do + memchar="mem"$(printf %03i $imem) + if [ $lobsdiag_forenkf = ".false." ]; then + if [ $USE_CFP = "YES" ]; then + echo "$nm $DATA/untar.sh $memchar" | tee -a $DATA/mp_untar.sh + if [ ${CFP_MP:-"NO"} = "YES" ]; then + nm=$((nm+1)) + fi + else + for ftype in $flist; do + fname=$COMOUT_ANL_ENS/$memchar/$ftype + tar -xvf $fname + done + fi + fi + mkdir -p $COMOUT_ANL_ENS/$memchar + for FHR in $nfhrs; do + $NLN $COMIN_GES_ENS/$memchar/${GPREFIX}atmf00${FHR}${ENKF_SUFFIX}${GSUFFIX} sfg_${CDATE}_fhr0${FHR}_${memchar} + if [ $cnvw_option = ".true." ]; then + $NLN $COMIN_GES_ENS/$memchar/${GPREFIX}sfcf00${FHR}${GSUFFIX} sfgsfc_${CDATE}_fhr0${FHR}_${memchar} + fi + if [ $FHR -eq 6 ]; then + if [ $DO_CALC_INCREMENT = "YES" ]; then + $NLN $COMOUT_ANL_ENS/$memchar/${APREFIX}atmanl${ASUFFIX} sanl_${CDATE}_fhr0${FHR}_${memchar} + else + $NLN $COMOUT_ANL_ENS/$memchar/${APREFIX}atminc${ASUFFIX} incr_${CDATE}_fhr0${FHR}_${memchar} + fi + else + if [ $DO_CALC_INCREMENT = "YES" ]; then + $NLN $COMOUT_ANL_ENS/$memchar/${APREFIX}atma00${FHR}${ASUFFIX} sanl_${CDATE}_fhr0${FHR}_${memchar} + else + $NLN $COMOUT_ANL_ENS/$memchar/${APREFIX}atmi00${FHR}${ASUFFIX} incr_${CDATE}_fhr0${FHR}_${memchar} + fi + fi + done +done + +# Ensemble mean guess +for FHR in $nfhrs; do + $NLN $COMIN_GES_ENS/${GPREFIX}atmf00${FHR}.ensmean${GSUFFIX} sfg_${CDATE}_fhr0${FHR}_ensmean + if [ $cnvw_option = ".true." ]; then + $NLN $COMIN_GES_ENS/${GPREFIX}sfcf00${FHR}.ensmean${GSUFFIX} sfgsfc_${CDATE}_fhr0${FHR}_ensmean + fi +done + +if [ $USE_CFP = "YES" ]; then + chmod 755 $DATA/mp_untar.sh + ncmd=$(cat $DATA/mp_untar.sh | wc -l) + if [ $ncmd -gt 0 ]; then + ncmd_max=$((ncmd < npe_node_max ? ncmd : npe_node_max)) + APRUNCFP=$(eval echo $APRUNCFP) + $APRUNCFP $DATA/mp_untar.sh + export err=$?; err_chk + fi +fi + +################################################################################ +# Create global_enkf namelist +cat > enkf.nml << EOFnml +&nam_enkf + datestring="$CDATE",datapath="$DATA/", + analpertwtnh=${analpertwt},analpertwtsh=${analpertwt},analpertwttr=${analpertwt}, + covinflatemax=1.e2,covinflatemin=1,pseudo_rh=.true.,iassim_order=0, + corrlengthnh=${corrlength},corrlengthsh=${corrlength},corrlengthtr=${corrlength}, + lnsigcutoffnh=${lnsigcutoff},lnsigcutoffsh=${lnsigcutoff},lnsigcutofftr=${lnsigcutoff}, + lnsigcutoffpsnh=${lnsigcutoff},lnsigcutoffpssh=${lnsigcutoff},lnsigcutoffpstr=${lnsigcutoff}, + lnsigcutoffsatnh=${lnsigcutoff},lnsigcutoffsatsh=${lnsigcutoff},lnsigcutoffsattr=${lnsigcutoff}, + obtimelnh=1.e30,obtimelsh=1.e30,obtimeltr=1.e30, + saterrfact=1.0,numiter=0, + sprd_tol=1.e30,paoverpb_thresh=0.98, + nlons=$LONA_ENKF,nlats=$LATA_ENKF,nlevs=$LEVS_ENKF,nanals=$NMEM_ENKF, + deterministic=.true.,sortinc=.true.,lupd_satbiasc=.false., + reducedgrid=${reducedgrid},readin_localization=${readin_localization_enkf}., + use_gfs_nemsio=${use_gfs_nemsio},use_gfs_ncio=${use_gfs_ncio},imp_physics=$imp_physics,lupp=$lupp, + univaroz=.false.,adp_anglebc=.true.,angord=4,use_edges=.false.,emiss_bc=.true., + letkf_flag=${letkf_flag},nobsl_max=${nobsl_max},denkf=${denkf},getkf=${getkf}., + nhr_anal=${IAUFHRS_ENKF},nhr_state=${IAUFHRS_ENKF},use_qsatensmean=.true., + lobsdiag_forenkf=$lobsdiag_forenkf, + write_spread_diag=$write_spread_diag, + modelspace_vloc=$modelspace_vloc, + use_correlated_oberrs=${use_correlated_oberrs}, + netcdf_diag=$netcdf_diag,cnvw_option=$cnvw_option, + paranc=$paranc,write_fv3_incr=$write_fv3_incr, + $WRITE_INCR_ZERO + $NAM_ENKF +/ +&satobs_enkf + sattypes_rad(1) = 'amsua_n15', dsis(1) = 'amsua_n15', + sattypes_rad(2) = 'amsua_n18', dsis(2) = 'amsua_n18', + sattypes_rad(3) = 'amsua_n19', dsis(3) = 'amsua_n19', + sattypes_rad(4) = 'amsub_n16', dsis(4) = 'amsub_n16', + sattypes_rad(5) = 'amsub_n17', dsis(5) = 'amsub_n17', + sattypes_rad(6) = 'amsua_aqua', dsis(6) = 'amsua_aqua', + sattypes_rad(7) = 'amsua_metop-a', dsis(7) = 'amsua_metop-a', + sattypes_rad(8) = 'airs_aqua', dsis(8) = 'airs_aqua', + sattypes_rad(9) = 'hirs3_n17', dsis(9) = 'hirs3_n17', + sattypes_rad(10)= 'hirs4_n19', dsis(10)= 'hirs4_n19', + sattypes_rad(11)= 'hirs4_metop-a', dsis(11)= 'hirs4_metop-a', + sattypes_rad(12)= 'mhs_n18', dsis(12)= 'mhs_n18', + sattypes_rad(13)= 'mhs_n19', dsis(13)= 'mhs_n19', + sattypes_rad(14)= 'mhs_metop-a', dsis(14)= 'mhs_metop-a', + sattypes_rad(15)= 'goes_img_g11', dsis(15)= 'imgr_g11', + sattypes_rad(16)= 'goes_img_g12', dsis(16)= 'imgr_g12', + sattypes_rad(17)= 'goes_img_g13', dsis(17)= 'imgr_g13', + sattypes_rad(18)= 'goes_img_g14', dsis(18)= 'imgr_g14', + sattypes_rad(19)= 'goes_img_g15', dsis(19)= 'imgr_g15', + sattypes_rad(20)= 'avhrr_n18', dsis(20)= 'avhrr3_n18', + sattypes_rad(21)= 'avhrr_metop-a', dsis(21)= 'avhrr3_metop-a', + sattypes_rad(22)= 'avhrr_n19', dsis(22)= 'avhrr3_n19', + sattypes_rad(23)= 'amsre_aqua', dsis(23)= 'amsre_aqua', + sattypes_rad(24)= 'ssmis_f16', dsis(24)= 'ssmis_f16', + sattypes_rad(25)= 'ssmis_f17', dsis(25)= 'ssmis_f17', + sattypes_rad(26)= 'ssmis_f18', dsis(26)= 'ssmis_f18', + sattypes_rad(27)= 'ssmis_f19', dsis(27)= 'ssmis_f19', + sattypes_rad(28)= 'ssmis_f20', dsis(28)= 'ssmis_f20', + sattypes_rad(29)= 'sndrd1_g11', dsis(29)= 'sndrD1_g11', + sattypes_rad(30)= 'sndrd2_g11', dsis(30)= 'sndrD2_g11', + sattypes_rad(31)= 'sndrd3_g11', dsis(31)= 'sndrD3_g11', + sattypes_rad(32)= 'sndrd4_g11', dsis(32)= 'sndrD4_g11', + sattypes_rad(33)= 'sndrd1_g12', dsis(33)= 'sndrD1_g12', + sattypes_rad(34)= 'sndrd2_g12', dsis(34)= 'sndrD2_g12', + sattypes_rad(35)= 'sndrd3_g12', dsis(35)= 'sndrD3_g12', + sattypes_rad(36)= 'sndrd4_g12', dsis(36)= 'sndrD4_g12', + sattypes_rad(37)= 'sndrd1_g13', dsis(37)= 'sndrD1_g13', + sattypes_rad(38)= 'sndrd2_g13', dsis(38)= 'sndrD2_g13', + sattypes_rad(39)= 'sndrd3_g13', dsis(39)= 'sndrD3_g13', + sattypes_rad(40)= 'sndrd4_g13', dsis(40)= 'sndrD4_g13', + sattypes_rad(41)= 'sndrd1_g14', dsis(41)= 'sndrD1_g14', + sattypes_rad(42)= 'sndrd2_g14', dsis(42)= 'sndrD2_g14', + sattypes_rad(43)= 'sndrd3_g14', dsis(43)= 'sndrD3_g14', + sattypes_rad(44)= 'sndrd4_g14', dsis(44)= 'sndrD4_g14', + sattypes_rad(45)= 'sndrd1_g15', dsis(45)= 'sndrD1_g15', + sattypes_rad(46)= 'sndrd2_g15', dsis(46)= 'sndrD2_g15', + sattypes_rad(47)= 'sndrd3_g15', dsis(47)= 'sndrD3_g15', + sattypes_rad(48)= 'sndrd4_g15', dsis(48)= 'sndrD4_g15', + sattypes_rad(49)= 'iasi_metop-a', dsis(49)= 'iasi_metop-a', + sattypes_rad(50)= 'seviri_m08', dsis(50)= 'seviri_m08', + sattypes_rad(51)= 'seviri_m09', dsis(51)= 'seviri_m09', + sattypes_rad(52)= 'seviri_m10', dsis(52)= 'seviri_m10', + sattypes_rad(53)= 'seviri_m11', dsis(53)= 'seviri_m11', + sattypes_rad(54)= 'amsua_metop-b', dsis(54)= 'amsua_metop-b', + sattypes_rad(55)= 'hirs4_metop-b', dsis(55)= 'hirs4_metop-b', + sattypes_rad(56)= 'mhs_metop-b', dsis(56)= 'mhs_metop-b', + sattypes_rad(57)= 'iasi_metop-b', dsis(57)= 'iasi_metop-b', + sattypes_rad(58)= 'avhrr_metop-b', dsis(58)= 'avhrr3_metop-b', + sattypes_rad(59)= 'atms_npp', dsis(59)= 'atms_npp', + sattypes_rad(60)= 'atms_n20', dsis(60)= 'atms_n20', + sattypes_rad(61)= 'cris_npp', dsis(61)= 'cris_npp', + sattypes_rad(62)= 'cris-fsr_npp', dsis(62)= 'cris-fsr_npp', + sattypes_rad(63)= 'cris-fsr_n20', dsis(63)= 'cris-fsr_n20', + sattypes_rad(64)= 'gmi_gpm', dsis(64)= 'gmi_gpm', + sattypes_rad(65)= 'saphir_meghat', dsis(65)= 'saphir_meghat', + sattypes_rad(66)= 'amsua_metop-c', dsis(66)= 'amsua_metop-c', + sattypes_rad(67)= 'mhs_metop-c', dsis(67)= 'mhs_metop-c', + sattypes_rad(68)= 'ahi_himawari8', dsis(68)= 'ahi_himawari8', + sattypes_rad(69)= 'abi_g16', dsis(69)= 'abi_g16', + sattypes_rad(70)= 'abi_g17', dsis(70)= 'abi_g17', + sattypes_rad(71)= 'iasi_metop-c', dsis(71)= 'iasi_metop-c', + sattypes_rad(72)= 'viirs-m_npp', dsis(72)= 'viirs-m_npp', + sattypes_rad(73)= 'viirs-m_j1', dsis(73)= 'viirs-m_j1', + sattypes_rad(74)= 'avhrr_metop-c', dsis(74)= 'avhrr3_metop-c', + sattypes_rad(75)= 'abi_g18', dsis(75)= 'abi_g18', + sattypes_rad(76)= 'ahi_himawari9', dsis(76)= 'ahi_himawari9', + sattypes_rad(77)= 'viirs-m_j2', dsis(77)= 'viirs-m_j2', + sattypes_rad(78)= 'atms_n21', dsis(78)= 'atms_n21', + sattypes_rad(79)= 'cris-fsr_n21', dsis(79)= 'cris-fsr_n21', + $SATOBS_ENKF +/ +&ozobs_enkf + sattypes_oz(1) = 'sbuv2_n16', + sattypes_oz(2) = 'sbuv2_n17', + sattypes_oz(3) = 'sbuv2_n18', + sattypes_oz(4) = 'sbuv2_n19', + sattypes_oz(5) = 'omi_aura', + sattypes_oz(6) = 'gome_metop-a', + sattypes_oz(7) = 'gome_metop-b', + sattypes_oz(8) = 'mls30_aura', + sattypes_oz(9) = 'ompsnp_npp', + sattypes_oz(10) = 'ompstc8_npp', + sattypes_oz(11) = 'ompstc8_n20', + sattypes_oz(12) = 'ompsnp_n20', + sattypes_oz(13) = 'ompslp_npp', + sattypes_oz(14) = 'ompstc8_n21', + sattypes_oz(15) = 'ompsnp_n21', + sattypes_oz(16) = 'gome_metop-c', + $OZOBS_ENKF +/ +EOFnml + +################################################################################ +# Run enkf update + +export OMP_NUM_THREADS=$NTHREADS_ENKF +export pgm=$ENKFEXEC +. prep_step + +$NCP $ENKFEXEC $DATA +$APRUN_ENKF ${DATA}/$(basename $ENKFEXEC) 1>stdout 2>stderr +export err=$?; err_chk + +# Cat runtime output files. +cat stdout stderr > $COMOUT_ANL_ENS/$ENKFSTAT + +################################################################################ +# Postprocessing +cd $pwd +[[ $mkdata = "YES" ]] && rm -rf $DATA +set +x +if [ $VERBOSE = "YES" ]; then + echo $(date) EXITING $0 with return code $err >&2 +fi +exit $err diff --git a/scripts/exgfs_atmos_vminmon.sh b/scripts/exgfs_atmos_vminmon.sh new file mode 100755 index 0000000000..eb0eac23c5 --- /dev/null +++ b/scripts/exgfs_atmos_vminmon.sh @@ -0,0 +1,127 @@ +#/bin/sh +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: exgfs_vrfminmon.sh +# Script description: Runs data extract/validation for GSI normalization diag data +# +# Author: Ed Safford Org: NP23 Date: 2015-04-10 +# +# Abstract: This script runs the data extract/validation portion of the +# MinMon package. +# +# Condition codes +# 0 - no problem encountered +# >0 - some problem encountered +# +################################################################################ + + +######################################## +# Set environment +######################################## +export VERBOSE=${VERBOSE:-"NO"} +if [[ "$VERBOSE" = "YES" ]] +then + set -x +fi + +export scr=exgfs_vrfyminmon.sh + +export RUN_ENVIR=${RUN_ENVIR:-nco} +export NET=${NET:-gfs} +export RUN=${RUN:-gfs} +export envir=${envir:-prod} + +######################################## +# Command line arguments +######################################## +export PDY=${1:-${PDY:?}} +export cyc=${2:-${cyc:?}} + +######################################## +# Directories +######################################## +export DATA=${DATA:-$(pwd)} + + +######################################## +# Filenames +######################################## +gsistat=${gsistat:-$COMIN/gfs.t${cyc}z.gsistat} +export mm_gnormfile=${gnormfile:-${M_FIXgfs}/gfs_minmon_gnorm.txt} +export mm_costfile=${costfile:-${M_FIXgfs}/gfs_minmon_cost.txt} + +######################################## +# Other variables +######################################## +export MINMON_SUFFIX=${MINMON_SUFFIX:-GFS} +export PDATE=${PDY}${cyc} +export NCP=${NCP:-/bin/cp} +export pgm=exgfs_vrfminmon.sh + + + +if [[ ! -d ${DATA} ]]; then + mkdir $DATA +fi +cd $DATA + +###################################################################### + +data_available=0 + +if [[ -s ${gsistat} ]]; then + + data_available=1 + + #------------------------------------------------------------------ + # Copy the $MINMON_SUFFIX.gnorm_data.txt file to the working directory + # It's ok if it doesn't exist; we'll create a new one if needed. + #------------------------------------------------------------------ + if [[ -s ${M_TANKverf}/gnorm_data.txt ]]; then + $NCP ${M_TANKverf}/gnorm_data.txt gnorm_data.txt + elif [[ -s ${M_TANKverfM1}/gnorm_data.txt ]]; then + $NCP ${M_TANKverfM1}/gnorm_data.txt gnorm_data.txt + fi + + + #------------------------------------------------------------------ + # Run the child sccripts. + #------------------------------------------------------------------ + ${USHminmon}/minmon_xtrct_costs.pl ${MINMON_SUFFIX} ${PDY} ${cyc} ${gsistat} dummy + rc_costs=$? + echo "rc_costs = $rc_costs" + + ${USHminmon}/minmon_xtrct_gnorms.pl ${MINMON_SUFFIX} ${PDY} ${cyc} ${gsistat} dummy + rc_gnorms=$? + echo "rc_gnorms = $rc_gnorms" + + ${USHminmon}/minmon_xtrct_reduct.pl ${MINMON_SUFFIX} ${PDY} ${cyc} ${gsistat} dummy + rc_reduct=$? + echo "rc_reduct = $rc_reduct" + +fi + +##################################################################### +# Postprocessing + +err=0 +if [[ ${data_available} -ne 1 ]]; then + err=1 +elif [[ $rc_costs -ne 0 ]]; then + err=$rc_costs +elif [[ $rc_gnorms -ne 0 ]]; then + err=$rc_gnorms +elif [[ $rc_reduct -ne 0 ]]; then + err=$rc_reduct +fi + +if [[ "$VERBOSE" = "YES" ]]; then + echo "end exgfs_vrfminmon.sh, exit value = ${err}" +fi + + +set +x +exit ${err} + From 0792e7ba003217d3bcdb2226752bb3f414978e92 Mon Sep 17 00:00:00 2001 From: Andrew Collard Date: Wed, 21 Aug 2024 20:54:23 +0000 Subject: [PATCH 05/22] Turn off some links --- sorc/link_fv3gfs.sh | 27 --------------------------- versions/wcoss2.ver | 2 +- 2 files changed, 1 insertion(+), 28 deletions(-) diff --git a/sorc/link_fv3gfs.sh b/sorc/link_fv3gfs.sh index b654c96a62..35cdf6612d 100755 --- a/sorc/link_fv3gfs.sh +++ b/sorc/link_fv3gfs.sh @@ -110,33 +110,6 @@ if [ -d ${pwd}/gfs_wafs.fd ]; then $LINK ../sorc/gfs_wafs.fd/fix/* . fi - -#------------------------------ -#--add GSI/EnKF file -#------------------------------ -cd ${pwd}/../jobs ||exit 8 - $LINK ../sorc/gsi.fd/jobs/JGLOBAL_ATMOS_ANALYSIS . - $LINK ../sorc/gsi.fd/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC . - $LINK ../sorc/gsi.fd/jobs/JGDAS_ATMOS_ANALYSIS_DIAG . - $LINK ../sorc/gsi.fd/jobs/JGDAS_ENKF_SELECT_OBS . - $LINK ../sorc/gsi.fd/jobs/JGDAS_ENKF_DIAG . - $LINK ../sorc/gsi.fd/jobs/JGDAS_ENKF_UPDATE . - $LINK ../sorc/gsi.fd/jobs/JGDAS_ENKF_ECEN . - $LINK ../sorc/gsi.fd/jobs/JGDAS_ENKF_SFC . - $LINK ../sorc/gsi.fd/jobs/JGDAS_ENKF_FCST . - $LINK ../sorc/gsi.fd/jobs/JGDAS_ENKF_POST . - $LINK ../sorc/gsi.fd/jobs/JGDAS_ATMOS_CHGRES_FORENKF . -cd ${pwd}/../scripts ||exit 8 - $LINK ../sorc/gsi.fd/scripts/exglobal_atmos_analysis.sh . - $LINK ../sorc/gsi.fd/scripts/exglobal_atmos_analysis_calc.sh . - $LINK ../sorc/gsi.fd/scripts/exglobal_diag.sh . - $LINK ../sorc/gsi.fd/scripts/exgdas_enkf_select_obs.sh . - $LINK ../sorc/gsi.fd/scripts/exgdas_enkf_update.sh . - $LINK ../sorc/gsi.fd/scripts/exgdas_enkf_ecen.sh . - $LINK ../sorc/gsi.fd/scripts/exgdas_enkf_sfc.sh . - $LINK ../sorc/gsi.fd/scripts/exgdas_enkf_fcst.sh . - $LINK ../sorc/gsi.fd/scripts/exgdas_enkf_post.sh . - $LINK ../sorc/gsi.fd/scripts/exgdas_atmos_chgres_forenkf.sh . cd ${pwd}/../fix ||exit 8 [[ -d fix_gsi ]] && rm -rf fix_gsi $LINK ../sorc/gsi.fd/fix fix_gsi diff --git a/versions/wcoss2.ver b/versions/wcoss2.ver index 55bfd23d67..674a750aa5 100644 --- a/versions/wcoss2.ver +++ b/versions/wcoss2.ver @@ -3,7 +3,7 @@ export prod_envir_ver=${prod_envir_ver:-2.0.4} # Allow override from ops ecflow export prod_util_ver=${prod_util_ver:-2.0.9} # Allow override from ops ecflow export obsproc_run_ver=1.2.0 -export prepobs_run_ver=1.0.1 +export prepobs_run_ver=1.1.0 export tracker_ver=v1.1.15.5 export fit_ver="newm.1.5" From dbc2d374b4d9607376e56123aa68d6d01d16eb40 Mon Sep 17 00:00:00 2001 From: Andrew Collard Date: Thu, 22 Aug 2024 14:52:52 +0000 Subject: [PATCH 06/22] Add former gsi.fd scripts to ush --- .gitignore | 4 - ush/calcanl_gfs.py | 375 +++++++++++++++++++++++++++++++++++++++++++++ ush/calcinc_gfs.py | 90 +++++++++++ ush/getncdimlen | 17 ++ ush/gsi_utils.py | 138 +++++++++++++++++ 5 files changed, 620 insertions(+), 4 deletions(-) create mode 100755 ush/calcanl_gfs.py create mode 100755 ush/calcinc_gfs.py create mode 100755 ush/getncdimlen create mode 100644 ush/gsi_utils.py diff --git a/.gitignore b/.gitignore index 27cf8b2f8b..727e608cbe 100644 --- a/.gitignore +++ b/.gitignore @@ -116,8 +116,6 @@ scripts/exgfs_atmos_wafs_grib2.sh scripts/exgfs_atmos_wafs_grib2_0p25.sh scripts/exglobal_atmos_pmgr.sh # ush symlinks -ush/calcanl_gfs.py -ush/calcinc_gfs.py ush/emcsfc_ice_blend.sh ush/emcsfc_snow.sh ush/fix_precip.sh @@ -128,7 +126,6 @@ ush/fv3gfs_dwn_nems.sh ush/fv3gfs_filter_topo.sh ush/fv3gfs_make_grid.sh ush/fv3gfs_make_orog.sh -ush/getncdimlen ush/gfs_nceppost.sh ush/gfs_transfer.sh ush/gldas_archive.sh @@ -141,7 +138,6 @@ ush/global_chgres.sh ush/global_chgres_driver.sh ush/global_cycle.sh ush/global_cycle_driver.sh -ush/gsi_utils.py ush/link_crtm_fix.sh ush/minmon_xtrct_costs.pl ush/minmon_xtrct_gnorms.pl diff --git a/ush/calcanl_gfs.py b/ush/calcanl_gfs.py new file mode 100755 index 0000000000..69f282cf41 --- /dev/null +++ b/ush/calcanl_gfs.py @@ -0,0 +1,375 @@ +#!/usr/bin/env python +# calcanl_gfs.py +# cory.r.martin@noaa.gov +# 2019-10-11 +# script to run executables to produce netCDF analysis +# on GFS gaussian grid for downstream users +import os +import shutil +import subprocess +import sys +import gsi_utils +from collections import OrderedDict +import datetime + + +# function to calculate analysis from a given increment file and background +def calcanl_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix, ASuffix, + ComIn_Ges, GPrefix, GSuffix, + FixDir, atmges_ens_mean, RunDir, NThreads, NEMSGet, IAUHrs, + ExecCMD, ExecCMDMPI, ExecAnl, ExecChgresInc, Cdump): + print('calcanl_gfs beginning at: ',datetime.datetime.utcnow()) + + IAUHH = IAUHrs + if Cdump == "gfs": + IAUHH = list(map(int,'6')) + else: + IAUHH = IAUHrs + + ######## copy and link files + if DoIAU and l4DEnsVar and Write4Danl: + for fh in IAUHH: + if fh == 6: + # for full res analysis + CalcAnlDir = RunDir+'/calcanl_'+format(fh, '02') + if not os.path.exists(CalcAnlDir): + gsi_utils.make_dir(CalcAnlDir) + gsi_utils.copy_file(ExecAnl, CalcAnlDir+'/calc_anl.x') + gsi_utils.link_file(RunDir+'/siginc.nc', CalcAnlDir+'/siginc.nc.06') + gsi_utils.link_file(RunDir+'/sigf06', CalcAnlDir+'/ges.06') + gsi_utils.link_file(RunDir+'/siganl', CalcAnlDir+'/anl.06') + gsi_utils.copy_file(ExecChgresInc, CalcAnlDir+'/chgres_inc.x') + # for ensemble res analysis + if Cdump == "gdas": + CalcAnlDir = RunDir+'/calcanl_ensres_'+format(fh, '02') + if not os.path.exists(CalcAnlDir): + gsi_utils.make_dir(CalcAnlDir) + gsi_utils.copy_file(ExecAnl, CalcAnlDir+'/calc_anl.x') + gsi_utils.link_file(RunDir+'/siginc.nc', CalcAnlDir+'/siginc.nc.06') + gsi_utils.link_file(ComOut+'/'+APrefix+'atmanl.ensres'+ASuffix, CalcAnlDir+'/anl.ensres.06') + gsi_utils.link_file(ComIn_Ges+'/'+GPrefix+'atmf006.ensres'+GSuffix, CalcAnlDir+'/ges.ensres.06') + gsi_utils.link_file(RunDir+'/sigf06', CalcAnlDir+'/ges.06') + else: + if os.path.isfile('sigi'+format(fh, '02')+'.nc'): + # for full res analysis + CalcAnlDir = RunDir+'/calcanl_'+format(fh, '02') + CalcAnlDir6 = RunDir+'/calcanl_'+format(6, '02') + if not os.path.exists(CalcAnlDir): + gsi_utils.make_dir(CalcAnlDir) + if not os.path.exists(CalcAnlDir6): + gsi_utils.make_dir(CalcAnlDir6) + gsi_utils.link_file(ComOut+'/'+APrefix+'atma'+format(fh, '03')+ASuffix, + CalcAnlDir6+'/anl.'+format(fh, '02')) + gsi_utils.link_file(RunDir+'/siga'+format(fh, '02'), + CalcAnlDir6+'/anl.'+format(fh, '02')) + gsi_utils.link_file(RunDir+'/sigi'+format(fh, '02')+'.nc', + CalcAnlDir+'/siginc.nc.'+format(fh, '02')) + gsi_utils.link_file(CalcAnlDir6+'/inc.fullres.'+format(fh, '02'), + CalcAnlDir+'/inc.fullres.'+format(fh, '02')) + gsi_utils.link_file(RunDir+'/sigf'+format(fh, '02'), + CalcAnlDir6+'/ges.'+format(fh, '02')) + gsi_utils.link_file(RunDir+'/sigf'+format(fh, '02'), + CalcAnlDir+'/ges.'+format(fh, '02')) + gsi_utils.copy_file(ExecChgresInc, CalcAnlDir+'/chgres_inc.x') + # for ensemble res analysis + CalcAnlDir = RunDir+'/calcanl_ensres_'+format(fh, '02') + CalcAnlDir6 = RunDir+'/calcanl_ensres_'+format(6, '02') + if not os.path.exists(CalcAnlDir): + gsi_utils.make_dir(CalcAnlDir) + if not os.path.exists(CalcAnlDir6): + gsi_utils.make_dir(CalcAnlDir6) + gsi_utils.link_file(ComOut+'/'+APrefix+'atma'+format(fh, '03')+'.ensres'+ASuffix, + CalcAnlDir6+'/anl.ensres.'+format(fh, '02')) + gsi_utils.link_file(RunDir+'/sigi'+format(fh, '02')+'.nc', + CalcAnlDir6+'/siginc.nc.'+format(fh, '02')) + gsi_utils.link_file(ComIn_Ges+'/'+GPrefix+'atmf'+format(fh, '03')+'.ensres'+GSuffix, + CalcAnlDir6+'/ges.ensres.'+format(fh, '02')) + + + else: + # for full res analysis + CalcAnlDir = RunDir+'/calcanl_'+format(6, '02') + if not os.path.exists(CalcAnlDir): + gsi_utils.make_dir(CalcAnlDir) + gsi_utils.copy_file(ExecAnl, CalcAnlDir+'/calc_anl.x') + gsi_utils.link_file(RunDir+'/siginc.nc', CalcAnlDir+'/siginc.nc.06') + gsi_utils.link_file(RunDir+'/sigf06', CalcAnlDir+'/ges.06') + gsi_utils.link_file(RunDir+'/siganl', CalcAnlDir+'/anl.06') + gsi_utils.copy_file(ExecChgresInc, CalcAnlDir+'/chgres_inc.x') + # for ensemble res analysis + CalcAnlDir = RunDir+'/calcanl_ensres_'+format(6, '02') + if not os.path.exists(CalcAnlDir): + gsi_utils.make_dir(CalcAnlDir) + gsi_utils.copy_file(ExecAnl, CalcAnlDir+'/calc_anl.x') + gsi_utils.link_file(RunDir+'/siginc.nc', CalcAnlDir+'/siginc.nc.06') + gsi_utils.link_file(ComOut+'/'+APrefix+'atmanl.ensres'+ASuffix, CalcAnlDir+'/anl.ensres.06') + gsi_utils.link_file(ComIn_Ges+'/'+GPrefix+'atmf006.ensres'+GSuffix, CalcAnlDir+'/ges.ensres.06') + + ######## get dimension information from background and increment files + AnlDims = gsi_utils.get_ncdims('siginc.nc') + if ASuffix == ".nc": + GesDims = gsi_utils.get_ncdims('sigf06') + else: + GesDims = gsi_utils.get_nemsdims('sigf06',NEMSGet) + + levs = AnlDims['lev'] + LonA = AnlDims['lon'] + LatA = AnlDims['lat'] + LonB = GesDims['grid_xt'] + LatB = GesDims['grid_yt'] + + # vertical coordinate info + levs2 = levs + 1 + siglevel = FixDir+'/global_hyblev.l'+str(levs2)+'.txt' + + ####### determine how many forecast hours to process + nFH=0 + for fh in IAUHH: + # first check to see if increment file exists + CalcAnlDir = RunDir+'/calcanl_'+format(fh, '02') + if (os.path.isfile(CalcAnlDir+'/siginc.nc.'+format(fh, '02'))): + print('will process increment file: '+CalcAnlDir+'/siginc.nc.'+format(fh, '02')) + nFH+=1 + else: + print('Increment file: '+CalcAnlDir+'/siginc.nc.'+format(fh, '02')+' does not exist. Skipping.') + + sys.stdout.flush() + ######## need to gather information about runtime environment + ExecCMD = ExecCMD.replace("$ncmd","1") + os.environ['OMP_NUM_THREADS'] = str(NThreads) + os.environ['ncmd'] = str(nFH) + ExecCMDMPI1 = ExecCMDMPI.replace("$ncmd",str(1)) + ExecCMDMPI = ExecCMDMPI.replace("$ncmd",str(nFH)) + ExecCMDLevs = ExecCMDMPI.replace("$ncmd",str(levs)) + ExecCMDMPI10 = ExecCMDMPI.replace("$ncmd",str(10)) + + # are we using mpirun with lsf, srun, or aprun with Cray? + launcher = ExecCMDMPI.split(' ')[0] + if launcher == 'mpirun': + hostfile = os.getenv('LSB_DJOB_HOSTFILE','') + with open(hostfile) as f: + hosts_tmp = f.readlines() + hosts_tmp = [x.strip() for x in hosts_tmp] + hosts = [] + [hosts.append(x) for x in hosts_tmp if x not in hosts] + nhosts = len(hosts) + ExecCMDMPI_host = 'mpirun -np '+str(nFH)+' --hostfile hosts' + tasks = int(os.getenv('LSB_DJOB_NUMPROC',1)) + if levs > tasks: + ExecCMDMPILevs_host = 'mpirun -np '+str(tasks)+' --hostfile hosts' + ExecCMDMPILevs_nohost = 'mpirun -np '+str(tasks) + else: + ExecCMDMPILevs_host = 'mpirun -np '+str(levs)+' --hostfile hosts' + ExecCMDMPILevs_nohost = 'mpirun -np '+str(levs) + ExecCMDMPI1_host = 'mpirun -np 1 --hostfile hosts' + ExecCMDMPI10_host = 'mpirun -np 10 --hostfile hosts' + elif launcher == 'mpiexec': + hostfile = os.getenv('PBS_NODEFILE','') + with open(hostfile) as f: + hosts_tmp = f.readlines() + hosts_tmp = [x.strip() for x in hosts_tmp] + hosts = [] + [hosts.append(x) for x in hosts_tmp if x not in hosts] + nhosts = len(hosts) + ExecCMDMPI_host = 'mpiexec -l -n '+str(nFH) + tasks = int(os.getenv('ntasks',1)) + print('nhosts,tasks=', nhosts, tasks) + if levs > tasks: + ExecCMDMPILevs_host = 'mpiexec -l -n '+str(tasks) + ExecCMDMPILevs_nohost = 'mpiexec -l -n '+str(tasks) + else: + ExecCMDMPILevs_host = 'mpiexec -l -n '+str(levs) + ExecCMDMPILevs_nohost = 'mpiexec -l -n '+str(levs) + ExecCMDMPI1_host = 'mpiexec -l -n 1 --cpu-bind depth --depth '+str(NThreads) + ExecCMDMPI10_host = 'mpiexec -l -n 10 --cpu-bind depth --depth '+str(NThreads) + elif launcher == 'srun': + nodes = os.getenv('SLURM_JOB_NODELIST','') + hosts_tmp = subprocess.check_output('scontrol show hostnames '+nodes, shell=True) + if (sys.version_info > (3, 0)): + hosts_tmp = hosts_tmp.decode('utf-8') + hosts_tmp = str(hosts_tmp).splitlines() + hosts_tmp = [x.strip() for x in hosts_tmp] + else: + hosts_tmp = hosts_tmp.strip() + hosts_tmp = str(hosts_tmp).splitlines() + hosts_tmp = [x.strip() for x in hosts_tmp] + hosts = [] + [hosts.append(x) for x in hosts_tmp if x not in hosts] + nhosts = len(hosts) + ExecCMDMPI_host = 'srun -n '+str(nFH)+' --verbose --export=ALL -c 1 --distribution=arbitrary --cpu-bind=cores' + # need to account for when fewer than LEVS tasks are available + tasks = int(os.getenv('SLURM_NPROCS',1)) + if levs > tasks: + ExecCMDMPILevs_host = 'srun -n '+str(tasks)+' --verbose --export=ALL -c 1 --distribution=arbitrary --cpu-bind=cores' + ExecCMDMPILevs_nohost = 'srun -n '+str(tasks)+' --verbose --export=ALL' + else: + ExecCMDMPILevs_host = 'srun -n '+str(levs)+' --verbose --export=ALL -c 1 --distribution=arbitrary --cpu-bind=cores' + ExecCMDMPILevs_nohost = 'srun -n '+str(levs)+' --verbose --export=ALL' + ExecCMDMPI1_host = 'srun -n 1 --verbose --export=ALL -c 1 --distribution=arbitrary --cpu-bind=cores' + ExecCMDMPI10_host = 'srun -n 10 --verbose --export=ALL -c 1 --distribution=arbitrary --cpu-bind=cores' + elif launcher == 'aprun': + hostfile = os.getenv('LSB_DJOB_HOSTFILE','') + with open(hostfile) as f: + hosts_tmp = f.readlines() + hosts_tmp = [x.strip() for x in hosts_tmp] + hosts = [] + [hosts.append(x) for x in hosts_tmp if x not in hosts] + nhosts = len(hosts) + ExecCMDMPI_host = 'aprun -l hosts -d '+str(NThreads)+' -n '+str(nFH) + ExecCMDMPILevs_host = 'aprun -l hosts -d '+str(NThreads)+' -n '+str(levs) + ExecCMDMPILevs_nohost = 'aprun -d '+str(NThreads)+' -n '+str(levs) + ExecCMDMPI1_host = 'aprun -l hosts -d '+str(NThreads)+' -n 1' + ExecCMDMPI10_host = 'aprun -l hosts -d '+str(NThreads)+' -n 10' + else: + print('unknown MPI launcher. Failure.') + sys.exit(1) + + ####### generate the full resolution analysis + ihost = 0 + ### interpolate increment to full background resolution + for fh in IAUHH: + # first check to see if increment file exists + CalcAnlDir = RunDir+'/calcanl_'+format(fh, '02') + if (os.path.isfile(CalcAnlDir+'/siginc.nc.'+format(fh, '02'))): + print('Interpolating increment for f'+format(fh, '03')) + # set up the namelist + namelist = OrderedDict() + namelist["setup"] = {"lon_out": LonB, + "lat_out": LatB, + "lev": levs, + "infile": "'siginc.nc."+format(fh, '02')+"'", + "outfile": "'inc.fullres."+format(fh, '02')+"'", + } + gsi_utils.write_nml(namelist, CalcAnlDir+'/fort.43') + + if ihost >= nhosts: + ihost = 0 + with open(CalcAnlDir+'/hosts', 'w') as hostfile: + hostfile.write(hosts[ihost]+'\n') + if launcher == 'srun': # need to write host per task not per node for slurm + # For xjet, each instance of chgres_inc must run on two nodes each + if os.getenv('SLURM_JOB_PARTITION','') == 'xjet': + for a in range(0,4): + hostfile.write(hosts[ihost]+'\n') + ihost+=1 + for a in range(0,5): + hostfile.write(hosts[ihost]+'\n') + for a in range(0,9): # need 9 more of the same host for the 10 tasks for chgres_inc + hostfile.write(hosts[ihost]+'\n') + if launcher == 'srun': + os.environ['SLURM_HOSTFILE'] = CalcAnlDir+'/hosts' + print('interp_inc', fh, namelist) + job = subprocess.Popen(ExecCMDMPI10_host+' '+CalcAnlDir+'/chgres_inc.x', shell=True, cwd=CalcAnlDir) + print(ExecCMDMPI10_host+' '+CalcAnlDir+'/chgres_inc.x submitted on '+hosts[ihost]) + sys.stdout.flush() + ec = job.wait() + if ec != 0: + print('Error with chgres_inc.x at forecast hour: f'+format(fh, '03')) + print('Error with chgres_inc.x, exit code='+str(ec)) + print(locals()) + sys.exit(ec) + ihost+=1 + else: + print('f'+format(fh, '03')+' is in $IAUFHRS but increment file is missing. Skipping.') + + #### generate analysis from interpolated increment + CalcAnlDir6 = RunDir+'/calcanl_'+format(6, '02') + # set up the namelist + namelist = OrderedDict() + namelist["setup"] = {"datapath": "'./'", + "analysis_filename": "'anl'", + "firstguess_filename": "'ges'", + "increment_filename": "'inc.fullres'", + "fhr": 6, + } + + gsi_utils.write_nml(namelist, CalcAnlDir6+'/calc_analysis.nml') + + # run the executable + if ihost >= nhosts-1: + ihost = 0 + if launcher == 'srun': + del os.environ['SLURM_HOSTFILE'] + print('fullres_calc_anl', namelist) + fullres_anl_job = subprocess.Popen(ExecCMDMPILevs_nohost+' '+CalcAnlDir6+'/calc_anl.x', shell=True, cwd=CalcAnlDir6) + print(ExecCMDMPILevs_nohost+' '+CalcAnlDir6+'/calc_anl.x submitted') + + sys.stdout.flush() + exit_fullres = fullres_anl_job.wait() + sys.stdout.flush() + if exit_fullres != 0: + print('Error with calc_analysis.x for deterministic resolution, exit code='+str(exit_fullres)) + print(locals()) + sys.exit(exit_fullres) + + + ######## compute determinstic analysis on ensemble resolution + if Cdump == "gdas": + chgres_jobs = [] + for fh in IAUHH: + # first check to see if guess file exists + CalcAnlDir6 = RunDir+'/calcanl_ensres_06' + print(CalcAnlDir6+'/ges.ensres.'+format(fh, '02')) + if (os.path.isfile(CalcAnlDir6+'/ges.ensres.'+format(fh, '02'))): + print('Calculating analysis on ensemble resolution for f'+format(fh, '03')) + ######## generate ensres analysis from interpolated background + # set up the namelist + namelist = OrderedDict() + namelist["setup"] = {"datapath": "'./'", + "analysis_filename": "'anl.ensres'", + "firstguess_filename": "'ges.ensres'", + "increment_filename": "'siginc.nc'", + "fhr": fh, + } + + gsi_utils.write_nml(namelist, CalcAnlDir6+'/calc_analysis.nml') + + # run the executable + if ihost > nhosts-1: + ihost = 0 + print('ensres_calc_anl', namelist) + ensres_anl_job = subprocess.Popen(ExecCMDMPILevs_nohost+' '+CalcAnlDir6+'/calc_anl.x', shell=True, cwd=CalcAnlDir6) + print(ExecCMDMPILevs_nohost+' '+CalcAnlDir6+'/calc_anl.x submitted') + + sys.stdout.flush() + ####### check on analysis steps + exit_ensres = ensres_anl_job.wait() + if exit_ensres != 0: + print('Error with calc_analysis.x for ensemble resolution, exit code='+str(exit_ensres)) + print(locals()) + sys.exit(exit_ensres) + else: + print('f'+format(fh, '03')+' is in $IAUFHRS but ensemble resolution guess file is missing. Skipping.') + + print('calcanl_gfs successfully completed at: ',datetime.datetime.utcnow()) + print(locals()) + +# run the function if this script is called from the command line +if __name__ == '__main__': + DoIAU = gsi_utils.isTrue(os.getenv('DOIAU', 'NO')) + l4DEnsVar = gsi_utils.isTrue(os.getenv('l4densvar', 'NO')) + Write4Danl = gsi_utils.isTrue(os.getenv('lwrite4danl', 'NO')) + ComIn_Ges = os.getenv('COMIN_GES', './') + GPrefix = os.getenv('GPREFIX', './') + GSuffix = os.getenv('GSUFFIX', './') + ComOut = os.getenv('COMOUT', './') + APrefix = os.getenv('APREFIX', '') + ASuffix= os.getenv('ASUFFIX', '') + NThreads = os.getenv('NTHREADS_CHGRES', 1) + FixDir = os.getenv('FIXgsm', './') + atmges_ens_mean = os.getenv('ATMGES_ENSMEAN', './atmges_ensmean') + RunDir = os.getenv('DATA', './') + ExecCMD = os.getenv('APRUN_CALCANL', '') + ExecCMDMPI = os.getenv('APRUN_CALCINC', '') + ExecAnl = os.getenv('CALCANLEXEC', './calc_analysis.x') + ExecChgresInc = os.getenv('CHGRESINCEXEC', './interp_inc.x') + NEMSGet = os.getenv('NEMSIOGET','nemsio_get') + IAUHrs = list(map(int,os.getenv('IAUFHRS','6').split(','))) + Cdump = os.getenv('CDUMP', 'gdas') + + print(locals()) + calcanl_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix, ASuffix, + ComIn_Ges, GPrefix, GSuffix, + FixDir, atmges_ens_mean, RunDir, NThreads, NEMSGet, IAUHrs, + ExecCMD, ExecCMDMPI, ExecAnl, ExecChgresInc, + Cdump) diff --git a/ush/calcinc_gfs.py b/ush/calcinc_gfs.py new file mode 100755 index 0000000000..0306d9f39f --- /dev/null +++ b/ush/calcinc_gfs.py @@ -0,0 +1,90 @@ +#!/usr/bin/env python +# calcinc_gfs.py +# cory.r.martin@noaa.gov +# 2019-10-10 +# script to run calc_increment_ens.x to produce +# increment from background and analysis file difference +import os +import shutil +import subprocess +import sys +import gsi_utils +from collections import OrderedDict + +# main function +def calcinc_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix, ASuffix, IAUHrs, + NThreads, IMP_Physics, Inc2Zero, RunDir, Exec, ExecCMD): + # run the calc_increment_ens executable + + # copy and link files + if DoIAU and l4DEnsVar and Write4Danl: + nFH=0 + for fh in IAUHrs: + nFH+=1 + if fh == 6: + gsi_utils.link_file('sigf06', 'atmges_mem'+format(nFH, '03')) + gsi_utils.link_file('siganl', 'atmanl_mem'+format(nFH, '03')) + gsi_utils.link_file(ComOut+'/'+APrefix+'atminc.nc', 'atminc_mem'+format(nFH, '03')) + else: + gsi_utils.link_file('sigf'+format(fh, '02'), 'atmges_mem'+format(nFH, '03')) + gsi_utils.link_file('siga'+format(fh, '02'), 'atmanl_mem'+format(nFH, '03')) + gsi_utils.link_file(ComOut+'/'+APrefix+'atmi'+format(fh, '03')+'.nc', 'atminc_mem'+format(nFH, '03')) + else: + nFH=1 + gsi_utils.link_file('sigf06', 'atmges_mem001') + gsi_utils.link_file('siganl', 'atmanl_mem001') + gsi_utils.link_file(ComOut+'/'+APrefix+'atminc', 'atminc_mem001') + os.environ['OMP_NUM_THREADS'] = str(NThreads) + os.environ['ncmd'] = str(nFH) + shutil.copy(Exec,RunDir+'/calc_inc.x') + ExecCMD = ExecCMD.replace("$ncmd",str(nFH)) + + # set up the namelist + namelist = OrderedDict() + namelist["setup"] = {"datapath": "'./'", + "analysis_filename": "'atmanl'", + "firstguess_filename": "'atmges'", + "increment_filename": "'atminc'", + "debug": ".false.", + "nens": str(nFH), + "imp_physics": str(IMP_Physics)} + + namelist["zeroinc"] = {"incvars_to_zero": Inc2Zero} + + gsi_utils.write_nml(namelist, RunDir+'/calc_increment.nml') + + # run the executable + try: + err = subprocess.check_call(ExecCMD+' '+RunDir+'/calc_inc.x', shell=True) + print(locals()) + except subprocess.CalledProcessError as e: + print('Error with calc_inc.x, exit code='+str(e.returncode)) + print(locals()) + sys.exit(e.returncode) + +# run the function if this script is called from the command line +if __name__ == '__main__': + DoIAU = gsi_utils.isTrue(os.getenv('DOIAU', 'NO')) + l4DEnsVar = gsi_utils.isTrue(os.getenv('l4densvar', 'NO')) + Write4Danl = gsi_utils.isTrue(os.getenv('lwrite4danl', 'NO')) + ComOut = os.getenv('COMOUT', './') + APrefix = os.getenv('APREFIX', '') + ASuffix= os.getenv('ASUFFIX', '') + NThreads = os.getenv('NTHREADS_CALCINC', 1) + IMP_Physics = os.getenv('imp_physics', 11) + RunDir = os.getenv('DATA', './') + ExecNC = os.getenv('CALCINCNCEXEC', './calc_increment_ens_ncio.x') + ExecNEMS = os.getenv('CALCINCEXEC', './calc_increment_ens.x') + Inc2Zero = os.getenv('INCREMENTS_TO_ZERO', '"NONE"') + ExecCMD = os.getenv('APRUN_CALCINC', '') + IAUHrs = list(map(int,os.getenv('IAUFHRS','6').split(','))) + + # determine if the analysis is in netCDF or NEMSIO + if ASuffix == ".nc": + Exec = ExecNC + else: + Exec = ExecNEMS + + print(locals()) + calcinc_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix, ASuffix, IAUHrs, + NThreads, IMP_Physics, Inc2Zero, RunDir, Exec, ExecCMD) diff --git a/ush/getncdimlen b/ush/getncdimlen new file mode 100755 index 0000000000..5d230f6cc3 --- /dev/null +++ b/ush/getncdimlen @@ -0,0 +1,17 @@ +#!/usr/bin/env python +# getncdimlen +# cory.r.martin@noaa.gov +# 2019-10-17 +# script to return length of requested dimension +# for specified netCDF file +import argparse +import gsi_utils + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description='Get length of dimension specified from a FV3GFS netCDF file') + parser.add_argument('ncfile', help='path to input netCDF file', type=str) + parser.add_argument('dimname', help='name of dimension (ex: grid_xt)', type=str) + args = parser.parse_args() + FileDims = gsi_utils.get_ncdims(args.ncfile) + print(FileDims[args.dimname]) diff --git a/ush/gsi_utils.py b/ush/gsi_utils.py new file mode 100644 index 0000000000..79c6e627e0 --- /dev/null +++ b/ush/gsi_utils.py @@ -0,0 +1,138 @@ +### gsi_utils.py +### a collection of functions, classes, etc. +### used for the GSI global analysis + +def isTrue(str_in): + """ isTrue(str_in) + - function to translate shell variables to python logical variables + + input: str_in - string (should be like 'YES', 'TRUE', etc.) + returns: status (logical True or False) + + """ + str_in = str_in.upper() + if str_in in ['YES','.TRUE.']: + status = True + else: + status = False + return status + +def link_file(from_file, to_file): + """ link_file(from_file, to_file) + - function to check if a path exists, and if not, make a symlink + input: from_file - string path + to_file - string path + """ + import os + if not os.path.exists(to_file): + if not os.path.islink(to_file): + os.symlink(from_file, to_file) + else: + print(to_file+" exists, unlinking.") + os.unlink(to_file) + os.symlink(from_file, to_file) + print("ln -s "+from_file+" "+to_file) + +def copy_file(from_file, to_file): + import shutil + shutil.copy(from_file, to_file) + print("cp "+from_file+" "+to_file) + +def make_dir(directory): + import os + os.makedirs(directory) + print("mkdir -p "+directory) + +def write_nml(nml_dict, nml_file): + """ write_nml(nml_dict, nml_file) + - function to write out namelist dictionary nml_dict to file nml_file + input: nml_dict - dictionary of dictionaries + first dictionary is &nml, second is nmlvar='value' + NOTE: this shoudl be an OrderedDict or else it might fail + nml_file - string path to write namelist file to + """ + nfile = open(nml_file, 'w') + + for nml, nmlvars in nml_dict.items(): + nfile.write('&'+nml+'\n') + for var, val in nmlvars.items(): + nfile.write(' '+str(var)+' = '+str(val)+'\n') + nfile.write('/\n\n') + nfile.close() + + +def get_ncdims(ncfile): + """ get_ncdims(ncfile) + - function to return dictionary of netCDF file dimensions and their lengths + input: ncfile - string to path to netCDF file + output: ncdims - dictionary where key is the name of a dimension and the + value is the length of that dimension + + ex: ncdims['pfull'] = 127 + """ + try: + import netCDF4 as nc + except ImportError: + print("Python Error!") + print("netCDF4 Python module not available. Do you have the proper Python available in your environment?") + print("Hera: module use -a /contrib/modulefiles && module load anaconda/2.3.0") + print("Dell: module load python/3.6.3") + print(" ") + ncf = nc.Dataset(ncfile) + ncdims = {} + for d in ncf.dimensions.keys(): + ncdims[d] = int(len(ncf.dimensions[d])) + ncf.close() + + return ncdims + +def get_nemsdims(nemsfile,nemsexe): + """ get_nemsdims(nemsfile,nemsexe) + - function to return dictionary of NEMSIO file dimensions for use + input: nemsfile - string to path nemsio file + nemsexe - string to path nemsio_get executable + output: nemsdims - dictionary where key is the name of a dimension and the + value is the length of that dimension + ex: nemsdims['pfull'] = 127 + """ + import subprocess + ncdims = { + 'dimx': 'grid_xt', + 'dimy': 'grid_yt', + 'dimz': 'pfull', + } + nemsdims = {} + for dim in ['dimx','dimy','dimz']: + out = subprocess.Popen([nemsexe,nemsfile,dim],stdout=subprocess.PIPE,stderr=subprocess.STDOUT) + stdout, stderr = out.communicate() + nemsdims[ncdims[dim]] = int(stdout.split(' ')[-1].rstrip()) + return nemsdims + +def get_timeinfo(ncfile): + """ get_timeinfo(ncfile) + - function to return datetime objects of initialized time and valid time + input: ncfile - string to path to netCDF file + returns: inittime, validtime - datetime objects + nfhour - integer forecast hour + """ + try: + import netCDF4 as nc + except ImportError: + print("Python Error!") + print("netCDF4 Python module not available. Do you have the proper Python available in your environment?") + print("Hera: module use -a /contrib/modulefiles && module load anaconda/2.3.0") + print("Dell: module load python/3.6.3") + print(" ") + import datetime as dt + import re + ncf = nc.Dataset(ncfile) + time_units = ncf['time'].units + date_str = time_units.split('since ')[1] + date_str = re.sub("[^0-9]", "", date_str) + initstr = date_str[0:10] + inittime = dt.datetime.strptime(initstr,"%Y%m%d%H") + nfhour = int(ncf['time'][0]) + validtime = inittime + dt.timedelta(hours=nfhour) + ncf.close() + + return inittime, validtime, nfhour From 3ddc2a76cfce770d738dc054457884b1582c577a Mon Sep 17 00:00:00 2001 From: Andrew Collard Date: Thu, 22 Aug 2024 16:36:09 +0000 Subject: [PATCH 07/22] Add external gsi_utils --- sorc/build_all.sh | 15 +++++++++++++++ sorc/build_gsi_utils.sh | 28 ++++++++++++++++++++++++++++ sorc/checkout.sh | 12 ++++++++++++ sorc/link_fv3gfs.sh | 19 +++++++++++++++---- 4 files changed, 70 insertions(+), 4 deletions(-) create mode 100755 sorc/build_gsi_utils.sh diff --git a/sorc/build_all.sh b/sorc/build_all.sh index 1c3ba8d00a..8433a2fd2a 100755 --- a/sorc/build_all.sh +++ b/sorc/build_all.sh @@ -83,6 +83,21 @@ fi ((err+=$rc)) } +#------------------------------------ +# build gsi_utils +#------------------------------------ +$Build_gsi && { +echo " .... Building gsi .... " +./build_gsi_utils.sh > $logs_dir/build_gsi_utils.log 2>&1 +rc=$? +if [[ $rc -ne 0 ]] ; then + echo "Fatal error in building gsi_utils." + echo "The log file is in $logs_dir/build_gsi_utils.log" +fi +((err+=$rc)) +} + +#------------------------------------ #------------------------------------ # build ncep_post #------------------------------------ diff --git a/sorc/build_gsi_utils.sh b/sorc/build_gsi_utils.sh new file mode 100755 index 0000000000..58c64e6e4a --- /dev/null +++ b/sorc/build_gsi_utils.sh @@ -0,0 +1,28 @@ +#! /usr/bin/env bash +set -eux + +OPTIND=1 +while getopts ":j:dv" option; do + case "${option}" in + d) BUILD_TYPE="Debug";; + j) BUILD_JOBS="${OPTARG}";; + v) BUILD_VERBOSE="YES";; + :) + echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" + usage + ;; + *) + echo "[${BASH_SOURCE[0]}]: Unrecognized option: ${option}" + usage + ;; + esac +done +shift $((OPTIND-1)) + +BUILD_TYPE=${BUILD_TYPE:-"Release"} \ +BUILD_VERBOSE=${BUILD_VERBOSE:-"NO"} \ +BUILD_JOBS=${BUILD_JOBS:-8} \ +UTIL_OPTS="-DBUILD_UTIL_ENKF_GFS=ON -DBUILD_UTIL_NCIO=ON" \ +"./gsi_utils.fd/ush/build.sh" + +exit diff --git a/sorc/checkout.sh b/sorc/checkout.sh index 2f568a0595..7c99468368 100755 --- a/sorc/checkout.sh +++ b/sorc/checkout.sh @@ -45,6 +45,18 @@ else echo 'Skip. Directory gsi.fd already exists.' fi +echo gsi_utils checkout ... +if [[ ! -d gsi_utils.fd ]] ; then + rm -f ${topdir}/checkout-gsi.log +# Check out develop for now + git clone --recursive https://github.com/NOAA-EMC/GSI-Utils.git gsi_utils.fd >> ${topdir}/checkout-gsi_utils.log 2>&1 + cd gsi_utils.fd + git submodule update --init + cd ${topdir} +else + echo 'Skip. Directory gsi_utils.fd already exists.' +fi + echo gldas checkout ... if [[ ! -d gldas.fd ]] ; then rm -f ${topdir}/checkout-gldas.log diff --git a/sorc/link_fv3gfs.sh b/sorc/link_fv3gfs.sh index 35cdf6612d..a7db5ff25f 100755 --- a/sorc/link_fv3gfs.sh +++ b/sorc/link_fv3gfs.sh @@ -195,14 +195,25 @@ for ufs_utilsexe in \ $LINK ../sorc/ufs_utils.fd/exec/$ufs_utilsexe . done -for gsiexe in calc_analysis.x calc_increment_ens_ncio.x calc_increment_ens.x \ - getsfcensmeanp.x getsigensmeanp_smooth.x getsigensstatp.x enkf.x gsi.x \ - interp_inc.x ncdiag_cat_serial.x oznmon_horiz.x oznmon_time.x radmon_angle.x \ - radmon_bcoef.x radmon_bcor.x radmon_time.x recentersigp.x;do +#for gsiexe in calc_analysis.x calc_increment_ens_ncio.x calc_increment_ens.x \ +# getsfcensmeanp.x getsigensmeanp_smooth.x getsigensstatp.x enkf.x gsi.x \ +# interp_inc.x ncdiag_cat_serial.x oznmon_horiz.x oznmon_time.x radmon_angle.x \ +# radmon_bcoef.x radmon_bcor.x radmon_time.x recentersigp.x;do +# [[ -s $gsiexe ]] && rm -f $gsiexe +# $LINK ../sorc/gsi.fd/exec/$gsiexe . +#done + +for gsiexe in enkf.x gsi.x; do [[ -s $gsiexe ]] && rm -f $gsiexe $LINK ../sorc/gsi.fd/exec/$gsiexe . done +for gsiexe in calc_analysis.x calc_increment_ens_ncio.x calc_increment_ens.x \ + getsfcensmeanp.x getsigensmeanp_smooth.x getsigensstatp.x interp_inc.x; do + [[ -s $gsiexe ]] && rm -f $gsiexe + $LINK ../sorc/gsi_utils.fd/install/bin/$gsiexe . +done + for gldasexe in gdas2gldas gldas2gdas gldas_forcing gldas_model gldas_post gldas_rst; do [[ -s $gldasexe ]] && rm -f $gldasexe $LINK ../sorc/gldas.fd/exec/$gldasexe . From dde6c68014e0a7e9a411a4c277f4f82930e905f8 Mon Sep 17 00:00:00 2001 From: Andrew Collard Date: Thu, 22 Aug 2024 17:27:12 +0000 Subject: [PATCH 08/22] Update some links --- sorc/link_fv3gfs.sh | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/sorc/link_fv3gfs.sh b/sorc/link_fv3gfs.sh index a7db5ff25f..0a951e1415 100755 --- a/sorc/link_fv3gfs.sh +++ b/sorc/link_fv3gfs.sh @@ -114,10 +114,11 @@ cd ${pwd}/../fix ||exit 8 [[ -d fix_gsi ]] && rm -rf fix_gsi $LINK ../sorc/gsi.fd/fix fix_gsi cd ${pwd}/../ush ||exit 8 - $LINK ../sorc/gsi.fd/ush/gsi_utils.py . - $LINK ../sorc/gsi.fd/ush/calcanl_gfs.py . - $LINK ../sorc/gsi.fd/ush/calcinc_gfs.py . - $LINK ../sorc/gsi.fd/ush/getncdimlen . +# The following are now explicitly in the global-workflow repository +# $LINK ../sorc/gsi.fd/ush/gsi_utils.py . +# $LINK ../sorc/gsi.fd/ush/calcanl_gfs.py . +# $LINK ../sorc/gsi.fd/ush/calcinc_gfs.py . +# $LINK ../sorc/gsi.fd/ush/getncdimlen . #------------------------------ From 644a64fb586dc3ac7deafac94247fb1fb8b6b9eb Mon Sep 17 00:00:00 2001 From: Andrew Collard Date: Wed, 30 Oct 2024 15:43:06 +0000 Subject: [PATCH 09/22] Update gsi_utils version number and add ncdiag --- ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf | 1 + scripts/exglobal_diag.sh | 3 ++- sorc/checkout.sh | 4 ++-- versions/wcoss2.ver | 1 + 4 files changed, 6 insertions(+), 3 deletions(-) diff --git a/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf b/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf index fa1e87a0eb..3cc3192ae0 100755 --- a/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf +++ b/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf @@ -29,6 +29,7 @@ module load cray-pals/${cray_pals_ver} module load python/${python_ver} module load hdf5/${hdf5_ver} module load netcdf/${netcdf_ver} +module load ncdiag/${ncdiag_ver} module list diff --git a/scripts/exglobal_diag.sh b/scripts/exglobal_diag.sh index 3ed2a5bf7b..5922016d41 100755 --- a/scripts/exglobal_diag.sh +++ b/scripts/exglobal_diag.sh @@ -46,7 +46,8 @@ export NMV=${NMV:-"/bin/mv"} export NLN=${NLN:-"/bin/ln -sf"} export CHGRP_CMD=${CHGRP_CMD:-"chgrp ${group_name:-rstprod}"} export NCLEN=${NCLEN:-$HOMEgfs/ush/getncdimlen} -export CATEXEC=${CATEXEC:-$HOMEgfs/exec/ncdiag_cat_serial.x} +#export CATEXEC=${CATEXEC:-$HOMEgfs/exec/ncdiag_cat_serial.x} +export CATEXEC=${CATEXEC:-${ncdiag_ROOT:-${gsi_ncdiag_ROOT}}/bin/ncdiag_cat_serial.x} COMPRESS=${COMPRESS:-gzip} UNCOMPRESS=${UNCOMPRESS:-gunzip} APRUNCFP=${APRUNCFP:-""} diff --git a/sorc/checkout.sh b/sorc/checkout.sh index 7c99468368..a588fabb0d 100755 --- a/sorc/checkout.sh +++ b/sorc/checkout.sh @@ -48,10 +48,10 @@ fi echo gsi_utils checkout ... if [[ ! -d gsi_utils.fd ]] ; then rm -f ${topdir}/checkout-gsi.log -# Check out develop for now +# Check out the version before the changes for Thompson microphysics were introduced. git clone --recursive https://github.com/NOAA-EMC/GSI-Utils.git gsi_utils.fd >> ${topdir}/checkout-gsi_utils.log 2>&1 cd gsi_utils.fd - git submodule update --init + git checkout 4d4daa2def14fd45537b94926f10825b633411ae cd ${topdir} else echo 'Skip. Directory gsi_utils.fd already exists.' diff --git a/versions/wcoss2.ver b/versions/wcoss2.ver index 674a750aa5..395bc76132 100644 --- a/versions/wcoss2.ver +++ b/versions/wcoss2.ver @@ -4,6 +4,7 @@ export prod_util_ver=${prod_util_ver:-2.0.9} # Allow override from ops ecflow export obsproc_run_ver=1.2.0 export prepobs_run_ver=1.1.0 +export ncdiag_run_ver=1.0.0 export tracker_ver=v1.1.15.5 export fit_ver="newm.1.5" From 57a11cd46f08e914b991033cb528e8d35edfb349 Mon Sep 17 00:00:00 2001 From: Andrew Collard Date: Wed, 30 Oct 2024 17:19:32 +0000 Subject: [PATCH 10/22] Use an earlier version of gsi_utils --- modulefiles/module_base.wcoss2.lua | 1 + sorc/checkout.sh | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/modulefiles/module_base.wcoss2.lua b/modulefiles/module_base.wcoss2.lua index f559affc3e..4fa2c3f504 100755 --- a/modulefiles/module_base.wcoss2.lua +++ b/modulefiles/module_base.wcoss2.lua @@ -32,6 +32,7 @@ load(pathJoin("util_shared", os.getenv("util_shared_ver"))) load(pathJoin("crtm", os.getenv("crtm_ver"))) load(pathJoin("g2tmpl", os.getenv("g2tmpl_ver"))) load(pathJoin("wgrib2", os.getenv("wgrib2_ver"))) +load(pathJoin("ncdiag", os.getenv("ncdiag_ver"))) prepend_path("MODULEPATH", pathJoin("/lfs/h2/emc/global/save/emc.global/git/prepobs/v" .. os.getenv("prepobs_run_ver"), "modulefiles")) load(pathJoin("prepobs", os.getenv("prepobs_run_ver"))) diff --git a/sorc/checkout.sh b/sorc/checkout.sh index a588fabb0d..90df3cc532 100755 --- a/sorc/checkout.sh +++ b/sorc/checkout.sh @@ -48,10 +48,10 @@ fi echo gsi_utils checkout ... if [[ ! -d gsi_utils.fd ]] ; then rm -f ${topdir}/checkout-gsi.log -# Check out the version before the changes for Thompson microphysics were introduced. +# Check out a version before the changes for Thompson microphysics were introduced. git clone --recursive https://github.com/NOAA-EMC/GSI-Utils.git gsi_utils.fd >> ${topdir}/checkout-gsi_utils.log 2>&1 cd gsi_utils.fd - git checkout 4d4daa2def14fd45537b94926f10825b633411ae + git checkout 4c39529b48836277df3ad388a3b99184ace0abdc cd ${topdir} else echo 'Skip. Directory gsi_utils.fd already exists.' From e07187bfac344009ca3a6a271ed8c52ef234103f Mon Sep 17 00:00:00 2001 From: Andrew Collard Date: Wed, 30 Oct 2024 18:37:56 +0000 Subject: [PATCH 11/22] Remove use_qsatensmean ref. GSI#602 --- scripts/exgdas_enkf_update.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/exgdas_enkf_update.sh b/scripts/exgdas_enkf_update.sh index 16fa92b1c9..974753c672 100755 --- a/scripts/exgdas_enkf_update.sh +++ b/scripts/exgdas_enkf_update.sh @@ -267,7 +267,7 @@ cat > enkf.nml << EOFnml use_gfs_nemsio=${use_gfs_nemsio},use_gfs_ncio=${use_gfs_ncio},imp_physics=$imp_physics,lupp=$lupp, univaroz=.false.,adp_anglebc=.true.,angord=4,use_edges=.false.,emiss_bc=.true., letkf_flag=${letkf_flag},nobsl_max=${nobsl_max},denkf=${denkf},getkf=${getkf}., - nhr_anal=${IAUFHRS_ENKF},nhr_state=${IAUFHRS_ENKF},use_qsatensmean=.true., + nhr_anal=${IAUFHRS_ENKF},nhr_state=${IAUFHRS_ENKF}, lobsdiag_forenkf=$lobsdiag_forenkf, write_spread_diag=$write_spread_diag, modelspace_vloc=$modelspace_vloc, From 93abb693960e60ddc6c0ab82f7995c159d3fa92c Mon Sep 17 00:00:00 2001 From: Andrew Collard Date: Wed, 30 Oct 2024 18:39:22 +0000 Subject: [PATCH 12/22] Slight change to version of GSI_utils --- sorc/checkout.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/checkout.sh b/sorc/checkout.sh index 90df3cc532..2b3362c977 100755 --- a/sorc/checkout.sh +++ b/sorc/checkout.sh @@ -51,7 +51,7 @@ if [[ ! -d gsi_utils.fd ]] ; then # Check out a version before the changes for Thompson microphysics were introduced. git clone --recursive https://github.com/NOAA-EMC/GSI-Utils.git gsi_utils.fd >> ${topdir}/checkout-gsi_utils.log 2>&1 cd gsi_utils.fd - git checkout 4c39529b48836277df3ad388a3b99184ace0abdc + git checkout 10c2535c07cf96dbb1fdbaa48ab6261387e0a19f cd ${topdir} else echo 'Skip. Directory gsi_utils.fd already exists.' From f1c5459aeb4ef63798a210e4cdb8a75f5e32ccbc Mon Sep 17 00:00:00 2001 From: Andrew Collard Date: Wed, 30 Oct 2024 20:59:12 +0000 Subject: [PATCH 13/22] Another version number change --- sorc/checkout.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sorc/checkout.sh b/sorc/checkout.sh index 2b3362c977..9190cd380c 100755 --- a/sorc/checkout.sh +++ b/sorc/checkout.sh @@ -49,9 +49,9 @@ echo gsi_utils checkout ... if [[ ! -d gsi_utils.fd ]] ; then rm -f ${topdir}/checkout-gsi.log # Check out a version before the changes for Thompson microphysics were introduced. - git clone --recursive https://github.com/NOAA-EMC/GSI-Utils.git gsi_utils.fd >> ${topdir}/checkout-gsi_utils.log 2>&1 + git clone https://github.com/NOAA-EMC/GSI-Utils.git gsi_utils.fd >> ${topdir}/checkout-gsi_utils.log 2>&1 cd gsi_utils.fd - git checkout 10c2535c07cf96dbb1fdbaa48ab6261387e0a19f + git checkout 2a15d3b514cb05a9c1343e437f134375ad260369 cd ${topdir} else echo 'Skip. Directory gsi_utils.fd already exists.' From 38fe6cf31913c42e8a5d04a1fb47ec6a209da6df Mon Sep 17 00:00:00 2001 From: Andrew Collard Date: Wed, 30 Oct 2024 21:14:23 +0000 Subject: [PATCH 14/22] Fix some links --- sorc/link_fv3gfs.sh | 78 ++++++++++++++++++++++----------------------- 1 file changed, 39 insertions(+), 39 deletions(-) diff --git a/sorc/link_fv3gfs.sh b/sorc/link_fv3gfs.sh index 0a951e1415..bf8746d7f7 100755 --- a/sorc/link_fv3gfs.sh +++ b/sorc/link_fv3gfs.sh @@ -124,43 +124,43 @@ cd ${pwd}/../ush ||exit 8 #------------------------------ #--add DA Monitor file #------------------------------ -cd ${pwd}/../fix ||exit 8 - [[ -d gdas ]] && rm -rf gdas - mkdir -p gdas - cd gdas - $LINK ../../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gdas/fix/gdas_minmon_cost.txt . - $LINK ../../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gdas/fix/gdas_minmon_gnorm.txt . - $LINK ../../sorc/gsi.fd/util/Ozone_Monitor/nwprod/gdas_oznmon/fix/gdas_oznmon_base.tar . - $LINK ../../sorc/gsi.fd/util/Ozone_Monitor/nwprod/gdas_oznmon/fix/gdas_oznmon_satype.txt . - $LINK ../../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_base.tar . - $LINK ../../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_satype.txt . - $LINK ../../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_scaninfo.txt . -cd ${pwd}/../jobs ||exit 8 - $LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gdas/jobs/JGDAS_ATMOS_VMINMON . - $LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gfs/jobs/JGFS_ATMOS_VMINMON . - $LINK ../sorc/gsi.fd/util/Ozone_Monitor/nwprod/gdas_oznmon/jobs/JGDAS_ATMOS_VERFOZN . - $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon/jobs/JGDAS_ATMOS_VERFRAD . -cd ${pwd}/../parm ||exit 8 - [[ -d mon ]] && rm -rf mon - mkdir -p mon - cd mon - $LINK ../../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon/parm/gdas_radmon.parm da_mon.parm - $LINK ../../sorc/gsi.fd/util/Ozone_Monitor/nwprod/gdas_oznmon/parm/gdas_oznmon.parm . -cd ${pwd}/../scripts ||exit 8 - $LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gdas/scripts/exgdas_atmos_vminmon.sh . - $LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gfs/scripts/exgfs_atmos_vminmon.sh . - $LINK ../sorc/gsi.fd/util/Ozone_Monitor/nwprod/gdas_oznmon/scripts/exgdas_atmos_verfozn.sh . - $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon/scripts/exgdas_atmos_verfrad.sh . -cd ${pwd}/../ush ||exit 8 - $LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/minmon_shared/ush/minmon_xtrct_costs.pl . - $LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/minmon_shared/ush/minmon_xtrct_gnorms.pl . - $LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/minmon_shared/ush/minmon_xtrct_reduct.pl . - $LINK ../sorc/gsi.fd/util/Ozone_Monitor/nwprod/oznmon_shared/ush/ozn_xtrct.sh . - $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_err_rpt.sh . - $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_verf_angle.sh . - $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_verf_bcoef.sh . - $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_verf_bcor.sh . - $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_verf_time.sh . +#cd ${pwd}/../fix ||exit 8 +# [[ -d gdas ]] && rm -rf gdas +# mkdir -p gdas +# cd gdas +# $LINK ../../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gdas/fix/gdas_minmon_cost.txt . +# $LINK ../../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gdas/fix/gdas_minmon_gnorm.txt . +# $LINK ../../sorc/gsi.fd/util/Ozone_Monitor/nwprod/gdas_oznmon/fix/gdas_oznmon_base.tar . +# $LINK ../../sorc/gsi.fd/util/Ozone_Monitor/nwprod/gdas_oznmon/fix/gdas_oznmon_satype.txt . +# $LINK ../../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_base.tar . +# $LINK ../../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_satype.txt . +# $LINK ../../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_scaninfo.txt . +#cd ${pwd}/../jobs ||exit 8 +# $LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gdas/jobs/JGDAS_ATMOS_VMINMON . +# $LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gfs/jobs/JGFS_ATMOS_VMINMON . +# $LINK ../sorc/gsi.fd/util/Ozone_Monitor/nwprod/gdas_oznmon/jobs/JGDAS_ATMOS_VERFOZN . +# $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon/jobs/JGDAS_ATMOS_VERFRAD . +#cd ${pwd}/../parm ||exit 8 +# [[ -d mon ]] && rm -rf mon +# mkdir -p mon +# cd mon +# $LINK ../../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon/parm/gdas_radmon.parm da_mon.parm +# $LINK ../../sorc/gsi.fd/util/Ozone_Monitor/nwprod/gdas_oznmon/parm/gdas_oznmon.parm . +#cd ${pwd}/../scripts ||exit 8 +# $LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gdas/scripts/exgdas_atmos_vminmon.sh . +# $LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gfs/scripts/exgfs_atmos_vminmon.sh . +# $LINK ../sorc/gsi.fd/util/Ozone_Monitor/nwprod/gdas_oznmon/scripts/exgdas_atmos_verfozn.sh . +# $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon/scripts/exgdas_atmos_verfrad.sh . +#cd ${pwd}/../ush ||exit 8 +# $LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/minmon_shared/ush/minmon_xtrct_costs.pl . +# $LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/minmon_shared/ush/minmon_xtrct_gnorms.pl . +# $LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/minmon_shared/ush/minmon_xtrct_reduct.pl . +# $LINK ../sorc/gsi.fd/util/Ozone_Monitor/nwprod/oznmon_shared/ush/ozn_xtrct.sh . +# $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_err_rpt.sh . +# $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_verf_angle.sh . +# $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_verf_bcoef.sh . +# $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_verf_bcor.sh . +# $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_verf_time.sh . #------------------------------ @@ -201,10 +201,10 @@ done # interp_inc.x ncdiag_cat_serial.x oznmon_horiz.x oznmon_time.x radmon_angle.x \ # radmon_bcoef.x radmon_bcor.x radmon_time.x recentersigp.x;do # [[ -s $gsiexe ]] && rm -f $gsiexe -# $LINK ../sorc/gsi.fd/exec/$gsiexe . +# $LINK ../sorc/gsi_utils.fd/install/bin/$gsiexe . #done -for gsiexe in enkf.x gsi.x; do +for gsiexe in enkf.x gsi.x recentersigp.x; do [[ -s $gsiexe ]] && rm -f $gsiexe $LINK ../sorc/gsi.fd/exec/$gsiexe . done From 66745c450e4b31b05d8709ef7054cf2e14dcde15 Mon Sep 17 00:00:00 2001 From: Andrew Collard Date: Wed, 30 Oct 2024 21:22:10 +0000 Subject: [PATCH 15/22] Another link fix --- sorc/link_fv3gfs.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sorc/link_fv3gfs.sh b/sorc/link_fv3gfs.sh index bf8746d7f7..e3ab1a9c57 100755 --- a/sorc/link_fv3gfs.sh +++ b/sorc/link_fv3gfs.sh @@ -204,13 +204,13 @@ done # $LINK ../sorc/gsi_utils.fd/install/bin/$gsiexe . #done -for gsiexe in enkf.x gsi.x recentersigp.x; do +for gsiexe in enkf.x gsi.x; do [[ -s $gsiexe ]] && rm -f $gsiexe $LINK ../sorc/gsi.fd/exec/$gsiexe . done for gsiexe in calc_analysis.x calc_increment_ens_ncio.x calc_increment_ens.x \ - getsfcensmeanp.x getsigensmeanp_smooth.x getsigensstatp.x interp_inc.x; do + getsfcensmeanp.x getsigensmeanp_smooth.x getsigensstatp.x interp_inc.x recentersigp.x; do [[ -s $gsiexe ]] && rm -f $gsiexe $LINK ../sorc/gsi_utils.fd/install/bin/$gsiexe . done From b7cee3b64e5b993b559528a6fadae74f83f1364e Mon Sep 17 00:00:00 2001 From: Andrew Collard Date: Thu, 31 Oct 2024 19:41:15 +0000 Subject: [PATCH 16/22] Fix small bug in archive and change default ANAVINFO --- scripts/exgdas_enkf_update.sh | 4 +++- scripts/exglobal_atmos_analysis.sh | 4 +++- ush/hpssarch_gen.sh | 6 ++++-- 3 files changed, 10 insertions(+), 4 deletions(-) diff --git a/scripts/exgdas_enkf_update.sh b/scripts/exgdas_enkf_update.sh index 974753c672..e164b4123b 100755 --- a/scripts/exgdas_enkf_update.sh +++ b/scripts/exgdas_enkf_update.sh @@ -123,7 +123,9 @@ CONVINFO=${CONVINFO:-${FIXgsi}/global_convinfo.txt} OZINFO=${OZINFO:-${FIXgsi}/global_ozinfo.txt} SCANINFO=${SCANINFO:-${FIXgsi}/global_scaninfo.txt} HYBENSINFO=${HYBENSINFO:-${FIXgsi}/global_hybens_info.l${LEVS_ENKF}.txt} -ANAVINFO=${ANAVINFO:-${FIXgsi}/global_anavinfo.l${LEVS_ENKF}.txt} +#ANAVINFO=${ANAVINFO:-${FIXgsi}/global_anavinfo.l${LEVS_ENKF}.txt} +# Use the following for v16 workflow +ANAVINFO=${ANAVINFO:-${FIXgsi}/global_anavinfo_allhydro.l${LEVS_ENKF}.txt} VLOCALEIG=${VLOCALEIG:-${FIXgsi}/vlocal_eig_l${LEVS_ENKF}.dat} ENKF_SUFFIX="s" diff --git a/scripts/exglobal_atmos_analysis.sh b/scripts/exglobal_atmos_analysis.sh index ca841e6e57..2cbac85acc 100755 --- a/scripts/exglobal_atmos_analysis.sh +++ b/scripts/exglobal_atmos_analysis.sh @@ -348,7 +348,9 @@ SATANGL=${SATANGL:-${FIXgsi}/global_satangbias.txt} SATINFO=${SATINFO:-${FIXgsi}/global_satinfo.txt} RADCLOUDINFO=${RADCLOUDINFO:-${FIXgsi}/cloudy_radiance_info.txt} ATMSFILTER=${ATMSFILTER:-${FIXgsi}/atms_beamwidth.txt} -ANAVINFO=${ANAVINFO:-${FIXgsi}/global_anavinfo.l${LEVS}.txt} +#ANAVINFO=${ANAVINFO:-${FIXgsi}/global_anavinfo.l${LEVS}.txt} +# Use the following for v16 +ANAVINFO=${ANAVINFO:-${FIXgsi}/global_anavinfo_allhydro.l${LEVS}.txt} CONVINFO=${CONVINFO:-${FIXgsi}/global_convinfo.txt} vqcdat=${vqcdat:-${FIXgsi}/vqctp001.dat} INSITUINFO=${INSITUINFO:-${FIXgsi}/global_insituinfo.txt} diff --git a/ush/hpssarch_gen.sh b/ush/hpssarch_gen.sh index 0184683a44..9d22b88ccd 100755 --- a/ush/hpssarch_gen.sh +++ b/ush/hpssarch_gen.sh @@ -77,7 +77,8 @@ if [ $type = "gfs" ]; then echo "${dirname}${head}gsistat " >>gfsa.txt echo "${dirname}${head}nsstbufr " >>gfsa.txt echo "${dirname}${head}prepbufr " >>gfsa.txt - echo "${dirname}${head}prepbufr_pre-qc " >>gfsa.txt + # prepbufr_pre-qc is not always there + [ -f ${dirname}${head}prepbufr_pre-qc ] && echo "${dirname}${head}prepbufr_pre-qc " >>gfsa.txt echo "${dirname}${head}prepbufr.acft_profiles " >>gfsa.txt echo "${dirname}${head}pgrb2.0p25.anl " >>gfsa.txt echo "${dirname}${head}pgrb2.0p25.anl.idx " >>gfsa.txt @@ -299,7 +300,8 @@ if [ $type = "gdas" ]; then fi echo "${dirname}${head}nsstbufr " >>gdas_restarta.txt echo "${dirname}${head}prepbufr " >>gdas_restarta.txt - echo "${dirname}${head}prepbufr_pre-qc " >>gdas_restarta.txt + # prepbufr_pre-qc is not always there + [ -f ${dirname}${head}prepbufr_pre-qc ] && echo "${dirname}${head}prepbufr_pre-qc " >>gdas_restarta.txt echo "${dirname}${head}prepbufr.acft_profiles " >>gdas_restarta.txt echo "${dirname}${head}abias " >>gdas_restarta.txt echo "${dirname}${head}abias_air " >>gdas_restarta.txt From 5b1297e0a4e1316925f2308fa136dbd67b23c754 Mon Sep 17 00:00:00 2001 From: Andrew Collard Date: Thu, 31 Oct 2024 19:52:32 +0000 Subject: [PATCH 17/22] Turn on optconv option - see GSI #375 --- scripts/exglobal_atmos_analysis.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/exglobal_atmos_analysis.sh b/scripts/exglobal_atmos_analysis.sh index 2cbac85acc..c44356d97e 100755 --- a/scripts/exglobal_atmos_analysis.sh +++ b/scripts/exglobal_atmos_analysis.sh @@ -83,6 +83,7 @@ cnvw_option=${cnvw_option:-".false."} # Observation usage options cao_check=${cao_check:-".true."} ta2tb=${ta2tb:-".true."} +optconv=${optconv:-0.06} # Diagnostic files options lobsdiag_forenkf=${lobsdiag_forenkf:-".false."} @@ -767,7 +768,7 @@ cat > gsiparm.anl << EOF lobsdiag_forenkf=$lobsdiag_forenkf, write_fv3_incr=$write_fv3_increment, nhr_anal=${IAUFHRS}, - ta2tb=${ta2tb}, + ta2tb=${ta2tb},optconv=${optconv}, $WRITE_INCR_ZERO $WRITE_ZERO_STRAT $WRITE_STRAT_EFOLD From 399cac01a6db047faa488eaac3c9158b6b5dd503 Mon Sep 17 00:00:00 2001 From: Andrew Collard Date: Fri, 1 Nov 2024 14:41:46 +0000 Subject: [PATCH 18/22] Restore observation monitoring capability --- .gitignore | 4 - scripts/exgdas_atmos_verfozn.sh | 84 ++---- scripts/exgdas_atmos_verfrad.sh | 207 +++++--------- scripts/exglobal_atmos_vminmon.sh | 76 +++++ sorc/build_all.sh | 15 + sorc/build_gsi_monitor.sh | 28 ++ sorc/checkout.sh | 15 +- sorc/link_fv3gfs.sh | 89 +++--- ush/minmon_xtrct_costs.pl | 230 ++++++++++++++++ ush/minmon_xtrct_gnorms.pl | 441 ++++++++++++++++++++++++++++++ ush/minmon_xtrct_reduct.pl | 87 ++++++ ush/ozn_xtrct.sh | 254 +++++++++++++++++ 12 files changed, 1275 insertions(+), 255 deletions(-) create mode 100755 scripts/exglobal_atmos_vminmon.sh create mode 100755 sorc/build_gsi_monitor.sh create mode 100755 ush/minmon_xtrct_costs.pl create mode 100755 ush/minmon_xtrct_gnorms.pl create mode 100755 ush/minmon_xtrct_reduct.pl create mode 100755 ush/ozn_xtrct.sh diff --git a/.gitignore b/.gitignore index 727e608cbe..3bb2af9b44 100644 --- a/.gitignore +++ b/.gitignore @@ -139,12 +139,8 @@ ush/global_chgres_driver.sh ush/global_cycle.sh ush/global_cycle_driver.sh ush/link_crtm_fix.sh -ush/minmon_xtrct_costs.pl -ush/minmon_xtrct_gnorms.pl -ush/minmon_xtrct_reduct.pl ush/mkwfsgbl.sh ush/mod_icec.sh -ush/ozn_xtrct.sh ush/radmon_ck_stdout.sh ush/radmon_err_rpt.sh ush/radmon_verf_angle.sh diff --git a/scripts/exgdas_atmos_verfozn.sh b/scripts/exgdas_atmos_verfozn.sh index 939b03a3d4..e681fc55c5 100755 --- a/scripts/exgdas_atmos_verfozn.sh +++ b/scripts/exgdas_atmos_verfozn.sh @@ -1,87 +1,44 @@ -#/bin/sh +#! /usr/bin/env bash -set -ax +source "${USHgfs}/preamble.sh" ################################################################################ -# exgdas_vrfyozn.sh +# exgdas_atmos_verfozn.sh # # This script runs the data extract/validation portion of the Ozone Monitor -# (OznMon) DA package. +# (OznMon) DA package. # ################################################################################ -export scr=exgdas_vrfyozn.sh - err=0 -#------------------------------------------------------------------------------- -# Set environment -# -export RUN_ENVIR=${RUN_ENVIR:-nco} -export NET=${NET:-gfs} -export RUN=${RUN:-gdas} -export envir=${envir:-prod} -export COMPONENT=${COMPONENT:-atmos} - -# Command line arguments -export PDY=${1:-${PDY:?}} -export cyc=${2:-${cyc:?}} - - -# Other variables -export SATYPE_FILE=${SATYPE_FILE:-$FIXgdas_ozn/gdas_oznmon_satype.txt} -export PDATE=${PDY}${cyc} -export DO_DATA_RPT=${DO_DATA_RPT:-1} -export NCP=${NCP:-/bin/cp} - - -#----------------------------------------------------------------- -# ensure work and TANK dirs exist, verify oznstat is available -# -export OZN_WORK_DIR=${OZN_WORK_DIR:-$(pwd)} - -if [[ ! -d ${OZN_WORK_DIR} ]]; then - mkdir $OZN_WORK_DIR -fi -cd $OZN_WORK_DIR - -if [[ ! -d ${TANKverf_ozn} ]]; then - mkdir -p $TANKverf_ozn -fi - -if [[ -s ${oznstat} ]]; then - echo ${oznstat} is available -fi - - - data_available=0 if [[ -s ${oznstat} ]]; then - data_available=1 + data_available=1 #------------------------------------------------------------------ - # Copy data files file to local data directory. - # Untar oznstat file. + # Copy data files file to local data directory. + # Untar oznstat file. #------------------------------------------------------------------ - $NCP $oznstat ./oznstat.$PDATE + ${NCP} "${oznstat}" "./oznstat.${PDY}${cyc}" - tar -xvf oznstat.$PDATE - rm oznstat.$PDATE + tar -xvf "oznstat.${PDY}${cyc}" + rm "oznstat.${PDY}${cyc}" netcdf=0 - count=`ls diag* | grep ".nc4" | wc -l` - if [ $count -gt 0 ] ; then + count=$(ls diag* | grep ".nc4" | wc -l) + if [ "${count}" -gt 0 ] ; then netcdf=1 - for filenc4 in `ls diag*nc4.gz`; do - file=`echo $filenc4 | cut -d'.' -f1-2`.gz - mv $filenc4 $file + for filenc4 in $(ls diag*nc4.gz); do + file=$(echo "${filenc4}" | cut -d'.' -f1-2).gz + mv "${filenc4}" "${file}" done fi - + export OZNMON_NETCDF=${netcdf} - ${HOMEoznmon}/ush/ozn_xtrct.sh + "${USHgfs}/ozn_xtrct.sh" err=$? else @@ -89,12 +46,5 @@ else err=1 fi - -if [[ "$VERBOSE" = "YES" ]]; then - echo "end exgdas_vrfyozn.sh, exit value = ${err}" -fi - - -set +x exit ${err} diff --git a/scripts/exgdas_atmos_verfrad.sh b/scripts/exgdas_atmos_verfrad.sh index 7aec25432f..bad8715acd 100755 --- a/scripts/exgdas_atmos_verfrad.sh +++ b/scripts/exgdas_atmos_verfrad.sh @@ -1,145 +1,90 @@ -#/bin/sh +#! /usr/bin/env bash + +source "${USHgfs}/preamble.sh" + ################################################################################ #### UNIX Script Documentation Block # . . -# Script name: exgdas_vrfyrad.sh +# Script name: exgdas_atmos_verfrad.sh # Script description: Runs data extract/validation for global radiance diag data # # Author: Ed Safford Org: NP23 Date: 2012-01-18 # -# Abstract: This script runs the data extract/validation portion of the -# RadMon package. +# Abstract: This script runs the data extract/validation portion of the +# RadMon package. # # Condition codes # 0 - no problem encountered # >0 - some problem encountered # ################################################################################ -scr=exgdas_vrfyrad.sh -echo "${scr} HAS STARTED" - -export VERBOSE=${VERBOSE:-"NO"} -if [[ "$VERBOSE" = "YES" ]] -then - set -x -fi - - -export RUN_ENVIR=${RUN_ENVIR:-nco} -export NET=${NET:-gfs} -export RUN=${RUN:-gdas} -export envir=${envir:-prod} -export COMPONENT=${COMPONENT:-atmos} - -# Command line arguments -export PDY=${1:-${PDY:?}} -export cyc=${2:-${cyc:?}} - -# Directories -export COM_IN=${COM_IN:-$(compath.py ${envir}/${NET}/${gfs_ver})} -export COMIN=${COMIN:-$COM_IN/${RUN}.${PDY}/${cyc}/$COMPONENT} - - -# Filenames -export biascr=${biascr:-$COMIN/gdas.t${cyc}z.abias} -export radstat=${radstat:-$COMIN/gdas.t${cyc}z.radstat} -export satype_file=${satype_file:-${FIXgdas}/gdas_radmon_satype.txt} - -# Other variables -export RAD_AREA=${RAD_AREA:-glb} -export MAKE_CTL=${MAKE_CTL:-1} -export MAKE_DATA=${MAKE_DATA:-1} -export USE_ANL=${USE_ANL:-1} -export PDATE=${PDY}${cyc} -export DO_DIAG_RPT=${DO_DIAG_RPT:-1} -export DO_DATA_RPT=${DO_DATA_RPT:-1} -export USE_MAIL=${USE_MAIL:-0} -export MAIL_TO=${MAIL_TO:-" "} -export MAIL_CC=${MAIL_CC:-" "} -export NCP=${NCP:-/bin/cp} - -########################################################################### -# ensure TANK dir exists, verify radstat and biascr are available -# -if [[ ! -d ${TANKverf_rad} ]]; then - mkdir -p $TANKverf_rad -fi - -if [[ "$VERBOSE" = "YES" ]]; then - if [[ -s ${radstat} ]]; then - echo ${radstat} is available - fi - if [[ -s ${biascr} ]]; then - echo ${biascr} is available - fi -fi -##################################################################### data_available=0 + if [[ -s ${radstat} && -s ${biascr} ]]; then - data_available=1 + data_available=1 #------------------------------------------------------------------ - # Copy data files file to local data directory. - # Untar radstat file. + # Copy data files file to local data directory. + # Untar radstat file. #------------------------------------------------------------------ - $NCP $biascr ./biascr.$PDATE - $NCP $radstat ./radstat.$PDATE + ${NCP} "${biascr}" "./biascr.${PDY}${cyc}" + ${NCP} "${radstat}" "./radstat.${PDY}${cyc}" - tar -xvf radstat.$PDATE - rm radstat.$PDATE + tar -xvf "radstat.${PDY}${cyc}" + rm "radstat.${PDY}${cyc}" #------------------------------------------------------------------ # SATYPE is the list of expected satellite/instrument sources - # in the radstat file. It should be stored in the $TANKverf - # directory. If it isn't there then use the $FIXgdas copy. In all - # cases write it back out to the radmon.$PDY directory. Add any + # in the radstat file. It should be stored in the $TANKverf + # directory. If it isn't there then use the gdas fix copy. In all + # cases write it back out to the radmon.$PDY directory. Add any # new sources to the list before writing back out. #------------------------------------------------------------------ - radstat_satype=`ls d*ges* | awk -F_ '{ print $2 "_" $3 }'` - if [[ "$VERBOSE" = "YES" ]]; then - echo $radstat_satype + radstat_satype=$(ls d*ges* | awk -F_ '{ print $2 "_" $3 }') + if [[ "${VERBOSE}" = "YES" ]]; then + echo "${radstat_satype}" fi - echo satype_file = $satype_file - + echo satype_file = "${satype_file}" + #------------------------------------------------------------------ - # Get previous cycle's date, and look for the satype_file. Using - # the previous cycle will get us the previous day's directory if + # Get previous cycle's date, and look for the satype_file. Using + # the previous cycle will get us the previous day's directory if # the cycle being processed is 00z. #------------------------------------------------------------------ - if [[ $cyc = "00" ]]; then + if [[ ${cyc} = "00" ]]; then use_tankdir=${TANKverf_radM1} else use_tankdir=${TANKverf_rad} fi - echo satype_file = $satype_file - export SATYPE=`cat ${satype_file}` - + echo satype_file = "${satype_file}" + export SATYPE=$(cat "${satype_file}") + #------------------------------------------------------------- - # Update the SATYPE if any new sat/instrument was - # found in $radstat_satype. Write the SATYPE contents back + # Update the SATYPE if any new sat/instrument was + # found in $radstat_satype. Write the SATYPE contents back # to $TANKverf/radmon.$PDY. #------------------------------------------------------------- satype_changes=0 - new_satype=$SATYPE + new_satype=${SATYPE} for type in ${radstat_satype}; do - test=`echo $SATYPE | grep $type | wc -l` + type_count=$(echo "${SATYPE}" | grep "${type}" | wc -l) - if [[ $test -eq 0 ]]; then - if [[ "$VERBOSE" = "YES" ]]; then - echo "Found $type in radstat file but not in SATYPE list. Adding it now." + if (( type_count == 0 )); then + if [[ "${VERBOSE}" = "YES" ]]; then + echo "Found ${type} in radstat file but not in SATYPE list. Adding it now." fi satype_changes=1 - new_satype="$new_satype $type" + new_satype="${new_satype} ${type}" fi done - + #------------------------------------------------------------------ # Rename the diag files and uncompress #------------------------------------------------------------------ @@ -147,46 +92,45 @@ if [[ -s ${radstat} && -s ${biascr} ]]; then for type in ${SATYPE}; do - if [[ netcdf -eq 0 && -e diag_${type}_ges.${PDATE}.nc4.${Z} ]]; then + if (( netcdf == 0 )) && [[ -e "diag_${type}_ges.${PDY}${cyc}.nc4.${Z}" ]]; then netcdf=1 fi - - mv diag_${type}_ges.${PDATE}*.${Z} ${type}.${Z} - ${UNCOMPRESS} ./${type}.${Z} - - if [[ $USE_ANL -eq 1 ]]; then - mv diag_${type}_anl.${PDATE}*.${Z} ${type}_anl.${Z} - ${UNCOMPRESS} ./${type}_anl.${Z} + + if [[ $(find . -maxdepth 1 -type f -name "diag_${type}_ges.${PDY}${cyc}*.${Z}" | wc -l) -gt 0 ]]; then + mv "diag_${type}_ges.${PDY}${cyc}"*".${Z}" "${type}.${Z}" + ${UNCOMPRESS} "./${type}.${Z}" + else + echo "WARNING: diag_${type}_ges.${PDY}${cyc}*.${Z} not available, skipping" + fi + + if [[ ${USE_ANL} -eq 1 ]]; then + if [[ $(find . -maxdepth 1 -type f -name "diag_${type}_anl.${PDY}${cyc}*.${Z}" | wc -l) -gt 0 ]]; then + mv "diag_${type}_anl.${PDY}${cyc}"*".${Z}" "${type}_anl.${Z}" + ${UNCOMPRESS} "./${type}_anl.${Z}" + else + echo "WARNING: diag_${type}_anl.${PDY}${cyc}*.${Z} not available, skipping" + fi fi done - export RADMON_NETCDF=$netcdf + export RADMON_NETCDF=${netcdf} #------------------------------------------------------------------ - # Run the child sccripts. + # Run the child scripts. #------------------------------------------------------------------ - ${USHradmon}/radmon_verf_angle.sh ${PDATE} + "${USHgfs}/radmon_verf_angle.sh" rc_angle=$? - ${USHradmon}/radmon_verf_bcoef.sh ${PDATE} + "${USHgfs}/radmon_verf_bcoef.sh" rc_bcoef=$? - ${USHradmon}/radmon_verf_bcor.sh ${PDATE} + "${USHgfs}/radmon_verf_bcor.sh" rc_bcor=$? - ${USHradmon}/radmon_verf_time.sh ${PDATE} + "${USHgfs}/radmon_verf_time.sh" rc_time=$? - #-------------------------------------- - # optionally run clean_tankdir script - # - if [[ ${CLEAN_TANKVERF} -eq 1 ]]; then - ${USHradmon}/clean_tankdir.sh glb 60 - rc_clean_tankdir=$? - echo "rc_clean_tankdir = $rc_clean_tankdir" - fi - fi @@ -197,32 +141,25 @@ fi err=0 if [[ ${data_available} -ne 1 ]]; then err=1 -elif [[ $rc_angle -ne 0 ]]; then - err=$rc_angle -elif [[ $rc_bcoef -ne 0 ]]; then - err=$rc_bcoef -elif [[ $rc_bcor -ne 0 ]]; then - err=$rc_bcor -elif [[ $rc_time -ne 0 ]]; then - err=$rc_time +elif [[ ${rc_angle} -ne 0 ]]; then + err=${rc_angle} +elif [[ ${rc_bcoef} -ne 0 ]]; then + err=${rc_bcoef} +elif [[ ${rc_bcor} -ne 0 ]]; then + err=${rc_bcor} +elif [[ ${rc_time} -ne 0 ]]; then + err=${rc_time} fi ##################################################################### # Restrict select sensors and satellites export CHGRP_CMD=${CHGRP_CMD:-"chgrp ${group_name:-rstprod}"} rlist="saphir" -for rtype in $rlist; do - ${CHGRP_CMD} $TANKverf_rad/*${rtype}* +for rtype in ${rlist}; do + if compgen -G "${TANKverf_rad}/"*"${rtype}"* > /dev/null; then + ${CHGRP_CMD} "${TANKverf_rad}/"*"${rtype}"* + fi done - -if [[ "$VERBOSE" = "YES" ]]; then - echo "end exgdas_vrfyrad.sh, exit value = ${err}" -fi - -echo "${scr} HAS ENDED" - - -set +x exit ${err} diff --git a/scripts/exglobal_atmos_vminmon.sh b/scripts/exglobal_atmos_vminmon.sh new file mode 100755 index 0000000000..b4307c8af9 --- /dev/null +++ b/scripts/exglobal_atmos_vminmon.sh @@ -0,0 +1,76 @@ +#! /usr/bin/env bash + +source "${USHgfs}/preamble.sh" + +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: exglobal_atmos_vminmon.sh +# Script description: Runs data extract/validation for GSI normalization diag data +# +# Author: Ed Safford Org: NP23 Date: 2015-04-10 +# +# Abstract: This script runs the data extract/validation portion of the +# MinMon package. +# +# Condition codes +# 0 - no problem encountered +# >0 - some problem encountered +# +################################################################################ + +data_available=0 + +if [[ -s ${gsistat} ]]; then + + data_available=1 + + #----------------------------------------------------------------------- + # Copy the $MINMON_SUFFIX.gnorm_data.txt file to the working directory + # It's ok if it doesn't exist; we'll create a new one if needed. + # + # Note: The logic below is to accomodate two different data storage + # methods. Some parallels (and formerly ops) dump all MinMon data for + # a given day in the same directory (if condition). Ops now separates + # data into ${cyc} subdirectories (elif condition). + #----------------------------------------------------------------------- + if [[ -s ${M_TANKverf}/gnorm_data.txt ]]; then + ${NCP} "${M_TANKverf}/gnorm_data.txt" gnorm_data.txt + elif [[ -s ${M_TANKverfM1}/gnorm_data.txt ]]; then + ${NCP} "${M_TANKverfM1}/gnorm_data.txt" gnorm_data.txt + fi + + + #------------------------------------------------------------------ + # Run the child sccripts. + #------------------------------------------------------------------ + "${USHgfs}/minmon_xtrct_costs.pl" "${MINMON_SUFFIX}" "${PDY}" "${cyc}" "${gsistat}" + rc_costs=$? + echo "rc_costs = ${rc_costs}" + + "${USHgfs}/minmon_xtrct_gnorms.pl" "${MINMON_SUFFIX}" "${PDY}" "${cyc}" "${gsistat}" + rc_gnorms=$? + echo "rc_gnorms = ${rc_gnorms}" + + "${USHgfs}/minmon_xtrct_reduct.pl" "${MINMON_SUFFIX}" "${PDY}" "${cyc}" "${gsistat}" + rc_reduct=$? + echo "rc_reduct = ${rc_reduct}" + +fi + +##################################################################### +# Postprocessing + +err=0 +if [[ ${data_available} -ne 1 ]]; then + err=1 +elif [[ ${rc_costs} -ne 0 ]]; then + err=${rc_costs} +elif [[ ${rc_gnorms} -ne 0 ]]; then + err=${rc_gnorms} +elif [[ ${rc_reduct} -ne 0 ]]; then + err=${rc_reduct} +fi + +exit "${err}" + diff --git a/sorc/build_all.sh b/sorc/build_all.sh index 8433a2fd2a..bd35a3be72 100755 --- a/sorc/build_all.sh +++ b/sorc/build_all.sh @@ -97,6 +97,21 @@ fi ((err+=$rc)) } +#------------------------------------ +# build gsi_monitor +#------------------------------------ +$Build_gsi && { +echo " .... Building gsi .... " +./build_gsi_monitor.sh > $logs_dir/build_gsi_monitor.log 2>&1 +rc=$? +if [[ $rc -ne 0 ]] ; then + echo "Fatal error in building gsi_monitor." + echo "The log file is in $logs_dir/build_gsi_monitor.log" +fi +((err+=$rc)) +} + +#------------------------------------ #------------------------------------ #------------------------------------ # build ncep_post diff --git a/sorc/build_gsi_monitor.sh b/sorc/build_gsi_monitor.sh new file mode 100755 index 0000000000..e917bd03df --- /dev/null +++ b/sorc/build_gsi_monitor.sh @@ -0,0 +1,28 @@ +#! /usr/bin/env bash +set -eux + +OPTIND=1 +while getopts ":j:dv" option; do + case "${option}" in + d) BUILD_TYPE="Debug";; + j) BUILD_JOBS="${OPTARG}";; + v) BUILD_VERBOSE="YES";; + :) + echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" + usage + ;; + *) + echo "[${BASH_SOURCE[0]}]: Unrecognized option: ${option}" + usage + ;; + esac +done +shift $((OPTIND-1)) + +BUILD_TYPE=${BUILD_TYPE:-"Release"} \ +BUILD_VERBOSE=${BUILD_VERBOSE:-"NO"} \ +BUILD_JOBS=${BUILD_JOBS:-8} \ +UTIL_OPTS="-DBUILD_UTIL_ENKF_GFS=ON -DBUILD_UTIL_NCIO=ON" \ +"./gsi_monitor.fd/ush/build.sh" + +exit diff --git a/sorc/checkout.sh b/sorc/checkout.sh index dccb95db05..658014b9ce 100755 --- a/sorc/checkout.sh +++ b/sorc/checkout.sh @@ -47,7 +47,7 @@ fi echo gsi_utils checkout ... if [[ ! -d gsi_utils.fd ]] ; then - rm -f ${topdir}/checkout-gsi.log + rm -f ${topdir}/checkout-gsi_utils.log # Check out a version before the changes for Thompson microphysics were introduced. git clone https://github.com/NOAA-EMC/GSI-Utils.git gsi_utils.fd >> ${topdir}/checkout-gsi_utils.log 2>&1 cd gsi_utils.fd @@ -57,6 +57,19 @@ else echo 'Skip. Directory gsi_utils.fd already exists.' fi +echo gsi_monitor checkout ... +if [[ ! -d gsi_monitor.fd ]] ; then + rm -f ${topdir}/checkout-gsi_monitor.log +# Check out a version before the changes for Thompson microphysics were introduced. + git clone https://github.com/NOAA-EMC/GSI-Monitor.git gsi_monitor.fd >> ${topdir}/checkout-gsi_monitor.log 2>&1 + cd gsi_monitor.fd + git checkout 94588d63ca636269474bf865603e0ccfeb4dc049 + cd ${topdir} +else + echo 'Skip. Directory gsi_monitor.fd already exists.' +fi + + echo gldas checkout ... if [[ ! -d gldas.fd ]] ; then rm -f ${topdir}/checkout-gldas.log diff --git a/sorc/link_fv3gfs.sh b/sorc/link_fv3gfs.sh index e3ab1a9c57..ced1a237fc 100755 --- a/sorc/link_fv3gfs.sh +++ b/sorc/link_fv3gfs.sh @@ -124,43 +124,38 @@ cd ${pwd}/../ush ||exit 8 #------------------------------ #--add DA Monitor file #------------------------------ -#cd ${pwd}/../fix ||exit 8 -# [[ -d gdas ]] && rm -rf gdas -# mkdir -p gdas -# cd gdas -# $LINK ../../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gdas/fix/gdas_minmon_cost.txt . -# $LINK ../../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gdas/fix/gdas_minmon_gnorm.txt . -# $LINK ../../sorc/gsi.fd/util/Ozone_Monitor/nwprod/gdas_oznmon/fix/gdas_oznmon_base.tar . -# $LINK ../../sorc/gsi.fd/util/Ozone_Monitor/nwprod/gdas_oznmon/fix/gdas_oznmon_satype.txt . -# $LINK ../../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_base.tar . -# $LINK ../../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_satype.txt . -# $LINK ../../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_scaninfo.txt . -#cd ${pwd}/../jobs ||exit 8 -# $LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gdas/jobs/JGDAS_ATMOS_VMINMON . -# $LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gfs/jobs/JGFS_ATMOS_VMINMON . -# $LINK ../sorc/gsi.fd/util/Ozone_Monitor/nwprod/gdas_oznmon/jobs/JGDAS_ATMOS_VERFOZN . -# $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon/jobs/JGDAS_ATMOS_VERFRAD . -#cd ${pwd}/../parm ||exit 8 -# [[ -d mon ]] && rm -rf mon -# mkdir -p mon -# cd mon -# $LINK ../../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon/parm/gdas_radmon.parm da_mon.parm -# $LINK ../../sorc/gsi.fd/util/Ozone_Monitor/nwprod/gdas_oznmon/parm/gdas_oznmon.parm . +cd ${pwd}/../fix ||exit 8 + [[ -d gdas ]] && rm -rf gdas + mkdir -p gdas + cd gdas + $LINK ../../sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gdas/fix/gdas_minmon_cost.txt . + $LINK ../../sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gdas/fix/gdas_minmon_gnorm.txt . + $LINK ../../sorc/gsi_monitor.fd/src/Ozone_Monitor/nwprod/gdas_oznmon/fix/gdas_oznmon_base.tar . + $LINK ../../sorc/gsi_monitor.fd/src/Ozone_Monitor/nwprod/gdas_oznmon/fix/gdas_oznmon_satype.txt . + $LINK ../../sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_base.tar . + $LINK ../../sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_satype.txt . + $LINK ../../sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_scaninfo.txt . +cd ${pwd}/../parm ||exit 8 + [[ -d mon ]] && rm -rf mon + mkdir -p mon + cd mon + $LINK ../../sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/parm/gdas_radmon.parm da_mon.parm + $LINK ../../sorc/gsi_monitor.fd/src/Ozone_Monitor/nwprod/gdas_oznmon/parm/gdas_oznmon.parm . #cd ${pwd}/../scripts ||exit 8 -# $LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gdas/scripts/exgdas_atmos_vminmon.sh . -# $LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gfs/scripts/exgfs_atmos_vminmon.sh . -# $LINK ../sorc/gsi.fd/util/Ozone_Monitor/nwprod/gdas_oznmon/scripts/exgdas_atmos_verfozn.sh . -# $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon/scripts/exgdas_atmos_verfrad.sh . +# $LINK ../sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gdas/scripts/exgdas_atmos_vminmon.sh . +# $LINK ../sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gfs/scripts/exgfs_atmos_vminmon.sh . +# $LINK ../sorc/gsi_monitor.fd/src/Ozone_Monitor/nwprod/gdas_oznmon/scripts/exgdas_atmos_verfozn.sh . +# $LINK ../sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/scripts/exgdas_atmos_verfrad.sh . #cd ${pwd}/../ush ||exit 8 -# $LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/minmon_shared/ush/minmon_xtrct_costs.pl . -# $LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/minmon_shared/ush/minmon_xtrct_gnorms.pl . -# $LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/minmon_shared/ush/minmon_xtrct_reduct.pl . -# $LINK ../sorc/gsi.fd/util/Ozone_Monitor/nwprod/oznmon_shared/ush/ozn_xtrct.sh . -# $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_err_rpt.sh . -# $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_verf_angle.sh . -# $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_verf_bcoef.sh . -# $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_verf_bcor.sh . -# $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_verf_time.sh . +#C $LINK ../sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/minmon_shared/ush/minmon_xtrct_costs.pl . +#C $LINK ../sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/minmon_shared/ush/minmon_xtrct_gnorms.pl . +#C $LINK ../sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/minmon_shared/ush/minmon_xtrct_reduct.pl . +#C $LINK ../sorc/gsi_monitor.fd/src/Ozone_Monitor/nwprod/oznmon_shared/ush/ozn_xtrct.sh . + $LINK ../sorc/gsi_monitor.fd/src/Radiance_Monitor/data_extract/ush/radmon_err_rpt.sh . +#C $LINK ../sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_verf_angle.sh . +#C $LINK ../sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_verf_bcoef.sh . +#C $LINK ../sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_verf_bcor.sh . +#C $LINK ../sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_verf_time.sh . #------------------------------ @@ -196,13 +191,11 @@ for ufs_utilsexe in \ $LINK ../sorc/ufs_utils.fd/exec/$ufs_utilsexe . done -#for gsiexe in calc_analysis.x calc_increment_ens_ncio.x calc_increment_ens.x \ -# getsfcensmeanp.x getsigensmeanp_smooth.x getsigensstatp.x enkf.x gsi.x \ -# interp_inc.x ncdiag_cat_serial.x oznmon_horiz.x oznmon_time.x radmon_angle.x \ -# radmon_bcoef.x radmon_bcor.x radmon_time.x recentersigp.x;do -# [[ -s $gsiexe ]] && rm -f $gsiexe -# $LINK ../sorc/gsi_utils.fd/install/bin/$gsiexe . -#done +for monexe in oznmon_horiz.x oznmon_time.x radmon_angle.x \ + radmon_bcoef.x radmon_bcor.x radmon_time.x;do + [[ -s $monexe ]] && rm -f $monexe + $LINK ../sorc/gsi_monitor.fd/install/bin/$monexe . +done for gsiexe in enkf.x gsi.x; do [[ -s $gsiexe ]] && rm -f $gsiexe @@ -256,22 +249,22 @@ cd ${pwd}/../sorc || exit 8 $SLINK gsi.fd/src/ncdiag ncdiag_cat.fd [[ -d oznmon_horiz.fd ]] && rm -rf oznmon_horiz.fd - $SLINK gsi.fd/util/Ozone_Monitor/nwprod/oznmon_shared/sorc/oznmon_horiz.fd oznmon_horiz.fd + $SLINK gsi_monitor.fd/src/Ozone_Monitor/nwprod/oznmon_shared/sorc/oznmon_horiz.fd oznmon_horiz.fd [[ -d oznmon_time.fd ]] && rm -rf oznmon_time.fd - $SLINK gsi.fd/util/Ozone_Monitor/nwprod/oznmon_shared/sorc/oznmon_time.fd oznmon_time.fd + $SLINK gsi_monitor.fd/src/Ozone_Monitor/nwprod/oznmon_shared/sorc/oznmon_time.fd oznmon_time.fd [[ -d radmon_angle.fd ]] && rm -rf radmon_angle.fd - $SLINK gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radang.fd radmon_angle.fd + $SLINK gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radang.fd radmon_angle.fd [[ -d radmon_bcoef.fd ]] && rm -rf radmon_bcoef.fd - $SLINK gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radbcoef.fd radmon_bcoef.fd + $SLINK gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radbcoef.fd radmon_bcoef.fd [[ -d radmon_bcor.fd ]] && rm -rf radmon_bcor.fd - $SLINK gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radbcor.fd radmon_bcor.fd + $SLINK gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radbcor.fd radmon_bcor.fd [[ -d radmon_time.fd ]] && rm -rf radmon_time.fd - $SLINK gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radtime.fd radmon_time.fd + $SLINK gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radtime.fd radmon_time.fd [[ -d recentersigp.fd ]] && rm -rf recentersigp.fd $SLINK gsi.fd/util/EnKF/gfs/src/recentersigp.fd recentersigp.fd diff --git a/ush/minmon_xtrct_costs.pl b/ush/minmon_xtrct_costs.pl new file mode 100755 index 0000000000..c56ac3bdad --- /dev/null +++ b/ush/minmon_xtrct_costs.pl @@ -0,0 +1,230 @@ +#!/usr/bin/env perl + +#--------------------------------------------------------------------------- +# minmon_xtrct_costs.pl +# +# Extract cost data from gsistat file and load into cost +# and cost term files. +#--------------------------------------------------------------------------- + +use strict; +use warnings; + +#---------------------------------------------- +# subroutine to trim white space from strings +#---------------------------------------------- +sub trim { my $s = shift; $s =~ s/^\s+|\s+$//g; return $s }; + + +#--------------------------- +# +# Main routine begins here +# +#--------------------------- + +if ($#ARGV != 3 ) { + print "usage: minmon_xtrct_costs.pl SUFFIX PDY cyc infile\n"; + exit; +} +my $suffix = $ARGV[0]; + +my $pdy = $ARGV[1]; +my $cyc = $ARGV[2]; +my $infile = $ARGV[3]; + +my $use_costterms = 0; +my $no_data = 0.00; + +my $scr = "minmon_xtrct_costs.pl"; +print "$scr has started\n"; + + +my $rc = 0; +my $cdate = sprintf '%s%s', $pdy, $cyc; + +if( (-e $infile) ) { + + my $found_cost = 0; + my $found_costterms = 0; + my @cost_array; + my @jb_array; + my @jo_array; + my @jc_array; + my @jl_array; + my @term_array; + my @all_cost_terms; + + my $cost_target; + my $cost_number; + my $costterms_target; + my $jb_number = 5; + my $jo_number = 6; + my $jc_number = 7; + my $jl_number = 8; + + my $costfile = $ENV{"mm_costfile"}; + + if( (-e $costfile) ) { + open( COSTFILE, "<${costfile}" ) or die "Can't open ${costfile}: $!\n"; + my $line; + + while( $line = ) { + if( $line =~ /cost_target/ ) { + my @termsline = split( /:/, $line ); + $cost_target = $termsline[1]; + } elsif( $line =~ /cost_number/ ) { + my @termsline = split( /:/, $line ); + $cost_number = $termsline[1]; + } elsif( $line =~ /costterms_target/ ){ + my @termsline = split( /:/, $line ); + $costterms_target = $termsline[1]; + } + } + close( COSTFILE ); + } else { + $rc = 2; + } + + #------------------------------------------------------------------------ + # Open the infile and search for the $costterms_target and $cost_target + # strings. If found, parse out the cost information and push into + # holding arrays. + #------------------------------------------------------------------------ + if( $rc == 0 ) { + open( INFILE, "<${infile}" ) or die "Can't open ${infile}: $!\n"; + + my $line; + my $term_ctr=0; + + while( $line = ) { + + if( $line =~ /$costterms_target/ ) { + my @termsline = split( / +/, $line ); + push( @jb_array, $termsline[$jb_number] ); + push( @jo_array, $termsline[$jo_number] ); + push( @jc_array, $termsline[$jc_number] ); + push( @jl_array, $termsline[$jl_number] ); + $use_costterms = 1; + } + + if( $line =~ /$cost_target/ ) { + my @costline = split( / +/, $line ); + push( @cost_array, $costline[$cost_number] ); + } + + if( $term_ctr > 0 ) { + my @termline = split( / +/, $line ); + + if ( $term_ctr < 10 ) { + push( @term_array, trim($termline[1]) ); + push( @term_array, trim($termline[2]) ); + push( @term_array, trim($termline[3]) ); + $term_ctr++; + } else { + push( @term_array, trim($termline[1]) ); + push( @term_array, trim($termline[2]) ); + $term_ctr = 0; + } + + }elsif ( $line =~ "J=" && $line !~ "EJ=" ) { + my @termline = split( / +/, $line ); + push( @term_array, trim($termline[2]) ); + push( @term_array, trim($termline[3]) ); + push( @term_array, trim($termline[4]) ); + $term_ctr = 1; + } + } + + close( INFILE ); + + + #---------------------------------------------- + # move cost_array into all_costs by iteration + #---------------------------------------------- + my @all_costs; + for my $i (0 .. $#cost_array) { + my $iterline; + if( $use_costterms == 1 ){ + $iterline = sprintf ' %d,%e,%e,%e,%e,%e%s', + $i, $cost_array[$i], $jb_array[$i], $jo_array[$i], + $jc_array[$i], $jl_array[$i], "\n"; + } + else { + $iterline = sprintf ' %d,%e,%e,%e,%e,%e%s', + $i, $cost_array[$i], $no_data, $no_data, + $no_data, $no_data, "\n"; + } + + push( @all_costs, $iterline ); + } + + #--------------------------------------------------- + # move term_array into all_cost_terms by iteration + #--------------------------------------------------- + if( @term_array > 0 ) { + my $nterms = 32; + my $max_iter = ($#term_array+1)/$nterms; + my $niter = $max_iter -1; + + for my $iter (0 .. $niter ) { + my $step = $iter * $nterms; + my $iterline = sprintf '%d, %e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e%s', + $iter, $term_array[$step], $term_array[$step+1], $term_array[$step+2], + $term_array[$step+3], $term_array[$step+4], $term_array[$step+5], + $term_array[$step+6], $term_array[$step+7], $term_array[$step+8], + $term_array[$step+9], $term_array[$step+10], $term_array[$step+11], + $term_array[$step+12], $term_array[$step+13], $term_array[$step+14], + $term_array[$step+15], $term_array[$step+16], $term_array[$step+17], + $term_array[$step+18], $term_array[$step+19], $term_array[$step+20], + $term_array[$step+21], $term_array[$step+22], $term_array[$step+23], + $term_array[$step+24], $term_array[$step+25], $term_array[$step+26], + $term_array[$step+27], $term_array[$step+28], $term_array[$step+29], + $term_array[$step+30], $term_array[$step+31], "\n"; + push( @all_cost_terms, $iterline ); + } + } + + #------------------------------------------ + # write all_costs array to costs.txt file + #------------------------------------------ + my $filename2 = "${cdate}.costs.txt"; + if( @all_costs > 0 ) { + open( OUTFILE, ">$filename2" ) or die "Can't open ${filename2}: $!\n"; + print OUTFILE @all_costs; + close( OUTFILE ); + } + + #----------------------------------------------------- + # write all_cost_terms array to costs_terms.txt file + #----------------------------------------------------- + my $filename3 = "${cdate}.cost_terms.txt"; + if( @all_cost_terms > 0 ) { + open( OUTFILE, ">$filename3" ) or die "Can't open ${filename3}: $!\n"; + print OUTFILE @all_cost_terms; + close( OUTFILE ); + } + + #-------------------------- + # move files to $M_TANKverf + #-------------------------- + my $tankdir = $ENV{"M_TANKverf"}; + if(! -d $tankdir) { + system( "mkdir -p $tankdir" ); + } + + if( -e $filename2 ) { + my $newfile2 = "${tankdir}/${filename2}"; + system("cp -f $filename2 $newfile2"); + } + if( -e $filename3 ) { + my $newfile3 = "${tankdir}/${filename3}"; + system("cp -f $filename3 $newfile3"); + } + + } # $rc still == 0 after reading gmon_cost.txt +} +else { # $infile does not exist + $rc = 1; +} + +print "$scr has ended, return code = $rc \n" diff --git a/ush/minmon_xtrct_gnorms.pl b/ush/minmon_xtrct_gnorms.pl new file mode 100755 index 0000000000..ac83c08cd3 --- /dev/null +++ b/ush/minmon_xtrct_gnorms.pl @@ -0,0 +1,441 @@ +#!/usr/bin/env perl + +use strict; +use warnings; +use List::MoreUtils 'true'; +use List::MoreUtils 'first_index'; +use List::MoreUtils 'last_index'; + +#--------------------------------------------------------------------------- +# minmon_xtrct_gnorms.pl +# +# Update the gnorm_data.txt file with data from a new cycle. Add +# this new data to the last line of the gnorm_data.txt file. +# +# Note: If the gnorm_data.txt file does not exist, it will be created. +# +# The gnorm_data.txt file is used plotted directly by the javascript on +# the GSI stats page. +#--------------------------------------------------------------------------- +sub updateGnormData { + my $cycle = $_[0]; + my $igrad = $_[1]; + my $fgnorm = $_[2]; + my $avg_gnorm = $_[3]; + my $min_gnorm = $_[4]; + my $max_gnorm = $_[5]; + my $suffix = $_[6]; + + my $rc = 0; + my @filearray; + + my $gdfile = "gnorm_data.txt"; + + my $outfile = "new_gnorm_data.txt"; + my $yr = substr( $cycle, 0, 4); + my $mon = substr( $cycle, 4, 2); + my $day = substr( $cycle, 6, 2); + my $hr = substr( $cycle, 8, 2); + + my $newln = sprintf ' %04d,%02d,%02d,%02d,%e,%e,%e,%e,%e%s', + $yr, $mon, $day, $hr, $igrad, $fgnorm, + $avg_gnorm, $min_gnorm, $max_gnorm, "\n"; + + #------------------------------------------------------------- + # attempt to locate the latest $gdfile and copy it locally + # + if( -e $gdfile ) { + open( INFILE, "<${gdfile}" ) or die "Can't open ${gdfile}: $!\n"; + + @filearray = ; + +# This is the mechanism that limits the data to 30 days worth. Should I +# keep it or let the transfer script(s) truncate? 6/12/16 -- I'm going to keep +# it. I can add this as a later change once I add a user mechanism to vary the +# amount of data plotted (on the fly). + + my $cyc_interval = $ENV{'CYCLE_INTERVAL'}; + if( $cyc_interval eq "" ) { + $cyc_interval = 6; + } + + my $max_cyc = 119; # default 30 days worth of data = 120 cycles + # If CYCLE_INTERVAL is other than "" or 6 + # then set the $max_cyc using that interval + if( $cyc_interval != 6 && $cyc_interval != 0 ) { + my $cyc_per_day = 24 / $cyc_interval; + $max_cyc = (30 * $cyc_per_day) - 1; + } + + while( $#filearray > $max_cyc ) { + shift( @filearray ); + } + close( INFILE ); + } + + # Here is the problem Russ encountered after re-running the MinMon: + # If the cycle time in $newln is the same as an existing record in + # *.gnorm_data.txt then we end up with 2+ rows for the same cycle time. + # In that case $newln should replace the first existing line + # in @filearray and all other lines that might match should be deleted. + # Else when the cycle time doesn't already exist (the expected condition) + # it should be pushed into @filearray. + + # algorithm: + # ========= + # Establish $count of matches on "$yr,$mon,$day,$hr" + # if $count > 0 + # while $count > 1 + # get last_index and remove with splice + # replace first_index with $newln + # else + # push $newln + # + my $srch_strng = "$yr,$mon,$day,$hr"; + my $count = true { /$srch_strng/ } @filearray; + + if( $count > 0 ) { + while( $count > 1 ) { + my $l_index = last_index { /$srch_strng/ } @filearray; + splice @filearray, $l_index, 1; + $count = true { /$srch_strng/ } @filearray; + } + my $f_index = first_index { /$srch_strng/ } @filearray; + splice @filearray, $f_index, 1, $newln; + } + else { + push( @filearray, $newln ); + } + + open( OUTFILE, ">$outfile" ) or die "Can't open ${$outfile}: $!\n"; + print OUTFILE @filearray; + close( OUTFILE ); + + system("cp -f $outfile $gdfile"); + +} + +#--------------------------------------------------------------------------- +# makeErrMsg +# +# Apply a gross check on the final value of the gnorm for a specific +# cycle. If the final_gnorm value is greater than the gross_check value +# then put that in the error message file. Also check for resets or a +# premature halt, and journal those events to the error message file too. +# +# Note to self: reset_iter array is passed by reference +#--------------------------------------------------------------------------- +sub makeErrMsg { + my $suffix = $_[0]; + my $cycle = $_[1]; + my $final_gnorm = $_[2]; + my $stop_flag = $_[3]; + my $stop_iter = $_[4]; + my $reset_flag = $_[5]; + my $reset_iter = $_[6]; #reset iteration array + my $infile = $_[7]; + my $gross_check = $_[8]; + + my $mail_msg =""; + my $out_file = "${cycle}.errmsg.txt"; + + + if( $stop_flag > 0 ) { + my $stop_msg = " Gnorm check detected premature iteration stop: suffix = $suffix, cycle = $cycle, iteration = $stop_iter"; + $mail_msg .= $stop_msg; + } + + if( $reset_flag > 0 ) { + my $ctr=0; + my $reset_msg = "\n Gnorm check detected $reset_flag reset(s): suffix = $suffix, cycle = $cycle"; + $mail_msg .= $reset_msg; + $mail_msg .= "\n"; + $mail_msg .= " Reset(s) detected in iteration(s): @{$reset_iter}[$ctr] \n"; + + my $arr_size = @{$reset_iter}; + for( $ctr=1; $ctr < $arr_size; $ctr++ ) { + $mail_msg .= " @{$reset_iter}[$ctr]\n"; + } + } + + if( $final_gnorm >= $gross_check ){ + my $gnorm_msg = " Final gnorm gross check failure: suffix = $suffix, cycle = $cycle, final gnorm = $final_gnorm "; + + $mail_msg .= $gnorm_msg; + } + + if( length $mail_msg > 0 ){ + my $file_msg = " File source for report is: $infile"; + $mail_msg .= $file_msg; + } + + if( length $mail_msg > 0 ){ + my $mail_link = "http://www.emc.ncep.noaa.gov/gmb/gdas/gsi_stat/index.html?src=$suffix&typ=gnorm&cyc=$cycle"; + open( OUTFILE, ">$out_file" ) or die "Can't open ${$out_file}: $!\n"; + print OUTFILE $mail_msg; + print OUTFILE "\n\n $mail_link"; + close( OUTFILE ); + } +} + + +#--------------------------------------------------------------------------- +# +# Main routine begins here +# +#--------------------------------------------------------------------------- + +if ($#ARGV != 3 ) { + print "usage: minmon_xtrct_gnorms.pl SUFFIX pdy cyc infile \n"; + exit; +} + + +my $suffix = $ARGV[0]; +my $pdy = $ARGV[1]; +my $cyc = $ARGV[2]; +my $infile = $ARGV[3]; + + +my $scr = "minmon_xtrct_gnorms.pl"; +print "$scr Has Started\n"; + +# +# This needs to be redesigned to get the gnorm value from the gsistat file +# using the line that starts "cost,grad,step,b,step?:". The line formerly +# used for the gnorm and reduction values may not be available if the the +# verbose output flag is set to FALSE. +# +# So, using the grad value on that line: +# gnorm[i] = (grad[i]**)/(grad[0]**) +# reduct[i] = sqrt(gnorm) + +my $igrad_target; +my $igrad_number; +my $expected_gnorms; +my $gross_check_val; + +my $rc = 0; +my $cdate = sprintf '%s%s', $pdy, $cyc; + +my $gnormfile = $ENV{"mm_gnormfile"}; + + +if( (-e $gnormfile) ) { + open( GNORMFILE, "<${gnormfile}" ) or die "Can't open ${gnormfile}: $!\n"; + my $line; + + while( $line = ) { + if( $line =~ /igrad_target/ ) { + my @termsline = split( /:/, $line ); + $igrad_target = $termsline[1]; + } elsif( $line =~ /igrad_number/ ) { + my @termsline = split( /:/, $line ); + $igrad_number = $termsline[1]; + } elsif( $line =~ /expected_gnorms/ ){ + my @termsline = split( /:/, $line ); + $expected_gnorms = $termsline[1]; + } elsif( $line =~ /gross_check_val/ ){ + my @termsline = split( /:/, $line ); + $gross_check_val = $termsline[1]; + } + } + close( GNORMFILE ); +} else { + $rc = 4; +} + +if( $rc == 0 ) { + if( (-e $infile) ) { + open( INFILE, "<${infile}" ) or die "Can't open ${infile}: $!\n"; + + my $found_igrad = 0; + my $final_gnorm = 0.0; + my $igrad = 0.0; + my $header = 4; + my $header2 = 0; + my @gnorm_array; + my @last_10_gnorm; + + my $reset_flag = 0; + my $stop_flag = 0; + my $warn_str = "WARNING"; + my $stop_str = "Stopping"; + my $stop_iter = ""; + my $reset_str = "Reset"; + my @reset_iter; # reset iteration array + + my $stop_iter_flag = 0; + my $reset_iter_flag = 0; + my $line; + while( $line = ) { + + ############################################## + # if the reset_iter_flag is 1 then record the + # current outer & inner iteration number + ############################################## + if( $reset_iter_flag == 1 ) { + if( $line =~ /${igrad_target}/ ) { + my @iterline = split( / +/, $line ); + my $iter_str = $iterline[2] . "," . $iterline[3]; + push( @reset_iter, $iter_str); + $reset_iter_flag = 0; + } + } + + + if( $line =~ /${igrad_target}/ ) { + my @gradline = split( / +/, $line ); + + my $grad = $gradline[$igrad_number]; + + if( $found_igrad == 0 ){ + $igrad = $grad; + $found_igrad = 1; + } + + my $igrad_sqr = $igrad**2; + my $grad_sqr = $grad**2; + my $gnorm = $grad_sqr/$igrad_sqr; + + push( @gnorm_array, $gnorm ); + } + + + if( $line =~ /${warn_str}/ ) { + if( $line =~ /${stop_str}/ ) { + $stop_flag++; + $stop_iter_flag=1; + } + elsif( $line =~ /${reset_str}/ ){ + $reset_flag++; + $reset_iter_flag = 1; + } + } + + } + close( INFILE ); + + ######################################################################## + # If the stop_flag is >0 then record the last outer & inner + # iteration number. The trick is that it's the last iteration in the + # log file and we just passed it when we hit the stop warning message, + # so we have to reopen the file and get the last iteration number. + ######################################################################## + if( $stop_flag > 0 ) { + open( INFILE, "<${infile}" ) or die "Can't open ${infile}: $!\n"; + + my @lines = reverse ; + foreach $line (@lines) { + if( $line =~ /${igrad_target}/ ){ + my @iterline = split( / +/, $line ); + $stop_iter = $iterline[2] . "," . $iterline[3]; + last; + } + } + close( INFILE ); + } + + + my @all_gnorm = @gnorm_array; + + ############################################################################## + ## + ## If the iterations were halted due to error then the @all_gnorm array won't + ## be the expected size. In that case we need to pad the array out with + ## RMISS values so GrADS won't choke when it tries to read the data file. + ## + ## Note that we're padding @all_gnorm. The @gnorm_array is examined below + ## and we don't want to pad that and mess up the min/max calculation. + ## + ############################################################################### + my $arr_size = @all_gnorm; + + if( $arr_size < $expected_gnorms ) { + for( my $ctr = $arr_size; $ctr < $expected_gnorms; $ctr++ ) { + push( @all_gnorm, -999.0 ); + } + } + + my $sum_10_gnorm = 0.0; + my $min_gnorm = 9999999.0; + my $max_gnorm = -9999999.0; + my $avg_gnorm = 0.0; + + for( my $ctr = 9; $ctr >= 0; $ctr-- ) { + my $new_gnorm = pop( @gnorm_array ); + $sum_10_gnorm = $sum_10_gnorm + $new_gnorm; + if( $new_gnorm > $max_gnorm ) { + $max_gnorm = $new_gnorm; + } + if( $new_gnorm < $min_gnorm ) { + $min_gnorm = $new_gnorm; + } + if( $ctr == 9 ) { + $final_gnorm = $new_gnorm; + } + } + + $avg_gnorm = $sum_10_gnorm / 10; + + + ##################################################################### + # Update the gnorm_data.txt file with information on the + # initial gradient, final gnorm, and avg/min/max for the last 10 + # iterations. + ##################################################################### + updateGnormData( $cdate,$igrad,$final_gnorm,$avg_gnorm,$min_gnorm,$max_gnorm,$suffix ); + + + ##################################################################### + # Call makeErrMsg to build the error message file to record any + # abnormalities in the minimization. This file can be mailed by + # a calling script. + ##################################################################### + makeErrMsg( $suffix, $cdate, $final_gnorm, $stop_flag, $stop_iter, $reset_flag, \@reset_iter, $infile, $gross_check_val ); + + + ######################################################### + # write to GrADS ready output data file + # + # Note: this uses pack to achieve the same results as + # an unformatted binary Fortran file. + ######################################################### + my $filename2 = "${cdate}.gnorms.ieee_d"; + + open( OUTFILE, ">$filename2" ) or die "Can't open ${filename2}: $!\n"; + binmode OUTFILE; + + print OUTFILE pack( 'f*', @all_gnorm); + + close( OUTFILE ); + + #-------------------------- + # move files to $M_TANKverf + #-------------------------- + my $tankdir = $ENV{"M_TANKverf"}; + if(! -d $tankdir) { + system( "mkdir -p $tankdir" ); + } + + if( -e $filename2 ) { + system("cp -f $filename2 ${tankdir}/."); + } + + my $gdfile = "gnorm_data.txt"; + if( -e $gdfile ) { + system("cp -f $gdfile ${tankdir}/."); + } + + my $errmsg = "${cdate}.errmsg.txt"; + if( -e $errmsg ) { + system("cp -f $errmsg ${tankdir}/."); + } + + } # $rc still == 0 after reading gmon_gnorm.txt + +}else { # $infile does not exist + $rc = 3; +} + +print "$scr has ended, return code = $rc \n" diff --git a/ush/minmon_xtrct_reduct.pl b/ush/minmon_xtrct_reduct.pl new file mode 100755 index 0000000000..cc5da86af8 --- /dev/null +++ b/ush/minmon_xtrct_reduct.pl @@ -0,0 +1,87 @@ +#!/usr/bin/env perl + +use strict; + +#--------------------------------------------------------------------------- +# minmon_xtrct_reduct.pl +# +# Extract the reduction stats for a GSI minimization run and store in +# reduction.ieee_d files ready for GrADS use. +#--------------------------------------------------------------------------- + +if ($#ARGV != 3 ) { + print "usage: minmon_xtrct_reduct.pl SUFFIX pdy cyc infile\n"; + print " suffix is data source identifier\n"; + print " pdy is YYYYMMDD of the cycle to be processed\n"; + print " cyc is HH of the cycle to be processed\n"; + print " infile is the data file containing the reduction stats\n"; + exit; +} +my $suffix = $ARGV[0]; +my $pdy = $ARGV[1]; +my $cyc = $ARGV[2]; +my $infile = $ARGV[3]; + +my $scr = "minmon_xtrct_reduct.pl"; +print "$scr has started\n"; + +my $rc = 0; +my $cdate = sprintf '%s%s', $pdy, $cyc; +my $initial_gradient = -999.0; +my $iter_gradient; + +if( (-e $infile) ) { + + my $reduct_target = "cost,grad,step,b,step?"; + my $gradient_num = 5; + my $reduct; + + open( INFILE, "<${infile}" ) or die "Can't open ${infile}: $!\n"; + + my @reduct_array; + + while( my $line = ) { + if( $line =~ /$reduct_target/ ) { + my @reduct_ln = split( / +/, $line ); + $iter_gradient = $reduct_ln[$gradient_num]; + if( $initial_gradient == -999.0 ){ + $initial_gradient = $iter_gradient; + } + + $reduct = $iter_gradient / $initial_gradient; + + push( @reduct_array, $reduct ); + } + } + + close( INFILE ); + + + ################################# + # write reduct_array to outfile + ################################# + my $outfile = "${cdate}.reduction.ieee_d"; + open( OUTFILE, ">$outfile" ) or die "Can't open ${outfile}: $!\n"; + binmode OUTFILE; + + print OUTFILE pack( 'f*', @reduct_array); + close( OUTFILE ); + + #---------------------------- + # copy outfile to $M_TANKverf + #---------------------------- + my $tankdir = $ENV{"M_TANKverf"}; + if(! -d $tankdir) { + system( "mkdir -p $tankdir" ); + } + + if( -e $outfile ) { + my $newfile = "${tankdir}/${outfile}"; + system("cp -f $outfile $newfile"); + } + +} else { # $infile does not exist + $rc = 5; +} + +print "$scr has ended, return code = $rc \n" diff --git a/ush/ozn_xtrct.sh b/ush/ozn_xtrct.sh new file mode 100755 index 0000000000..0c623bf03c --- /dev/null +++ b/ush/ozn_xtrct.sh @@ -0,0 +1,254 @@ +#! /usr/bin/env bash + +source "${USHgfs}/preamble.sh" + +#------------------------------------------------------------------ +# ozn_xtrct.sh +# +# This script performs the data extraction from the oznstat +# diagnostic files. The resulting data (*.ieee_d) files, GrADS +# control files and stdout files will be moved to the +# $TANKverf_ozn. +# +# Calling scripts must define: +# $TANKverf_ozn +# $PDY +# $cyc +# +# Return values are +# 0 = normal +# 2 = unable to generate satype list; may indicate no diag +# files found in oznstat file +#------------------------------------------------------------------ + +#-------------------------------------------------- +# check_diag_files +# +# Compare $satype (which contains the contents of +# gdas_oznmon_satype.txt to $avail_satype which is +# determined by the contents of the oznstat file. +# Report any missing diag files in a file named +# bad_diag.$PDY$cyc +# +check_diag_files() { + pdate=$1 + found_satype=$2 + avail_satype=$3 + + out_file="bad_diag.${pdate}" + + echo ""; echo ""; echo "--> check_diag_files" + + for type in ${found_satype}; do + len_check=$(echo "${avail_satype}" | grep "${type}" | wc -c) + + if [[ ${len_check} -le 1 ]]; then + echo "missing diag file -- diag_${type}_ges.${pdate}.gz not found" >> "./${out_file}" + fi + done + + echo "<-- check_diag_files"; echo ""; echo "" +} + + +iret=0 +export NCP=${NCP:-/bin/cp} +VALIDATE_DATA=${VALIDATE_DATA:-0} +nregion=${nregion:-6} +DO_DATA_RPT=${DO_DATA_RPT:-0} + +netcdf_boolean=".false." +if [[ ${OZNMON_NETCDF} -eq 1 ]]; then + netcdf_boolean=".true." +fi + +OZNMON_NEW_HDR=${OZNMON_NEW_HDR:-0} +new_hdr="F" +if [[ ${OZNMON_NEW_HDR} -eq 1 ]]; then + new_hdr="T" +fi + +#------------------------------------------------------------------ +# if VALIDATE_DATA then locate and untar base file +# +validate=".FALSE." +if [[ ${VALIDATE_DATA} -eq 1 ]]; then + if [[ ! -e ${ozn_val_file} && ! -h ${ozn_val_file} ]]; then + echo "WARNING: VALIDATE_DATA set to 1, but unable to locate ${ozn_val_file}" + echo " Setting VALIDATE_DATA to 0/OFF" + VALIDATE_DATA=0 + else + validate=".TRUE." + val_file=$(basename "${ozn_val_file}") + ${NCP} "${ozn_val_file}" "${val_file}" + tar -xvf "${val_file}" + fi +fi +echo "VALIDATE_DATA, validate = ${VALIDATE_DATA}, ${validate} " + + + +#------------------------------------------------------------------ +# ozn_ptype here is the processing type which is intended to be "ges" +# or "anl". Default is "ges". +# +ozn_ptype=${ozn_ptype:-"ges anl"} + + +#--------------------------------------------------------------------------- +# Build satype list from the available diag files. +# +# An empty satype list means there are no diag files to process. That's +# a problem, reported by an iret value of 2 +# + +avail_satype=$(ls -1 d*ges* | sed -e 's/_/ /g;s/\./ /' | gawk '{ print $2 "_" $3 }') + +if [[ ${DO_DATA_RPT} -eq 1 ]]; then + if [[ -e ${SATYPE_FILE} ]]; then + satype=$(cat "${SATYPE_FILE}") + check_diag_files "${PDY}${cyc}" "${satype}" "${avail_satype}" + else + echo "WARNING: missing ${SATYPE_FILE}" + fi +fi + +len_satype=$(echo -n "${satype}" | wc -c) + +if [[ ${len_satype} -le 1 ]]; then + satype=${avail_satype} +fi + +echo "${satype}" + + +len_satype=$(echo -n "${satype}" | wc -c) + +if [[ ${DO_DATA_RPT} -eq 1 && ${len_satype} -lt 1 ]]; then + iret=2 + +else + + #-------------------------------------------------------------------- + # Copy extraction programs to working directory + # + ${NCP} "${EXECgfs}/oznmon_time.x" ./oznmon_time.x + if [[ ! -e oznmon_time.x ]]; then + iret=2 + exit ${iret} + fi + ${NCP} "${EXECgfs}/oznmon_horiz.x" ./oznmon_horiz.x + if [[ ! -e oznmon_horiz.x ]]; then + iret=3 + exit ${iret} + fi + + + #--------------------------------------------------------------------------- + # Outer loop over $ozn_ptype (default values 'ges', 'anl') + # + for ptype in ${ozn_ptype}; do + + iyy="${PDY:0:4}" + imm="${PDY:4:2}" + idd="${PDY:6:2}" + ihh=${cyc} + + for type in ${avail_satype}; do + if [[ -f "diag_${type}_${ptype}.${PDY}${cyc}.gz" ]]; then + mv "diag_${type}_${ptype}.${PDY}${cyc}.gz" "${type}.${ptype}.gz" + gunzip "./${type}.${ptype}.gz" + + echo "processing ptype, type: ${ptype}, ${type}" + rm -f input + +cat << EOF > input + &INPUT + satname='${type}', + iyy=${iyy}, + imm=${imm}, + idd=${idd}, + ihh=${ihh}, + idhh=-720, + incr=6, + nregion=${nregion}, + region(1)='global', rlonmin(1)=-180.0,rlonmax(1)=180.0,rlatmin(1)=-90.0,rlatmax(1)= 90.0, + region(2)='70N-90N', rlonmin(2)=-180.0,rlonmax(2)=180.0,rlatmin(2)= 70.0,rlatmax(2)= 90.0, + region(3)='20N-70N', rlonmin(3)=-180.0,rlonmax(3)=180.0,rlatmin(3)= 20.0,rlatmax(3)= 70.0, + region(4)='20S-20N', rlonmin(4)=-180.0,rlonmax(4)=180.0,rlatmin(4)=-20.0,rlatmax(4)= 20.0, + region(5)='20S-70S', rlonmin(5)=-180.0,rlonmax(5)=180.0,rlatmin(5)=-70.0,rlatmax(5)=-20.0, + region(6)='70S-90S', rlonmin(6)=-180.0,rlonmax(6)=180.0,rlatmin(6)=-90.0,rlatmax(6)=-70.0, + validate=${validate}, + new_hdr=${new_hdr}, + ptype=${ptype}, + netcdf=${netcdf_boolean} + / +EOF + + + echo "oznmon_time.x HAS STARTED ${type}" + + ./oznmon_time.x < input > "stdout.time.${type}.${ptype}" + + echo "oznmon_time.x HAS ENDED ${type}" + + if [[ ! -d ${TANKverf_ozn}/time ]]; then + mkdir -p "${TANKverf_ozn}/time" + fi + ${NCP} "${type}.${ptype}.ctl" "${TANKverf_ozn}/time/" + ${NCP} "${type}.${ptype}.${PDY}${cyc}.ieee_d" "${TANKverf_ozn}/time/" + + ${NCP} bad* "${TANKverf_ozn}/time/" + + rm -f input + +cat << EOF > input + &INPUT + satname='${type}', + iyy=${iyy}, + imm=${imm}, + idd=${idd}, + ihh=${ihh}, + idhh=-18, + incr=6, + new_hdr=${new_hdr}, + ptype=${ptype}, + netcdf=${netcdf_boolean} + / +EOF + + echo "oznmon_horiz.x HAS STARTED ${type}" + + ./oznmon_horiz.x < input > "stdout.horiz.${type}.${ptype}" + + echo "oznmon_horiz.x HAS ENDED ${type}" + + if [[ ! -d ${TANKverf_ozn}/horiz ]]; then + mkdir -p "${TANKverf_ozn}/horiz" + fi + ${NCP} "${type}.${ptype}.ctl" "${TANKverf_ozn}/horiz/" + + ${COMPRESS} "${type}.${ptype}.${PDY}${cyc}.ieee_d" + ${NCP} "${type}.${ptype}.${PDY}${cyc}.ieee_d.${Z}" "${TANKverf_ozn}/horiz/" + + + echo "finished processing ptype, type: ${ptype}, ${type}" + + else + echo "diag file for ${type}.${ptype} not found" + fi + + done # type in satype + + done # ptype in $ozn_ptype + + tar -cvf stdout.horiz.tar stdout.horiz* + ${COMPRESS} stdout.horiz.tar + ${NCP} "stdout.horiz.tar.${Z}" "${TANKverf_ozn}/horiz/" + + tar -cvf stdout.time.tar stdout.time* + ${COMPRESS} stdout.time.tar + ${NCP} "stdout.time.tar.${Z}" "${TANKverf_ozn}/time/" +fi + +exit ${iret} From 52431aeb338630ee111d24e43c6a7ffd2a4e7d0f Mon Sep 17 00:00:00 2001 From: Andrew Collard Date: Fri, 1 Nov 2024 15:01:57 +0000 Subject: [PATCH 19/22] More radmon updates --- .gitignore | 4 - sorc/link_fv3gfs.sh | 2 +- ush/radmon_verf_angle.sh | 225 ++++++++++++++++ ush/radmon_verf_bcoef.sh | 221 ++++++++++++++++ ush/radmon_verf_bcor.sh | 218 ++++++++++++++++ ush/radmon_verf_time.sh | 550 +++++++++++++++++++++++++++++++++++++++ 6 files changed, 1215 insertions(+), 5 deletions(-) create mode 100755 ush/radmon_verf_angle.sh create mode 100755 ush/radmon_verf_bcoef.sh create mode 100755 ush/radmon_verf_bcor.sh create mode 100755 ush/radmon_verf_time.sh diff --git a/.gitignore b/.gitignore index 3bb2af9b44..a8a15b5d20 100644 --- a/.gitignore +++ b/.gitignore @@ -143,10 +143,6 @@ ush/mkwfsgbl.sh ush/mod_icec.sh ush/radmon_ck_stdout.sh ush/radmon_err_rpt.sh -ush/radmon_verf_angle.sh -ush/radmon_verf_bcoef.sh -ush/radmon_verf_bcor.sh -ush/radmon_verf_time.sh ush/trim_rh.sh ush/wafs_blending.sh ush/wafs_grib2.regrid.sh diff --git a/sorc/link_fv3gfs.sh b/sorc/link_fv3gfs.sh index ced1a237fc..504a7110f5 100755 --- a/sorc/link_fv3gfs.sh +++ b/sorc/link_fv3gfs.sh @@ -146,7 +146,7 @@ cd ${pwd}/../parm ||exit 8 # $LINK ../sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gfs/scripts/exgfs_atmos_vminmon.sh . # $LINK ../sorc/gsi_monitor.fd/src/Ozone_Monitor/nwprod/gdas_oznmon/scripts/exgdas_atmos_verfozn.sh . # $LINK ../sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/scripts/exgdas_atmos_verfrad.sh . -#cd ${pwd}/../ush ||exit 8 +cd ${pwd}/../ush ||exit 8 #C $LINK ../sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/minmon_shared/ush/minmon_xtrct_costs.pl . #C $LINK ../sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/minmon_shared/ush/minmon_xtrct_gnorms.pl . #C $LINK ../sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/minmon_shared/ush/minmon_xtrct_reduct.pl . diff --git a/ush/radmon_verf_angle.sh b/ush/radmon_verf_angle.sh new file mode 100755 index 0000000000..3dff2a6f98 --- /dev/null +++ b/ush/radmon_verf_angle.sh @@ -0,0 +1,225 @@ +#! /usr/bin/env bash + +source "${USHgfs}/preamble.sh" + +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: radmon_verf_angle.sh +# Script description: Extract angle dependent data from radiance +# diagnostic files. +# +# Author: Ed Safford Org: NP23 Date: 2012-02-02 +# +# Abstract: This script extracts angle dependent data from radiance +# diagnostic files (which are an output from GSI runs), +# storing the extracted data in small binary files. +# +# This script is a child script of exgdas_vrfyrad.sh.sms. The parent +# script opens and uncompresses the radiance diagnostic file and copies +# other supporting files into a temporary working directory. +# +# +# Usage: radmon_verf_angle.sh +# +# Input script positional parameters: +# PDYcyc processing date +# yyyymmddcc format; required +# +# Imported Shell Variables: +# RADMON_SUFFIX data source suffix +# defauls to opr +# RAD_AREA global or regional flag +# defaults to global +# TANKverf_rad data repository +# defaults to current directory +# SATYPE list of satellite/instrument sources +# defaults to none +# VERBOSE Verbose flag (YES or NO) +# defaults to NO +# LITTLE_ENDIAN flag to indicate LE machine +# defaults to 0 (big endian) +# USE_ANL use analysis files as inputs in addition to +# the ges files. Default is 0 (ges only) +# +# Modules and files referenced: +# scripts : +# +# programs : $NCP +# $angle_exec +# +# fixed data : $scaninfo +# +# input data : $data_file +# +# output data: $angle_file +# $angle_ctl +# $pgmout +# +# Remarks: +# +# Condition codes +# 0 - no problem encountered +# >0 - some problem encountered +# +#################################################################### + +# Command line arguments. +RAD_AREA=${RAD_AREA:-glb} +REGIONAL_RR=${REGIONAL_RR:-0} # rapid refresh model flag +rgnHH=${rgnHH:-} +rgnTM=${rgnTM:-} + +echo " REGIONAL_RR, rgnHH, rgnTM = ${REGIONAL_RR}, ${rgnHH}, ${rgnTM}" +netcdf_boolean=".false." +if [[ ${RADMON_NETCDF} -eq 1 ]]; then + netcdf_boolean=".true." +fi +echo " RADMON_NETCDF, netcdf_boolean = ${RADMON_NETCDF}, ${netcdf_boolean}" + +which prep_step +which startmsg + +# File names +touch "${pgmout}" + +# Other variables +SATYPE=${SATYPE:-} +VERBOSE=${VERBOSE:-NO} +LITTLE_ENDIAN=${LITTLE_ENDIAN:-0} +USE_ANL=${USE_ANL:-0} + + +if [[ ${USE_ANL} -eq 1 ]]; then + gesanl="ges anl" +else + gesanl="ges" +fi + +err=0 +angle_exec=radmon_angle.x +shared_scaninfo="${shared_scaninfo:-${PARMgfs}/monitor/gdas_radmon_scaninfo.txt}" +scaninfo=scaninfo.txt + +#-------------------------------------------------------------------- +# Copy extraction program and supporting files to working directory + +${NCP} "${EXECgfs}/${angle_exec}" ./ +${NCP} "${shared_scaninfo}" ./${scaninfo} + +if [[ ! -s ./${angle_exec} || ! -s ./${scaninfo} ]]; then + err=2 +else +#-------------------------------------------------------------------- +# Run program for given time + + export pgm=${angle_exec} + + iyy="${PDY:0:4}" + imm="${PDY:4:2}" + idd="${PDY:6:2}" + ihh=${cyc} + + ctr=0 + fail=0 + touch "./errfile" + + for type in ${SATYPE}; do + + if [[ ! -s ${type} ]]; then + echo "ZERO SIZED: ${type}" + continue + fi + + for dtype in ${gesanl}; do + + echo "pgm = ${pgm}" + echo "pgmout = ${pgmout}" + prep_step + + ctr=$((ctr + 1)) + + if [[ ${dtype} == "anl" ]]; then + data_file="${type}_anl.${PDY}${cyc}.ieee_d" + ctl_file=${type}_anl.ctl + angl_ctl=angle.${ctl_file} + else + data_file="${type}.${PDY}${cyc}.ieee_d" + ctl_file=${type}.ctl + angl_ctl=angle.${ctl_file} + fi + + angl_file="" + if [[ ${REGIONAL_RR} -eq 1 ]]; then + angl_file=${rgnHH}.${data_file}.${rgnTM} + fi + + + if [[ -f input ]]; then rm input; fi + + nchanl=-999 +cat << EOF > input + &INPUT + satname='${type}', + iyy=${iyy}, + imm=${imm}, + idd=${idd}, + ihh=${ihh}, + idhh=-720, + incr=${CYCLE_INTERVAL}, + nchanl=${nchanl}, + suffix='${RADMON_SUFFIX}', + gesanl='${dtype}', + little_endian=${LITTLE_ENDIAN}, + rad_area='${RAD_AREA}', + netcdf=${netcdf_boolean}, + / +EOF + + startmsg + ./${angle_exec} < input >> "${pgmout}" 2>>errfile + export err=$?; err_chk + if [[ ${err} -ne 0 ]]; then + fail=$(( fail + 1 )) + fi + + if [[ -s ${angl_file} ]]; then + ${COMPRESS} -f "${angl_file}" + fi + + if [[ -s ${angl_ctl} ]]; then + ${COMPRESS} -f "${angl_ctl}" + fi + + + done # for dtype in ${gesanl} loop + + done # for type in ${SATYPE} loop + + + "${USHgfs}/rstprod.sh" + + tar_file=radmon_angle.tar + if compgen -G "angle*.ieee_d*" > /dev/null || compgen -G "angle*.ctl*" > /dev/null; then + tar -cf "${tar_file}" angle*.ieee_d* angle*.ctl* + ${COMPRESS} ${tar_file} + mv "${tar_file}.${Z}" "${TANKverf_rad}/." + + if [[ ${RAD_AREA} = "rgn" ]]; then + cwd=$(pwd) + cd "${TANKverf_rad}" + tar -xf "${tar_file}.${Z}" + rm "${tar_file}.${Z}" + cd "${cwd}" + fi + fi + + if [[ ${ctr} -gt 0 && ${fail} -eq ${ctr} || ${fail} -gt ${ctr} ]]; then + err=3 + fi +fi + +################################################################################ +# Post processing + +exit ${err} diff --git a/ush/radmon_verf_bcoef.sh b/ush/radmon_verf_bcoef.sh new file mode 100755 index 0000000000..4274436154 --- /dev/null +++ b/ush/radmon_verf_bcoef.sh @@ -0,0 +1,221 @@ +#! /usr/bin/env bash + +source "${USHgfs}/preamble.sh" + +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: radmon_verf_bcoef.sh +# Script description: Extract bias correction coefficients data from radiance +# diagnostic files. +# +# Author: Ed Safford Org: NP23 Date: 2012-02-02 +# +# Abstract: This script extracts bias correction coefficient related data from +# radiance diagnostic files (which are an output from GSI runs), +# storing the extracted data in small binary files. +# +# This script is a child script of exgdas_vrfyrad.sh.sms. The parent +# script opens and uncompresses the radiance diagnostic file and copies +# other supporting files into a temporary working directory. +# +# +# Usage: radmon_verf_bcoef.sh +# +# Input script positional parameters: +# PDYcyc processing date +# yyyymmddcc format; required +# +# Imported Shell Variables: +# RADMON_SUFFIX data source suffix +# defauls to opr +# EXECgfs executable directory +# RAD_AREA global or regional flag +# defaults to global +# TANKverf_rad data repository +# SATYPE list of satellite/instrument sources +# defaults to none +# LITTLE_ENDIAN flag for LE machine +# defaults to 0 (big endian) +# USE_ANL use analysis files as inputs in addition to +# the ges files. Default is 0 (ges only) +# +# Modules and files referenced: +# scripts : +# +# programs : $NCP +# $bcoef_exec +# +# fixed data : $biascr +# +# input data : $data_file +# +# output data: $bcoef_file +# $bcoef_ctl +# $pgmout +# +# Remarks: +# +# Condition codes +# 0 - no problem encountered +# >0 - some problem encountered +# +#################################################################### + +netcdf_boolean=".false." +if [[ ${RADMON_NETCDF} -eq 1 ]]; then + netcdf_boolean=".true." +fi +echo " RADMON_NETCDF, netcdf_boolean = ${RADMON_NETCDF}, ${netcdf_boolean}" + +# File names +touch "${pgmout}" + +# Other variables +RAD_AREA=${RAD_AREA:-glb} +REGIONAL_RR=${REGIONAL_RR:-0} +rgnHH=${rgnHH:-} +rgnTM=${rgnTM:-} +SATYPE=${SATYPE:-} +LITTLE_ENDIAN=${LITTLE_ENDIAN:-0} +USE_ANL=${USE_ANL:-0} + + +err=0 +bcoef_exec=radmon_bcoef.x + +if [[ ${USE_ANL} -eq 1 ]]; then + gesanl="ges anl" +else + gesanl="ges" +fi + +#-------------------------------------------------------------------- +# Copy extraction program and supporting files to working directory + +${NCP} "${EXECgfs}/${bcoef_exec}" ./${bcoef_exec} +${NCP} "${biascr}" ./biascr.txt + +if [[ ! -s ./${bcoef_exec} || ! -s ./biascr.txt ]]; then + err=4 +else + + +#-------------------------------------------------------------------- +# Run program for given time + + export pgm=${bcoef_exec} + + iyy="${PDY:0:4}" + imm="${PDY:4:2}" + idd="${PDY:6:2}" + ihh=${cyc} + + ctr=0 + fail=0 + + nchanl=-999 + npredr=5 + + for type in ${SATYPE}; do + + if [[ ! -s ${type} ]]; then + echo "ZERO SIZED: ${type}" + continue + fi + + for dtype in ${gesanl}; do + + prep_step + + ctr=$(( ctr + 1 )) + + if [[ ${dtype} == "anl" ]]; then + data_file="${type}_anl.${PDY}${cyc}.ieee_d" + ctl_file=${type}_anl.ctl + bcoef_ctl=bcoef.${ctl_file} + else + data_file="${type}.${PDY}${cyc}.ieee_d" + ctl_file=${type}.ctl + bcoef_ctl=bcoef.${ctl_file} + fi + + if [[ ${REGIONAL_RR} -eq 1 ]]; then + bcoef_file=${rgnHH}.bcoef.${data_file}.${rgnTM} + else + bcoef_file=bcoef.${data_file} + fi + + + if [[ -f input ]]; then rm input; fi + + +cat << EOF > input + &INPUT + satname='${type}', + npredr=${npredr}, + nchanl=${nchanl}, + iyy=${iyy}, + imm=${imm}, + idd=${idd}, + ihh=${ihh}, + idhh=-720, + incr=${CYCLE_INTERVAL}, + suffix='${RADMON_SUFFIX}', + gesanl='${dtype}', + little_endian=${LITTLE_ENDIAN}, + netcdf=${netcdf_boolean}, + / +EOF + startmsg + ./${bcoef_exec} < input >>"${pgmout}" 2>>errfile + export err=$?; err_chk + if [[ ${err} -ne 0 ]]; then + fail=$(( fail + 1 )) + fi + + +#------------------------------------------------------------------- +# move data, control, and stdout files to $TANKverf_rad and compress +# + + if [[ -s ${bcoef_file} ]]; then + ${COMPRESS} "${bcoef_file}" + fi + + if [[ -s ${bcoef_ctl} ]]; then + ${COMPRESS} "${bcoef_ctl}" + fi + + + done # dtype in $gesanl loop + done # type in $SATYPE loop + + + "${USHgfs}/rstprod.sh" + + if compgen -G "bcoef*.ieee_d*" > /dev/null || compgen -G "bcoef*.ctl*" > /dev/null; then + tar_file=radmon_bcoef.tar + tar -cf ${tar_file} bcoef*.ieee_d* bcoef*.ctl* + ${COMPRESS} ${tar_file} + mv "${tar_file}.${Z}" "${TANKverf_rad}" + + if [[ ${RAD_AREA} = "rgn" ]]; then + cwd=$(pwd) + cd "${TANKverf_rad}" + tar -xf "${tar_file}.${Z}" + rm "${tar_file}.${Z}" + cd "${cwd}" + fi + fi + + if [[ ${ctr} -gt 0 && ${fail} -eq ${ctr} || ${fail} -gt ${ctr} ]]; then + err=5 + fi +fi + + +################################################################################ +# Post processing + +exit ${err} diff --git a/ush/radmon_verf_bcor.sh b/ush/radmon_verf_bcor.sh new file mode 100755 index 0000000000..ea0a7842e6 --- /dev/null +++ b/ush/radmon_verf_bcor.sh @@ -0,0 +1,218 @@ +#! /usr/bin/env bash + +source "${USHgfs}/preamble.sh" + +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: radmon_verf_bcor.sh +# Script description: Extract bias correction data from radiance diagnostic +# files. +# +# Author: Ed Safford Org: NP23 Date: 2012-02-02 +# +# Abstract: This script extracts bias correction related data from radiance +# diagnostic files (which are an output from GSI runs), storing the +# extracted data in small binary files. +# +# This script is a child script of exgdas_vrfyrad.sh.sms. The parent +# script opens and uncompresses the radiance diagnostic file and copies +# other supporting files into a temporary working directory. +# +# +# Usage: radmon_verf_bcor.sh +# +# Input script positional parameters: +# PDYcyc processing date +# yyyymmddcc format; required +# +# Imported Shell Variables: +# RADMON_SUFFIX data source suffix +# defauls to opr +# EXECgfs executable directory +# defaults to current directory +# RAD_AREA global or regional flag +# defaults to global +# TANKverf_rad data repository +# defaults to current directory +# SATYPE list of satellite/instrument sources +# defaults to none +# LITTLE_ENDIAN flag for little endian machine +# defaults to 0 (big endian) +# USE_ANL use analysis files as inputs in addition to +# the ges files. Default is 0 (ges only) +# +# Modules and files referenced: +# scripts : +# +# programs : $NCP +# $bcor_exec +# +# fixed data : none +# +# input data : $data_file +# +# output data: $bcor_file +# $bcor_ctl +# $pgmout +# +# Remarks: +# +# Condition codes +# 0 - no problem encountered +# >0 - some problem encountered +# +#################################################################### + +# File names +touch "${pgmout}" + +# Other variables +RAD_AREA=${RAD_AREA:-glb} +SATYPE=${SATYPE:-} +LITTLE_ENDIAN=${LITTLE_ENDIAN:-0} +USE_ANL=${USE_ANL:-0} + +bcor_exec=radmon_bcor.x +err=0 + +netcdf_boolean=".false." +if [[ ${RADMON_NETCDF} -eq 1 ]]; then + netcdf_boolean=".true." +fi + +if [[ ${USE_ANL} -eq 1 ]]; then + gesanl="ges anl" +else + gesanl="ges" +fi + + +#-------------------------------------------------------------------- +# Copy extraction program to working directory + +${NCP} "${EXECgfs}/${bcor_exec}" ./${bcor_exec} + +if [[ ! -s ./${bcor_exec} ]]; then + err=6 +else + + +#-------------------------------------------------------------------- +# Run program for given time + + export pgm=${bcor_exec} + + iyy="${PDY:0:4}" + imm="${PDY:4:2}" + idd="${PDY:6:2}" + ihh=${cyc} + + ctr=0 + fail=0 + touch "./errfile" + + for type in ${SATYPE}; do + + for dtype in ${gesanl}; do + + prep_step + + ctr=$(( ctr + 1 )) + + if [[ ${dtype} == "anl" ]]; then + data_file="${type}_anl.${PDY}${cyc}.ieee_d" + bcor_file=bcor.${data_file} + ctl_file=${type}_anl.ctl + bcor_ctl=bcor.${ctl_file} + stdout_file=stdout.${type}_anl + bcor_stdout=bcor.${stdout_file} + input_file=${type}_anl + else + data_file="${type}.${PDY}${cyc}.ieee_d" + bcor_file=bcor.${data_file} + ctl_file=${type}.ctl + bcor_ctl=bcor.${ctl_file} + stdout_file=stdout.${type} + bcor_stdout=bcor.${stdout_file} + input_file=${type} + fi + + if [[ -f input ]]; then rm input; fi + + # Check for 0 length input file here and avoid running + # the executable if $input_file doesn't exist or is 0 bytes + # + if [[ -s "${input_file}" ]]; then + nchanl=-999 + +cat << EOF > input + &INPUT + satname='${type}', + iyy=${iyy}, + imm=${imm}, + idd=${idd}, + ihh=${ihh}, + idhh=-720, + incr=6, + nchanl=${nchanl}, + suffix='${RADMON_SUFFIX}', + gesanl='${dtype}', + little_endian=${LITTLE_ENDIAN}, + rad_area='${RAD_AREA}', + netcdf=${netcdf_boolean}, + / +EOF + + startmsg + ./${bcor_exec} < input >> "${pgmout}" 2>>errfile + export err=$?; err_chk + if [[ $? -ne 0 ]]; then + fail=$(( fail + 1 )) + fi + + +#------------------------------------------------------------------- +# move data, control, and stdout files to $TANKverf_rad and compress +# + + if [[ -s ${bcor_file} ]]; then + ${COMPRESS} "${bcor_file}" + fi + + if [[ -s ${bcor_ctl} ]]; then + ${COMPRESS} "${bcor_ctl}" + fi + + fi + done # dtype in $gesanl loop + done # type in $SATYPE loop + + + "${USHgfs}/rstprod.sh" + tar_file=radmon_bcor.tar + + if compgen -G "bcor*.ieee_d*" > /dev/null || compgen -G "bcor*.ctl*" > /dev/null; then + tar -cf "${tar_file}" bcor*.ieee_d* bcor*.ctl* + ${COMPRESS} ${tar_file} + mv "${tar_file}.${Z}" "${TANKverf_rad}/." + + if [[ ${RAD_AREA} = "rgn" ]]; then + cwd=$(pwd) + cd "${TANKverf_rad}" + tar -xf "${tar_file}.${Z}" + rm "${tar_file}.${Z}" + cd "${cwd}" + fi + fi + + if [[ ${ctr} -gt 0 && ${fail} -eq ${ctr} || ${fail} -gt ${ctr} ]]; then + err=7 + fi +fi + +################################################################################ +# Post processing + +exit ${err} + diff --git a/ush/radmon_verf_time.sh b/ush/radmon_verf_time.sh new file mode 100755 index 0000000000..0e935826dd --- /dev/null +++ b/ush/radmon_verf_time.sh @@ -0,0 +1,550 @@ +#! /usr/bin/env bash + +source "${USHgfs}/preamble.sh" + +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: radmon_verf_time.sh +# Script description: Extract time data from radiance diagnostic files, +# perform data integrity checks. +# +# Author: Ed Safford Org: NP23 Date: 2012-02-02 +# +# Abstract: This script extracts time related data from radiance diagnostic +# files (which are an output from GSI runs), storing the extracted +# data in small binary files. Data integrity checks are performed +# on the data and mail messages are sent if potential errors are +# detected. +# +# This script is a child script of exgdas_vrfyrad.sh.sms. The parent +# script opens and uncompresses the radiance diagnostic file and copies +# other supporting files into a temporary working directory. +# +# +# Usage: radmon_verf_time.sh ${PDY}${cyc} +# +# Input script positional parameters: +# PDYcyc processing date +# yyyymmddcc format; required +# +# Imported Shell Variables: +# DO_DATA_RPT switch to build the data report +# defaults to 1 (on) +# RADMON_SUFFIX data source suffix +# defauls to opr +# RAD_AREA global or regional flag +# defaults to global +# TANKverf_rad data repository +# SATYPE list of satellite/instrument sources +# defaults to none +# VERBOSE Verbose flag (YES or NO) +# defaults to NO +# LITTLE_ENDIAN flag for little endian machine +# defaults to 0 (big endian) +# USE_ANL use analysis files as inputs in addition to +# the ges files. Default is 0 (ges only) +# +# Modules and files referenced: +# scripts : +# +# programs : $NCP +# $time_exec +# +# fixed data : gdas_radmon_base.tar +# +# input data : $data_file +# +# output data: $time_file +# $time_ctl +# $pgmout +# $bad_pen +# $bad_chan +# $report +# $diag_report +# +# +# Remarks: +# +# Condition codes +# 0 - no problem encountered +# >0 - some problem encountered +# +#################################################################### + +# File names + +radmon_err_rpt=${radmon_err_rpt:-${USHgfs}/radmon_err_rpt.sh} +base_file=${base_file:-${PARMgfs}/monitor/gdas_radmon_base.tar} +report=report.txt +disclaimer=disclaimer.txt + +diag_report=diag_report.txt +diag_hdr=diag_hdr.txt +diag=diag.txt + +obs_err=obs_err.txt +obs_hdr=obs_hdr.txt +pen_err=pen_err.txt +pen_hdr=pen_hdr.txt + +chan_err=chan_err.txt +chan_hdr=chan_hdr.txt +count_hdr=count_hdr.txt +count_err=count_err.txt + +netcdf_boolean=".false." +if [[ ${RADMON_NETCDF} -eq 1 ]]; then + netcdf_boolean=".true." +fi + +DO_DATA_RPT=${DO_DATA_RPT:-1} +RADMON_SUFFIX=${RADMON_SUFFIX:-opr} +RAD_AREA=${RAD_AREA:-glb} +REGIONAL_RR=${REGIONAL_RR:-0} +rgnHH=${rgnHH:-} +rgnTM=${rgnTM:-} +SATYPE=${SATYPE:-} +VERBOSE=${VERBOSE:-NO} +LITTLE_ENDIAN=${LITTLE_ENDIAN:-0} + +time_exec=radmon_time.x +USE_ANL=${USE_ANL:-0} +err=0 + +if [[ ${USE_ANL} -eq 1 ]]; then + gesanl="ges anl" +else + gesanl="ges" +fi + + +#-------------------------------------------------------------------- +# Copy extraction program and base files to working directory +#------------------------------------------------------------------- +${NCP} "${EXECgfs}/${time_exec}" ./ +if [[ ! -s ./${time_exec} ]]; then + err=8 +fi + +iyy="${PDY:0:4}" +imm="${PDY:4:2}" +idd="${PDY:6:2}" +ihh=${cyc} + +local_base="local_base" +if [[ ${DO_DATA_RPT} -eq 1 ]]; then + + if [[ -e ${base_file}.${Z} ]]; then + ${NCP} "${base_file}.${Z}" "./${local_base}.${Z}" + ${UNCOMPRESS} "${local_base}.${Z}" + else + ${NCP} "${base_file}" ./${local_base} + fi + + if [[ ! -s ./${local_base} ]]; then + echo "RED LIGHT: local_base file not found" + else + echo "Confirming local_base file is good = ${local_base}" + tar -xf ./${local_base} + echo "local_base is untarred" + fi +fi + +if [[ ${err} -eq 0 ]]; then + ctr=0 + fail=0 + + export pgm=${time_exec} +#-------------------------------------------------------------------- +# Loop over each entry in SATYPE +#-------------------------------------------------------------------- + for type in ${SATYPE}; do + + if [[ ! -s ${type} ]]; then + echo "ZERO SIZED: ${type}" + continue + fi + + ctr=$(( ctr + 1 )) + + for dtype in ${gesanl}; do + + if [[ -f input ]]; then rm input; fi + + if [[ ${dtype} == "anl" ]]; then + data_file="${type}_anl.${PDY}${cyc}.ieee_d" + ctl_file=${type}_anl.ctl + time_ctl=time.${ctl_file} + else + data_file="${type}.${PDY}${cyc}.ieee_d" + ctl_file=${type}.ctl + time_ctl=time.${ctl_file} + fi + + if [[ ${REGIONAL_RR} -eq 1 ]]; then + time_file=${rgnHH}.time.${data_file}.${rgnTM} + else + time_file=time.${data_file} + fi + +#-------------------------------------------------------------------- +# Run program for given satellite/instrument +#-------------------------------------------------------------------- + nchanl=-999 +cat << EOF > input + &INPUT + satname='${type}', + iyy=${iyy}, + imm=${imm}, + idd=${idd}, + ihh=${ihh}, + idhh=-720, + incr=${CYCLE_INTERVAL}, + nchanl=${nchanl}, + suffix='${RADMON_SUFFIX}', + gesanl='${dtype}', + little_endian=${LITTLE_ENDIAN}, + rad_area='${RAD_AREA}', + netcdf=${netcdf_boolean}, + / +EOF + + ./${time_exec} < input >> stdout."${type}" 2>>errfile + + if [[ ${err} -ne 0 ]]; then + fail=$(( fail + 1 )) + fi + +#------------------------------------------------------------------- +# move data, control, and stdout files to $TANKverf_rad and compress +#------------------------------------------------------------------- + cat "stdout.${type}" >> stdout.time + + if [[ -s ${time_file} ]]; then + ${COMPRESS} "${time_file}" + fi + + if [[ -s ${time_ctl} ]]; then + ${COMPRESS} "${time_ctl}" + fi + + done + done + + + "${USHgfs}/rstprod.sh" + + if compgen -G "time*.ieee_d*" > /dev/null || compgen -G "time*.ctl*" > /dev/null; then + tar_file=radmon_time.tar + tar -cf "${tar_file}" time*.ieee_d* time*.ctl* + ${COMPRESS} ${tar_file} + mv "${tar_file}.${Z}" "${TANKverf_rad}/." + + if [[ ${RAD_AREA} = "rgn" ]]; then + cwd=$(pwd) + cd "${TANKverf_rad}" + tar -xf "${tar_file}.${Z}" + rm "${tar_file}.${Z}" + cd "${cwd}" + fi + fi + + if [[ ${ctr} -gt 0 && ${fail} -eq ${ctr} || ${fail} -gt ${ctr} ]]; then + echo "fail, ctr = ${fail}, ${ctr}" + err=10 + fi + +fi + + + +#################################################################### +#------------------------------------------------------------------- +# Begin error analysis and reporting +#------------------------------------------------------------------- +#################################################################### + +if [[ ${DO_DATA_RPT} -eq 1 ]]; then + +#--------------------------- +# build report disclaimer +# + cat << EOF > ${disclaimer} + + +*********************** WARNING *************************** +THIS IS AN AUTOMATED EMAIL. REPLIES TO SENDER WILL NOT BE +RECEIVED. PLEASE DIRECT REPLIES TO edward.safford@noaa.gov +*********************** WARNING *************************** +EOF + + +#------------------------------------------------------------------- +# Check for missing diag files +# + tmp_satype="./tmp_satype.txt" + echo "${SATYPE}" > ${tmp_satype} + "${USHgfs}/radmon_diag_ck.sh" --rad "${radstat}" --sat "${tmp_satype}" --out "${diag}" + + if [[ -s ${diag} ]]; then + cat << EOF > ${diag_hdr} + + Problem Reading Diagnostic File + + + Problems were encountered reading the diagnostic file for + the following sources: + +EOF + + cat ${diag_hdr} >> ${diag_report} + cat ${diag} >> ${diag_report} + + echo >> ${diag_report} + + rm ${diag_hdr} + fi + +#------------------------------------------------------------------- +# move warning notification to TANKverf +# + if [[ -s ${diag} ]]; then + lines=$(wc -l <${diag}) + echo "lines in diag = ${lines}" + + if [[ ${lines} -gt 0 ]]; then + cat ${diag_report} + cp ${diag} "${TANKverf_rad}/bad_diag.${PDY}${cyc}" + else + rm ${diag_report} + fi + fi + + + + #---------------------------------------------------------------- + # Identify bad_pen and bad_chan files for this cycle and + # previous cycle + + bad_pen=bad_pen.${PDY}${cyc} + bad_chan=bad_chan.${PDY}${cyc} + low_count=low_count.${PDY}${cyc} + + qdate=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") + pday="${qdate:0:8}" + + prev_bad_pen=bad_pen.${qdate} + prev_bad_chan=bad_chan.${qdate} + prev_low_count=low_count.${qdate} + + prev_bad_pen=${TANKverf_radM1}/${prev_bad_pen} + prev_bad_chan=${TANKverf_radM1}/${prev_bad_chan} + prev_low_count=${TANKverf_radM1}/${prev_low_count} + + if [[ -s ${bad_pen} ]]; then + echo "pad_pen = ${bad_pen}" + fi + if [[ -s ${prev_bad_pen} ]]; then + echo "prev_pad_pen = ${prev_bad_pen}" + fi + + if [[ -s ${bad_chan} ]]; then + echo "bad_chan = ${bad_chan}" + fi + if [[ -s ${prev_bad_chan} ]]; then + echo "prev_bad_chan = ${prev_bad_chan}" + fi + if [[ -s ${low_count} ]]; then + echo "low_count = ${low_count}" + fi + if [[ -s ${prev_low_count} ]]; then + echo "prev_low_count = ${prev_low_count}" + fi + + do_pen=0 + do_chan=0 + do_cnt=0 + + if [[ -s ${bad_pen} && -s ${prev_bad_pen} ]]; then + do_pen=1 + fi + + if [[ -s ${low_count} && -s ${prev_low_count} ]]; then + do_cnt=1 + fi + + #-------------------------------------------------------------------- + # avoid doing the bad_chan report for REGIONAL_RR sources -- because + # they run hourly they often have 0 count channels for off-hour runs. + # + if [[ -s ${bad_chan} && -s ${prev_bad_chan} && REGIONAL_RR -eq 0 ]]; then + do_chan=1 + fi + + #-------------------------------------------------------------------- + # Remove extra spaces in new bad_pen & low_count files + # + if [[ -s ${bad_pen} ]]; then + gawk '{$1=$1}1' "${bad_pen}" > tmp.bad_pen + mv -f tmp.bad_pen "${bad_pen}" + fi + if [[ -s ${low_count} ]]; then + gawk '{$1=$1}1' "${low_count}" > tmp.low_count + mv -f tmp.low_count "${low_count}" + fi + + echo " do_pen, do_chan, do_cnt = ${do_pen}, ${do_chan}, ${do_cnt}" + echo " diag_report = ${diag_report} " + if [[ ${do_pen} -eq 1 || ${do_chan} -eq 1 || ${do_cnt} -eq 1 || -s ${diag_report} ]]; then + + if [[ ${do_pen} -eq 1 ]]; then + + echo "calling radmon_err_rpt for pen" + ${radmon_err_rpt} "${prev_bad_pen}" "${bad_pen}" pen "${qdate}" \ + "${PDY}${cyc}" ${diag_report} ${pen_err} + fi + + if [[ ${do_chan} -eq 1 ]]; then + + echo "calling radmon_err_rpt for chan" + ${radmon_err_rpt} "${prev_bad_chan}" "${bad_chan}" chan "${qdate}" \ + "${PDY}${cyc}" ${diag_report} ${chan_err} + fi + + if [[ ${do_cnt} -eq 1 ]]; then + + echo "calling radmon_err_rpt for cnt" + ${radmon_err_rpt} "${prev_low_count}" "${low_count}" cnt "${qdate}" \ + "${PDY}${cyc}" ${diag_report} ${count_err} + fi + + #------------------------------------------------------------------- + # put together the unified error report with any obs, chan, and + # penalty problems and mail it + + if [[ -s ${obs_err} || -s ${pen_err} || -s ${chan_err} || -s ${count_err} || -s ${diag_report} ]]; then + + echo DOING ERROR REPORTING + + + cat << EOF > ${report} +Radiance Monitor warning report + + Net: ${RADMON_SUFFIX} + Run: ${RUN} + Cycle: ${PDY}${cyc} + +EOF + + if [[ -s ${diag_report} ]]; then + echo OUTPUTING DIAG_REPORT + cat ${diag_report} >> ${report} + fi + + if [[ -s ${chan_err} ]]; then + + echo OUTPUTING CHAN_ERR + + cat << EOF > ${chan_hdr} + + The following channels report 0 observational counts over the past two cycles: + + Satellite/Instrument Channel + ==================== ======= + +EOF + + cat ${chan_hdr} >> ${report} + cat ${chan_err} >> ${report} + + fi + + if [[ -s ${count_err} ]]; then + + cat << EOF > ${count_hdr} + + + + The following channels report abnormally low observational counts in the latest 2 cycles: + +Satellite/Instrument Obs Count Avg Count +==================== ========= ========= + +EOF + + cat ${count_hdr} >> ${report} + cat ${count_err} >> ${report} + fi + + + if [[ -s ${pen_err} ]]; then + + cat << EOF > ${pen_hdr} + + + Penalty values outside of the established normal range were found + for these sensor/channel/regions in the past two cycles: + + Questionable Penalty Values + ============ ======= ====== Cycle Penalty Bound + ----- ------- ----- +EOF + cat ${pen_hdr} >> ${report} + cat ${pen_err} >> ${report} + rm -f ${pen_hdr} + rm -f ${pen_err} + fi + + echo >> ${report} + cat ${disclaimer} >> ${report} + echo >> ${report} + fi + + #------------------------------------------------------------------- + # dump report to log file + # + if [[ -s ${report} ]]; then + lines=$(wc -l <${report}) + if [[ ${lines} -gt 2 ]]; then + cat ${report} + + ${NCP} ${report} "${TANKverf_rad}/warning.${PDY}${cyc}" + fi + fi + + + fi + + #------------------------------------------------------------------- + # copy new bad_pen, bad_chan, and low_count files to $TANKverf_rad + # + if [[ -s ${bad_chan} ]]; then + mv "${bad_chan}" "${TANKverf_rad}/." + fi + + if [[ -s ${bad_pen} ]]; then + mv "${bad_pen}" "${TANKverf_rad}/." + fi + + if [[ -s ${low_count} ]]; then + mv "${low_count}" "${TANKverf_rad}/." + fi + + +fi + + for type in ${SATYPE}; do + rm -f "stdout.${type}" + done + +################################################################################ +#------------------------------------------------------------------- +# end error reporting section +#------------------------------------------------------------------- +################################################################################ + +################################################################################ +# Post processing + +exit ${err} From e70b34002082f4514219cea656f6e984c0e5be9c Mon Sep 17 00:00:00 2001 From: Andrew Collard Date: Sat, 2 Nov 2024 21:47:28 +0000 Subject: [PATCH 20/22] Add monitor scripts from global-workflow #f159d39 --- jobs/JGDAS_ATMOS_VERFOZN | 70 +++------- jobs/JGDAS_ATMOS_VERFRAD | 91 +++++-------- jobs/JGDAS_ATMOS_VMINMON | 59 +++------ jobs/JGFS_ATMOS_VMINMON | 56 +++----- scripts/exgdas_atmos_verfozn.sh | 69 ++++++++-- scripts/exgdas_atmos_verfrad.sh | 183 ++++++++++++++++--------- scripts/exgdas_atmos_vminmon.sh | 20 +-- scripts/exgfs_atmos_vminmon.sh | 19 +-- scripts/exglobal_atmos_vminmon.sh | 76 ----------- ush/jjob_header.sh | 115 ++++++++++++++++ ush/minmon_xtrct_costs.pl | 7 +- ush/minmon_xtrct_gnorms.pl | 7 +- ush/minmon_xtrct_reduct.pl | 8 +- ush/ozn_xtrct.sh | 87 ++++++------ ush/preamble.sh | 92 +++++++++++++ ush/radmon_diag_ck.sh | 175 ++++++++++++++++++++++++ ush/radmon_verf_angle.sh | 82 +++++++----- ush/radmon_verf_bcoef.sh | 78 ++++++----- ush/radmon_verf_bcor.sh | 68 +++++----- ush/radmon_verf_time.sh | 213 ++++++++++++++++-------------- ush/rstprod.sh | 19 +++ 21 files changed, 975 insertions(+), 619 deletions(-) delete mode 100755 scripts/exglobal_atmos_vminmon.sh create mode 100644 ush/jjob_header.sh create mode 100644 ush/preamble.sh create mode 100755 ush/radmon_diag_ck.sh create mode 100755 ush/rstprod.sh diff --git a/jobs/JGDAS_ATMOS_VERFOZN b/jobs/JGDAS_ATMOS_VERFOZN index 9071beaf30..e16cb6d548 100755 --- a/jobs/JGDAS_ATMOS_VERFOZN +++ b/jobs/JGDAS_ATMOS_VERFOZN @@ -1,59 +1,32 @@ -#!/bin/sh +#! /usr/bin/env bash + ############################################################# # Set up environment for GDAS Ozone Monitor job ############################################################# -set -xa -echo `date` $0 `date -u` begin -export PS4='$SECONDS + ' +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "vrfy" -c "base vrfy" + ############################### # Specify NET, RUN, and COMPONENT name ############################## -export NET=${NET:-gfs} -export RUN=${RUN:-gdas} -export COMPONENT=${COMPONENT:-atmos} - -########################################################### -# obtain unique process id (pid) and make temp directories -########################################################### -export pid=$$ -export outid=${outid:-"LL$job"} -export DATA=${DATA:-${DATAROOT}/${jobid:?}} +export COMPONENT="atmos" export OZNMON_SUFFIX=${OZNMON_SUFFIX:-${NET}} -mkdir -p ${DATA} -cd ${DATA} - - -#################################### -# Determine Job Output Name on System -#################################### -export pgmout="OUTPUT.${pid}" -export pgmerr=errfile -export cycle=t${cyc}z - - -############################################## -# Run setpdy and initialize PDY variables -############################################## -setpdy.sh -. ./PDY - - #--------------------------------------------- # Specify Execution Areas # export HOMEgfs_ozn=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} export HOMEgdas_ozn=${HOMEgfs_ozn:-${NWROOT}/gfs.${gfs_ver}} -export PARMgdas_ozn=${PARMgfs_ozn:-$HOMEgfs_ozn/parm/mon} -export SCRgdas_ozn=${SCRgfs_ozn:-$HOMEgfs_ozn/scripts} -export FIXgdas_ozn=${FIXgfs_ozn:-$HOMEgfs_ozn/fix/gdas} +export PARMgdas_ozn=${PARMgfs_ozn:-${HOMEgfs_ozn}/parm/mon} +export SCRgdas_ozn=${SCRgfs_ozn:-${HOMEgfs_ozn}/scripts} +export FIXgdas_ozn=${FIXgfs_ozn:-${HOMEgfs_ozn}/fix/gdas} export HOMEoznmon=${HOMEoznmon:-${HOMEgfs_ozn}} -export EXECoznmon=${EXECoznmon:-$HOMEoznmon/exec} +export EXECoznmon=${EXECoznmon:-${HOMEoznmon}/exec} export FIXoznmon=${FIXoznmon:-${HOMEoznmon}/fix} -export USHoznmon=${USHoznmon:-$HOMEoznmon/ush} +export USHoznmon=${USHoznmon:-${HOMEoznmon}/ush} #----------------------------------- @@ -66,11 +39,11 @@ export USHoznmon=${USHoznmon:-$HOMEoznmon/ush} # determine PDY and cyc for previous cycle ############################################# -cdate=`${NDATE} -6 ${PDY}${cyc}` +cdate=$(${NDATE} -6 ${PDY}${cyc}) echo 'pdate = ${pdate}' -export P_PDY=`echo ${cdate} | cut -c1-8` -export p_cyc=`echo ${cdate} | cut -c9-10` +export P_PDY=$(echo ${cdate} | cut -c1-8) +export p_cyc=$(echo ${cdate} | cut -c9-10) #--------------------------------------------- # OZN_TANKDIR - WHERE OUTPUT DATA WILL RESIDE @@ -84,7 +57,7 @@ export COMIN=${COMIN:-${COM_IN}/${RUN}.${PDY}/${cyc}/${COMPONENT}} if [[ ! -d ${TANKverf_ozn} ]]; then mkdir -p -m 775 ${TANKverf_ozn} fi - + #--------------------------------------- # set up validation file # @@ -96,7 +69,7 @@ fi # Set necessary environment variables # export OZN_AREA=${OZN_AREA:-glb} -export oznstat=${oznstat:-$COMIN/gdas.t${cyc}z.oznstat} +export oznstat=${oznstat:-${COMIN}/gdas.t${cyc}z.oznstat} #------------------------------------------------------- @@ -104,17 +77,16 @@ export oznstat=${oznstat:-$COMIN/gdas.t${cyc}z.oznstat} # ${OZNMONSH:-${SCRgdas_ozn}/exgdas_atmos_verfozn.sh} ${PDY} ${cyc} err=$? -[[ $err -ne 0 ]] && exit $err +[[ ${err} -ne 0 ]] && exit ${err} ################################ # Remove the Working Directory ################################ -KEEPDATA=${KEEPDATA:-YES} -cd $DATAROOT +KEEPDATA=${KEEPDATA:-NO} +cd ${DATAROOT} if [ ${KEEPDATA} = NO ] ; then - rm -rf $DATA + rm -rf ${DATA} fi -date - +exit 0 diff --git a/jobs/JGDAS_ATMOS_VERFRAD b/jobs/JGDAS_ATMOS_VERFRAD index 39c7b6661f..300e6eff1a 100755 --- a/jobs/JGDAS_ATMOS_VERFRAD +++ b/jobs/JGDAS_ATMOS_VERFRAD @@ -1,51 +1,34 @@ -#!/bin/sh +#! /usr/bin/env bash + ############################################################# # Set up environment for GDAS Radiance Monitor job ############################################################# -set -xa -echo `date` $0 `date -u` begin -export PS4='$SECONDS + ' - -############################### -# Specify NET, RUN, and COMPONENT name -############################## -export NET=${NET:-gfs} -export RUN=${RUN:-gdas} -export COMPONENT=${COMPONENT:-atmos} - -########################################################### -# obtain unique process id (pid) and make temp directories -########################################################### -export pid=$$ -export outid=${outid:-"LL$job"} -export RAD_DATA_IN=${RAD_DATA_IN:-${DATAROOT}/${jobid:?}} +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "vrfy" -c "base vrfy" + +export COMPONENT="atmos" + +export RAD_DATA_IN=${DATA} export RADMON_SUFFIX=${RADMON_SUFFIX:-${RUN}} export CYCLE_INTERVAL=${CYCLE_INTERVAL:-6} -mkdir -p $RAD_DATA_IN -cd $RAD_DATA_IN - -#################################### -# Determine Job Output Name on System -#################################### -export pgmout="OUTPUT.${pid}" -export pgmerr=errfile -export cycle=t${cyc}z +mkdir -p ${RAD_DATA_IN} +cd ${RAD_DATA_IN} ############################################## # Specify Execution Areas ############################################## export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export SCRgfs=${SCRgfs:-$HOMEgfs/scripts} +export SCRgfs=${SCRgfs:-${HOMEgfs}/scripts} -export FIXgdas=${FIXgdas:-$HOMEgfs/fix/gdas} -export PARMmon=${PARMmon:-$HOMEgfs/parm/mon} +export FIXgdas=${FIXgdas:-${HOMEgfs}/fix/gdas} +export PARMmon=${PARMmon:-${HOMEgfs}/parm/mon} export HOMEradmon=${HOMEradmon:-${HOMEgfs}} -export EXECradmon=${EXECradmon:-$HOMEradmon/exec} +export EXECradmon=${EXECradmon:-${HOMEradmon}/exec} export FIXradmon=${FIXradmon:-${FIXgfs}} -export USHradmon=${USHradmon:-$HOMEradmon/ush} +export USHradmon=${USHradmon:-${HOMEradmon}/ush} ################################### @@ -55,55 +38,45 @@ parm_file=${parm_file:-${PARMmon}/da_mon.parm} . ${parm_file} -############################################# -# Run setpdy and initialize PDY variables -############################################# -if [[ $MY_MACHINE != "HERA" && $MY_MACHINE != "hera" ]]; then - setpdy.sh - . ./PDY -fi - ############################################# # determine PDY and cyc for previous cycle ############################################# -cdate=`${NDATE} -6 ${PDY}${cyc}` +cdate=$(${NDATE} -6 ${PDY}${cyc}) echo 'pdate = ${pdate}' -export P_PDY=`echo ${cdate} | cut -c1-8` -export p_cyc=`echo ${cdate} | cut -c9-10` +export P_PDY=$(echo ${cdate} | cut -c1-8) +export p_cyc=$(echo ${cdate} | cut -c9-10) ############################################# # COMOUT - WHERE GSI OUTPUT RESIDES # TANKverf - WHERE OUTPUT DATA WILL RESIDE ############################################# export TANKverf=${TANKverf:-$(compath.py ${envir}/${NET}/${gfs_ver})} -export TANKverf_rad=${TANKverf_rad:-${TANKverf}/${RUN}.${PDY}/${cyc}/$COMPONENT/radmon} -export TANKverf_radM1=${TANKverf_radM1:-${TANKverf}/${RUN}.${P_PDY}/${p_cyc}/$COMPONENT/radmon} +export TANKverf_rad=${TANKverf_rad:-${TANKverf}/${RUN}.${PDY}/${cyc}/${COMPONENT}/radmon} +export TANKverf_radM1=${TANKverf_radM1:-${TANKverf}/${RUN}.${P_PDY}/${p_cyc}/${COMPONENT}/radmon} export COM_IN=${COM_IN:-$(compath.py ${envir}/${NET}/${gfs_ver})} -export COMIN=${COMIN:-${COM_IN}/${RUN}.${PDY}/${cyc}/$COMPONENT} +export COMIN=${COMIN:-${COM_IN}/${RUN}.${PDY}/${cyc}/${COMPONENT}} ################################ # backwards compatibility for -# gfs v15 which doesn't have -# a $COMPONENT in output path +# gfs v15 which doesn't have +# a $COMPONENT in output path ################################ if [[ ! -d ${COMIN} ]]; then export COMIN=${COM_IN}/${RUN}.${PDY}/${cyc} fi -mkdir -p -m 775 $TANKverf_rad - -env +mkdir -p -m 775 ${TANKverf_rad} ######################################## # Set necessary environment variables ######################################## export RAD_AREA=${RAD_AREA:-glb} -export biascr=${biascr:-$COMIN/gdas.t${cyc}z.abias} -export radstat=${radstat:-$COMIN/gdas.t${cyc}z.radstat} +export biascr=${biascr:-${COMIN}/gdas.t${cyc}z.abias} +export radstat=${radstat:-${COMIN}/gdas.t${cyc}z.radstat} echo " " echo "JOB HAS STARTED" @@ -115,8 +88,8 @@ echo " " ${RADMONSH:-${SCRgfs}/exgdas_atmos_verfrad.sh} ${PDY} ${cyc} err=$? -if [[ $err -ne 0 ]] ; then - exit $err +if [[ ${err} -ne 0 ]] ; then + exit ${err} else echo " " echo "JOB HAS COMPLETED NORMALLY" @@ -127,10 +100,8 @@ fi # Remove the Working Directory ################################ KEEPDATA=${KEEPDATA:-YES} -cd $DATAROOT -if [ ${KEEPDATA} = NO ] ; then - rm -rf $RAD_DATA_IN +cd ${DATAROOT} +if [ ${KEEPDATA} = NO ] ; then + rm -rf ${RAD_DATA_IN} fi -date - diff --git a/jobs/JGDAS_ATMOS_VMINMON b/jobs/JGDAS_ATMOS_VMINMON index d3ce40fafc..67f50f5c1c 100755 --- a/jobs/JGDAS_ATMOS_VMINMON +++ b/jobs/JGDAS_ATMOS_VMINMON @@ -1,32 +1,16 @@ -#!/bin/sh +#! /usr/bin/env bash + ########################################################### # GDAS Minimization Monitor (MinMon) job ########################################################### -set -xa -echo `date` $0 `date -u` begin -export PS4='$SECONDS + ' +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "vrfy" -c "base vrfy" -############################### -# Specify NET, RUN, and COMPONENT name -############################## -export NET=${NET:-gfs} -export RUN=${RUN:-gdas} -export COMPONENT=${COMPONENT:-atmos} +export COMPONENT="atmos" ########################################################### # obtain unique process id (pid) and make temp directories ########################################################### -export pid=$$ -export outid=${outid:-"LL$job"} - -export DATA=${DATA:-${DATAROOT}/${jobid:?}} -mkdir -p $DATA -cd $DATA - - -########################################################### -# obtain unique process id (pid) and make temp directories -########################################################### export MINMON_SUFFIX=${MINMON_SUFFIX:-${NET}} export m_job=${m_job:-${MINMON_SUFFIX}_mmDE} @@ -35,32 +19,24 @@ export m_job=${m_job:-${MINMON_SUFFIX}_mmDE} # Specify Package Areas ############################################## export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export SCRgfs=${SCRgfs:-$HOMEgfs/scripts} +export SCRgfs=${SCRgfs:-${HOMEgfs}/scripts} -export M_FIXgdas=${M_FIXgdas:-$HOMEgfs/fix/gdas} +export M_FIXgdas=${M_FIXgdas:-${HOMEgfs}/fix/gdas} export HOMEminmon=${HOMEminmon:-${HOMEgfs}} -export EXECminmon=${EXECminmon:-$HOMEminmon/exec} -export USHminmon=${USHminmon:-$HOMEminmon/ush} - - -############################################# -# Run setpdy and initialize PDY variables -############################################# -export cycle=t${cyc}z -setpdy.sh -. ./PDY +export EXECminmon=${EXECminmon:-${HOMEminmon}/exec} +export USHminmon=${USHminmon:-${HOMEminmon}/ush} ############################################# # determine PDY and cyc for previous cycle ############################################# -cdate=`${NDATE} -6 ${PDY}${cyc}` +cdate=$(${NDATE} -6 ${PDY}${cyc}) echo 'pdate = ${pdate}' -export P_PDY=`echo ${cdate} | cut -c1-8` -export p_cyc=`echo ${cdate} | cut -c9-10` +export P_PDY=$(echo ${cdate} | cut -c1-8) +export p_cyc=$(echo ${cdate} | cut -c9-10) ############################################# @@ -71,9 +47,9 @@ export COM_IN=${COM_IN:-$(compath.py ${envir}/${NET}/${gfs_ver})} export M_TANKverf=${M_TANKverf:-${COM_IN}/${RUN}.${PDY}/${cyc}/${COMPONENT}/minmon} export M_TANKverfM1=${M_TANKverfM1:-${COM_IN}/${RUN}.${P_PDY}/${p_cyc}/${COMPONENT}/minmon} -export COMIN=${COMIN:-$COM_IN/${RUN}.${PDY}/${cyc}/$COMPONENT} +export COMIN=${COMIN:-${COM_IN}/${RUN}.${PDY}/${cyc}/${COMPONENT}} -mkdir -p -m 775 $M_TANKverf +mkdir -p -m 775 ${M_TANKverf} @@ -86,9 +62,9 @@ export gsistat=${gsistat:-${COMIN}/gdas.t${cyc}z.gsistat} ######################################################## # Execute the script. -${GMONSH:-$SCRgfs/exgdas_atmos_vminmon.sh} ${PDY} ${cyc} +${GMONSH:-${SCRgfs}/exgdas_atmos_vminmon.sh} ${PDY} ${cyc} err=$? -[[ $err -ne 0 ]] && exit $err +[[ ${err} -ne 0 ]] && exit ${err} ################################ @@ -96,9 +72,8 @@ err=$? ################################ KEEPDATA=${KEEPDATA:-NO} cd ${DATAROOT} - if [ ${KEEPDATA} = NO ] ; then rm -rf ${DATA} fi - +exit 0 diff --git a/jobs/JGFS_ATMOS_VMINMON b/jobs/JGFS_ATMOS_VMINMON index 3b43b385c3..01f2d3516c 100755 --- a/jobs/JGFS_ATMOS_VMINMON +++ b/jobs/JGFS_ATMOS_VMINMON @@ -1,32 +1,21 @@ -#!/bin/sh +#! /usr/bin/env bash + ########################################################### # GFS Minimization Monitor (MinMon) job ########################################################### -set -xa -echo `date` $0 `date -u` begin -export PS4='$SECONDS + ' +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "vrfy" -c "base vrfy" + ############################### # Specify NET and RUN name ############################## -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export COMPONENT=${COMPONENT:-atmos} +export COMPONENT="atmos" ########################################################### # obtain unique process id (pid) and make temp directories ########################################################### -export pid=$$ -export outid=${outid:-"LL$job"} -export DATA=${DATA:-${DATAROOT}/${jobid:?}} -mkdir -p $DATA -cd $DATA - - -########################################################### -# obtain unique process id (pid) and make temp directories -########################################################### export MINMON_SUFFIX=${MINMON_SUFFIX:-GFS} export m_job=${m_job:-${MINMON_SUFFIX}_mmDE} @@ -34,32 +23,23 @@ export m_job=${m_job:-${MINMON_SUFFIX}_mmDE} ############################################## # Specify Package Areas ############################################## -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export SCRgfs=${SCRgfs:-$HOMEgfs/scripts} -export M_FIXgfs=${M_FIXgfs:-$HOMEgfs/fix/product} +export SCRgfs=${SCRgfs:-${HOMEgfs}/scripts} +export M_FIXgfs=${M_FIXgfs:-${HOMEgfs}/fix/product} export HOMEminmon=${HOMEminmon:-${HOMEgfs}} -export EXECminmon=${EXECminmon:-$HOMEminmon/exec} -export USHminmon=${USHminmon:-$HOMEminmon/ush} - - -############################################# -# Run setpdy and initialize PDY variables -############################################# -export cycle=t${cyc}z -setpdy.sh -. ./PDY +export EXECminmon=${EXECminmon:-${HOMEminmon}/exec} +export USHminmon=${USHminmon:-${HOMEminmon}/ush} ############################################# # determine PDY and cyc for previous cycle ############################################# -cdate=`${NDATE} -6 ${PDY}${cyc}` +cdate=$(${NDATE} -6 ${PDY}${cyc}) echo 'pdate = ${pdate}' -export P_PDY=`echo ${cdate} | cut -c1-8` -export p_cyc=`echo ${cdate} | cut -c9-10` +export P_PDY=$(echo ${cdate} | cut -c1-8) +export p_cyc=$(echo ${cdate} | cut -c9-10) ############################################# @@ -70,10 +50,10 @@ export COM_IN=${COM_IN:-$(compath.py ${envir}/${NET}/${gfs_ver})} M_TANKverf=${M_TANKverf:-${COM_IN}/${RUN}.${PDY}/${cyc}/${COMPONENT}/minmon} export M_TANKverfM1=${M_TANKverfM1:-${COM_IN}/${RUN}.${P_PDY}/${p_cyc}/${COMPONENT}/minmon} -export COMIN=${COMIN:-$COM_IN/${RUN}.${PDY}/${cyc}/$COMPONENT} +export COMIN=${COMIN:-${COM_IN}/${RUN}.${PDY}/${cyc}/${COMPONENT}} + +mkdir -p -m 775 ${M_TANKverf} -mkdir -p -m 775 $M_TANKverf - ######################################## # Set necessary environment variables @@ -84,9 +64,9 @@ export gsistat=${gsistat:-${COMIN}/gfs.t${cyc}z.gsistat} ######################################################## # Execute the script. -${GMONSH:-$SCRgfs/exgfs_atmos_vminmon.sh} ${PDY} ${cyc} +${GMONSH:-${SCRgfs}/exgfs_atmos_vminmon.sh} ${PDY} ${cyc} err=$? -[[ $err -ne 0 ]] && exit $err +[[ ${err} -ne 0 ]] && exit ${err} ################################ diff --git a/scripts/exgdas_atmos_verfozn.sh b/scripts/exgdas_atmos_verfozn.sh index e681fc55c5..e9a1900085 100755 --- a/scripts/exgdas_atmos_verfozn.sh +++ b/scripts/exgdas_atmos_verfozn.sh @@ -1,44 +1,85 @@ #! /usr/bin/env bash -source "${USHgfs}/preamble.sh" +source "$HOMEgfs/ush/preamble.sh" ################################################################################ -# exgdas_atmos_verfozn.sh +# exgdas_vrfyozn.sh # # This script runs the data extract/validation portion of the Ozone Monitor -# (OznMon) DA package. +# (OznMon) DA package. # ################################################################################ err=0 +#------------------------------------------------------------------------------- +# Set environment +# +export RUN_ENVIR=${RUN_ENVIR:-nco} +export NET=${NET:-gfs} +export RUN=${RUN:-gdas} +export envir=${envir:-prod} +export COMPONENT=${COMPONENT:-atmos} + +# Command line arguments +export PDY=${1:-${PDY:?}} +export cyc=${2:-${cyc:?}} + + +# Other variables +export SATYPE_FILE=${SATYPE_FILE:-$FIXgdas_ozn/gdas_oznmon_satype.txt} +export PDATE=${PDY}${cyc} +export DO_DATA_RPT=${DO_DATA_RPT:-1} +export NCP=${NCP:-/bin/cp} + + +#----------------------------------------------------------------- +# ensure work and TANK dirs exist, verify oznstat is available +# +export OZN_WORK_DIR=${OZN_WORK_DIR:-$(pwd)} + +if [[ ! -d ${OZN_WORK_DIR} ]]; then + mkdir $OZN_WORK_DIR +fi +cd $OZN_WORK_DIR + +if [[ ! -d ${TANKverf_ozn} ]]; then + mkdir -p $TANKverf_ozn +fi + +if [[ -s ${oznstat} ]]; then + echo ${oznstat} is available +fi + + + data_available=0 if [[ -s ${oznstat} ]]; then - data_available=1 + data_available=1 #------------------------------------------------------------------ - # Copy data files file to local data directory. - # Untar oznstat file. + # Copy data files file to local data directory. + # Untar oznstat file. #------------------------------------------------------------------ - ${NCP} "${oznstat}" "./oznstat.${PDY}${cyc}" + $NCP $oznstat ./oznstat.$PDATE - tar -xvf "oznstat.${PDY}${cyc}" - rm "oznstat.${PDY}${cyc}" + tar -xvf oznstat.$PDATE + rm oznstat.$PDATE netcdf=0 count=$(ls diag* | grep ".nc4" | wc -l) - if [ "${count}" -gt 0 ] ; then + if [ $count -gt 0 ] ; then netcdf=1 for filenc4 in $(ls diag*nc4.gz); do - file=$(echo "${filenc4}" | cut -d'.' -f1-2).gz - mv "${filenc4}" "${file}" + file=$(echo $filenc4 | cut -d'.' -f1-2).gz + mv $filenc4 $file done fi - + export OZNMON_NETCDF=${netcdf} - "${USHgfs}/ozn_xtrct.sh" + ${HOMEoznmon}/ush/ozn_xtrct.sh err=$? else diff --git a/scripts/exgdas_atmos_verfrad.sh b/scripts/exgdas_atmos_verfrad.sh index bad8715acd..b9cfa701cd 100755 --- a/scripts/exgdas_atmos_verfrad.sh +++ b/scripts/exgdas_atmos_verfrad.sh @@ -1,17 +1,17 @@ #! /usr/bin/env bash -source "${USHgfs}/preamble.sh" +source "$HOMEgfs/ush/preamble.sh" ################################################################################ #### UNIX Script Documentation Block # . . -# Script name: exgdas_atmos_verfrad.sh +# Script name: exgdas_vrfyrad.sh # Script description: Runs data extract/validation for global radiance diag data # # Author: Ed Safford Org: NP23 Date: 2012-01-18 # -# Abstract: This script runs the data extract/validation portion of the -# RadMon package. +# Abstract: This script runs the data extract/validation portion of the +# RadMon package. # # Condition codes # 0 - no problem encountered @@ -19,72 +19,120 @@ source "${USHgfs}/preamble.sh" # ################################################################################ -data_available=0 +export VERBOSE=${VERBOSE:-YES} + +export RUN_ENVIR=${RUN_ENVIR:-nco} +export NET=${NET:-gfs} +export RUN=${RUN:-gdas} +export envir=${envir:-prod} +export COMPONENT=${COMPONENT:-atmos} + +# Command line arguments +export PDY=${1:-${PDY:?}} +export cyc=${2:-${cyc:?}} + +# Directories +export COM_IN=${COM_IN:-$(compath.py ${envir}/${NET}/${gfs_ver})} +export COMIN=${COMIN:-$COM_IN/${RUN}.${PDY}/${cyc}/$COMPONENT} + + +# Filenames +export biascr=${biascr:-$COMIN/gdas.t${cyc}z.abias} +export radstat=${radstat:-$COMIN/gdas.t${cyc}z.radstat} +export satype_file=${satype_file:-${FIXgdas}/gdas_radmon_satype.txt} + +# Other variables +export RAD_AREA=${RAD_AREA:-glb} +export MAKE_CTL=${MAKE_CTL:-1} +export MAKE_DATA=${MAKE_DATA:-1} +export USE_ANL=${USE_ANL:-1} +export PDATE=${PDY}${cyc} +export DO_DIAG_RPT=${DO_DIAG_RPT:-1} +export DO_DATA_RPT=${DO_DATA_RPT:-1} +export NCP=${NCP:-/bin/cp} + +########################################################################### +# ensure TANK dir exists, verify radstat and biascr are available +# +if [[ ! -d ${TANKverf_rad} ]]; then + mkdir -p $TANKverf_rad +fi + +if [[ "$VERBOSE" = "YES" ]]; then + if [[ -s ${radstat} ]]; then + echo ${radstat} is available + fi + if [[ -s ${biascr} ]]; then + echo ${biascr} is available + fi +fi +##################################################################### +data_available=0 if [[ -s ${radstat} && -s ${biascr} ]]; then - data_available=1 + data_available=1 #------------------------------------------------------------------ - # Copy data files file to local data directory. - # Untar radstat file. + # Copy data files file to local data directory. + # Untar radstat file. #------------------------------------------------------------------ - ${NCP} "${biascr}" "./biascr.${PDY}${cyc}" - ${NCP} "${radstat}" "./radstat.${PDY}${cyc}" + $NCP $biascr ./biascr.$PDATE + $NCP $radstat ./radstat.$PDATE - tar -xvf "radstat.${PDY}${cyc}" - rm "radstat.${PDY}${cyc}" + tar -xvf radstat.$PDATE + rm radstat.$PDATE #------------------------------------------------------------------ # SATYPE is the list of expected satellite/instrument sources - # in the radstat file. It should be stored in the $TANKverf - # directory. If it isn't there then use the gdas fix copy. In all - # cases write it back out to the radmon.$PDY directory. Add any + # in the radstat file. It should be stored in the $TANKverf + # directory. If it isn't there then use the $FIXgdas copy. In all + # cases write it back out to the radmon.$PDY directory. Add any # new sources to the list before writing back out. #------------------------------------------------------------------ radstat_satype=$(ls d*ges* | awk -F_ '{ print $2 "_" $3 }') - if [[ "${VERBOSE}" = "YES" ]]; then - echo "${radstat_satype}" + if [[ "$VERBOSE" = "YES" ]]; then + echo $radstat_satype fi - echo satype_file = "${satype_file}" - + echo satype_file = $satype_file + #------------------------------------------------------------------ - # Get previous cycle's date, and look for the satype_file. Using - # the previous cycle will get us the previous day's directory if + # Get previous cycle's date, and look for the satype_file. Using + # the previous cycle will get us the previous day's directory if # the cycle being processed is 00z. #------------------------------------------------------------------ - if [[ ${cyc} = "00" ]]; then + if [[ $cyc = "00" ]]; then use_tankdir=${TANKverf_radM1} else use_tankdir=${TANKverf_rad} fi - echo satype_file = "${satype_file}" - export SATYPE=$(cat "${satype_file}") - + echo satype_file = $satype_file + export SATYPE=$(cat ${satype_file}) + #------------------------------------------------------------- - # Update the SATYPE if any new sat/instrument was - # found in $radstat_satype. Write the SATYPE contents back + # Update the SATYPE if any new sat/instrument was + # found in $radstat_satype. Write the SATYPE contents back # to $TANKverf/radmon.$PDY. #------------------------------------------------------------- satype_changes=0 - new_satype=${SATYPE} + new_satype=$SATYPE for type in ${radstat_satype}; do - type_count=$(echo "${SATYPE}" | grep "${type}" | wc -l) + test=$(echo $SATYPE | grep $type | wc -l) - if (( type_count == 0 )); then - if [[ "${VERBOSE}" = "YES" ]]; then - echo "Found ${type} in radstat file but not in SATYPE list. Adding it now." + if [[ $test -eq 0 ]]; then + if [[ "$VERBOSE" = "YES" ]]; then + echo "Found $type in radstat file but not in SATYPE list. Adding it now." fi satype_changes=1 - new_satype="${new_satype} ${type}" + new_satype="$new_satype $type" fi done - + #------------------------------------------------------------------ # Rename the diag files and uncompress #------------------------------------------------------------------ @@ -92,45 +140,54 @@ if [[ -s ${radstat} && -s ${biascr} ]]; then for type in ${SATYPE}; do - if (( netcdf == 0 )) && [[ -e "diag_${type}_ges.${PDY}${cyc}.nc4.${Z}" ]]; then + if [[ netcdf -eq 0 && -e diag_${type}_ges.${PDATE}.nc4.${Z} ]]; then netcdf=1 fi - - if [[ $(find . -maxdepth 1 -type f -name "diag_${type}_ges.${PDY}${cyc}*.${Z}" | wc -l) -gt 0 ]]; then - mv "diag_${type}_ges.${PDY}${cyc}"*".${Z}" "${type}.${Z}" - ${UNCOMPRESS} "./${type}.${Z}" + + if [[ $(find . -maxdepth 1 -type f -name "diag_${type}_ges.${PDATE}*.${Z}" | wc -l) -gt 0 ]]; then + mv diag_${type}_ges.${PDATE}*.${Z} ${type}.${Z} + ${UNCOMPRESS} ./${type}.${Z} else - echo "WARNING: diag_${type}_ges.${PDY}${cyc}*.${Z} not available, skipping" + echo "WARNING: diag_${type}_ges.${PDATE}*.${Z} not available, skipping" fi - - if [[ ${USE_ANL} -eq 1 ]]; then - if [[ $(find . -maxdepth 1 -type f -name "diag_${type}_anl.${PDY}${cyc}*.${Z}" | wc -l) -gt 0 ]]; then - mv "diag_${type}_anl.${PDY}${cyc}"*".${Z}" "${type}_anl.${Z}" - ${UNCOMPRESS} "./${type}_anl.${Z}" + + if [[ $USE_ANL -eq 1 ]]; then + if [[ $(find . -maxdepth 1 -type f -name "diag_${type}_anl.${PDATE}*.${Z}" | wc -l) -gt 0 ]]; then + mv diag_${type}_anl.${PDATE}*.${Z} ${type}_anl.${Z} + ${UNCOMPRESS} ./${type}_anl.${Z} else - echo "WARNING: diag_${type}_anl.${PDY}${cyc}*.${Z} not available, skipping" + echo "WARNING: diag_${type}_anl.${PDATE}*.${Z} not available, skipping" fi fi done - export RADMON_NETCDF=${netcdf} + export RADMON_NETCDF=$netcdf #------------------------------------------------------------------ - # Run the child scripts. + # Run the child sccripts. #------------------------------------------------------------------ - "${USHgfs}/radmon_verf_angle.sh" + ${USHradmon}/radmon_verf_angle.sh ${PDATE} rc_angle=$? - "${USHgfs}/radmon_verf_bcoef.sh" + ${USHradmon}/radmon_verf_bcoef.sh ${PDATE} rc_bcoef=$? - "${USHgfs}/radmon_verf_bcor.sh" + ${USHradmon}/radmon_verf_bcor.sh "${PDATE}" rc_bcor=$? - "${USHgfs}/radmon_verf_time.sh" + ${USHradmon}/radmon_verf_time.sh "${PDATE}" rc_time=$? + #-------------------------------------- + # optionally run clean_tankdir script + # + if [[ ${CLEAN_TANKVERF:-0} -eq 1 ]]; then + "${USHradmon}/clean_tankdir.sh" glb 60 + rc_clean_tankdir=$? + echo "rc_clean_tankdir = $rc_clean_tankdir" + fi + fi @@ -141,23 +198,23 @@ fi err=0 if [[ ${data_available} -ne 1 ]]; then err=1 -elif [[ ${rc_angle} -ne 0 ]]; then - err=${rc_angle} -elif [[ ${rc_bcoef} -ne 0 ]]; then - err=${rc_bcoef} -elif [[ ${rc_bcor} -ne 0 ]]; then - err=${rc_bcor} -elif [[ ${rc_time} -ne 0 ]]; then - err=${rc_time} +elif [[ $rc_angle -ne 0 ]]; then + err=$rc_angle +elif [[ $rc_bcoef -ne 0 ]]; then + err=$rc_bcoef +elif [[ $rc_bcor -ne 0 ]]; then + err=$rc_bcor +elif [[ $rc_time -ne 0 ]]; then + err=$rc_time fi ##################################################################### # Restrict select sensors and satellites export CHGRP_CMD=${CHGRP_CMD:-"chgrp ${group_name:-rstprod}"} rlist="saphir" -for rtype in ${rlist}; do - if compgen -G "${TANKverf_rad}/"*"${rtype}"* > /dev/null; then - ${CHGRP_CMD} "${TANKverf_rad}/"*"${rtype}"* +for rtype in $rlist; do + if compgen -G "$TANKverf_rad/*${rtype}*" > /dev/null; then + ${CHGRP_CMD} "${TANKverf_rad}"/*${rtype}* fi done diff --git a/scripts/exgdas_atmos_vminmon.sh b/scripts/exgdas_atmos_vminmon.sh index f5087e41d5..5d54174bf7 100755 --- a/scripts/exgdas_atmos_vminmon.sh +++ b/scripts/exgdas_atmos_vminmon.sh @@ -1,4 +1,7 @@ -#/bin/sh +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" + ################################################################################ #### UNIX Script Documentation Block # . . @@ -20,15 +23,6 @@ ######################################## # Set environment ######################################## -export VERBOSE=${VERBOSE:-"NO"} -if [[ "$VERBOSE" = "YES" ]] -then - set -x -fi - -export scr=exgdas_vrfyminmon.sh - - export RUN_ENVIR=${RUN_ENVIR:-nco} export NET=${NET:-gfs} export RUN=${RUN:-gdas} @@ -121,11 +115,5 @@ elif [[ $rc_reduct -ne 0 ]]; then err=$rc_reduct fi -if [[ "$VERBOSE" = "YES" ]]; then - echo "end exgdas_vrfminmon.sh, exit value = ${err}" -fi - - -set +x exit ${err} diff --git a/scripts/exgfs_atmos_vminmon.sh b/scripts/exgfs_atmos_vminmon.sh index eb0eac23c5..4311878a03 100755 --- a/scripts/exgfs_atmos_vminmon.sh +++ b/scripts/exgfs_atmos_vminmon.sh @@ -1,4 +1,7 @@ -#/bin/sh +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" + ################################################################################ #### UNIX Script Documentation Block # . . @@ -20,14 +23,6 @@ ######################################## # Set environment ######################################## -export VERBOSE=${VERBOSE:-"NO"} -if [[ "$VERBOSE" = "YES" ]] -then - set -x -fi - -export scr=exgfs_vrfyminmon.sh - export RUN_ENVIR=${RUN_ENVIR:-nco} export NET=${NET:-gfs} export RUN=${RUN:-gfs} @@ -117,11 +112,5 @@ elif [[ $rc_reduct -ne 0 ]]; then err=$rc_reduct fi -if [[ "$VERBOSE" = "YES" ]]; then - echo "end exgfs_vrfminmon.sh, exit value = ${err}" -fi - - -set +x exit ${err} diff --git a/scripts/exglobal_atmos_vminmon.sh b/scripts/exglobal_atmos_vminmon.sh deleted file mode 100755 index b4307c8af9..0000000000 --- a/scripts/exglobal_atmos_vminmon.sh +++ /dev/null @@ -1,76 +0,0 @@ -#! /usr/bin/env bash - -source "${USHgfs}/preamble.sh" - -################################################################################ -#### UNIX Script Documentation Block -# . . -# Script name: exglobal_atmos_vminmon.sh -# Script description: Runs data extract/validation for GSI normalization diag data -# -# Author: Ed Safford Org: NP23 Date: 2015-04-10 -# -# Abstract: This script runs the data extract/validation portion of the -# MinMon package. -# -# Condition codes -# 0 - no problem encountered -# >0 - some problem encountered -# -################################################################################ - -data_available=0 - -if [[ -s ${gsistat} ]]; then - - data_available=1 - - #----------------------------------------------------------------------- - # Copy the $MINMON_SUFFIX.gnorm_data.txt file to the working directory - # It's ok if it doesn't exist; we'll create a new one if needed. - # - # Note: The logic below is to accomodate two different data storage - # methods. Some parallels (and formerly ops) dump all MinMon data for - # a given day in the same directory (if condition). Ops now separates - # data into ${cyc} subdirectories (elif condition). - #----------------------------------------------------------------------- - if [[ -s ${M_TANKverf}/gnorm_data.txt ]]; then - ${NCP} "${M_TANKverf}/gnorm_data.txt" gnorm_data.txt - elif [[ -s ${M_TANKverfM1}/gnorm_data.txt ]]; then - ${NCP} "${M_TANKverfM1}/gnorm_data.txt" gnorm_data.txt - fi - - - #------------------------------------------------------------------ - # Run the child sccripts. - #------------------------------------------------------------------ - "${USHgfs}/minmon_xtrct_costs.pl" "${MINMON_SUFFIX}" "${PDY}" "${cyc}" "${gsistat}" - rc_costs=$? - echo "rc_costs = ${rc_costs}" - - "${USHgfs}/minmon_xtrct_gnorms.pl" "${MINMON_SUFFIX}" "${PDY}" "${cyc}" "${gsistat}" - rc_gnorms=$? - echo "rc_gnorms = ${rc_gnorms}" - - "${USHgfs}/minmon_xtrct_reduct.pl" "${MINMON_SUFFIX}" "${PDY}" "${cyc}" "${gsistat}" - rc_reduct=$? - echo "rc_reduct = ${rc_reduct}" - -fi - -##################################################################### -# Postprocessing - -err=0 -if [[ ${data_available} -ne 1 ]]; then - err=1 -elif [[ ${rc_costs} -ne 0 ]]; then - err=${rc_costs} -elif [[ ${rc_gnorms} -ne 0 ]]; then - err=${rc_gnorms} -elif [[ ${rc_reduct} -ne 0 ]]; then - err=${rc_reduct} -fi - -exit "${err}" - diff --git a/ush/jjob_header.sh b/ush/jjob_header.sh new file mode 100644 index 0000000000..45fa6402ae --- /dev/null +++ b/ush/jjob_header.sh @@ -0,0 +1,115 @@ +#! /usr/bin/env bash +# +# Universal header for global j-jobs +# +# Sets up and completes actions common to all j-jobs: +# - Creates and moves to $DATA after removing any +# existing one unless $WIPE_DATA is set to "NO" +# - Runs `setpdy.sh` +# - Sources configs provided as arguments +# - Sources machine environment script +# - Defines a few other variables +# +# The job name for the environment files should be passed +# in using the `-e` option (required). Any config files +# to be sourced should be passed in as an argument to +# the `-c` option. For example: +# ``` +# jjob_header.sh -e "fcst" -c "base fcst" +# ``` +# Will source `config.base` and `config.fcst`, then pass +# `fcst` to the ${machine}.env script. +# +# Script requires the following variables to already be +# defined in the environment: +# - $HOMEgfs +# - $DATAROOT (unless $DATA is overriden) +# - $jobid +# - $PDY +# - $cyc +# - $machine +# +# Additionally, there are a couple of optional settings that +# can be set before calling the script: +# - $EXPDIR : Override the default $EXPDIR +# [default: ${HOMEgfs}/parm/config] +# - $DATA : Override the default $DATA location +# [default: ${DATAROOT}/${jobid}] +# - $WIPE_DATA : Set whether to delete any existing $DATA +# [default: "YES"] +# - $pid : Override the default process id +# [default: $$] +# + +OPTIND=1 +while getopts "c:e:" option; do + case "${option}" in + c) read -ra configs <<< "${OPTARG}" ;; + e) env_job=${OPTARG} ;; + :) + echo "FATAL [${BASH_SOURCE[0]}]: ${option} requires an argument" + exit 1 + ;; + *) + echo "FATAL [${BASH_SOURCE[0]}]: Unrecognized option: ${option}" + exit 1 + ;; + esac +done +shift $((OPTIND-1)) + +if [[ -z ${env_job} ]]; then + echo "FATAL [${BASH_SOURCE[0]}]: Must specify a job name with -e" + exit 1 +fi + +############################################## +# make temp directory +############################################## +export DATA=${DATA:-"${DATAROOT}/${jobid}"} +if [[ ${WIPE_DATA:-YES} == "YES" ]]; then + rm -rf "${DATA}" +fi +mkdir -p "${DATA}" +cd "${DATA}" || ( echo "FATAL [${BASH_SOURCE[0]}]: ${DATA} does not exist"; exit 1 ) + + +############################################## +# Run setpdy and initialize PDY variables +############################################## +export cycle="t${cyc}z" +setpdy.sh +source ./PDY + + +############################################## +# Determine Job Output Name on System +############################################## +export pid="${pid:-$$}" +export pgmout="OUTPUT.${pid}" +export pgmerr=errfile + + +############################# +# Source relevant config files +############################# +export EXPDIR="${EXPDIR:-${HOMEgfs}/parm/config}" +for config in "${configs[@]:-''}"; do + source "${EXPDIR}/config.${config}" + status=$? + if (( status != 0 )); then + echo "FATAL [${BASH_SOURCE[0]}]: Unable to load config config.${config}" + exit "${status}" + fi +done + + +########################################## +# Source machine runtime environment +########################################## +source "${HOMEgfs}/env/${machine}.env" "${env_job}" +status=$? +if (( status != 0 )); then + echo "FATAL [${BASH_SOURCE[0]}]: Error while sourcing machine environment ${machine}.env for job ${env_job}" + exit "${status}" +fi diff --git a/ush/minmon_xtrct_costs.pl b/ush/minmon_xtrct_costs.pl index c56ac3bdad..1b5d490102 100755 --- a/ush/minmon_xtrct_costs.pl +++ b/ush/minmon_xtrct_costs.pl @@ -22,8 +22,8 @@ # #--------------------------- -if ($#ARGV != 3 ) { - print "usage: minmon_xtrct_costs.pl SUFFIX PDY cyc infile\n"; +if ($#ARGV != 4 ) { + print "usage: minmon_xtrct_costs.pl SUFFIX PDY cyc infile jlogfile\n"; exit; } my $suffix = $ARGV[0]; @@ -31,6 +31,7 @@ my $pdy = $ARGV[1]; my $cyc = $ARGV[2]; my $infile = $ARGV[3]; +my $jlogfile = $ARGV[4]; my $use_costterms = 0; my $no_data = 0.00; @@ -207,7 +208,7 @@ #-------------------------- # move files to $M_TANKverf #-------------------------- - my $tankdir = $ENV{"M_TANKverf"}; + my $tankdir = $ENV{"M_TANKverfM0"}; if(! -d $tankdir) { system( "mkdir -p $tankdir" ); } diff --git a/ush/minmon_xtrct_gnorms.pl b/ush/minmon_xtrct_gnorms.pl index ac83c08cd3..ecd44232da 100755 --- a/ush/minmon_xtrct_gnorms.pl +++ b/ush/minmon_xtrct_gnorms.pl @@ -185,8 +185,8 @@ sub updateGnormData { # #--------------------------------------------------------------------------- -if ($#ARGV != 3 ) { - print "usage: minmon_xtrct_gnorms.pl SUFFIX pdy cyc infile \n"; +if ($#ARGV != 4 ) { + print "usage: minmon_xtrct_gnorms.pl SUFFIX pdy cyc infile jlogfile\n"; exit; } @@ -195,6 +195,7 @@ sub updateGnormData { my $pdy = $ARGV[1]; my $cyc = $ARGV[2]; my $infile = $ARGV[3]; +my $jlogfile = $ARGV[4]; my $scr = "minmon_xtrct_gnorms.pl"; @@ -413,7 +414,7 @@ sub updateGnormData { #-------------------------- # move files to $M_TANKverf #-------------------------- - my $tankdir = $ENV{"M_TANKverf"}; + my $tankdir = $ENV{"M_TANKverfM0"}; if(! -d $tankdir) { system( "mkdir -p $tankdir" ); } diff --git a/ush/minmon_xtrct_reduct.pl b/ush/minmon_xtrct_reduct.pl index cc5da86af8..f6037d3f32 100755 --- a/ush/minmon_xtrct_reduct.pl +++ b/ush/minmon_xtrct_reduct.pl @@ -9,18 +9,20 @@ # reduction.ieee_d files ready for GrADS use. #--------------------------------------------------------------------------- -if ($#ARGV != 3 ) { - print "usage: minmon_xtrct_reduct.pl SUFFIX pdy cyc infile\n"; +if ($#ARGV != 4 ) { + print "usage: minmon_xtrct_reduct.pl SUFFIX pdy cyc infile jlogfile\n"; print " suffix is data source identifier\n"; print " pdy is YYYYMMDD of the cycle to be processed\n"; print " cyc is HH of the cycle to be processed\n"; print " infile is the data file containing the reduction stats\n"; + print " jlogfile is the job log file\n"; exit; } my $suffix = $ARGV[0]; my $pdy = $ARGV[1]; my $cyc = $ARGV[2]; my $infile = $ARGV[3]; +my $jlogfile = $ARGV[4]; my $scr = "minmon_xtrct_reduct.pl"; print "$scr has started\n"; @@ -70,7 +72,7 @@ #---------------------------- # copy outfile to $M_TANKverf #---------------------------- - my $tankdir = $ENV{"M_TANKverf"}; + my $tankdir = $ENV{"M_TANKverfM0"}; if(! -d $tankdir) { system( "mkdir -p $tankdir" ); } diff --git a/ush/ozn_xtrct.sh b/ush/ozn_xtrct.sh index 0c623bf03c..3f6b3fed19 100755 --- a/ush/ozn_xtrct.sh +++ b/ush/ozn_xtrct.sh @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${USHgfs}/preamble.sh" +source "$HOMEgfs/ush/preamble.sh" #------------------------------------------------------------------ # ozn_xtrct.sh @@ -11,9 +11,9 @@ source "${USHgfs}/preamble.sh" # $TANKverf_ozn. # # Calling scripts must define: -# $TANKverf_ozn -# $PDY -# $cyc +# $TANKverf_ozn +# $HOMEoznmon +# $PDATE # # Return values are # 0 = normal @@ -28,7 +28,7 @@ source "${USHgfs}/preamble.sh" # gdas_oznmon_satype.txt to $avail_satype which is # determined by the contents of the oznstat file. # Report any missing diag files in a file named -# bad_diag.$PDY$cyc +# bad_diag.$PDATE # check_diag_files() { pdate=$1 @@ -40,10 +40,10 @@ check_diag_files() { echo ""; echo ""; echo "--> check_diag_files" for type in ${found_satype}; do - len_check=$(echo "${avail_satype}" | grep "${type}" | wc -c) + len_check=$(echo ${avail_satype} | grep ${type} | wc -c) if [[ ${len_check} -le 1 ]]; then - echo "missing diag file -- diag_${type}_ges.${pdate}.gz not found" >> "./${out_file}" + echo "missing diag file -- diag_${type}_ges.${pdate}.gz not found " >> ./${out_file} fi done @@ -58,13 +58,13 @@ nregion=${nregion:-6} DO_DATA_RPT=${DO_DATA_RPT:-0} netcdf_boolean=".false." -if [[ ${OZNMON_NETCDF} -eq 1 ]]; then +if [[ $OZNMON_NETCDF -eq 1 ]]; then netcdf_boolean=".true." fi OZNMON_NEW_HDR=${OZNMON_NEW_HDR:-0} new_hdr="F" -if [[ ${OZNMON_NEW_HDR} -eq 1 ]]; then +if [[ $OZNMON_NEW_HDR -eq 1 ]]; then new_hdr="T" fi @@ -72,19 +72,19 @@ fi # if VALIDATE_DATA then locate and untar base file # validate=".FALSE." -if [[ ${VALIDATE_DATA} -eq 1 ]]; then - if [[ ! -e ${ozn_val_file} && ! -h ${ozn_val_file} ]]; then - echo "WARNING: VALIDATE_DATA set to 1, but unable to locate ${ozn_val_file}" +if [[ $VALIDATE_DATA -eq 1 ]]; then + if [[ ! -e $ozn_val_file && ! -h $ozn_val_file ]]; then + echo "WARNING: VALIDATE_DATA set to 1, but unable to locate $ozn_val_file" echo " Setting VALIDATE_DATA to 0/OFF" VALIDATE_DATA=0 else validate=".TRUE." - val_file=$(basename "${ozn_val_file}") - ${NCP} "${ozn_val_file}" "${val_file}" - tar -xvf "${val_file}" + val_file=$(basename ${ozn_val_file}) + ${NCP} $ozn_val_file $val_file + tar -xvf $val_file fi fi -echo "VALIDATE_DATA, validate = ${VALIDATE_DATA}, ${validate} " +echo "VALIDATE_DATA, validate = $VALIDATE_DATA, $validate " @@ -106,8 +106,8 @@ avail_satype=$(ls -1 d*ges* | sed -e 's/_/ /g;s/\./ /' | gawk '{ print $2 "_" $3 if [[ ${DO_DATA_RPT} -eq 1 ]]; then if [[ -e ${SATYPE_FILE} ]]; then - satype=$(cat "${SATYPE_FILE}") - check_diag_files "${PDY}${cyc}" "${satype}" "${avail_satype}" + satype=$(cat ${SATYPE_FILE}) + check_diag_files ${PDATE} "${satype}" "${avail_satype}" else echo "WARNING: missing ${SATYPE_FILE}" fi @@ -119,7 +119,7 @@ if [[ ${len_satype} -le 1 ]]; then satype=${avail_satype} fi -echo "${satype}" +echo ${satype} len_satype=$(echo -n "${satype}" | wc -c) @@ -132,12 +132,12 @@ else #-------------------------------------------------------------------- # Copy extraction programs to working directory # - ${NCP} "${EXECgfs}/oznmon_time.x" ./oznmon_time.x + ${NCP} ${HOMEoznmon}/exec/oznmon_time.x ./oznmon_time.x if [[ ! -e oznmon_time.x ]]; then iret=2 exit ${iret} fi - ${NCP} "${EXECgfs}/oznmon_horiz.x" ./oznmon_horiz.x + ${NCP} ${HOMEoznmon}/exec/oznmon_horiz.x ./oznmon_horiz.x if [[ ! -e oznmon_horiz.x ]]; then iret=3 exit ${iret} @@ -149,15 +149,15 @@ else # for ptype in ${ozn_ptype}; do - iyy="${PDY:0:4}" - imm="${PDY:4:2}" - idd="${PDY:6:2}" - ihh=${cyc} + iyy=$(echo ${PDATE} | cut -c1-4) + imm=$(echo ${PDATE} | cut -c5-6) + idd=$(echo ${PDATE} | cut -c7-8) + ihh=$(echo ${PDATE} | cut -c9-10) for type in ${avail_satype}; do - if [[ -f "diag_${type}_${ptype}.${PDY}${cyc}.gz" ]]; then - mv "diag_${type}_${ptype}.${PDY}${cyc}.gz" "${type}.${ptype}.gz" - gunzip "./${type}.${ptype}.gz" + if [[ -f "diag_${type}_${ptype}.${PDATE}.gz" ]]; then + mv diag_${type}_${ptype}.${PDATE}.gz ${type}.${ptype}.gz + gunzip ./${type}.${ptype}.gz echo "processing ptype, type: ${ptype}, ${type}" rm -f input @@ -188,17 +188,17 @@ EOF echo "oznmon_time.x HAS STARTED ${type}" - ./oznmon_time.x < input > "stdout.time.${type}.${ptype}" + ./oznmon_time.x < input > stdout.time.${type}.${ptype} echo "oznmon_time.x HAS ENDED ${type}" if [[ ! -d ${TANKverf_ozn}/time ]]; then - mkdir -p "${TANKverf_ozn}/time" + mkdir -p ${TANKverf_ozn}/time fi - ${NCP} "${type}.${ptype}.ctl" "${TANKverf_ozn}/time/" - ${NCP} "${type}.${ptype}.${PDY}${cyc}.ieee_d" "${TANKverf_ozn}/time/" + $NCP ${type}.${ptype}.ctl ${TANKverf_ozn}/time/ + $NCP ${type}.${ptype}.${PDATE}.ieee_d ${TANKverf_ozn}/time/ - ${NCP} bad* "${TANKverf_ozn}/time/" + $NCP bad* ${TANKverf_ozn}/time/ rm -f input @@ -219,17 +219,17 @@ EOF echo "oznmon_horiz.x HAS STARTED ${type}" - ./oznmon_horiz.x < input > "stdout.horiz.${type}.${ptype}" + ./oznmon_horiz.x < input > stdout.horiz.${type}.${ptype} echo "oznmon_horiz.x HAS ENDED ${type}" if [[ ! -d ${TANKverf_ozn}/horiz ]]; then - mkdir -p "${TANKverf_ozn}/horiz" + mkdir -p ${TANKverf_ozn}/horiz fi - ${NCP} "${type}.${ptype}.ctl" "${TANKverf_ozn}/horiz/" + $NCP ${type}.${ptype}.ctl ${TANKverf_ozn}/horiz/ - ${COMPRESS} "${type}.${ptype}.${PDY}${cyc}.ieee_d" - ${NCP} "${type}.${ptype}.${PDY}${cyc}.ieee_d.${Z}" "${TANKverf_ozn}/horiz/" + $COMPRESS ${type}.${ptype}.${PDATE}.ieee_d + $NCP ${type}.${ptype}.${PDATE}.ieee_d.${Z} ${TANKverf_ozn}/horiz/ echo "finished processing ptype, type: ${ptype}, ${type}" @@ -244,11 +244,18 @@ EOF tar -cvf stdout.horiz.tar stdout.horiz* ${COMPRESS} stdout.horiz.tar - ${NCP} "stdout.horiz.tar.${Z}" "${TANKverf_ozn}/horiz/" + ${NCP} stdout.horiz.tar.${Z} ${TANKverf_ozn}/horiz/ tar -cvf stdout.time.tar stdout.time* ${COMPRESS} stdout.time.tar - ${NCP} "stdout.time.tar.${Z}" "${TANKverf_ozn}/time/" + ${NCP} stdout.time.tar.${Z} ${TANKverf_ozn}/time/ fi +#------------------------------------------------------- +# Conditionally remove data files older than 40 days +# +if [[ ${CLEAN_TANKDIR:-0} -eq 1 ]]; then + ${HOMEoznmon}/ush/clean_tankdir.sh glb 40 +fi + exit ${iret} diff --git a/ush/preamble.sh b/ush/preamble.sh new file mode 100644 index 0000000000..3cdced6512 --- /dev/null +++ b/ush/preamble.sh @@ -0,0 +1,92 @@ +#! /usr/bin/env bash + +####### +# Preamble script to be SOURCED at the beginning of every script. Sets +# useful PS4 and optionally turns on set -x and set -eu. Also sets up +# crude script timing and provides a postamble that runs on exit. +# +# Syntax: +# preamble.sh [id] +# +# Aruguments: +# id: Optional identifier string. Use when running the same script +# multiple times in the same job (e.g. MPMD) +# +# Input environment variables: +# TRACE (YES/NO): Whether to echo every command (set -x) [default: "YES"] +# STRICT (YES/NO): Whether to exit immediately on error or undefined variable +# (set -eu) [default: "YES"] +# +####### +set +x +if (( $# > 0 )); then + id="(${1})" +else + id="" +fi + +# Record the start time so we can calculate the elapsed time later +start_time=$(date +%s) + +# Get the base name of the calling script +_calling_script=$(basename "${BASH_SOURCE[1]}") + +# Announce the script has begun +start_time_human=$(date -d"@${start_time}" -u) +echo "Begin ${_calling_script} at ${start_time_human}" + +export PS4='+ $(basename ${BASH_SOURCE})[${LINENO}]'"${id}: " + +set_strict() { + if [[ ${STRICT:-"YES"} == "YES" ]]; then + # Exit on error and undefined variable + set -eu + fi +} + +set_trace() { + # Print the script name and line number of each command as it is + # executed when using trace. + if [[ ${TRACE:-"YES"} == "YES" ]]; then + set -x + fi +} + +postamble() { + # + # Commands to execute when a script ends. + # + # Syntax: + # postamble script start_time rc + # + # Arguments: + # script: name of the script ending + # start_time: start time of script (in seconds) + # rc: the exit code of the script + # + + set +x + script="${1}" + start_time="${2}" + rc="${3}" + + # Calculate the elapsed time + end_time=$(date +%s) + end_time_human=$(date -d@"${end_time}" -u +%H:%M:%S) + elapsed_sec=$((end_time - start_time)) + elapsed=$(date -d@"${elapsed_sec}" -u +%H:%M:%S) + + # Announce the script has ended, then pass the error code up + echo "End ${script} at ${end_time_human} with error code ${rc:-0} (time elapsed: ${elapsed})" + exit "${rc}" +} + +# Place the postamble in a trap so it is always called no matter how the script exits +# Shellcheck: Turn off warning about substitions at runtime instead of signal time +# shellcheck disable=SC2064 +trap "postamble ${_calling_script} ${start_time} \$?" EXIT +# shellcheck disable= + +# Turn on our settings +set_strict +set_trace diff --git a/ush/radmon_diag_ck.sh b/ush/radmon_diag_ck.sh new file mode 100755 index 0000000000..142e99f8c7 --- /dev/null +++ b/ush/radmon_diag_ck.sh @@ -0,0 +1,175 @@ +#!/bin/bash + +#---------------------------------------------------------------- +# Check the contents of the radstat file and compare to +# the ${run}_radmon_satype.txt file. Report any missing +# or zero sized diag files. +# + + function usage { + echo "Usage: radmon_diag_ck.sh -rad radstat --sat satype --out output " + echo "" + echo " -r,--rad radstat file (required)" + echo " File name or path to radstat file." + echo "" + echo " -s,--sat satype file (required)" + echo " File name or path to satype file." + echo "" + echo " -o,--out output file name (required)" + echo " File name for missing diag file report." + } + + +echo "--> radmon_diag_ck.sh" + + +#-------------------------- +# Process input arguments +# + nargs=$# + if [[ $nargs -ne 6 ]]; then + usage + exit 1 + fi + + while [[ $# -ge 1 ]] + do + key="$1" + echo $key + + case $key in + -r|--rad) + radstat_file="$2" + shift # past argument + ;; + -s|--sat) + satype_file="$2" + shift # past argument + ;; + -o|--out) + output_file="$2" + shift # past argument + ;; + *) + #unspecified key + echo " unsupported key = $key" + ;; + esac + + shift + done + +# set -ax + + echo " radstat_file = ${radstat_file}" + echo " satype_file = ${satype_file}" + echo " output_file = ${output_file}" + + missing_diag="" + zero_len_diag="" + + #--------------------------------------------- + # get list of diag files in the radstat file + # + radstat_contents=`tar -tf ${radstat_file} | grep '_ges' | + gawk -F"diag_" '{print $2}' | + gawk -F"_ges" '{print $1}'` + + + #--------------------------------------------- + # load contents of satype_file into an array + # + satype_contents=`cat ${satype_file}` + + + #------------------------------------------------- + # compare $satype_contents and $radstat_contents + # report anything missing + # + for sat in $satype_contents; do + test=`echo $radstat_contents | grep $sat` + + if [[ ${#test} -le 0 ]]; then + missing_diag="${missing_diag} ${sat}" + fi + + done + + echo "" + echo "missing_diag = ${missing_diag}" + echo "" + + + #--------------------------------------------------------- + # Check for zero sized diag files. The diag files in + # the radstat file (which is a tar file) are gzipped. + # I find that 0 sized, gzipped file has a size of ~52 + # (I assume that's for header and block size). + # + # So for this check we'll assume anything in the radstat + # file with a size of > 1000 bytes is suspect. (That's + # overkill, 100 is probably sufficient, but I'm the + # nervous type.) So we'll extract, uncompress, and check + # the actual file size of those. Anything with an + # uncompressed size of 0 goes on the zero_len_diag list. + # + verbose_contents=`tar -tvf ${radstat_file} | grep '_ges'` + + + #------------------------------------------------------- + # note: need to reset the IFS to line breaks otherwise + # the $vc value in the for loop below will break + # on all white space, not the line break. + SAVEIFS=$IFS + IFS=$(echo -en "\n\b") + + + for vc in ${verbose_contents}; do + + gzip_len=`echo ${vc} | gawk '{print $3}'` + + if [[ ${gzip_len} -le 1000 ]]; then + test_file=`echo ${vc} | gawk '{print $6}'` + tar -xf ${radstat_file} ${test_file} + + gunzip ${test_file} + unzipped_file=`echo ${test_file%.*}` + + uz_file_size=`ls -la ${unzipped_file} | gawk '{print $5}'` + + if [[ ${uz_file_size} -le 0 ]]; then + sat=`echo ${unzipped_file} | gawk -F"diag_" '{print $2}' | + gawk -F"_ges" '{print $1}'` + + zero_len_diag="${zero_len_diag} ${sat}" + fi + + rm -f ${unzipped_file} + fi + done + + IFS=${SAVEIFS} # reset IFS to default (white space) + + echo "" + echo "zero_len_diag = ${zero_len_diag}" + echo "" + + + #----------------------------------------- + # Write results to $output_file + # + if [[ ${#zero_len_diag} -gt 0 ]]; then + for zld in ${zero_len_diag}; do + echo " Zero Length diagnostic file: $zld" >> $output_file + done + fi + + if [[ ${#missing_diag} -gt 0 ]]; then + for md in ${missing_diag}; do + echo " Missing diagnostic file : $md" >> $output_file + done + fi + + +echo "<-- radmon_diag_ck.sh" +exit diff --git a/ush/radmon_verf_angle.sh b/ush/radmon_verf_angle.sh index 3dff2a6f98..b2dab0825a 100755 --- a/ush/radmon_verf_angle.sh +++ b/ush/radmon_verf_angle.sh @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${USHgfs}/preamble.sh" +source "$HOMEgfs/ush/preamble.sh" ################################################################################ #### UNIX Script Documentation Block @@ -20,15 +20,17 @@ source "${USHgfs}/preamble.sh" # other supporting files into a temporary working directory. # # -# Usage: radmon_verf_angle.sh +# Usage: radmon_verf_angle.sh PDATE # # Input script positional parameters: -# PDYcyc processing date +# PDATE processing date # yyyymmddcc format; required # # Imported Shell Variables: # RADMON_SUFFIX data source suffix # defauls to opr +# EXECradmon executable directory +# defaults to current directory # RAD_AREA global or regional flag # defaults to global # TANKverf_rad data repository @@ -70,18 +72,26 @@ REGIONAL_RR=${REGIONAL_RR:-0} # rapid refresh model flag rgnHH=${rgnHH:-} rgnTM=${rgnTM:-} -echo " REGIONAL_RR, rgnHH, rgnTM = ${REGIONAL_RR}, ${rgnHH}, ${rgnTM}" +export PDATE=${1:-${PDATE:?}} + +echo " REGIONAL_RR, rgnHH, rgnTM = $REGIONAL_RR, $rgnHH, $rgnTM" netcdf_boolean=".false." -if [[ ${RADMON_NETCDF} -eq 1 ]]; then +if [[ $RADMON_NETCDF -eq 1 ]]; then netcdf_boolean=".true." fi -echo " RADMON_NETCDF, netcdf_boolean = ${RADMON_NETCDF}, ${netcdf_boolean}" +echo " RADMON_NETCDF, netcdf_boolean = ${RADMON_NETCDF}, $netcdf_boolean" which prep_step which startmsg +# Directories +FIXgdas=${FIXgdas:-$(pwd)} +EXECradmon=${EXECradmon:-$(pwd)} +TANKverf_rad=${TANKverf_rad:-$(pwd)} + # File names -touch "${pgmout}" +export pgmout=${pgmout:-${jlogfile}} +touch $pgmout # Other variables SATYPE=${SATYPE:-} @@ -90,7 +100,7 @@ LITTLE_ENDIAN=${LITTLE_ENDIAN:-0} USE_ANL=${USE_ANL:-0} -if [[ ${USE_ANL} -eq 1 ]]; then +if [[ $USE_ANL -eq 1 ]]; then gesanl="ges anl" else gesanl="ges" @@ -98,14 +108,14 @@ fi err=0 angle_exec=radmon_angle.x -shared_scaninfo="${shared_scaninfo:-${PARMgfs}/monitor/gdas_radmon_scaninfo.txt}" +shared_scaninfo=${shared_scaninfo:-$FIXgdas/gdas_radmon_scaninfo.txt} scaninfo=scaninfo.txt #-------------------------------------------------------------------- # Copy extraction program and supporting files to working directory -${NCP} "${EXECgfs}/${angle_exec}" ./ -${NCP} "${shared_scaninfo}" ./${scaninfo} +$NCP ${EXECradmon}/${angle_exec} ./ +$NCP $shared_scaninfo ./${scaninfo} if [[ ! -s ./${angle_exec} || ! -s ./${scaninfo} ]]; then err=2 @@ -115,10 +125,10 @@ else export pgm=${angle_exec} - iyy="${PDY:0:4}" - imm="${PDY:4:2}" - idd="${PDY:6:2}" - ihh=${cyc} + iyy=$(echo $PDATE | cut -c1-4) + imm=$(echo $PDATE | cut -c5-6) + idd=$(echo $PDATE | cut -c7-8) + ihh=$(echo $PDATE | cut -c9-10) ctr=0 fail=0 @@ -133,24 +143,24 @@ else for dtype in ${gesanl}; do - echo "pgm = ${pgm}" - echo "pgmout = ${pgmout}" + echo "pgm = $pgm" + echo "pgmout = $pgmout" prep_step - ctr=$((ctr + 1)) + ctr=$(expr $ctr + 1) - if [[ ${dtype} == "anl" ]]; then - data_file="${type}_anl.${PDY}${cyc}.ieee_d" + if [[ $dtype == "anl" ]]; then + data_file=${type}_anl.${PDATE}.ieee_d ctl_file=${type}_anl.ctl angl_ctl=angle.${ctl_file} else - data_file="${type}.${PDY}${cyc}.ieee_d" + data_file=${type}.${PDATE}.ieee_d ctl_file=${type}.ctl angl_ctl=angle.${ctl_file} fi angl_file="" - if [[ ${REGIONAL_RR} -eq 1 ]]; then + if [[ $REGIONAL_RR -eq 1 ]]; then angl_file=${rgnHH}.${data_file}.${rgnTM} fi @@ -177,18 +187,18 @@ cat << EOF > input EOF startmsg - ./${angle_exec} < input >> "${pgmout}" 2>>errfile + ./${angle_exec} < input >> ${pgmout} 2>>errfile export err=$?; err_chk - if [[ ${err} -ne 0 ]]; then - fail=$(( fail + 1 )) + if [[ $err -ne 0 ]]; then + fail=$(expr $fail + 1) fi if [[ -s ${angl_file} ]]; then - ${COMPRESS} -f "${angl_file}" + ${COMPRESS} -f ${angl_file} fi if [[ -s ${angl_ctl} ]]; then - ${COMPRESS} -f "${angl_ctl}" + ${COMPRESS} -f ${angl_ctl} fi @@ -197,24 +207,24 @@ EOF done # for type in ${SATYPE} loop - "${USHgfs}/rstprod.sh" + ${USHradmon}/rstprod.sh tar_file=radmon_angle.tar if compgen -G "angle*.ieee_d*" > /dev/null || compgen -G "angle*.ctl*" > /dev/null; then - tar -cf "${tar_file}" angle*.ieee_d* angle*.ctl* + tar -cf $tar_file angle*.ieee_d* angle*.ctl* ${COMPRESS} ${tar_file} - mv "${tar_file}.${Z}" "${TANKverf_rad}/." + mv $tar_file.${Z} ${TANKverf_rad}/. - if [[ ${RAD_AREA} = "rgn" ]]; then + if [[ $RAD_AREA = "rgn" ]]; then cwd=$(pwd) - cd "${TANKverf_rad}" - tar -xf "${tar_file}.${Z}" - rm "${tar_file}.${Z}" - cd "${cwd}" + cd ${TANKverf_rad} + tar -xf ${tar_file}.${Z} + rm ${tar_file}.${Z} + cd ${cwd} fi fi - if [[ ${ctr} -gt 0 && ${fail} -eq ${ctr} || ${fail} -gt ${ctr} ]]; then + if [[ $ctr -gt 0 && $fail -eq $ctr || $fail -gt $ctr ]]; then err=3 fi fi diff --git a/ush/radmon_verf_bcoef.sh b/ush/radmon_verf_bcoef.sh index 4274436154..374c8db7b2 100755 --- a/ush/radmon_verf_bcoef.sh +++ b/ush/radmon_verf_bcoef.sh @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${USHgfs}/preamble.sh" +source "$HOMEgfs/ush/preamble.sh" ################################################################################ #### UNIX Script Documentation Block @@ -20,19 +20,23 @@ source "${USHgfs}/preamble.sh" # other supporting files into a temporary working directory. # # -# Usage: radmon_verf_bcoef.sh +# Usage: radmon_verf_bcoef.sh PDATE # # Input script positional parameters: -# PDYcyc processing date +# PDATE processing date # yyyymmddcc format; required # # Imported Shell Variables: # RADMON_SUFFIX data source suffix # defauls to opr -# EXECgfs executable directory +# EXECradmon executable directory +# defaults to current directory +# FIXradmon fixed data directory +# defaults to current directory # RAD_AREA global or regional flag # defaults to global # TANKverf_rad data repository +# defaults to current directory # SATYPE list of satellite/instrument sources # defaults to none # LITTLE_ENDIAN flag for LE machine @@ -61,15 +65,23 @@ source "${USHgfs}/preamble.sh" # >0 - some problem encountered # #################################################################### +# Command line arguments. +export PDATE=${1:-${PDATE:?}} netcdf_boolean=".false." -if [[ ${RADMON_NETCDF} -eq 1 ]]; then +if [[ $RADMON_NETCDF -eq 1 ]]; then netcdf_boolean=".true." fi -echo " RADMON_NETCDF, netcdf_boolean = ${RADMON_NETCDF}, ${netcdf_boolean}" +echo " RADMON_NETCDF, netcdf_boolean = ${RADMON_NETCDF}, $netcdf_boolean" + +# Directories +FIXgdas=${FIXgdas:-$(pwd)} +EXECradmon=${EXECradmon:-$(pwd)} +TANKverf_rad=${TANKverf_rad:-$(pwd)} # File names -touch "${pgmout}" +pgmout=${pgmout:-${jlogfile}} +touch $pgmout # Other variables RAD_AREA=${RAD_AREA:-glb} @@ -84,7 +96,7 @@ USE_ANL=${USE_ANL:-0} err=0 bcoef_exec=radmon_bcoef.x -if [[ ${USE_ANL} -eq 1 ]]; then +if [[ $USE_ANL -eq 1 ]]; then gesanl="ges anl" else gesanl="ges" @@ -93,8 +105,8 @@ fi #-------------------------------------------------------------------- # Copy extraction program and supporting files to working directory -${NCP} "${EXECgfs}/${bcoef_exec}" ./${bcoef_exec} -${NCP} "${biascr}" ./biascr.txt +$NCP $EXECradmon/${bcoef_exec} ./${bcoef_exec} +$NCP ${biascr} ./biascr.txt if [[ ! -s ./${bcoef_exec} || ! -s ./biascr.txt ]]; then err=4 @@ -106,10 +118,10 @@ else export pgm=${bcoef_exec} - iyy="${PDY:0:4}" - imm="${PDY:4:2}" - idd="${PDY:6:2}" - ihh=${cyc} + iyy=$(echo $PDATE | cut -c1-4) + imm=$(echo $PDATE | cut -c5-6) + idd=$(echo $PDATE | cut -c7-8) + ihh=$(echo $PDATE | cut -c9-10) ctr=0 fail=0 @@ -128,19 +140,19 @@ else prep_step - ctr=$(( ctr + 1 )) + ctr=$(expr $ctr + 1) - if [[ ${dtype} == "anl" ]]; then - data_file="${type}_anl.${PDY}${cyc}.ieee_d" + if [[ $dtype == "anl" ]]; then + data_file=${type}_anl.${PDATE}.ieee_d ctl_file=${type}_anl.ctl bcoef_ctl=bcoef.${ctl_file} else - data_file="${type}.${PDY}${cyc}.ieee_d" + data_file=${type}.${PDATE}.ieee_d ctl_file=${type}.ctl bcoef_ctl=bcoef.${ctl_file} fi - if [[ ${REGIONAL_RR} -eq 1 ]]; then + if [[ $REGIONAL_RR -eq 1 ]]; then bcoef_file=${rgnHH}.bcoef.${data_file}.${rgnTM} else bcoef_file=bcoef.${data_file} @@ -168,10 +180,10 @@ cat << EOF > input / EOF startmsg - ./${bcoef_exec} < input >>"${pgmout}" 2>>errfile + ./${bcoef_exec} < input >>${pgmout} 2>>errfile export err=$?; err_chk - if [[ ${err} -ne 0 ]]; then - fail=$(( fail + 1 )) + if [[ $err -ne 0 ]]; then + fail=$(expr $fail + 1) fi @@ -180,11 +192,11 @@ EOF # if [[ -s ${bcoef_file} ]]; then - ${COMPRESS} "${bcoef_file}" + ${COMPRESS} ${bcoef_file} fi if [[ -s ${bcoef_ctl} ]]; then - ${COMPRESS} "${bcoef_ctl}" + ${COMPRESS} ${bcoef_ctl} fi @@ -192,24 +204,24 @@ EOF done # type in $SATYPE loop - "${USHgfs}/rstprod.sh" + ${USHradmon}/rstprod.sh if compgen -G "bcoef*.ieee_d*" > /dev/null || compgen -G "bcoef*.ctl*" > /dev/null; then tar_file=radmon_bcoef.tar - tar -cf ${tar_file} bcoef*.ieee_d* bcoef*.ctl* + tar -cf $tar_file bcoef*.ieee_d* bcoef*.ctl* ${COMPRESS} ${tar_file} - mv "${tar_file}.${Z}" "${TANKverf_rad}" + mv $tar_file.${Z} ${TANKverf_rad} - if [[ ${RAD_AREA} = "rgn" ]]; then + if [[ $RAD_AREA = "rgn" ]]; then cwd=$(pwd) - cd "${TANKverf_rad}" - tar -xf "${tar_file}.${Z}" - rm "${tar_file}.${Z}" - cd "${cwd}" + cd ${TANKverf_rad} + tar -xf ${tar_file}.${Z} + rm ${tar_file}.${Z} + cd ${cwd} fi fi - if [[ ${ctr} -gt 0 && ${fail} -eq ${ctr} || ${fail} -gt ${ctr} ]]; then + if [[ $ctr -gt 0 && $fail -eq $ctr || $fail -gt $ctr ]]; then err=5 fi fi diff --git a/ush/radmon_verf_bcor.sh b/ush/radmon_verf_bcor.sh index ea0a7842e6..3e267f018c 100755 --- a/ush/radmon_verf_bcor.sh +++ b/ush/radmon_verf_bcor.sh @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${USHgfs}/preamble.sh" +source "$HOMEgfs/ush/preamble.sh" ################################################################################ #### UNIX Script Documentation Block @@ -20,16 +20,16 @@ source "${USHgfs}/preamble.sh" # other supporting files into a temporary working directory. # # -# Usage: radmon_verf_bcor.sh +# Usage: radmon_verf_bcor.sh PDATE # # Input script positional parameters: -# PDYcyc processing date +# PDATE processing date # yyyymmddcc format; required # # Imported Shell Variables: # RADMON_SUFFIX data source suffix # defauls to opr -# EXECgfs executable directory +# EXECradmon executable directory # defaults to current directory # RAD_AREA global or regional flag # defaults to global @@ -64,8 +64,16 @@ source "${USHgfs}/preamble.sh" # #################################################################### +# Command line arguments. +export PDATE=${1:-${PDATE:?}} + +# Directories +EXECradmon=${EXECradmon:-$(pwd)} +TANKverf_rad=${TANKverf_rad:-$(pwd)} + # File names -touch "${pgmout}" +pgmout=${pgmout:-${jlogfile}} +touch $pgmout # Other variables RAD_AREA=${RAD_AREA:-glb} @@ -77,11 +85,11 @@ bcor_exec=radmon_bcor.x err=0 netcdf_boolean=".false." -if [[ ${RADMON_NETCDF} -eq 1 ]]; then +if [[ $RADMON_NETCDF -eq 1 ]]; then netcdf_boolean=".true." fi -if [[ ${USE_ANL} -eq 1 ]]; then +if [[ $USE_ANL -eq 1 ]]; then gesanl="ges anl" else gesanl="ges" @@ -91,7 +99,7 @@ fi #-------------------------------------------------------------------- # Copy extraction program to working directory -${NCP} "${EXECgfs}/${bcor_exec}" ./${bcor_exec} +$NCP ${EXECradmon}/${bcor_exec} ./${bcor_exec} if [[ ! -s ./${bcor_exec} ]]; then err=6 @@ -103,10 +111,10 @@ else export pgm=${bcor_exec} - iyy="${PDY:0:4}" - imm="${PDY:4:2}" - idd="${PDY:6:2}" - ihh=${cyc} + iyy=$(echo $PDATE | cut -c1-4) + imm=$(echo $PDATE | cut -c5-6) + idd=$(echo $PDATE | cut -c7-8) + ihh=$(echo $PDATE | cut -c9-10) ctr=0 fail=0 @@ -118,10 +126,10 @@ else prep_step - ctr=$(( ctr + 1 )) + ctr=$(expr $ctr + 1) - if [[ ${dtype} == "anl" ]]; then - data_file="${type}_anl.${PDY}${cyc}.ieee_d" + if [[ $dtype == "anl" ]]; then + data_file=${type}_anl.${PDATE}.ieee_d bcor_file=bcor.${data_file} ctl_file=${type}_anl.ctl bcor_ctl=bcor.${ctl_file} @@ -129,7 +137,7 @@ else bcor_stdout=bcor.${stdout_file} input_file=${type}_anl else - data_file="${type}.${PDY}${cyc}.ieee_d" + data_file=${type}.${PDATE}.ieee_d bcor_file=bcor.${data_file} ctl_file=${type}.ctl bcor_ctl=bcor.${ctl_file} @@ -143,7 +151,7 @@ else # Check for 0 length input file here and avoid running # the executable if $input_file doesn't exist or is 0 bytes # - if [[ -s "${input_file}" ]]; then + if [[ -s $input_file ]]; then nchanl=-999 cat << EOF > input @@ -165,10 +173,10 @@ cat << EOF > input EOF startmsg - ./${bcor_exec} < input >> "${pgmout}" 2>>errfile + ./${bcor_exec} < input >> ${pgmout} 2>>errfile export err=$?; err_chk if [[ $? -ne 0 ]]; then - fail=$(( fail + 1 )) + fail=$(expr $fail + 1) fi @@ -177,11 +185,11 @@ EOF # if [[ -s ${bcor_file} ]]; then - ${COMPRESS} "${bcor_file}" + ${COMPRESS} ${bcor_file} fi if [[ -s ${bcor_ctl} ]]; then - ${COMPRESS} "${bcor_ctl}" + ${COMPRESS} ${bcor_ctl} fi fi @@ -189,24 +197,24 @@ EOF done # type in $SATYPE loop - "${USHgfs}/rstprod.sh" + ${USHradmon}/rstprod.sh tar_file=radmon_bcor.tar if compgen -G "bcor*.ieee_d*" > /dev/null || compgen -G "bcor*.ctl*" > /dev/null; then - tar -cf "${tar_file}" bcor*.ieee_d* bcor*.ctl* + tar -cf $tar_file bcor*.ieee_d* bcor*.ctl* ${COMPRESS} ${tar_file} - mv "${tar_file}.${Z}" "${TANKverf_rad}/." + mv $tar_file.${Z} ${TANKverf_rad}/. - if [[ ${RAD_AREA} = "rgn" ]]; then + if [[ $RAD_AREA = "rgn" ]]; then cwd=$(pwd) - cd "${TANKverf_rad}" - tar -xf "${tar_file}.${Z}" - rm "${tar_file}.${Z}" - cd "${cwd}" + cd ${TANKverf_rad} + tar -xf ${tar_file}.${Z} + rm ${tar_file}.${Z} + cd ${cwd} fi fi - if [[ ${ctr} -gt 0 && ${fail} -eq ${ctr} || ${fail} -gt ${ctr} ]]; then + if [[ $ctr -gt 0 && $fail -eq $ctr || $fail -gt $ctr ]]; then err=7 fi fi diff --git a/ush/radmon_verf_time.sh b/ush/radmon_verf_time.sh index 0e935826dd..51743277c9 100755 --- a/ush/radmon_verf_time.sh +++ b/ush/radmon_verf_time.sh @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "${USHgfs}/preamble.sh" +source "$HOMEgfs/ush/preamble.sh" ################################################################################ #### UNIX Script Documentation Block @@ -22,10 +22,10 @@ source "${USHgfs}/preamble.sh" # other supporting files into a temporary working directory. # # -# Usage: radmon_verf_time.sh ${PDY}${cyc} +# Usage: radmon_verf_time.sh PDATE # # Input script positional parameters: -# PDYcyc processing date +# PDATE processing date # yyyymmddcc format; required # # Imported Shell Variables: @@ -33,9 +33,14 @@ source "${USHgfs}/preamble.sh" # defaults to 1 (on) # RADMON_SUFFIX data source suffix # defauls to opr +# EXECradmon executable directory +# defaults to current directory +# FIXgdas fixed data directory +# defaults to current directory # RAD_AREA global or regional flag # defaults to global # TANKverf_rad data repository +# defaults to current directory # SATYPE list of satellite/instrument sources # defaults to none # VERBOSE Verbose flag (YES or NO) @@ -72,10 +77,20 @@ source "${USHgfs}/preamble.sh" # #################################################################### +# Command line arguments. +export PDATE=${1:-${PDATE:?}} + +# Directories +FIXgdas=${FIXgdas:-$(pwd)} +EXECradmon=${EXECradmon:-$(pwd)} +TANKverf_rad=${TANKverf_rad:-$(pwd)} + # File names +#pgmout=${pgmout:-${jlogfile}} +#touch $pgmout -radmon_err_rpt=${radmon_err_rpt:-${USHgfs}/radmon_err_rpt.sh} -base_file=${base_file:-${PARMgfs}/monitor/gdas_radmon_base.tar} +radmon_err_rpt=${radmon_err_rpt:-${USHradmon}/radmon_err_rpt.sh} +base_file=${base_file:-$FIXgdas/gdas_radmon_base.tar} report=report.txt disclaimer=disclaimer.txt @@ -94,7 +109,7 @@ count_hdr=count_hdr.txt count_err=count_err.txt netcdf_boolean=".false." -if [[ ${RADMON_NETCDF} -eq 1 ]]; then +if [[ $RADMON_NETCDF -eq 1 ]]; then netcdf_boolean=".true." fi @@ -112,7 +127,7 @@ time_exec=radmon_time.x USE_ANL=${USE_ANL:-0} err=0 -if [[ ${USE_ANL} -eq 1 ]]; then +if [[ $USE_ANL -eq 1 ]]; then gesanl="ges anl" else gesanl="ges" @@ -122,24 +137,26 @@ fi #-------------------------------------------------------------------- # Copy extraction program and base files to working directory #------------------------------------------------------------------- -${NCP} "${EXECgfs}/${time_exec}" ./ +$NCP ${EXECradmon}/${time_exec} ./ if [[ ! -s ./${time_exec} ]]; then err=8 fi -iyy="${PDY:0:4}" -imm="${PDY:4:2}" -idd="${PDY:6:2}" -ihh=${cyc} +iyy=$(echo $PDATE | cut -c1-4) +imm=$(echo $PDATE | cut -c5-6) +idd=$(echo $PDATE | cut -c7-8) +ihh=$(echo $PDATE | cut -c9-10) +cyc=$ihh +CYCLE=$cyc local_base="local_base" -if [[ ${DO_DATA_RPT} -eq 1 ]]; then +if [[ $DO_DATA_RPT -eq 1 ]]; then if [[ -e ${base_file}.${Z} ]]; then - ${NCP} "${base_file}.${Z}" "./${local_base}.${Z}" - ${UNCOMPRESS} "${local_base}.${Z}" + $NCP ${base_file}.${Z} ./${local_base}.{Z} + ${UNCOMPRESS} ${local_base}.${Z} else - ${NCP} "${base_file}" ./${local_base} + $NCP ${base_file} ./${local_base} fi if [[ ! -s ./${local_base} ]]; then @@ -151,7 +168,7 @@ if [[ ${DO_DATA_RPT} -eq 1 ]]; then fi fi -if [[ ${err} -eq 0 ]]; then +if [[ $err -eq 0 ]]; then ctr=0 fail=0 @@ -166,23 +183,23 @@ if [[ ${err} -eq 0 ]]; then continue fi - ctr=$(( ctr + 1 )) + ctr=$(expr $ctr + 1) for dtype in ${gesanl}; do if [[ -f input ]]; then rm input; fi - if [[ ${dtype} == "anl" ]]; then - data_file="${type}_anl.${PDY}${cyc}.ieee_d" + if [[ $dtype == "anl" ]]; then + data_file=${type}_anl.${PDATE}.ieee_d ctl_file=${type}_anl.ctl time_ctl=time.${ctl_file} else - data_file="${type}.${PDY}${cyc}.ieee_d" + data_file=${type}.${PDATE}.ieee_d ctl_file=${type}.ctl time_ctl=time.${ctl_file} fi - if [[ ${REGIONAL_RR} -eq 1 ]]; then + if [[ $REGIONAL_RR -eq 1 ]]; then time_file=${rgnHH}.time.${data_file}.${rgnTM} else time_file=time.${data_file} @@ -210,48 +227,48 @@ cat << EOF > input / EOF - ./${time_exec} < input >> stdout."${type}" 2>>errfile + ./${time_exec} < input >> stdout.${type} 2>>errfile - if [[ ${err} -ne 0 ]]; then - fail=$(( fail + 1 )) + if [[ $err -ne 0 ]]; then + fail=$(expr $fail + 1) fi #------------------------------------------------------------------- # move data, control, and stdout files to $TANKverf_rad and compress #------------------------------------------------------------------- - cat "stdout.${type}" >> stdout.time + cat stdout.${type} >> stdout.time if [[ -s ${time_file} ]]; then - ${COMPRESS} "${time_file}" + ${COMPRESS} ${time_file} fi if [[ -s ${time_ctl} ]]; then - ${COMPRESS} "${time_ctl}" + ${COMPRESS} ${time_ctl} fi done done - "${USHgfs}/rstprod.sh" + ${USHradmon}/rstprod.sh if compgen -G "time*.ieee_d*" > /dev/null || compgen -G "time*.ctl*" > /dev/null; then tar_file=radmon_time.tar - tar -cf "${tar_file}" time*.ieee_d* time*.ctl* + tar -cf $tar_file time*.ieee_d* time*.ctl* ${COMPRESS} ${tar_file} - mv "${tar_file}.${Z}" "${TANKverf_rad}/." + mv $tar_file.${Z} ${TANKverf_rad}/. - if [[ ${RAD_AREA} = "rgn" ]]; then + if [[ $RAD_AREA = "rgn" ]]; then cwd=$(pwd) - cd "${TANKverf_rad}" - tar -xf "${tar_file}.${Z}" - rm "${tar_file}.${Z}" - cd "${cwd}" + cd ${TANKverf_rad} + tar -xf ${tar_file}.${Z} + rm ${tar_file}.${Z} + cd ${cwd} fi fi - if [[ ${ctr} -gt 0 && ${fail} -eq ${ctr} || ${fail} -gt ${ctr} ]]; then - echo "fail, ctr = ${fail}, ${ctr}" + if [[ $ctr -gt 0 && $fail -eq $ctr || $fail -gt $ctr ]]; then + echo "fail, ctr = $fail, $ctr" err=10 fi @@ -265,7 +282,7 @@ fi #------------------------------------------------------------------- #################################################################### -if [[ ${DO_DATA_RPT} -eq 1 ]]; then +if [[ $DO_DATA_RPT -eq 1 ]]; then #--------------------------- # build report disclaimer @@ -284,8 +301,8 @@ EOF # Check for missing diag files # tmp_satype="./tmp_satype.txt" - echo "${SATYPE}" > ${tmp_satype} - "${USHgfs}/radmon_diag_ck.sh" --rad "${radstat}" --sat "${tmp_satype}" --out "${diag}" + echo ${SATYPE} > ${tmp_satype} + ${USHradmon}/radmon_diag_ck.sh --rad ${radstat} --sat ${tmp_satype} --out ${diag} if [[ -s ${diag} ]]; then cat << EOF > ${diag_hdr} @@ -311,11 +328,11 @@ EOF # if [[ -s ${diag} ]]; then lines=$(wc -l <${diag}) - echo "lines in diag = ${lines}" + echo "lines in diag = $lines" - if [[ ${lines} -gt 0 ]]; then + if [[ $lines -gt 0 ]]; then cat ${diag_report} - cp ${diag} "${TANKverf_rad}/bad_diag.${PDY}${cyc}" + cp ${diag} ${TANKverf_rad}/bad_diag.${PDATE} else rm ${diag_report} fi @@ -327,12 +344,12 @@ EOF # Identify bad_pen and bad_chan files for this cycle and # previous cycle - bad_pen=bad_pen.${PDY}${cyc} - bad_chan=bad_chan.${PDY}${cyc} - low_count=low_count.${PDY}${cyc} + bad_pen=bad_pen.${PDATE} + bad_chan=bad_chan.${PDATE} + low_count=low_count.${PDATE} - qdate=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") - pday="${qdate:0:8}" + qdate=$($NDATE -${CYCLE_INTERVAL} $PDATE) + pday=$(echo $qdate | cut -c1-8) prev_bad_pen=bad_pen.${qdate} prev_bad_chan=bad_chan.${qdate} @@ -342,35 +359,35 @@ EOF prev_bad_chan=${TANKverf_radM1}/${prev_bad_chan} prev_low_count=${TANKverf_radM1}/${prev_low_count} - if [[ -s ${bad_pen} ]]; then - echo "pad_pen = ${bad_pen}" + if [[ -s $bad_pen ]]; then + echo "pad_pen = $bad_pen" fi - if [[ -s ${prev_bad_pen} ]]; then - echo "prev_pad_pen = ${prev_bad_pen}" + if [[ -s $prev_bad_pen ]]; then + echo "prev_pad_pen = $prev_bad_pen" fi - if [[ -s ${bad_chan} ]]; then - echo "bad_chan = ${bad_chan}" + if [[ -s $bad_chan ]]; then + echo "bad_chan = $bad_chan" fi - if [[ -s ${prev_bad_chan} ]]; then - echo "prev_bad_chan = ${prev_bad_chan}" + if [[ -s $prev_bad_chan ]]; then + echo "prev_bad_chan = $prev_bad_chan" fi - if [[ -s ${low_count} ]]; then - echo "low_count = ${low_count}" + if [[ -s $low_count ]]; then + echo "low_count = $low_count" fi - if [[ -s ${prev_low_count} ]]; then - echo "prev_low_count = ${prev_low_count}" + if [[ -s $prev_low_count ]]; then + echo "prev_low_count = $prev_low_count" fi do_pen=0 do_chan=0 do_cnt=0 - if [[ -s ${bad_pen} && -s ${prev_bad_pen} ]]; then + if [[ -s $bad_pen && -s $prev_bad_pen ]]; then do_pen=1 fi - if [[ -s ${low_count} && -s ${prev_low_count} ]]; then + if [[ -s $low_count && -s $prev_low_count ]]; then do_cnt=1 fi @@ -378,7 +395,7 @@ EOF # avoid doing the bad_chan report for REGIONAL_RR sources -- because # they run hourly they often have 0 count channels for off-hour runs. # - if [[ -s ${bad_chan} && -s ${prev_bad_chan} && REGIONAL_RR -eq 0 ]]; then + if [[ -s $bad_chan && -s $prev_bad_chan && REGIONAL_RR -eq 0 ]]; then do_chan=1 fi @@ -386,37 +403,37 @@ EOF # Remove extra spaces in new bad_pen & low_count files # if [[ -s ${bad_pen} ]]; then - gawk '{$1=$1}1' "${bad_pen}" > tmp.bad_pen - mv -f tmp.bad_pen "${bad_pen}" + gawk '{$1=$1}1' $bad_pen > tmp.bad_pen + mv -f tmp.bad_pen $bad_pen fi if [[ -s ${low_count} ]]; then - gawk '{$1=$1}1' "${low_count}" > tmp.low_count - mv -f tmp.low_count "${low_count}" + gawk '{$1=$1}1' $low_count > tmp.low_count + mv -f tmp.low_count $low_count fi - echo " do_pen, do_chan, do_cnt = ${do_pen}, ${do_chan}, ${do_cnt}" - echo " diag_report = ${diag_report} " - if [[ ${do_pen} -eq 1 || ${do_chan} -eq 1 || ${do_cnt} -eq 1 || -s ${diag_report} ]]; then + echo " do_pen, do_chan, do_cnt = $do_pen, $do_chan, $do_cnt" + echo " diag_report = $diag_report " + if [[ $do_pen -eq 1 || $do_chan -eq 1 || $do_cnt -eq 1 || -s ${diag_report} ]]; then - if [[ ${do_pen} -eq 1 ]]; then + if [[ $do_pen -eq 1 ]]; then echo "calling radmon_err_rpt for pen" - ${radmon_err_rpt} "${prev_bad_pen}" "${bad_pen}" pen "${qdate}" \ - "${PDY}${cyc}" ${diag_report} ${pen_err} + ${radmon_err_rpt} ${prev_bad_pen} ${bad_pen} pen ${qdate} \ + ${PDATE} ${diag_report} ${pen_err} fi - if [[ ${do_chan} -eq 1 ]]; then + if [[ $do_chan -eq 1 ]]; then echo "calling radmon_err_rpt for chan" - ${radmon_err_rpt} "${prev_bad_chan}" "${bad_chan}" chan "${qdate}" \ - "${PDY}${cyc}" ${diag_report} ${chan_err} + ${radmon_err_rpt} ${prev_bad_chan} ${bad_chan} chan ${qdate} \ + ${PDATE} ${diag_report} ${chan_err} fi - if [[ ${do_cnt} -eq 1 ]]; then + if [[ $do_cnt -eq 1 ]]; then echo "calling radmon_err_rpt for cnt" - ${radmon_err_rpt} "${prev_low_count}" "${low_count}" cnt "${qdate}" \ - "${PDY}${cyc}" ${diag_report} ${count_err} + ${radmon_err_rpt} ${prev_low_count} ${low_count} cnt ${qdate} \ + ${PDATE} ${diag_report} ${count_err} fi #------------------------------------------------------------------- @@ -428,18 +445,18 @@ EOF echo DOING ERROR REPORTING - cat << EOF > ${report} + cat << EOF > $report Radiance Monitor warning report Net: ${RADMON_SUFFIX} Run: ${RUN} - Cycle: ${PDY}${cyc} + Cycle: $PDATE EOF if [[ -s ${diag_report} ]]; then echo OUTPUTING DIAG_REPORT - cat ${diag_report} >> ${report} + cat ${diag_report} >> $report fi if [[ -s ${chan_err} ]]; then @@ -455,8 +472,8 @@ EOF EOF - cat ${chan_hdr} >> ${report} - cat ${chan_err} >> ${report} + cat ${chan_hdr} >> $report + cat ${chan_err} >> $report fi @@ -473,8 +490,8 @@ Satellite/Instrument Obs Count Avg Count EOF - cat ${count_hdr} >> ${report} - cat ${count_err} >> ${report} + cat ${count_hdr} >> $report + cat ${count_err} >> $report fi @@ -490,15 +507,15 @@ EOF ============ ======= ====== Cycle Penalty Bound ----- ------- ----- EOF - cat ${pen_hdr} >> ${report} - cat ${pen_err} >> ${report} + cat ${pen_hdr} >> $report + cat ${pen_err} >> $report rm -f ${pen_hdr} rm -f ${pen_err} fi - echo >> ${report} - cat ${disclaimer} >> ${report} - echo >> ${report} + echo >> $report + cat ${disclaimer} >> $report + echo >> $report fi #------------------------------------------------------------------- @@ -506,10 +523,10 @@ EOF # if [[ -s ${report} ]]; then lines=$(wc -l <${report}) - if [[ ${lines} -gt 2 ]]; then + if [[ $lines -gt 2 ]]; then cat ${report} - ${NCP} ${report} "${TANKverf_rad}/warning.${PDY}${cyc}" + $NCP ${report} ${TANKverf_rad}/warning.${PDATE} fi fi @@ -520,22 +537,22 @@ EOF # copy new bad_pen, bad_chan, and low_count files to $TANKverf_rad # if [[ -s ${bad_chan} ]]; then - mv "${bad_chan}" "${TANKverf_rad}/." + mv ${bad_chan} ${TANKverf_rad}/. fi if [[ -s ${bad_pen} ]]; then - mv "${bad_pen}" "${TANKverf_rad}/." + mv ${bad_pen} ${TANKverf_rad}/. fi if [[ -s ${low_count} ]]; then - mv "${low_count}" "${TANKverf_rad}/." + mv ${low_count} ${TANKverf_rad}/. fi fi for type in ${SATYPE}; do - rm -f "stdout.${type}" + rm -f stdout.${type} done ################################################################################ diff --git a/ush/rstprod.sh b/ush/rstprod.sh new file mode 100755 index 0000000000..acac0340bb --- /dev/null +++ b/ush/rstprod.sh @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" + +#--------------------------------------------------------- +# rstprod.sh +# +# Restrict data from select sensors and satellites +#--------------------------------------------------------- + +# Restrict select sensors and satellites + +export CHGRP_CMD=${CHGRP_CMD:-"chgrp ${group_name:-rstprod}"} +rlist="saphir abi_g16" +for rtype in $rlist; do + if compgen -G "*${rtype}*" > /dev/null; then + ${CHGRP_CMD} *${rtype}* + fi +done From 03d6877dc6c6f3f59b45e8308e355328d2ed9fdc Mon Sep 17 00:00:00 2001 From: Andrew Collard Date: Sat, 2 Nov 2024 23:00:10 +0000 Subject: [PATCH 21/22] Fix info messages --- sorc/build_all.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sorc/build_all.sh b/sorc/build_all.sh index bd35a3be72..82d6ffef40 100755 --- a/sorc/build_all.sh +++ b/sorc/build_all.sh @@ -87,7 +87,7 @@ fi # build gsi_utils #------------------------------------ $Build_gsi && { -echo " .... Building gsi .... " +echo " .... Building gsi_utils .... " ./build_gsi_utils.sh > $logs_dir/build_gsi_utils.log 2>&1 rc=$? if [[ $rc -ne 0 ]] ; then @@ -101,7 +101,7 @@ fi # build gsi_monitor #------------------------------------ $Build_gsi && { -echo " .... Building gsi .... " +echo " .... Building gsi_monitor .... " ./build_gsi_monitor.sh > $logs_dir/build_gsi_monitor.log 2>&1 rc=$? if [[ $rc -ne 0 ]] ; then From 11c5163b88e513ed088ea65609f02aec1c25cc10 Mon Sep 17 00:00:00 2001 From: Andrew Collard Date: Mon, 4 Nov 2024 01:17:30 +0000 Subject: [PATCH 22/22] Slightly earlier tag for GSI-Monitor --- sorc/checkout.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sorc/checkout.sh b/sorc/checkout.sh index 658014b9ce..ba5b719cd2 100755 --- a/sorc/checkout.sh +++ b/sorc/checkout.sh @@ -60,10 +60,10 @@ fi echo gsi_monitor checkout ... if [[ ! -d gsi_monitor.fd ]] ; then rm -f ${topdir}/checkout-gsi_monitor.log -# Check out a version before the changes for Thompson microphysics were introduced. +# Check out a version before changes for the new directory structure were introduced. git clone https://github.com/NOAA-EMC/GSI-Monitor.git gsi_monitor.fd >> ${topdir}/checkout-gsi_monitor.log 2>&1 cd gsi_monitor.fd - git checkout 94588d63ca636269474bf865603e0ccfeb4dc049 + git checkout e1f9f21af16ce912fdc2cd75c5b27094a550a0c5 cd ${topdir} else echo 'Skip. Directory gsi_monitor.fd already exists.'