diff --git a/docs/source/jobs.rst b/docs/source/jobs.rst index 0e3700bf20..30411283a6 100644 --- a/docs/source/jobs.rst +++ b/docs/source/jobs.rst @@ -33,7 +33,7 @@ Jobs in the GFS Configuration | anal | Runs the analysis. 1) Runs the atmospheric analysis (global_gsi) to produce analysis increments; 2) Update surface | | | guess file via global_cycle to create surface analysis on tiles. | +-------------------+-----------------------------------------------------------------------------------------------------------------------+ -| analcalc | Adds the analysis increments to previous cycle’s forecasts to produce atmospheric analysis files. Produces surface | +| analcalc_gsi | Adds the analysis increments to previous cycle’s forecasts to produce atmospheric analysis files. Produces surface | | | analysis file on Gaussian grid. | +-------------------+-----------------------------------------------------------------------------------------------------------------------+ | analdiag | Creates netCDF diagnostic files containing observation values, innovation (O-F), error, quality control, as well as | diff --git a/env/GAEA.env b/env/GAEA.env index 7736e0f1ea..d80958e54d 100755 --- a/env/GAEA.env +++ b/env/GAEA.env @@ -41,7 +41,7 @@ if [[ "${step}" = "prep" ]]; then export sys_tp="GAEA" export launcher_PREP="srun" -elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then +elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc_gsi" ]]; then export MKL_NUM_THREADS=4 export MKL_CBWR=AUTO diff --git a/env/HERA.env b/env/HERA.env index 0d77547b5b..94df9766af 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -160,7 +160,12 @@ elif [[ "${step}" = "marineanalletkf" ]]; then export NTHREADS_MARINEANALLETKF=${NTHREADSmax} export APRUN_MARINEANALLETKF="${APRUN_default} --cpus-per-task=${NTHREADS_MARINEANALLETKF}" -elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then +elif [[ "${step}" = "analcalc_fv3jedi" ]]; then + + export NTHREADS_ANALCALC_FV3JEDI=${NTHREADSmax} + export APRUN_ANALCALC_FV3JEDI="${APRUN_default} --cpus-per-task=${NTHREADS_ANALCALC_FV3JEDI}" + +elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc_gsi" ]]; then export MKL_NUM_THREADS=4 export MKL_CBWR=AUTO diff --git a/env/HERCULES.env b/env/HERCULES.env index 0138e33645..0aa521b029 100755 --- a/env/HERCULES.env +++ b/env/HERCULES.env @@ -154,8 +154,13 @@ case ${step} in export NTHREADS_OCNANAL=${NTHREADSmax} export APRUN_OCNANAL="${APRUN_default} --cpus-per-task=${NTHREADS_OCNANAL}" - ;; - "anal" | "analcalc") +;; + "analcalc_fv3jedi") + + export NTHREADS_ANALCALC_FV3JEDI=${NTHREADSmax} + export APRUN_ANALCALC_FV3JEDI="${APRUN_default} --cpus-per-task=${NTHREADS_ANALCALC_FV3JEDI}" +;; + "anal" | "analcalc_gsi") export MKL_NUM_THREADS=4 export MKL_CBWR=AUTO diff --git a/env/JET.env b/env/JET.env index f2b018d2d7..4d3b4b36bd 100755 --- a/env/JET.env +++ b/env/JET.env @@ -126,7 +126,12 @@ elif [[ "${step}" = "ocnanalrun" ]]; then export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" export APRUN_OCNANAL="${APRUN_default}" -elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then +elif [[ "${step}" = "analcalc_fv3jedi" ]]; then + + export NTHREADS_ANALCALC_FV3JEDI=${NTHREADSmax} + export APRUN_ANALCALC_FV3JEDI="${APRUN_default}" + +elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc_gsi" ]]; then export MKL_NUM_THREADS=4 export MKL_CBWR=AUTO diff --git a/env/ORION.env b/env/ORION.env index e8c1bcbf58..d65ecc7b92 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -153,7 +153,12 @@ elif [[ "${step}" = "marineanalletkf" ]]; then export NTHREADS_MARINEANALLETKF=${NTHREADSmax} export APRUN_MARINEANALLETKF="${APRUN_default} --cpus-per-task=${NTHREADS_MARINEANALLETKF}" -elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then +elif [[ "${step}" = "analcalc_fv3jedi" ]]; then + + export NTHREADS_ANALCALC_FV3JEDI=${NTHREADSmax} + export APRUN_ANALCALC_FV3JEDI="${APRUN_default} --cpus-per-task=${NTHREADS_ANALCALC_FV3JEDI}" + +elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc_gsi" ]]; then export MKL_NUM_THREADS=4 export MKL_CBWR=AUTO diff --git a/env/S4.env b/env/S4.env index 5d5ffd23b1..e4e1b896de 100755 --- a/env/S4.env +++ b/env/S4.env @@ -122,7 +122,12 @@ elif [[ "${step}" = "marinebmat" ]]; then elif [[ "${step}" = "marinerun" ]]; then echo "WARNING: ${step} is not enabled on S4!" -elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then +elif [[ "${step}" = "analcalc_fv3jedi" ]]; then + + export NTHREADS_ANALCALC_FV3JEDI=${NTHREADSmax} + export APRUN_ANALCALC_FV3JEDI="${APRUN_default}" + +elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc_gsi" ]]; then export MKL_NUM_THREADS=4 export MKL_CBWR=AUTO diff --git a/env/WCOSS2.env b/env/WCOSS2.env index cea24fb26b..29fd57d35f 100755 --- a/env/WCOSS2.env +++ b/env/WCOSS2.env @@ -136,13 +136,18 @@ elif [[ "${step}" = "atmanlfv3inc" ]]; then export NTHREADS_ATMANLFV3INC=${NTHREADSmax} export APRUN_ATMANLFV3INC="${APRUN_default}" -elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then +elif [[ "${step}" = "analcalc_fv3jedi" ]]; then + + export NTHREADS_ANALCALC_FV3JEDI=${NTHREADSmax} + export APRUN_ANALCALC_FV3JEDI="${APRUN_default}" + +elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc_gsi" ]]; then export OMP_PLACES=cores export OMP_STACKSIZE=1G export FI_OFI_RXM_SAR_LIMIT=3145728 - if [[ "${step}" = "analcalc" ]]; then + if [[ "${step}" = "analcalc_gsi" ]]; then export MPICH_MPIIO_HINTS="*:romio_cb_write=disable" fi diff --git a/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC_FV3JEDI b/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC_FV3JEDI new file mode 100755 index 0000000000..af4e5537cf --- /dev/null +++ b/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC_FV3JEDI @@ -0,0 +1,55 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "analcalc_fv3jedi" -c "base analcalc_fv3jedi" + +############################################## +# Set variables used in the script +############################################## + +GDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +gPDY=${GDATE:0:8} +gcyc=${GDATE:8:2} +GDUMP="gdas" + +############################################## +# Begin JOB SPECIFIC work +############################################## + +# Generate COM variables from templates +YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ + COM_ATMOS_ANALYSIS +RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \ + COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL + +############################################## +# Run relevant script +############################################## + +EXSCRIPT=${GDASATMRUNSH:-${SCRgfs}/exglobal_atmos_analysis_calc_fv3jedi.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## + +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +############################################## +# Remove the Temporary working directory +############################################## + +cd "${DATAROOT}" || ( echo "FATAL ERROR: ${DATAROOT} does not exist, ABORT!"; exit 1 ) +if [[ ${KEEPDATA} = "NO" ]]; then + rm -rf "${DATA}" +fi + +exit 0 diff --git a/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC b/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC_GSI similarity index 93% rename from jobs/JGLOBAL_ATMOS_ANALYSIS_CALC rename to jobs/JGLOBAL_ATMOS_ANALYSIS_CALC_GSI index 5b6073254a..5671864fb2 100755 --- a/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC +++ b/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC_GSI @@ -1,7 +1,7 @@ #! /usr/bin/env bash source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "analcalc" -c "base anal analcalc" +source "${HOMEgfs}/ush/jjob_header.sh" -e "analcalc_gsi" -c "base anal analcalc_gsi" ############################################## @@ -56,7 +56,7 @@ export DOGAUSFCANL=${DOGAUSFCANL:-"YES"} ############################################################### # Run relevant script -${ANALCALCSH:-${SCRgfs}/exglobal_atmos_analysis_calc.sh} +${ANALCALCSH:-${SCRgfs}/exglobal_atmos_analysis_calc_gsi.sh} status=$? [[ ${status} -ne 0 ]] && exit ${status} diff --git a/jobs/rocoto/analcalc_fv3jedi.sh b/jobs/rocoto/analcalc_fv3jedi.sh new file mode 100755 index 0000000000..22e8abd4ed --- /dev/null +++ b/jobs/rocoto/analcalc_fv3jedi.sh @@ -0,0 +1,18 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="analcalc_fv3jedi" +export jobid="${job}.$$" + +############################################################### +# Execute the JJOB +"${HOMEgfs}"/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC_FV3JEDI +status=$? +exit "${status}" diff --git a/jobs/rocoto/analcalc.sh b/jobs/rocoto/analcalc_gsi.sh similarity index 83% rename from jobs/rocoto/analcalc.sh rename to jobs/rocoto/analcalc_gsi.sh index 2e669b0163..66d264a8bd 100755 --- a/jobs/rocoto/analcalc.sh +++ b/jobs/rocoto/analcalc_gsi.sh @@ -8,12 +8,12 @@ source "${HOMEgfs}/ush/preamble.sh" status=$? [[ ${status} -ne 0 ]] && exit ${status} -export job="analcalc" +export job="analcalc_gsi" export jobid="${job}.$$" ############################################################### # Execute the JJOB -${HOMEgfs}/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC +"${HOMEgfs}"/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC_GSI status=$? diff --git a/parm/config/gfs/config.analcalc b/parm/config/gfs/config.analcalc deleted file mode 100644 index d9501503f0..0000000000 --- a/parm/config/gfs/config.analcalc +++ /dev/null @@ -1,11 +0,0 @@ -#! /usr/bin/env bash - -########## config.analcalc ########## -# GFS post-anal specific (non-diag) - -echo "BEGIN: config.analcalc" - -# Get task specific resources -. ${EXPDIR}/config.resources analcalc - -echo "END: config.analcalc" diff --git a/parm/config/gfs/config.analcalc_fv3jedi b/parm/config/gfs/config.analcalc_fv3jedi new file mode 100644 index 0000000000..e1fdb867e2 --- /dev/null +++ b/parm/config/gfs/config.analcalc_fv3jedi @@ -0,0 +1,26 @@ +#! /usr/bin/env bash + +########## config.analcalc_fv3jedi ########## +# Diagnostic amospheric analysis calculation specific + +echo "BEGIN: config.analcalc_fv3jedi" + +export layout_x_analcalc_fv3jedi=2 +export layout_y_analcalc_fv3jedi=2 + +# Get task specific resources +. "${EXPDIR}/config.resources" analcalc_fv3jedi + +export JCB_BASE_YAML=${PARMgfs}/gdas/atm/jcb-base.yaml.j2 +export JCB_ALGO=fv3jedi_analcalc +export JEDIEXE=${EXECgfs}/fv3jedi_analcalc.x + +if [[ ${DOHYBVAR} = "YES" ]]; then + export CASE_ANL=${CASE_ENS} +else + export CASE_ANL=${CASE} +fi + +export JEDI_FIX_YAML="${PARMgfs}/gdas/atm_jedi_fix.yaml.j2" + +echo "END: config.analcalc_fv3jedi" diff --git a/parm/config/gfs/config.analcalc_gsi b/parm/config/gfs/config.analcalc_gsi new file mode 100644 index 0000000000..adc7b75570 --- /dev/null +++ b/parm/config/gfs/config.analcalc_gsi @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analcalc_gsi ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc_gsi" + +# Get task specific resources +. "${EXPDIR}"/config.resources analcalc_gsi + +echo "END: config.analcalc_gsi" diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index afc5939fcd..148a439606 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -17,7 +17,7 @@ if (( $# != 1 )); then echo "atmensanlinit atmensanlobs atmensanlsol atmensanlletkf atmensanlfv3inc atmensanlfinal" echo "snowanl esnowrecen" echo "prepobsaero aeroanlinit aeroanlvar aeroanlfinal aeroanlgenb" - echo "anal sfcanl analcalc analdiag fcst echgres" + echo "anal sfcanl analcalc_fv3jedi analcalc_gsi analdiag fcst echgres" echo "upp atmos_products" echo "tracker genesis genesis_fsu" echo "verfozn verfrad vminmon fit2obs metp arch cleanup" @@ -700,7 +700,18 @@ case ${step} in export is_exclusive=True ;; - "analcalc") + "analcalc_fv3jedi") + export layout_x=${layout_x_analcalc_fv3jedi} + export layout_y=${layout_y_analcalc_fv3jedi} + + walltime="00:15:00" + ntasks=$(( layout_x * layout_y * 6 )) + threads_per_task=1 + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + export is_exclusive=True + ;; + + "analcalc_gsi") walltime="00:15:00" ntasks=127 export ntasks_calcanl="${ntasks}" diff --git a/scripts/exglobal_atmos_analysis_calc_fv3jedi.py b/scripts/exglobal_atmos_analysis_calc_fv3jedi.py new file mode 100755 index 0000000000..3ac97c4a29 --- /dev/null +++ b/scripts/exglobal_atmos_analysis_calc_fv3jedi.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python3 +# exglobal_atm_calc_analysis.py +# This script creates an AnalysisCalc object +# and runs the execute method which executes +# the diagnostic global analysis calculation +import os + +from wxflow import Logger, cast_strdict_as_dtypedict +from pygfs.task.analcalc import AnalysisCalc + +# Initialize root logger +logger = Logger(level='DEBUG', colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the AnalysisCalc task + AnalCalc = AnalysisCalc(config, 'fv3jedi_analcalc') + + # Initialize + AnalCalc.initialize_jedi() + AnalCalc.initialize() + + # Execute JEDI application + AnalCalc.execute(config.APRUN_ANALCALC_FV3JEDI) + + # Finalize + AnalCalc.finalize() diff --git a/scripts/exglobal_atmos_analysis_calc.sh b/scripts/exglobal_atmos_analysis_calc_gsi.sh similarity index 100% rename from scripts/exglobal_atmos_analysis_calc.sh rename to scripts/exglobal_atmos_analysis_calc_gsi.sh diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 7c1c181359..d8b47436e8 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 7c1c181359c2c1952bab3dc1c481bbdb361aa472 +Subproject commit d8b47436e8bccb712929315dc2f1d6bf4be7c74d diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index 270a8bb1c9..462292a613 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -368,6 +368,7 @@ if [[ -d "${HOMEgfs}/sorc/gdas.cd/build" ]]; then "fv3jedi_plot_field.x" \ "gdasapp_chem_diagb.x" \ "fv3jedi_fv3inc.x" \ + "fv3jedi_analcalc.x" \ "gdas_ens_handler.x" \ "gdas_incr_handler.x" \ "gdas_obsprovider2ioda.x" \ diff --git a/sorc/ufs_model.fd b/sorc/ufs_model.fd index fcc9f8461d..38a29a6246 160000 --- a/sorc/ufs_model.fd +++ b/sorc/ufs_model.fd @@ -1 +1 @@ -Subproject commit fcc9f8461db5eafbfd1f080da61ea79156ca0145 +Subproject commit 38a29a62461cb1f9bf530420d5bc2f73a4650724 diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index 58755d41d9..33b8b81c45 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -95,6 +95,7 @@ FV3_postdet() { read_increment=".true." res_latlon_dynamics="atminc.nc" fi + increment_file_on_native_grid=".false." local increment_file for inc_file in "${inc_files[@]}"; do increment_file="${COMIN_ATMOS_INPUT}/${RUN}.t${cyc}z.${inc_file}" @@ -158,9 +159,16 @@ EOF delimiter="," done else # "${DOIAU}" == "NO" - inc_files=("atminc.nc") read_increment=".true." - res_latlon_dynamics="atminc.nc" + if [[ "${DO_JEDIATMVAR:-NO}" == "YES" ]]; then + inc_files=("atminc.tile1.nc" "atminc.tile2.nc" "atminc.tile3.nc" "atminc.tile4.nc" "atminc.tile5.nc" "atminc.tile6.nc") + res_latlon_dynamics="atminc" + increment_file_on_native_grid=".true." + else + inc_files=("atminc.nc") + res_latlon_dynamics="atminc.nc" + increment_file_on_native_grid=".false." + fi if [[ "${REPLAY_ICS:-NO}" == "YES" ]]; then IAU_FHROT=${half_window} # Replay ICs start at the end of the assimilation window # Control member has no perturbation @@ -169,6 +177,7 @@ EOF read_increment=".false." res_latlon_dynamics='""' fi + increment_file_on_native_grid=".false." fi fi diff --git a/ush/forecast_predet.sh b/ush/forecast_predet.sh index d7c04ea699..642a76db74 100755 --- a/ush/forecast_predet.sh +++ b/ush/forecast_predet.sh @@ -362,6 +362,7 @@ FV3_predet(){ warm_start=".false." read_increment=".false." res_latlon_dynamics='""' + increment_file_on_native_grid=".true." # Stochastic Physics Options do_skeb=".false." diff --git a/ush/parsing_namelists_FV3.sh b/ush/parsing_namelists_FV3.sh index 617ecff719..84b467a91f 100755 --- a/ush/parsing_namelists_FV3.sh +++ b/ush/parsing_namelists_FV3.sh @@ -180,6 +180,7 @@ cat > input.nml < None: + """Initialize JEDI application + + This method will initialize a JEDI application used in the diagnostic + atmospheric analysis computation task. + This includes: + - generating and saving JEDI YAML config + - staging the JEDI fix files + - linking the JEDI executable + + Parameters + ---------- + None + + Returns + ---------- + None + """ + + # get JEDI-to-FV3 increment converter config and save to YAML file + logger.info(f"Generating JEDI YAML config: {self.jedi.yaml}") + self.jedi.set_config(self.task_config) + logger.debug(f"JEDI config:\n{pformat(self.jedi.config)}") + + # save JEDI config to YAML file + logger.debug(f"Writing JEDI YAML config to: {self.jedi.yaml}") + save_as_yaml(self.jedi.config, self.jedi.yaml) + + # stage fix files + if not os.path.isdir(self.task_config.DATA + 'fv3jedi'): + logger.info(f"Staging JEDI fix files from {self.task_config.JEDI_FIX_YAML}") + jedi_fix_dict = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config) + FileHandler(jedi_fix_dict).sync() + logger.debug(f"JEDI fix files:\n{pformat(jedi_fix_dict)}") + + # link JEDI executable + logger.info(f"Linking JEDI executable {self.task_config.JEDIEXE} to {self.jedi.exe}") + self.jedi.link_exe(self.task_config) + + @logit(logger) + def initialize(self) -> None: + """Initialize the diagnostic atmospheric analysis computation task + + This method will initialize the diagnostic atmospheric analysis computation task. + This includes: + - creating working directories for each forecast hour + - staging backgrounds and increments + + Parameters + ---------- + None + + Returns + ---------- + None + """ + + # Initialize dictionary used to construct Filehandler + fh_dict = {'mkdir': [], + 'copy': []} + + # Initialize FileHandler to make directories and copy files + hist_prefix = f"{self.task_config.COM_ATMOS_HISTORY_PREV}/{self.task_config.GPREFIX}" + anl_prefix = f"{self.task_config.COM_ATMOS_ANALYSIS}/{self.task_config.APREFIX}" + for fh in self.task_config.IAUFHRS: + fh_dict['mkdir'].append(self.task_config.AnalCalcDir(fh)) + + fh_dict['copy'].append([f"{hist_prefix}cubed_sphere_grid_atmf{format(fh, '03')}.nc", + f"{self.task_config.AnalCalcDir(fh)}/ges.atm.{format(fh, '02')}.nc"]) + fh_dict['copy'].append([f"{hist_prefix}cubed_sphere_grid_sfcf{format(fh, '03')}.nc", + f"{self.task_config.AnalCalcDir(fh)}/ges.sfc.{format(fh, '02')}.nc"]) + + if fh == 6: + for itile in range(6): + fh_dict['copy'].append([f"{anl_prefix}atminc.tile{itile+1}.nc", + f"{self.task_config.AnalCalcDir(fh)}/siginc.06.tile{itile+1}.nc"]) + else: + for itile in range(6): + fh_dict['copy'].append([f"{anl_prefix}/atmi{format(fh, '02')}.tile{itile+1}.nc", + f"{self.task_config.AnalCalcDir(fh)}/siginc.{format(fh, '02')}.tile{itile+1}.nc"]) + + # Stage files + FileHandler(fh_dict).sync() + + @logit(logger) + def execute(self, aprun_cmd: str) -> None: + """Run JEDI executable + + This method will run the JEDI executable for the diagnostic atmospheric analysis computation + + Parameters + ---------- + aprun_cmd : str + Run command for JEDI application on HPC system + + Returns + ---------- + None + """ + + self.jedi.execute(self.task_config, aprun_cmd) + + @logit(logger) + def finalize(self) -> None: + """Finalize the diagnostic atmospheric analysis computation task + + This method will finalize the diagnostic atmospheric analysis computation task. + This includes: + - Move analysis files to the comrot directory + + Parameters + ---------- + None + + Returns + ---------- + None + """ + + cdate = to_fv3time(self.task_config.current_cycle).replace('.', '_') + anl_prefix = f"{self.task_config.COM_ATMOS_ANALYSIS}/{self.task_config.APREFIX}" + + # Initialize dictionary used to construct Filehandler + fh_dict = {'mkdir': [], + 'copy': []} + + for fh in self.task_config.IAUFHRS: + if fh == 6: + fh_dict['copy'].append([f"{self.task_config.AnalCalcDir(fh)}/anl.{format(fh, '02')}.{cdate}z.nc4", + f"{anl_prefix}atmanl.nc"]) + fh_dict['copy'].append([f"{self.task_config.AnalCalcDir(fh)}/anl.ensres.{format(fh, '02')}.{cdate}z.nc4", + f"{anl_prefix}atmanl.ensres.nc"]) + else: + fh_dict['copy'].append([f"{self.task_config.AnalCalcDir(fh)}/anl.{format(fh, '02')}.{cdate}z.nc4", + f"{anl_prefix}atma{format(fh, '03')}.nc"]) + fh_dict['copy'].append([f"{self.task_config.AnalCalcDir(fh)}/anl.ensres.{format(fh, '02')}.{cdate}z.nc4", + f"{anl_prefix}atma{format(fh, '03')}.ensres.nc"]) + + FileHandler(fh_dict).sync() diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index 8d340a5b73..d443bb0862 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -301,12 +301,15 @@ def finalize(self) -> None: logger.info("Copy UFS model readable atm increment file") cdate = to_fv3time(self.task_config.current_cycle) cdate_inc = cdate.replace('.', '_') - src = os.path.join(self.task_config.DATA, 'anl', f"atminc.{cdate_inc}z.nc4") - dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f'{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atminc.nc') - logger.debug(f"Copying {src} to {dest}") - inc_copy = { - 'copy': [[src, dest]] - } + inc_copy = {'copy': []} + for itile in range(6): + src = os.path.join(self.task_config.DATA, 'anl', f"atminc.{cdate_inc}z.tile{itile+1}.nc4") + dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f'{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atminc.tile{itile+1}.nc') + inc_copy['copy'].append([src, dest]) + + # copy increments + src_list, dest_list = zip(*inc_copy['copy']) + logger.debug(f"Copying {src_list}\nto {dest_list}") FileHandler(inc_copy).sync() def clean(self): diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index 55e72702b1..a8987f0347 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -268,14 +268,15 @@ def finalize(self) -> None: # create output path for member analysis increment tmpl_inc_dict['MEMDIR'] = memchar incdir = Template.substitute_structure(template_inc, TemplateConstants.DOLLAR_CURLY_BRACE, tmpl_inc_dict.get) - src = os.path.join(self.task_config.DATA, 'anl', memchar, f"atminc.{cdate_inc}z.nc4") - dest = os.path.join(incdir, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atminc.nc") - - # copy increment - logger.debug(f"Copying {src} to {dest}") - inc_copy = { - 'copy': [[src, dest]] - } + inc_copy = {'copy': []} + for itile in range(6): + src = os.path.join(self.task_config.DATA, 'anl', memchar, f"atminc.{cdate_inc}z.tile{itile+1}.nc4") + dest = os.path.join(incdir, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atminc.tile{itile+1}.nc") + inc_copy['copy'].append([src, dest]) + + # copy increments + src_list, dest_list = zip(*inc_copy['copy']) + logger.debug(f"Copying {src_list}\nto {dest_list}") FileHandler(inc_copy).sync() def clean(self): diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py index 4bb473f454..fb9b175207 100644 --- a/workflow/applications/gfs_cycled.py +++ b/workflow/applications/gfs_cycled.py @@ -39,9 +39,9 @@ def _get_app_configs(self): configs = ['prep'] if self.do_jediatmvar: - configs += ['prepatmiodaobs', 'atmanlinit', 'atmanlvar', 'atmanlfv3inc', 'atmanlfinal'] + configs += ['prepatmiodaobs', 'atmanlinit', 'atmanlvar', 'atmanlfv3inc', 'atmanlfinal', 'analcalc_fv3jedi'] else: - configs += ['anal', 'analdiag'] + configs += ['anal', 'analdiag', 'analcalc_gsi'] if self.do_jediocnvar: configs += ['prepoceanobs', 'ocnanalprep', 'marinebmat', 'ocnanalrun'] @@ -54,7 +54,7 @@ def _get_app_configs(self): if self.do_ocean or self.do_ice: configs += ['oceanice_products'] - configs += ['stage_ic', 'sfcanl', 'analcalc', 'fcst', 'upp', 'atmos_products', 'arch', 'cleanup'] + configs += ['stage_ic', 'sfcanl', 'fcst', 'upp', 'atmos_products', 'arch', 'cleanup'] if self.do_hybvar: if self.do_jediatmens: @@ -141,9 +141,9 @@ def get_task_names(self): gdas_gfs_common_cleanup_tasks = ['arch', 'cleanup'] if self.do_jediatmvar: - gdas_gfs_common_tasks_before_fcst += ['prepatmiodaobs', 'atmanlinit', 'atmanlvar', 'atmanlfv3inc', 'atmanlfinal'] + gdas_gfs_common_tasks_before_fcst += ['prepatmiodaobs', 'atmanlinit', 'atmanlvar', 'atmanlfv3inc', 'atmanlfinal', 'analcalc_fv3jedi'] else: - gdas_gfs_common_tasks_before_fcst += ['anal'] + gdas_gfs_common_tasks_before_fcst += ['anal', 'analcalc_gsi'] if self.do_jediocnvar: gdas_gfs_common_tasks_before_fcst += ['prepoceanobs', 'ocnanalprep', 'marinebmat', 'ocnanalrun'] @@ -153,7 +153,7 @@ def get_task_names(self): if self.do_vrfy_oceanda: gdas_gfs_common_tasks_before_fcst += ['ocnanalvrfy'] - gdas_gfs_common_tasks_before_fcst += ['sfcanl', 'analcalc'] + gdas_gfs_common_tasks_before_fcst += ['sfcanl'] if self.do_jedisnowda: gdas_gfs_common_tasks_before_fcst += ['prepsnowobs', 'snowanl'] diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index 89da933d00..9add4bb0fb 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -244,13 +244,39 @@ def sfcanl(self): return task - def analcalc(self): + def analcalc_fv3jedi(self): deps = [] - if self.app_config.do_jediatmvar: - dep_dict = {'type': 'task', 'name': f'{self.run}atmanlfinal'} - else: - dep_dict = {'type': 'task', 'name': f'{self.run}anal'} + dep_dict = {'type': 'task', 'name': f'{self.run}atmanlfinal'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'task', 'name': f'{self.run}sfcanl'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_hybvar and self.run in ['gdas']: + dep_dict = {'type': 'task', 'name': 'enkfgdasechgres', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('analcalc_fv3jedi') + task_name = f'{self.run}analcalc_fv3jedi' + task_dict = {'task_name': task_name, + 'resources': resources, + 'dependency': dependencies, + 'envars': self.envars, + 'cycledef': self.run.replace('enkf', ''), + 'command': f'{self.HOMEgfs}/jobs/rocoto/analcalc_fv3jedi.sh', + 'job_name': f'{self.pslot}_{task_name}_@H', + 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', + 'maxtries': '&MAXTRIES;' + } + + task = rocoto.create_task(task_dict) + + return task + + def analcalc_gsi(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.run}anal'} deps.append(rocoto.add_dependency(dep_dict)) dep_dict = {'type': 'task', 'name': f'{self.run}sfcanl'} deps.append(rocoto.add_dependency(dep_dict)) @@ -259,14 +285,14 @@ def analcalc(self): deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - resources = self.get_resource('analcalc') - task_name = f'{self.run}analcalc' + resources = self.get_resource('analcalc_gsi') + task_name = f'{self.run}analcalc_gsi' task_dict = {'task_name': task_name, 'resources': resources, 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/jobs/rocoto/analcalc.sh', + 'command': f'{self.HOMEgfs}/jobs/rocoto/analcalc_gsi.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -2643,7 +2669,7 @@ def _get_ecengroups(): return grp, dep, lst deps = [] - dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf","")}analcalc'} + dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf","")}analcalc_gsi'} deps.append(rocoto.add_dependency(dep_dict)) if self.app_config.do_jediatmens: dep_dict = {'type': 'task', 'name': f'{self.run}atmensanlfinal'} @@ -2689,7 +2715,7 @@ def esfc(self): # eupd_run = 'gdas' if 'gdas' in self.app_config.eupd_runs else 'gfs' deps = [] - dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf","")}analcalc'} + dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf","")}analcalc_gsi'} deps.append(rocoto.add_dependency(dep_dict)) if self.app_config.do_jediatmens: dep_dict = {'type': 'task', 'name': f'{self.run}atmensanlfinal'} diff --git a/workflow/rocoto/tasks.py b/workflow/rocoto/tasks.py index df2b0467db..e38ad75c39 100644 --- a/workflow/rocoto/tasks.py +++ b/workflow/rocoto/tasks.py @@ -12,7 +12,7 @@ class Tasks: SERVICE_TASKS = ['arch', 'earc'] VALID_TASKS = ['aerosol_init', 'stage_ic', - 'prep', 'anal', 'sfcanl', 'analcalc', 'analdiag', 'arch', "cleanup", + 'prep', 'anal', 'sfcanl', 'analcalc_fv3jedi', 'analcalc_gsi', 'analdiag', 'arch', "cleanup", 'prepatmiodaobs', 'atmanlinit', 'atmanlvar', 'atmanlfv3inc', 'atmanlfinal', 'prepoceanobs', 'ocnanalprep', 'marinebmat', 'ocnanalrun', 'ocnanalecen', 'ocnanalchkpt', 'ocnanalpost', 'ocnanalvrfy',