From 36d9e2a23d55bf6b30d0e906b877fd5f1aa54980 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Sun, 8 Sep 2024 17:04:35 +0000 Subject: [PATCH 01/71] Initial commit --- env/HERA.env | 5 + env/HERCULES.env | 7 +- env/JET.env | 5 + env/ORION.env | 5 + env/S4.env | 5 + env/WCOSS2.env | 5 + jobs/JGLOBAL_ATM_CALC_ANALYSIS | 46 +++++++ jobs/rocoto/calcanl.sh | 18 +++ parm/config/gfs/config.calcanl | 14 ++ parm/config/gfs/config.resources | 17 ++- scripts/exglobal_atm_calc_analysis.py | 28 ++++ ush/python/pygfs/__init__.py | 1 + ush/python/pygfs/task/calcanl.py | 178 ++++++++++++++++++++++++++ workflow/applications/gfs_cycled.py | 12 +- workflow/rocoto/gfs_tasks.py | 34 ++++- workflow/rocoto/tasks.py | 2 +- 16 files changed, 369 insertions(+), 13 deletions(-) create mode 100755 jobs/JGLOBAL_ATM_CALC_ANALYSIS create mode 100755 jobs/rocoto/calcanl.sh create mode 100644 parm/config/gfs/config.calcanl create mode 100755 scripts/exglobal_atm_calc_analysis.py create mode 100644 ush/python/pygfs/task/calcanl.py diff --git a/env/HERA.env b/env/HERA.env index 272c6773f9..416b4466b8 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -160,6 +160,11 @@ elif [[ "${step}" = "marineanalletkf" ]]; then export NTHREADS_MARINEANALLETKF=${NTHREADSmax} export APRUN_MARINEANALLETKF="${APRUN} --cpus-per-task=${NTHREADS_MARINEANALLETKF}" +elif [[ "${step}" = "calcanl" ]]; then + + export NTHREADS_CALCANAL=${NTHREADSmax} + export APRUN_CALCANL="${APRUN} --cpus-per-task=${NTHREADS_CALCANL}" + elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then export MKL_NUM_THREADS=4 diff --git a/env/HERCULES.env b/env/HERCULES.env index 62b579dda3..1f3365a0aa 100755 --- a/env/HERCULES.env +++ b/env/HERCULES.env @@ -154,7 +154,12 @@ case ${step} in export NTHREADS_OCNANAL=${NTHREADSmax} export APRUN_OCNANAL="${APRUN} --cpus-per-task=${NTHREADS_OCNANAL}" - ;; +;; + "calcanl") + + export NTHREADS_CALCANL=${NTHREADSmax} + export APRUN_CALCANL="${APRUN} --cpus-per-task=${NTHREADS_CALCANL}" +;; "anal" | "analcalc") export MKL_NUM_THREADS=4 diff --git a/env/JET.env b/env/JET.env index 52730fc74c..57dc243c36 100755 --- a/env/JET.env +++ b/env/JET.env @@ -126,6 +126,11 @@ elif [[ "${step}" = "ocnanalrun" ]]; then export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" export APRUN_OCNANAL="${APRUN}" +elif [[ "${step}" = "calcanl" ]]; then + + export NTHREADS_CALCANL=${NTHREADSmax} + export APRUN_CALCANL="${APRUN}" + elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then export MKL_NUM_THREADS=4 diff --git a/env/ORION.env b/env/ORION.env index 638764908f..c188df9ec7 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -153,6 +153,11 @@ elif [[ "${step}" = "marineanalletkf" ]]; then export NTHREADS_MARINEANALLETKF=${NTHREADSmax} export APRUN_MARINEANALLETKF="${APRUN} --cpus-per-task=${NTHREADS_MARINEANALLETKF}" +elif [[ "${step}" = "calcanl" ]]; then + + export NTHREADS_CALCANL=${NTHREADSmax} + export APRUN_CALCANL="${APRUN} --cpus-per-task=${NTHREADS_CALCANL}" + elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then export MKL_NUM_THREADS=4 diff --git a/env/S4.env b/env/S4.env index dd852afa0f..d0f29ab221 100755 --- a/env/S4.env +++ b/env/S4.env @@ -122,6 +122,11 @@ elif [[ "${step}" = "marinebmat" ]]; then elif [[ "${step}" = "marinerun" ]]; then echo "WARNING: ${step} is not enabled on S4!" +elif [[ "${step}" = "calcanl" ]]; then + + export NTHREADS_CALCANL=${NTHREADSmax} + export APRUN_CALCANL="${APRUN}" + elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then export MKL_NUM_THREADS=4 diff --git a/env/WCOSS2.env b/env/WCOSS2.env index 2640f85de2..554daa00d0 100755 --- a/env/WCOSS2.env +++ b/env/WCOSS2.env @@ -136,6 +136,11 @@ elif [[ "${step}" = "atmanlfv3inc" ]]; then export NTHREADS_ATMANLFV3INC=${NTHREADSmax} export APRUN_ATMANLFV3INC="${APRUN}" +elif [[ "${step}" = "calcanl" ]]; then + + export NTHREADS_CALCANL=${NTHREADSmax} + export APRUN_CALCANL="${APRUN}" + elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then export OMP_PLACES=cores diff --git a/jobs/JGLOBAL_ATM_CALC_ANALYSIS b/jobs/JGLOBAL_ATM_CALC_ANALYSIS new file mode 100755 index 0000000000..b70684dc9c --- /dev/null +++ b/jobs/JGLOBAL_ATM_CALC_ANALYSIS @@ -0,0 +1,46 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "calcanl" -c "base calcanl" + +############################################## +# Set variables used in the script +############################################## + + +############################################## +# Begin JOB SPECIFIC work +############################################## + + +############################################## +# Run relevant script +############################################## + +EXSCRIPT=${GDASATMRUNSH:-${SCRgfs}/exglobal_atm_calc_analysis.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## + +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +############################################## +# Remove the Temporary working directory +############################################## + +cd ${DATAROOT} +if [[ ${KEEPDATA} = "NO" ]]; then + rm -rf "${DATA}" +fi + +exit 0 diff --git a/jobs/rocoto/calcanl.sh b/jobs/rocoto/calcanl.sh new file mode 100755 index 0000000000..d345d433c8 --- /dev/null +++ b/jobs/rocoto/calcanl.sh @@ -0,0 +1,18 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="calcanl" +export jobid="${job}.$$" + +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_ATM_CALC_ANALYSIS" +status=$? +exit "${status}" diff --git a/parm/config/gfs/config.calcanl b/parm/config/gfs/config.calcanl new file mode 100644 index 0000000000..20fd0b3706 --- /dev/null +++ b/parm/config/gfs/config.calcanl @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.calcanl ########## +# Diagnostic amospheric analysis calculation specific + +echo "BEGIN: config.calcanl" + +# Get task specific resources +. "${EXPDIR}/config.resources" calcanl + +export JCB_ALGO=fv3jedi_calcanl +export JEDIEXE=${EXECgfs}/fv3jedi_calcanl.x + +echo "END: config.calcanl" diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index 851acb2e0d..cf3f1fb64c 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -17,7 +17,7 @@ if (( $# != 1 )); then echo "atmensanlinit atmensanlobs atmensanlsol atmensanlletkf atmensanlfv3inc atmensanlfinal" echo "snowanl esnowrecen" echo "prepobsaero aeroanlinit aeroanlvar aeroanlfinal aeroanlgenb" - echo "anal sfcanl analcalc analdiag fcst echgres" + echo "anal sfcanl calcanl analcalc analdiag fcst echgres" echo "upp atmos_products" echo "tracker genesis genesis_fsu" echo "verfozn verfrad vminmon fit2obs metp arch cleanup" @@ -684,6 +684,21 @@ case ${step} in export is_exclusive=True ;; + "calcanl") + walltime="00:15:00" + ntasks=127 + export ntasks_calcanl="${ntasks}" + threads_per_task=1 + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + export threads_per_task_echgres_gdas=4 + export threads_per_task_echgres_gfs=12 + export is_exclusive=True + memory="48GB" + if [[ "${CASE}" == "C384" || "${CASE}" == "C768" ]]; then + memory="${mem_node_max}" + fi + ;; + "analcalc") walltime="00:15:00" ntasks=127 diff --git a/scripts/exglobal_atm_calc_analysis.py b/scripts/exglobal_atm_calc_analysis.py new file mode 100755 index 0000000000..671a7dadd6 --- /dev/null +++ b/scripts/exglobal_atm_calc_analysis.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python3 +# exglobal_atm_calc_analysis.py +# This script creates an CalcAnalysis object +# and runs the execute method which executes +# the diagnostic global analysis calculation +import os + +from wxflow import Logger, cast_strdict_as_dtypedict +from pygfs.task.calcanl import CalcAnalysis + +# Initialize root logger +logger = Logger(level='DEBUG', colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the CalcAnalysis task + CalcAnl = CalcAnalysis(config, 'calcanl') + + # Initialize + CalcAnl.initialize_jedi() + CalcAnl.initialize() + + # Execute JEDI application + CalcAnl.execute(config.APRUN_CALCANL) diff --git a/ush/python/pygfs/__init__.py b/ush/python/pygfs/__init__.py index 9f290fafd3..8f66811b80 100644 --- a/ush/python/pygfs/__init__.py +++ b/ush/python/pygfs/__init__.py @@ -8,6 +8,7 @@ from .task.aero_bmatrix import AerosolBMatrix from .task.atm_analysis import AtmAnalysis from .task.atmens_analysis import AtmEnsAnalysis +from .task.calcanl import CalcAnalysis from .task.marine_bmat import MarineBMat from .task.snow_analysis import SnowAnalysis from .task.snowens_analysis import SnowEnsAnalysis diff --git a/ush/python/pygfs/task/calcanl.py b/ush/python/pygfs/task/calcanl.py new file mode 100644 index 0000000000..d429f80102 --- /dev/null +++ b/ush/python/pygfs/task/calcanl.py @@ -0,0 +1,178 @@ +#!/usr/bin/env python3 + +import os +import datetime +from logging import getLogger +from wxflow import Task, cast_as_dtype, logit + +from pygfs.jedi import Jedi + +logger = getLogger(__name__.split('.')[-1]) + + +class CalcAnalysis(Task): + """ + Class for JEDI-based analysis calculation + """ + @logit(logger, name="CalcAnalysis") + def __init__(self, config, yaml_name=None): + super().__init__(config) + + _res = int(self.task_config.CASE[1:]) + _res_anl = int(self.task_config.CASE_ANL[1:]) + + # Create a local dictionary that is repeatedly used across this class + local_dict = AttrDict( + { + 'npx_ges': _res + 1, + 'npy_ges': _res + 1, + 'npz_ges': self.task_config.LEVS - 1, + 'npz': self.task_config.LEVS - 1, + 'npx_anl': _res_anl + 1, + 'npy_anl': _res_anl + 1, + 'npz_anl': self.task_config.LEVS - 1, + 'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", + 'GPREFIX': f"gdas.t{self.task_config.previous_cycle.hour:02d}z.", + 'IAUHH': cast_as_dtype(task_config.IAUFHRS) + } + ) + + # Extend task_config with local_dict + self.task_config = AttrDict(**self.task_config, **local_dict) + + # Construct JEDI object + self.jedi = JEDI(self.task_config, yaml_name) + + @logit(logger) + def initialize_jedi(self) -> None: + # get JEDI-to-FV3 increment converter config and save to YAML file + logger.info(f"Generating JEDI YAML config: {self.jedi.yaml}") + self.jedi.set_config(self.task_config) + logger.debug(f"JEDI config:\n{pformat(self.jedi.config)}") + + # save JEDI config to YAML file + logger.debug(f"Writing JEDI YAML config to: {self.jedi.yaml}") + save_as_yaml(self.jedi.config, self.jedi.yaml) + + # link JEDI executable + logger.info(f"Linking JEDI executable {self.task_config.JEDIEXE} to {self.jedi.exe}") + self.jedi.link_exe(self.task_config) + + @logit(logger) + def initialize(self) -> None: + logger.info('calcanl_gfs beginning at: ', datetime.datetime.utcnow()) + + # Initialize FileHandler to make directories and copy files + if task_config.DOIAU and task_config.l4densvar and task_config.lwrite4danl: + fh_dict = {'mkdir': [], + 'copy': []} + + for fh in task_config.IAUHH: + if fh == 6: + # For full res analysis + CalcAnlDir = task_config.DATA + '/calcanl_' + format(fh, '02') + + if not os.path.exists(CalcAnlDir): + fh_dict['mkdir'].append(CalcAnlDir) + fh_dict['copy'].append([task_config.CALCANLEXEC, + CalcAnlDir + '/calc_anl.x']) + fh_dict['copy'].append([task_config.DATA + '/siginc.nc', + CalcAnlDir + '/siginc.nc.06']) + fh_dict['copy'].append([task_config.DATA + '/sigf06', + CalcAnlDir + '/ges.06']) + fh_dict['copy'].append([task_config.DATA + '/siganl', + CalcAnlDir + '/anl.06']) + fh_dict['copy'].append([task_config.CHGRESINCEXEC, + CalcAnlDir + '/chgres_inc.x']) + + # For ensemble res analysis + if Run in ["gdas", "gfs"]: + CalcAnlDir = task_config.DATA + '/calcanl_ensres_' + format(fh, '02') + + if not os.path.exists(CalcAnlDir): + fh_dict['mkdir'].append(CalcAnlDir) + fh_dict['copy'].append([task_config.CALCANLEXEC, + CalcAnlDir + '/calc_anl.x']) + fh_dict['copy'].append([task_config.DATA + '/siginc.nc', + CalcAnlDir + '/siginc.nc.06']) + fh_dict['copy'].append([task_config.COM_ATMOS_ANALYSIS + '/' + task_config.APREFIX + 'atmanl.ensres.nc', + CalcAnlDir + '/anl.ensres.06']) + fh_dict['copy'].append([task_config.COM_ATMOS_HISTORY_PREV + '/' + task_config.GPREFIX + 'atmf006.ensres.nc', + CalcAnlDir + '/ges.ensres.06']) + fh_dict['copy'].append([task_config.DATA + '/sigf06', + CalcAnlDir + '/ges.06']) + else: + if os.path.isfile('sigi' + format(fh, '02') + '.nc'): + # For full res analysis + CalcAnlDir = task_config.DATA + '/calcanl_' + format(fh, '02') + CalcAnlDir6 = task_config.DATA + '/calcanl_' + format(6, '02') + + if not os.path.exists(CalcAnlDir): + fh_dict['mkdir'].append(CalcAnlDir) + if not os.path.exists(CalcAnlDir6): + fh_dict['mkdir'].append(CalcAnlDir6) + fh_dict['copy'].append([task_config.COM_ATMOS_ANALYSIS + '/' + task_config.APREFIX + 'atma' + format(fh, '03') + '.nc', + CalcAnlDir6 + '/anl.' + format(fh, '02')]) + fh_dict['copy'].append([task_config.DATA + '/siga' + format(fh, '02'), + CalcAnlDir6 + '/anl.' + format(fh, '02')]) + fh_dict['copy'].append([task_config.DATA + '/sigi' + format(fh, '02') + '.nc', + CalcAnlDir + '/siginc.nc.' + format(fh, '02')]) + fh_dict['copy'].append([CalcAnlDir6 + '/inc.fullres.' + format(fh, '02'), + CalcAnlDir + '/inc.fullres.' + format(fh, '02')]) + fh_dict['copy'].append([task_config.DATA + '/sigf' + format(fh, '02'), + CalcAnlDir6 + '/ges.' + format(fh, '02')]) + fh_dict['copy'].append([task_config.DATA + '/sigf' + format(fh, '02'), + CalcAnlDir + '/ges.' + format(fh, '02')]) + fh_dict['copy'].append([task_config.CHGRESINCEXEC, + CalcAnlDir + '/chgres_inc.x']) + + # For ensemble res analysis + CalcAnlDir = task_config.DATA + '/calcanl_ensres_' + format(fh, '02') + CalcAnlDir6 = task_config.DATA + '/calcanl_ensres_' + format(6, '02') + if not os.path.exists(CalcAnlDir): + fh_dict['mkdir'].append(CalcAnlDir) + if not os.path.exists(CalcAnlDir6): + fh_dict['mkdir'].append(CalcAnlDir6) + fh_dict['copy'].append([task_config.COM_ATMOS_ANALYSIS + '/' + task_config.APREFIX + 'atma' + format(fh, '03') + '.ensres.nc', + CalcAnlDir6 + '/anl.ensres.' + format(fh, '02')]) + fh_dict['copy'].append([task_config.DATA + '/sigi' + format(fh, '02') + '.nc', + CalcAnlDir6 + '/siginc.nc.' + format(fh, '02')]) + fh_dict['copy'].append([task_config.COM_ATMOS_HISTORY_PREV + '/' + task_config.GPREFIX + 'atmf' + format(fh, '03') + '.ensres.nc', + CalcAnlDir6 + '/ges.ensres.' + format(fh, '02')]) + else: + # For full res analysis + CalcAnlDir = task_config.DATA + '/calcanl_' + format(6, '02') + + if not os.path.exists(CalcAnlDir): + fh_dict['mkdir'].append(CalcAnlDir) + fh_dict['copy'].append([task_config.CALCANLEXEC, + CalcAnlDir + '/calc_anl.x']) + fh_dict['copy'].append([task_config.DATA + '/siginc.nc', + CalcAnlDir + '/siginc.nc.06']) + fh_dict['copy'].append([task_config.DATA + '/sigf06', + CalcAnlDir + '/ges.06']) + fh_dict['copy'].append([task_config.DATA + '/siganl', + CalcAnlDir + '/anl.06']) + fh_dict['copy'].append([task_config.CHGRESINCEXEC, + CalcAnlDir + '/chgres_inc.x']) + + # For ensemble res analysis + CalcAnlDir = task_config.DATA + '/calcanl_ensres_' + format(6, '02') + + if not os.path.exists(CalcAnlDir): + fh_dict['mkdir'].append(CalcAnlDir) + fh_dict['copy'].append([task_config.CALCANLEXEC, + CalcAnlDir + '/calc_anl.x']) + fh_dict['copy'].append([task_config.DATA + '/siginc.nc', + CalcAnlDir + '/siginc.nc.06']) + fh_dict['copy'].append([task_config.COM_ATMOS_ANALYSIS + '/' + APrefix + 'atmanl.ensres.nc', + CalcAnlDir + '/anl.ensres.06']) + fh_dict['copy'].append([task_config.COM_ATMOS_HISTORY_PREV + '/' + GPrefix + 'atmf006.ensres.nc', + CalcAnlDir + '/ges.ensres.06']) + + # Stage files + FileHandler(fh_dict).sync() + + @logit(logger) + def execute(self, aprun_cmd: str) -> None: + self.jedi.execute(self.task_config, aprun_cmd) diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py index b8aa2dba3a..e21828ceba 100644 --- a/workflow/applications/gfs_cycled.py +++ b/workflow/applications/gfs_cycled.py @@ -38,9 +38,9 @@ def _get_app_configs(self): configs = ['prep'] if self.do_jediatmvar: - configs += ['prepatmiodaobs', 'atmanlinit', 'atmanlvar', 'atmanlfv3inc', 'atmanlfinal'] + configs += ['prepatmiodaobs', 'atmanlinit', 'atmanlvar', 'atmanlfv3inc', 'atmanlfinal', 'calcanl'] else: - configs += ['anal', 'analdiag'] + configs += ['anal', 'analdiag', 'analcalc'] if self.do_jediocnvar: configs += ['prepoceanobs', 'ocnanalprep', 'marinebmat', 'ocnanalrun'] @@ -53,7 +53,7 @@ def _get_app_configs(self): if self.do_ocean or self.do_ice: configs += ['oceanice_products'] - configs += ['stage_ic', 'sfcanl', 'analcalc', 'fcst', 'upp', 'atmos_products', 'arch', 'cleanup'] + configs += ['stage_ic', 'sfcanl', 'fcst', 'upp', 'atmos_products', 'arch', 'cleanup'] if self.do_hybvar: if self.do_jediatmens: @@ -140,9 +140,9 @@ def get_task_names(self): gdas_gfs_common_cleanup_tasks = ['arch', 'cleanup'] if self.do_jediatmvar: - gdas_gfs_common_tasks_before_fcst += ['prepatmiodaobs', 'atmanlinit', 'atmanlvar', 'atmanlfv3inc', 'atmanlfinal'] + gdas_gfs_common_tasks_before_fcst += ['prepatmiodaobs', 'atmanlinit', 'atmanlvar', 'atmanlfv3inc', 'atmanlfinal', 'calcanl'] else: - gdas_gfs_common_tasks_before_fcst += ['anal'] + gdas_gfs_common_tasks_before_fcst += ['anal', 'analcalc'] if self.do_jediocnvar: gdas_gfs_common_tasks_before_fcst += ['prepoceanobs', 'ocnanalprep', 'marinebmat', 'ocnanalrun'] @@ -152,7 +152,7 @@ def get_task_names(self): if self.do_vrfy_oceanda: gdas_gfs_common_tasks_before_fcst += ['ocnanalvrfy'] - gdas_gfs_common_tasks_before_fcst += ['sfcanl', 'analcalc'] + gdas_gfs_common_tasks_before_fcst += ['sfcanl'] if self.do_jedisnowda: gdas_gfs_common_tasks_before_fcst += ['prepsnowobs', 'snowanl'] diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index 89da933d00..996bced8f8 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -244,13 +244,39 @@ def sfcanl(self): return task + def calcanl(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.run}atmanlfinal'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'task', 'name': f'{self.run}sfcanl'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_hybvar and self.run in ['gdas']: + dep_dict = {'type': 'task', 'name': 'enkfgdasechgres', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('calcanl') + task_name = f'{self.run}calcanl' + task_dict = {'task_name': task_name, + 'resources': resources, + 'dependency': dependencies, + 'envars': self.envars, + 'cycledef': self.run.replace('enkf', ''), + 'command': f'{self.HOMEgfs}/jobs/rocoto/calcanl.sh', + 'job_name': f'{self.pslot}_{task_name}_@H', + 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', + 'maxtries': '&MAXTRIES;' + } + + task = rocoto.create_task(task_dict) + + return task + def analcalc(self): deps = [] - if self.app_config.do_jediatmvar: - dep_dict = {'type': 'task', 'name': f'{self.run}atmanlfinal'} - else: - dep_dict = {'type': 'task', 'name': f'{self.run}anal'} + dep_dict = {'type': 'task', 'name': f'{self.run}anal'} deps.append(rocoto.add_dependency(dep_dict)) dep_dict = {'type': 'task', 'name': f'{self.run}sfcanl'} deps.append(rocoto.add_dependency(dep_dict)) diff --git a/workflow/rocoto/tasks.py b/workflow/rocoto/tasks.py index d8d5edb5e6..cacf530c35 100644 --- a/workflow/rocoto/tasks.py +++ b/workflow/rocoto/tasks.py @@ -13,7 +13,7 @@ class Tasks: SERVICE_TASKS = ['arch', 'earc'] VALID_TASKS = ['aerosol_init', 'stage_ic', - 'prep', 'anal', 'sfcanl', 'analcalc', 'analdiag', 'arch', "cleanup", + 'prep', 'anal', 'sfcanl', 'calcanl', 'analcalc', 'analdiag', 'arch', "cleanup", 'prepatmiodaobs', 'atmanlinit', 'atmanlvar', 'atmanlfv3inc', 'atmanlfinal', 'prepoceanobs', 'ocnanalprep', 'marinebmat', 'ocnanalrun', 'ocnanalecen', 'ocnanalchkpt', 'ocnanalpost', 'ocnanalvrfy', From 3ac6a0e9e1ec3951056335539331cbea7d814f70 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Sun, 8 Sep 2024 17:17:10 +0000 Subject: [PATCH 02/71] Update gdas hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index faa95efb18..d167649678 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit faa95efb18f0f52acab2cf09b17f78406f9b48b1 +Subproject commit d167649678165bb17bbdc5522cf3295e8657dafe From e56126d693b6e0ffb7ef133d012ca26cffb10923 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Sun, 8 Sep 2024 17:17:49 +0000 Subject: [PATCH 03/71] Update GDAS hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index d167649678..d3d2bcf6e6 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit d167649678165bb17bbdc5522cf3295e8657dafe +Subproject commit d3d2bcf6e62a8387d4d8306a0863392e14eae24f From 5436a6b91409e635b6cbda279385f064096d9af1 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Mon, 9 Sep 2024 15:29:30 +0000 Subject: [PATCH 04/71] Fix typo --- env/HERA.env | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/env/HERA.env b/env/HERA.env index 416b4466b8..6e641b3665 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -162,7 +162,7 @@ elif [[ "${step}" = "marineanalletkf" ]]; then elif [[ "${step}" = "calcanl" ]]; then - export NTHREADS_CALCANAL=${NTHREADSmax} + export NTHREADS_CALCANL=${NTHREADSmax} export APRUN_CALCANL="${APRUN} --cpus-per-task=${NTHREADS_CALCANL}" elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then From b5e231a9c29774bdf7828b814e58d04d88ac62d4 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Mon, 9 Sep 2024 21:33:57 +0000 Subject: [PATCH 05/71] Saving progress --- jobs/JGLOBAL_ATM_CALC_ANALYSIS | 9 +++ parm/config/gfs/config.calcanl | 7 ++ sorc/link_workflow.sh | 1 + ush/python/pygfs/task/calcanl.py | 111 ++++++++++++++++--------------- 4 files changed, 76 insertions(+), 52 deletions(-) diff --git a/jobs/JGLOBAL_ATM_CALC_ANALYSIS b/jobs/JGLOBAL_ATM_CALC_ANALYSIS index b70684dc9c..8f468555f8 100755 --- a/jobs/JGLOBAL_ATM_CALC_ANALYSIS +++ b/jobs/JGLOBAL_ATM_CALC_ANALYSIS @@ -7,11 +7,20 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "calcanl" -c "base calcanl" # Set variables used in the script ############################################## +GDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +gPDY=${GDATE:0:8} +gcyc=${GDATE:8:2} +GDUMP="gdas" ############################################## # Begin JOB SPECIFIC work ############################################## +# Generate COM variables from templates +YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ + COM_ATMOS_ANALYSIS +RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \ + COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL ############################################## # Run relevant script diff --git a/parm/config/gfs/config.calcanl b/parm/config/gfs/config.calcanl index 20fd0b3706..c719b1bc4c 100644 --- a/parm/config/gfs/config.calcanl +++ b/parm/config/gfs/config.calcanl @@ -8,7 +8,14 @@ echo "BEGIN: config.calcanl" # Get task specific resources . "${EXPDIR}/config.resources" calcanl +export JCB_BASE_YAML=${PARMgfs}/gdas/atm/jcb-base.yaml.j2 export JCB_ALGO=fv3jedi_calcanl export JEDIEXE=${EXECgfs}/fv3jedi_calcanl.x +if [[ ${DOHYBVAR} = "YES" ]]; then + export CASE_ANL=${CASE_ENS} +else + export CASE_ANL=${CASE} +fi + echo "END: config.calcanl" diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index 92404afc01..3d341c0b21 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -373,6 +373,7 @@ if [[ -d "${HOMEgfs}/sorc/gdas.cd/build" ]]; then "fv3jedi_plot_field.x" \ "gdasapp_chem_diagb.x" \ "fv3jedi_fv3inc.x" \ + "fv3jedi_calcanl.x" \ "gdas_ens_handler.x" \ "gdas_incr_handler.x" \ "gdas_obsprovider2ioda.x" \ diff --git a/ush/python/pygfs/task/calcanl.py b/ush/python/pygfs/task/calcanl.py index d429f80102..8264e275e2 100644 --- a/ush/python/pygfs/task/calcanl.py +++ b/ush/python/pygfs/task/calcanl.py @@ -1,11 +1,11 @@ #!/usr/bin/env python3 -import os import datetime from logging import getLogger -from wxflow import Task, cast_as_dtype, logit - +from pprint import pformat +import os from pygfs.jedi import Jedi +from wxflow import add_to_datetime, AttrDict, FileHandler, logit, Task, save_as_yaml, to_timedelta logger = getLogger(__name__.split('.')[-1]) @@ -20,6 +20,7 @@ def __init__(self, config, yaml_name=None): _res = int(self.task_config.CASE[1:]) _res_anl = int(self.task_config.CASE_ANL[1:]) + _window_begin = add_to_datetime(self.task_config.current_cycle, -to_timedelta(f"{self.task_config.assim_freq}H") / 2) # Create a local dictionary that is repeatedly used across this class local_dict = AttrDict( @@ -31,9 +32,10 @@ def __init__(self, config, yaml_name=None): 'npx_anl': _res_anl + 1, 'npy_anl': _res_anl + 1, 'npz_anl': self.task_config.LEVS - 1, + 'ATM_WINDOW_BEGIN': _window_begin, + 'ATM_WINDOW_LENGTH': f"PT{self.task_config.assim_freq}H", 'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", 'GPREFIX': f"gdas.t{self.task_config.previous_cycle.hour:02d}z.", - 'IAUHH': cast_as_dtype(task_config.IAUFHRS) } ) @@ -41,7 +43,7 @@ def __init__(self, config, yaml_name=None): self.task_config = AttrDict(**self.task_config, **local_dict) # Construct JEDI object - self.jedi = JEDI(self.task_config, yaml_name) + self.jedi = Jedi(self.task_config, yaml_name) @logit(logger) def initialize_jedi(self) -> None: @@ -62,112 +64,117 @@ def initialize_jedi(self) -> None: def initialize(self) -> None: logger.info('calcanl_gfs beginning at: ', datetime.datetime.utcnow()) + # Initialize dictionary used to construct Filehandler + fh_dict = {'mkdir': [], + 'copy': []} + + logger.info(f"Linking JEDI executable {self.task_config.JEDIEXE} to {self.jedi.exe}") + self.jedi.link_exe(self.task_config) + # Initialize FileHandler to make directories and copy files - if task_config.DOIAU and task_config.l4densvar and task_config.lwrite4danl: - fh_dict = {'mkdir': [], - 'copy': []} + if self.task_config.DOIAU and self.task_config.l4densvar and self.task_config.lwrite4danl: - for fh in task_config.IAUHH: + for fh in self.task_config.IAUFHRS: if fh == 6: # For full res analysis - CalcAnlDir = task_config.DATA + '/calcanl_' + format(fh, '02') + CalcAnlDir = self.task_config.DATA + '/calcanl_' + format(fh, '02') if not os.path.exists(CalcAnlDir): fh_dict['mkdir'].append(CalcAnlDir) - fh_dict['copy'].append([task_config.CALCANLEXEC, - CalcAnlDir + '/calc_anl.x']) - fh_dict['copy'].append([task_config.DATA + '/siginc.nc', +# fh_dict['copy'].append([self.task_config.CALCANLEXEC, +# CalcAnlDir + '/calc_anl.x']) + fh_dict['copy'].append([self.task_config.DATA + '/siginc.nc', CalcAnlDir + '/siginc.nc.06']) - fh_dict['copy'].append([task_config.DATA + '/sigf06', + fh_dict['copy'].append([self.task_config.DATA + '/sigf06', CalcAnlDir + '/ges.06']) - fh_dict['copy'].append([task_config.DATA + '/siganl', + fh_dict['copy'].append([self.task_config.DATA + '/siganl', CalcAnlDir + '/anl.06']) - fh_dict['copy'].append([task_config.CHGRESINCEXEC, - CalcAnlDir + '/chgres_inc.x']) +# fh_dict['copy'].append([self.task_config.CHGRESINCEXEC, +# CalcAnlDir + '/chgres_inc.x']) # For ensemble res analysis if Run in ["gdas", "gfs"]: - CalcAnlDir = task_config.DATA + '/calcanl_ensres_' + format(fh, '02') + CalcAnlDir = self.task_config.DATA + '/calcanl_ensres_' + format(fh, '02') if not os.path.exists(CalcAnlDir): fh_dict['mkdir'].append(CalcAnlDir) - fh_dict['copy'].append([task_config.CALCANLEXEC, - CalcAnlDir + '/calc_anl.x']) - fh_dict['copy'].append([task_config.DATA + '/siginc.nc', +# fh_dict['copy'].append([self.task_config.CALCANLEXEC, +# CalcAnlDir + '/calc_anl.x']) + fh_dict['copy'].append([self.task_config.DATA + '/siginc.nc', CalcAnlDir + '/siginc.nc.06']) - fh_dict['copy'].append([task_config.COM_ATMOS_ANALYSIS + '/' + task_config.APREFIX + 'atmanl.ensres.nc', + fh_dict['copy'].append([self.task_config.COM_ATMOS_ANALYSIS + '/' + self.task_config.APREFIX + 'atmanl.ensres.nc', CalcAnlDir + '/anl.ensres.06']) - fh_dict['copy'].append([task_config.COM_ATMOS_HISTORY_PREV + '/' + task_config.GPREFIX + 'atmf006.ensres.nc', + fh_dict['copy'].append([self.task_config.COM_ATMOS_HISTORY_PREV + '/' + self.task_config.GPREFIX + 'atmf006.ensres.nc', CalcAnlDir + '/ges.ensres.06']) - fh_dict['copy'].append([task_config.DATA + '/sigf06', + fh_dict['copy'].append([self.task_config.DATA + '/sigf06', CalcAnlDir + '/ges.06']) else: if os.path.isfile('sigi' + format(fh, '02') + '.nc'): # For full res analysis - CalcAnlDir = task_config.DATA + '/calcanl_' + format(fh, '02') - CalcAnlDir6 = task_config.DATA + '/calcanl_' + format(6, '02') + CalcAnlDir = self.task_config.DATA + '/calcanl_' + format(fh, '02') + CalcAnlDir6 = self.task_config.DATA + '/calcanl_' + format(6, '02') if not os.path.exists(CalcAnlDir): fh_dict['mkdir'].append(CalcAnlDir) if not os.path.exists(CalcAnlDir6): fh_dict['mkdir'].append(CalcAnlDir6) - fh_dict['copy'].append([task_config.COM_ATMOS_ANALYSIS + '/' + task_config.APREFIX + 'atma' + format(fh, '03') + '.nc', + fh_dict['copy'].append([self.task_config.COM_ATMOS_ANALYSIS + '/' + self.task_config.APREFIX + 'atma' + format(fh, '03') + '.nc', CalcAnlDir6 + '/anl.' + format(fh, '02')]) - fh_dict['copy'].append([task_config.DATA + '/siga' + format(fh, '02'), + fh_dict['copy'].append([self.task_config.DATA + '/siga' + format(fh, '02'), CalcAnlDir6 + '/anl.' + format(fh, '02')]) - fh_dict['copy'].append([task_config.DATA + '/sigi' + format(fh, '02') + '.nc', + fh_dict['copy'].append([self.task_config.DATA + '/sigi' + format(fh, '02') + '.nc', CalcAnlDir + '/siginc.nc.' + format(fh, '02')]) fh_dict['copy'].append([CalcAnlDir6 + '/inc.fullres.' + format(fh, '02'), CalcAnlDir + '/inc.fullres.' + format(fh, '02')]) - fh_dict['copy'].append([task_config.DATA + '/sigf' + format(fh, '02'), + fh_dict['copy'].append([self.task_config.DATA + '/sigf' + format(fh, '02'), CalcAnlDir6 + '/ges.' + format(fh, '02')]) - fh_dict['copy'].append([task_config.DATA + '/sigf' + format(fh, '02'), + fh_dict['copy'].append([self.task_config.DATA + '/sigf' + format(fh, '02'), CalcAnlDir + '/ges.' + format(fh, '02')]) - fh_dict['copy'].append([task_config.CHGRESINCEXEC, - CalcAnlDir + '/chgres_inc.x']) +# fh_dict['copy'].append([self.task_config.CHGRESINCEXEC, +# CalcAnlDir + '/chgres_inc.x']) # For ensemble res analysis - CalcAnlDir = task_config.DATA + '/calcanl_ensres_' + format(fh, '02') - CalcAnlDir6 = task_config.DATA + '/calcanl_ensres_' + format(6, '02') + CalcAnlDir = self.task_config.DATA + '/calcanl_ensres_' + format(fh, '02') + CalcAnlDir6 = self.task_config.DATA + '/calcanl_ensres_' + format(6, '02') if not os.path.exists(CalcAnlDir): fh_dict['mkdir'].append(CalcAnlDir) if not os.path.exists(CalcAnlDir6): fh_dict['mkdir'].append(CalcAnlDir6) - fh_dict['copy'].append([task_config.COM_ATMOS_ANALYSIS + '/' + task_config.APREFIX + 'atma' + format(fh, '03') + '.ensres.nc', + fh_dict['copy'].append([self.task_config.COM_ATMOS_ANALYSIS + '/' + self.task_config.APREFIX + 'atma' + format(fh, '03') + '.ensres.nc', CalcAnlDir6 + '/anl.ensres.' + format(fh, '02')]) - fh_dict['copy'].append([task_config.DATA + '/sigi' + format(fh, '02') + '.nc', + fh_dict['copy'].append([self.task_config.DATA + '/sigi' + format(fh, '02') + '.nc', CalcAnlDir6 + '/siginc.nc.' + format(fh, '02')]) - fh_dict['copy'].append([task_config.COM_ATMOS_HISTORY_PREV + '/' + task_config.GPREFIX + 'atmf' + format(fh, '03') + '.ensres.nc', + fh_dict['copy'].append([self.task_config.COM_ATMOS_HISTORY_PREV + '/' + self.task_config.GPREFIX + 'atmf' + format(fh, '03') + '.ensres.nc', CalcAnlDir6 + '/ges.ensres.' + format(fh, '02')]) else: # For full res analysis - CalcAnlDir = task_config.DATA + '/calcanl_' + format(6, '02') + CalcAnlDir = self.task_config.DATA + '/calcanl_' + format(6, '02') if not os.path.exists(CalcAnlDir): fh_dict['mkdir'].append(CalcAnlDir) - fh_dict['copy'].append([task_config.CALCANLEXEC, - CalcAnlDir + '/calc_anl.x']) - fh_dict['copy'].append([task_config.DATA + '/siginc.nc', +# fh_dict['copy'].append([self.task_config.CALCANLEXEC, +# CalcAnlDir + '/calc_anl.x']) + fh_dict['copy'].append([self.task_config.DATA + '/siginc.nc', CalcAnlDir + '/siginc.nc.06']) - fh_dict['copy'].append([task_config.DATA + '/sigf06', + fh_dict['copy'].append([self.task_config.DATA + '/sigf06', CalcAnlDir + '/ges.06']) - fh_dict['copy'].append([task_config.DATA + '/siganl', + fh_dict['copy'].append([self.task_config.DATA + '/siganl', CalcAnlDir + '/anl.06']) - fh_dict['copy'].append([task_config.CHGRESINCEXEC, - CalcAnlDir + '/chgres_inc.x']) +# fh_dict['copy'].append([self.task_config.CHGRESINCEXEC, +# CalcAnlDir + '/chgres_inc.x']) # For ensemble res analysis - CalcAnlDir = task_config.DATA + '/calcanl_ensres_' + format(6, '02') + CalcAnlDir = self.task_config.DATA + '/calcanl_ensres_' + format(6, '02') if not os.path.exists(CalcAnlDir): fh_dict['mkdir'].append(CalcAnlDir) - fh_dict['copy'].append([task_config.CALCANLEXEC, - CalcAnlDir + '/calc_anl.x']) - fh_dict['copy'].append([task_config.DATA + '/siginc.nc', +# fh_dict['copy'].append([self.task_config.CALCANLEXEC, +# CalcAnlDir + '/calc_anl.x']) + fh_dict['copy'].append([self.task_config.DATA + '/siginc.nc', CalcAnlDir + '/siginc.nc.06']) - fh_dict['copy'].append([task_config.COM_ATMOS_ANALYSIS + '/' + APrefix + 'atmanl.ensres.nc', + fh_dict['copy'].append([self.task_config.COM_ATMOS_ANALYSIS + '/' + self.task_config.APREFIX + 'atmanl.ensres.nc', CalcAnlDir + '/anl.ensres.06']) - fh_dict['copy'].append([task_config.COM_ATMOS_HISTORY_PREV + '/' + GPrefix + 'atmf006.ensres.nc', + fh_dict['copy'].append([self.task_config.COM_ATMOS_HISTORY_PREV + '/' + self.task_config.GPREFIX + 'atmf006.ensres.nc', CalcAnlDir + '/ges.ensres.06']) # Stage files From 777641aaa1c9a86d90225e51866e4a314041bf12 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 11 Sep 2024 14:05:28 +0000 Subject: [PATCH 06/71] Update --- sorc/gdas.cd | 2 +- ush/python/pygfs/task/calcanl.py | 64 +------------------------------- 2 files changed, 3 insertions(+), 63 deletions(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index d3d2bcf6e6..40523d9982 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit d3d2bcf6e62a8387d4d8306a0863392e14eae24f +Subproject commit 40523d998284c145b9fc3873417bc774f70b77f6 diff --git a/ush/python/pygfs/task/calcanl.py b/ush/python/pygfs/task/calcanl.py index 8264e275e2..3eeb3ccaaf 100644 --- a/ush/python/pygfs/task/calcanl.py +++ b/ush/python/pygfs/task/calcanl.py @@ -76,41 +76,18 @@ def initialize(self) -> None: for fh in self.task_config.IAUFHRS: if fh == 6: - # For full res analysis CalcAnlDir = self.task_config.DATA + '/calcanl_' + format(fh, '02') if not os.path.exists(CalcAnlDir): fh_dict['mkdir'].append(CalcAnlDir) -# fh_dict['copy'].append([self.task_config.CALCANLEXEC, -# CalcAnlDir + '/calc_anl.x']) fh_dict['copy'].append([self.task_config.DATA + '/siginc.nc', CalcAnlDir + '/siginc.nc.06']) fh_dict['copy'].append([self.task_config.DATA + '/sigf06', CalcAnlDir + '/ges.06']) fh_dict['copy'].append([self.task_config.DATA + '/siganl', CalcAnlDir + '/anl.06']) -# fh_dict['copy'].append([self.task_config.CHGRESINCEXEC, -# CalcAnlDir + '/chgres_inc.x']) - - # For ensemble res analysis - if Run in ["gdas", "gfs"]: - CalcAnlDir = self.task_config.DATA + '/calcanl_ensres_' + format(fh, '02') - - if not os.path.exists(CalcAnlDir): - fh_dict['mkdir'].append(CalcAnlDir) -# fh_dict['copy'].append([self.task_config.CALCANLEXEC, -# CalcAnlDir + '/calc_anl.x']) - fh_dict['copy'].append([self.task_config.DATA + '/siginc.nc', - CalcAnlDir + '/siginc.nc.06']) - fh_dict['copy'].append([self.task_config.COM_ATMOS_ANALYSIS + '/' + self.task_config.APREFIX + 'atmanl.ensres.nc', - CalcAnlDir + '/anl.ensres.06']) - fh_dict['copy'].append([self.task_config.COM_ATMOS_HISTORY_PREV + '/' + self.task_config.GPREFIX + 'atmf006.ensres.nc', - CalcAnlDir + '/ges.ensres.06']) - fh_dict['copy'].append([self.task_config.DATA + '/sigf06', - CalcAnlDir + '/ges.06']) else: if os.path.isfile('sigi' + format(fh, '02') + '.nc'): - # For full res analysis CalcAnlDir = self.task_config.DATA + '/calcanl_' + format(fh, '02') CalcAnlDir6 = self.task_config.DATA + '/calcanl_' + format(6, '02') @@ -130,52 +107,15 @@ def initialize(self) -> None: CalcAnlDir6 + '/ges.' + format(fh, '02')]) fh_dict['copy'].append([self.task_config.DATA + '/sigf' + format(fh, '02'), CalcAnlDir + '/ges.' + format(fh, '02')]) -# fh_dict['copy'].append([self.task_config.CHGRESINCEXEC, -# CalcAnlDir + '/chgres_inc.x']) - - # For ensemble res analysis - CalcAnlDir = self.task_config.DATA + '/calcanl_ensres_' + format(fh, '02') - CalcAnlDir6 = self.task_config.DATA + '/calcanl_ensres_' + format(6, '02') - if not os.path.exists(CalcAnlDir): - fh_dict['mkdir'].append(CalcAnlDir) - if not os.path.exists(CalcAnlDir6): - fh_dict['mkdir'].append(CalcAnlDir6) - fh_dict['copy'].append([self.task_config.COM_ATMOS_ANALYSIS + '/' + self.task_config.APREFIX + 'atma' + format(fh, '03') + '.ensres.nc', - CalcAnlDir6 + '/anl.ensres.' + format(fh, '02')]) - fh_dict['copy'].append([self.task_config.DATA + '/sigi' + format(fh, '02') + '.nc', - CalcAnlDir6 + '/siginc.nc.' + format(fh, '02')]) - fh_dict['copy'].append([self.task_config.COM_ATMOS_HISTORY_PREV + '/' + self.task_config.GPREFIX + 'atmf' + format(fh, '03') + '.ensres.nc', - CalcAnlDir6 + '/ges.ensres.' + format(fh, '02')]) else: - # For full res analysis CalcAnlDir = self.task_config.DATA + '/calcanl_' + format(6, '02') if not os.path.exists(CalcAnlDir): fh_dict['mkdir'].append(CalcAnlDir) -# fh_dict['copy'].append([self.task_config.CALCANLEXEC, -# CalcAnlDir + '/calc_anl.x']) - fh_dict['copy'].append([self.task_config.DATA + '/siginc.nc', + fh_dict['copy'].append([self.task_config.COM_ATMOS_ANALYSIS + '/' + self.task_config.APREFIX + 'atminc006.nc', CalcAnlDir + '/siginc.nc.06']) - fh_dict['copy'].append([self.task_config.DATA + '/sigf06', + fh_dict['copy'].append([self.task_config.COM_ATMOS_HISTORY_PREV + '/' + self.task_config.GPREFIX + 'cubed_sphere_grid_atmf006.nc' CalcAnlDir + '/ges.06']) - fh_dict['copy'].append([self.task_config.DATA + '/siganl', - CalcAnlDir + '/anl.06']) -# fh_dict['copy'].append([self.task_config.CHGRESINCEXEC, -# CalcAnlDir + '/chgres_inc.x']) - - # For ensemble res analysis - CalcAnlDir = self.task_config.DATA + '/calcanl_ensres_' + format(6, '02') - - if not os.path.exists(CalcAnlDir): - fh_dict['mkdir'].append(CalcAnlDir) -# fh_dict['copy'].append([self.task_config.CALCANLEXEC, -# CalcAnlDir + '/calc_anl.x']) - fh_dict['copy'].append([self.task_config.DATA + '/siginc.nc', - CalcAnlDir + '/siginc.nc.06']) - fh_dict['copy'].append([self.task_config.COM_ATMOS_ANALYSIS + '/' + self.task_config.APREFIX + 'atmanl.ensres.nc', - CalcAnlDir + '/anl.ensres.06']) - fh_dict['copy'].append([self.task_config.COM_ATMOS_HISTORY_PREV + '/' + self.task_config.GPREFIX + 'atmf006.ensres.nc', - CalcAnlDir + '/ges.ensres.06']) # Stage files FileHandler(fh_dict).sync() From 86b6ad4f794febbc0e92d116c1bde7c24287fd18 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 11 Sep 2024 14:16:40 +0000 Subject: [PATCH 07/71] Merge --- docs/source/errors_faq.rst | 9 +- docs/source/index.rst | 5 +- docs/source/run.rst | 1 - env/AWSPW.env | 16 +-- env/AZUREPW.env | 4 +- env/GAEA.env | 10 +- env/GOOGLEPW.env | 14 +-- env/HERA.env | 66 ++++++------ env/HERCULES.env | 60 +++++------ env/JET.env | 48 ++++----- env/ORION.env | 60 +++++------ env/S4.env | 50 ++++----- env/WCOSS2.env | 60 +++++------ parm/config/gfs/config.resources | 2 +- workflow/applications/applications.py | 115 ++++++++++----------- workflow/applications/gefs.py | 7 +- workflow/applications/gfs_cycled.py | 23 +++-- workflow/applications/gfs_forecast_only.py | 18 ++-- workflow/rocoto/tasks.py | 15 ++- workflow/rocoto/workflow_xml.py | 3 +- 20 files changed, 300 insertions(+), 286 deletions(-) diff --git a/docs/source/errors_faq.rst b/docs/source/errors_faq.rst index 519e29bace..d2cdc7b306 100644 --- a/docs/source/errors_faq.rst +++ b/docs/source/errors_faq.rst @@ -2,6 +2,14 @@ Common Errors Known Issues ========================== +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Error: Reserved Variables Causing Workflow Issues +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Several variables are reserved in the workflow and should not be used as environment variables in your shell. Some of the common ones include (but are not limited to): +``HOMEgfs``, ``machine``, ``ROTDIR``, ``COMROT``, ``COMROOT``, ``COMOUT``, ``COMIN``, ``STMP``, ``PTMP``, ``DATAROOT``, ``DATA``, ``ACCOUNT``, ``PDY``, ``cyc``, ``RUN``, etc. +If you are using any of these variables in your shell, you may encounter unexpected behavior in the workflow. + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Error: "ImportError" message when running setup script ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -42,4 +50,3 @@ Issue: Directory name change for EnKF folder in ROTDIR **Issue:** The EnKF ROTDIR folders were renamed during the GFS v15 development process to remove the period between "enkf" and "gdas": enkf.gdas.$PDY → enkfgdas.$PDY **Fix:** Older tarballs on HPSS will have the older directory name with the period between 'enkf' and 'gdas'. Make sure to rename folder to 'enkfgdas.$PDY' after obtaining. Only an issue for the initial cycle. - diff --git a/docs/source/index.rst b/docs/source/index.rst index a5161789b3..637a4ef70a 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -1,6 +1,6 @@ ############### -Global Workflow +Global Workflow ############### **Global-workflow** is the end-to-end workflow designed to run global configurations of medium range weather forecasting for the UFS weather model. It supports both development and operational implementations. In its current format it supports the Global Forecast System (GFS) and the Global Ensemble Forecast System (GEFS) configurations @@ -31,7 +31,7 @@ GitHub updates: Users should adjust their "Watch" settings for this repo so they Table of Contents ================= -.. toctree:: +.. toctree:: :numbered: :maxdepth: 3 @@ -42,3 +42,4 @@ Table of Contents output.rst run.rst noaa_csp.rst + errors_faq.rst diff --git a/docs/source/run.rst b/docs/source/run.rst index 817ed3ccfa..f160f791c9 100644 --- a/docs/source/run.rst +++ b/docs/source/run.rst @@ -13,4 +13,3 @@ Here we will show how you can run an experiment using the Global Workflow. The G start.rst monitor_rocoto.rst view.rst - errors_faq.rst diff --git a/env/AWSPW.env b/env/AWSPW.env index 7fe17d2492..e366128a1d 100755 --- a/env/AWSPW.env +++ b/env/AWSPW.env @@ -27,7 +27,7 @@ if [[ -n "${ntasks:-}" && -n "${max_tasks_per_node:-}" && -n "${tasks_per_node:- NTHREADS1=${threads_per_task:-1} [[ ${NTHREADSmax} -gt ${max_threads_per_task} ]] && NTHREADSmax=${max_threads_per_task} [[ ${NTHREADS1} -gt ${max_threads_per_task} ]] && NTHREADS1=${max_threads_per_task} - export APRUN="${launcher} -n ${ntasks}" + APRUN_default="${launcher} -n ${ntasks}" else echo "ERROR config.resources must be sourced before sourcing AWSPW.env" exit 2 @@ -53,7 +53,7 @@ elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step} elif [[ "${step}" = "post" ]]; then export NTHREADS_NP=${NTHREADS1} - export APRUN_NP="${APRUN}" + export APRUN_NP="${APRUN_default}" export NTHREADS_DWN=${threads_per_task_dwn:-1} [[ ${NTHREADS_DWN} -gt ${max_threads_per_task} ]] && export NTHREADS_DWN=${max_threads_per_task} @@ -71,7 +71,7 @@ elif [[ "${step}" = "oceanice_products" ]]; then elif [[ "${step}" = "ecen" ]]; then export NTHREADS_ECEN=${NTHREADSmax} - export APRUN_ECEN="${APRUN}" + export APRUN_ECEN="${APRUN_default}" export NTHREADS_CHGRES=${threads_per_task_chgres:-12} [[ ${NTHREADS_CHGRES} -gt ${max_tasks_per_node} ]] && export NTHREADS_CHGRES=${max_tasks_per_node} @@ -79,25 +79,25 @@ elif [[ "${step}" = "ecen" ]]; then export NTHREADS_CALCINC=${threads_per_task_calcinc:-1} [[ ${NTHREADS_CALCINC} -gt ${max_threads_per_task} ]] && export NTHREADS_CALCINC=${max_threads_per_task} - export APRUN_CALCINC="${APRUN}" + export APRUN_CALCINC="${APRUN_default}" elif [[ "${step}" = "esfc" ]]; then export NTHREADS_ESFC=${NTHREADSmax} - export APRUN_ESFC="${APRUN}" + export APRUN_ESFC="${APRUN_default}" export NTHREADS_CYCLE=${threads_per_task_cycle:-14} [[ ${NTHREADS_CYCLE} -gt ${max_tasks_per_node} ]] && export NTHREADS_CYCLE=${max_tasks_per_node} - export APRUN_CYCLE="${APRUN}" + export APRUN_CYCLE="${APRUN_default}" elif [[ "${step}" = "epos" ]]; then export NTHREADS_EPOS=${NTHREADSmax} - export APRUN_EPOS="${APRUN}" + export APRUN_EPOS="${APRUN_default}" elif [[ "${step}" = "fit2obs" ]]; then export NTHREADS_FIT2OBS=${NTHREADS1} - export MPIRUN="${APRUN}" + export MPIRUN="${APRUN_default}" fi diff --git a/env/AZUREPW.env b/env/AZUREPW.env index 706c659e95..9e246a9cb4 100755 --- a/env/AZUREPW.env +++ b/env/AZUREPW.env @@ -27,7 +27,7 @@ if [[ -n "${ntasks:-}" && -n "${max_tasks_per_node:-}" && -n "${tasks_per_node:- NTHREADS1=${threads_per_task:-1} [[ ${NTHREADSmax} -gt ${max_threads_per_task} ]] && NTHREADSmax=${max_threads_per_task} [[ ${NTHREADS1} -gt ${max_threads_per_task} ]] && NTHREADS1=${max_threads_per_task} - APRUN="${launcher} -n ${ntasks}" + APRUN_default="${launcher} -n ${ntasks}" else echo "ERROR config.resources must be sourced before sourcing AZUREPW.env" exit 2 @@ -46,7 +46,7 @@ if [[ "${step}" = "fcst" ]] || [[ "${step}" = "efcs" ]]; then elif [[ "${step}" = "post" ]]; then export NTHREADS_NP=${NTHREADS1} - export APRUN_NP="${APRUN}" + export APRUN_NP="${APRUN_default}" export NTHREADS_DWN=${threads_per_task_dwn:-1} [[ ${NTHREADS_DWN} -gt ${max_threads_per_task} ]] && export NTHREADS_DWN=${max_threads_per_task} diff --git a/env/GAEA.env b/env/GAEA.env index be5e9f0ca7..7736e0f1ea 100755 --- a/env/GAEA.env +++ b/env/GAEA.env @@ -28,7 +28,7 @@ if [[ -n "${ntasks:-}" && -n "${max_tasks_per_node:-}" && -n "${tasks_per_node:- [[ ${NTHREADS1} -gt ${max_threads_per_task} ]] && NTHREADS1=${max_threads_per_task} # This may be useful when Gaea is fully ported, so ignore SC warning # shellcheck disable=SC2034 - APRUN="${launcher} -n ${ntasks}" + APRUN_default="${launcher} -n ${ntasks}" else echo "ERROR config.resources must be sourced before sourcing GAEA.env" exit 2 @@ -51,7 +51,7 @@ elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" export NTHREADS_GSI=${NTHREADSmax} - export APRUN_GSI="${APRUN} --cpus-per-task=${NTHREADS_GSI}" + export APRUN_GSI="${APRUN_default} --cpus-per-task=${NTHREADS_GSI}" export NTHREADS_CALCINC=${threads_per_task_calcinc:-1} [[ ${NTHREADS_CALCINC} -gt ${max_threads_per_task} ]] && export NTHREADS_CALCINC=${max_threads_per_task} @@ -69,7 +69,7 @@ elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then elif [[ "${step}" = "sfcanl" ]]; then export NTHREADS_CYCLE=${threads_per_task:-14} - export APRUN_CYCLE="${APRUN} --cpus-per-task=${NTHREADS_CYCLE}" + export APRUN_CYCLE="${APRUN_default} --cpus-per-task=${NTHREADS_CYCLE}" elif [[ "${step}" = "fcst" ]]; then @@ -82,7 +82,7 @@ elif [[ "${step}" = "fcst" ]]; then elif [[ "${step}" = "upp" ]]; then export NTHREADS_UPP=${NTHREADS1} - export APRUN_UPP="${APRUN} --cpus-per-task=${NTHREADS_UPP}" + export APRUN_UPP="${APRUN_default} --cpus-per-task=${NTHREADS_UPP}" elif [[ "${step}" = "atmos_products" ]]; then @@ -96,6 +96,6 @@ elif [[ "${step}" = "oceanice_products" ]]; then elif [[ "${step}" = "fit2obs" ]]; then export NTHREADS_FIT2OBS=${NTHREADS1} - export MPIRUN="${APRUN} --cpus-per-task=${NTHREADS_FIT2OBS}" + export MPIRUN="${APRUN_default} --cpus-per-task=${NTHREADS_FIT2OBS}" fi diff --git a/env/GOOGLEPW.env b/env/GOOGLEPW.env index 7d912eaf8b..c3b5ec806a 100755 --- a/env/GOOGLEPW.env +++ b/env/GOOGLEPW.env @@ -27,7 +27,7 @@ if [[ -n "${ntasks:-}" && -n "${max_tasks_per_node:-}" && -n "${tasks_per_node:- NTHREADS1=${threads_per_task:-1} [[ ${NTHREADSmax} -gt ${max_threads_per_task} ]] && NTHREADSmax=${max_threads_per_task} [[ ${NTHREADS1} -gt ${max_threads_per_task} ]] && NTHREADS1=${max_threads_per_task} - APRUN="${launcher} -n ${ntasks}" + APRUN_default="${launcher} -n ${ntasks}" else echo "ERROR config.resources must be sourced before sourcing GOOGLEPW.env" exit 2 @@ -57,7 +57,7 @@ elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step} elif [[ "${step}" = "post" ]]; then export NTHREADS_NP=${NTHREADS1} - export APRUN_NP="${APRUN}" + export APRUN_NP="${APRUN_default}" export NTHREADS_DWN=${threads_per_task_dwn:-1} [[ ${NTHREADS_DWN} -gt ${max_threads_per_task} ]] && export NTHREADS_DWN=${max_threads_per_task} @@ -75,7 +75,7 @@ elif [[ "${step}" = "oceanice_products" ]]; then elif [[ "${step}" = "ecen" ]]; then export NTHREADS_ECEN=${NTHREADSmax} - export APRUN_ECEN="${APRUN}" + export APRUN_ECEN="${APRUN_default}" export NTHREADS_CHGRES=${threads_per_task_chgres:-12} [[ ${NTHREADS_CHGRES} -gt ${max_tasks_per_node} ]] && export NTHREADS_CHGRES=${max_tasks_per_node} @@ -83,21 +83,21 @@ elif [[ "${step}" = "ecen" ]]; then export NTHREADS_CALCINC=${threads_per_task_calcinc:-1} [[ ${NTHREADS_CALCINC} -gt ${max_threads_per_task} ]] && export NTHREADS_CALCINC=${max_threads_per_task} - export APRUN_CALCINC="${APRUN}" + export APRUN_CALCINC="${APRUN_default}" elif [[ "${step}" = "esfc" ]]; then export NTHREADS_ESFC=${NTHREADSmax} - export APRUN_ESFC="${APRUN}" + export APRUN_ESFC="${APRUN_default}" export NTHREADS_CYCLE=${threads_per_task_cycle:-14} [[ ${NTHREADS_CYCLE} -gt ${max_tasks_per_node} ]] && export NTHREADS_CYCLE=${max_tasks_per_node} - export APRUN_CYCLE="${APRUN}" + export APRUN_CYCLE="${APRUN_default}" elif [[ "${step}" = "epos" ]]; then export NTHREADS_EPOS=${NTHREADSmax} - export APRUN_EPOS="${APRUN}" + export APRUN_EPOS="${APRUN_default}" elif [[ "${step}" = "fit2obs" ]]; then diff --git a/env/HERA.env b/env/HERA.env index 6e641b3665..ec7db1d787 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -39,7 +39,7 @@ if [[ -n "${ntasks:-}" && -n "${max_tasks_per_node:-}" && -n "${tasks_per_node:- NTHREADS1=${threads_per_task:-1} [[ ${NTHREADSmax} -gt ${max_threads_per_task} ]] && NTHREADSmax=${max_threads_per_task} [[ ${NTHREADS1} -gt ${max_threads_per_task} ]] && NTHREADS1=${max_threads_per_task} - APRUN="${launcher} -n ${ntasks}" + APRUN_default="${launcher} -n ${ntasks}" else echo "ERROR config.resources must be sourced before sourcing HERA.env" exit 2 @@ -54,11 +54,11 @@ if [[ "${step}" = "prep" ]] || [[ "${step}" = "prepbufr" ]]; then elif [[ "${step}" = "prepsnowobs" ]]; then - export APRUN_CALCFIMS="${APRUN}" + export APRUN_CALCFIMS="${APRUN_default}" elif [[ "${step}" = "prep_emissions" ]]; then - export APRUN="${APRUN}" + export APRUN="${APRUN_default}" elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step}" = "wavepostsbs" ]] || [[ "${step}" = "wavepostbndpnt" ]] || [[ "${step}" = "wavepostbndpntbll" ]] || [[ "${step}" = "wavepostpnt" ]]; then @@ -70,100 +70,100 @@ elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step} elif [[ "${step}" = "atmanlvar" ]]; then export NTHREADS_ATMANLVAR=${NTHREADSmax} - export APRUN_ATMANLVAR="${APRUN} --cpus-per-task=${NTHREADS_ATMANLVAR}" + export APRUN_ATMANLVAR="${APRUN_default} --cpus-per-task=${NTHREADS_ATMANLVAR}" elif [[ "${step}" = "atmensanlobs" ]]; then export NTHREADS_ATMENSANLOBS=${NTHREADSmax} - export APRUN_ATMENSANLOBS="${APRUN} --cpus-per-task=${NTHREADS_ATMENSANLOBS}" + export APRUN_ATMENSANLOBS="${APRUN_default} --cpus-per-task=${NTHREADS_ATMENSANLOBS}" elif [[ "${step}" = "atmensanlsol" ]]; then export NTHREADS_ATMENSANLSOL=${NTHREADSmax} - export APRUN_ATMENSANLSOL="${APRUN} --cpus-per-task=${NTHREADS_ATMENSANLSOL}" + export APRUN_ATMENSANLSOL="${APRUN_default} --cpus-per-task=${NTHREADS_ATMENSANLSOL}" elif [[ "${step}" = "atmensanlletkf" ]]; then export NTHREADS_ATMENSANLLETKF=${NTHREADSmax} - export APRUN_ATMENSANLLETKF="${APRUN} --cpus-per-task=${NTHREADS_ATMENSANLLETKF}" + export APRUN_ATMENSANLLETKF="${APRUN_default} --cpus-per-task=${NTHREADS_ATMENSANLLETKF}" elif [[ "${step}" = "atmensanlfv3inc" ]]; then export NTHREADS_ATMENSANLFV3INC=${NTHREADSmax} - export APRUN_ATMENSANLFV3INC="${APRUN} --cpus-per-task=${NTHREADS_ATMENSANLFV3INC}" + export APRUN_ATMENSANLFV3INC="${APRUN_default} --cpus-per-task=${NTHREADS_ATMENSANLFV3INC}" elif [[ "${step}" = "aeroanlvar" ]]; then export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" export NTHREADS_AEROANL=${NTHREADSmax} - export APRUN_AEROANL="${APRUN} --cpus-per-task=${NTHREADS_AEROANL}" + export APRUN_AEROANL="${APRUN_default} --cpus-per-task=${NTHREADS_AEROANL}" elif [[ "${step}" = "aeroanlgenb" ]]; then export NTHREADS_AEROANLGENB=${NTHREADSmax} - export APRUN_AEROANLGENB="${APRUN} --cpus-per-task=${NTHREADS_AEROANLGENB}" + export APRUN_AEROANLGENB="${APRUN_default} --cpus-per-task=${NTHREADS_AEROANLGENB}" elif [[ "${step}" = "atmanlfv3inc" ]]; then export NTHREADS_ATMANLFV3INC=${NTHREADSmax} - export APRUN_ATMANLFV3INC="${APRUN} --cpus-per-task=${NTHREADS_ATMANLFV3INC}" + export APRUN_ATMANLFV3INC="${APRUN_default} --cpus-per-task=${NTHREADS_ATMANLFV3INC}" elif [[ "${step}" = "prepobsaero" ]]; then export NTHREADS_PREPOBSAERO=${NTHREADS1} - export APRUN_PREPOBSAERO="${APRUN} --cpus-per-task=${NTHREADS_PREPOBSAERO}" + export APRUN_PREPOBSAERO="${APRUN_default} --cpus-per-task=${NTHREADS_PREPOBSAERO}" elif [[ "${step}" = "snowanl" ]]; then export NTHREADS_SNOWANL=${NTHREADSmax} - export APRUN_SNOWANL="${APRUN} --cpus-per-task=${NTHREADS_SNOWANL}" + export APRUN_SNOWANL="${APRUN_default} --cpus-per-task=${NTHREADS_SNOWANL}" export APRUN_APPLY_INCR="${launcher} -n 6" elif [[ "${step}" = "esnowrecen" ]]; then export NTHREADS_ESNOWRECEN=${NTHREADSmax} - export APRUN_ESNOWRECEN="${APRUN} --cpus-per-task=${NTHREADS_ESNOWRECEN}" + export APRUN_ESNOWRECEN="${APRUN_default} --cpus-per-task=${NTHREADS_ESNOWRECEN}" export APRUN_APPLY_INCR="${launcher} -n 6" elif [[ "${step}" = "marinebmat" ]]; then export APRUNCFP="${launcher} -n \$ncmd --multi-prog" - export APRUN_MARINEBMAT="${APRUN}" + export APRUN_MARINEBMAT="${APRUN_default}" elif [[ "${step}" = "marinebmat" ]]; then export APRUNCFP="${launcher} -n \$ncmd --multi-prog" - export APRUN_MARINEBMAT="${APRUN}" + export APRUN_MARINEBMAT="${APRUN_default}" elif [[ "${step}" = "ocnanalrun" ]]; then export APRUNCFP="${launcher} -n \$ncmd --multi-prog" - export APRUN_OCNANAL="${APRUN}" + export APRUN_OCNANAL="${APRUN_default}" elif [[ "${step}" = "ocnanalchkpt" ]]; then export APRUNCFP="${launcher} -n \$ncmd --multi-prog" - export APRUN_OCNANAL="${APRUN}" + export APRUN_OCNANAL="${APRUN_default}" elif [[ "${step}" = "ocnanalecen" ]]; then export NTHREADS_OCNANALECEN=${NTHREADSmax} - export APRUN_OCNANALECEN="${APRUN} --cpus-per-task=${NTHREADS_OCNANALECEN}" + export APRUN_OCNANALECEN="${APRUN_default} --cpus-per-task=${NTHREADS_OCNANALECEN}" elif [[ "${step}" = "marineanalletkf" ]]; then export NTHREADS_MARINEANALLETKF=${NTHREADSmax} - export APRUN_MARINEANALLETKF="${APRUN} --cpus-per-task=${NTHREADS_MARINEANALLETKF}" + export APRUN_MARINEANALLETKF="${APRUN_default} --cpus-per-task=${NTHREADS_MARINEANALLETKF}" elif [[ "${step}" = "calcanl" ]]; then export NTHREADS_CALCANL=${NTHREADSmax} - export APRUN_CALCANL="${APRUN} --cpus-per-task=${NTHREADS_CALCANL}" + export APRUN_CALCANL="${APRUN_default} --cpus-per-task=${NTHREADS_CALCANL}" elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then @@ -175,7 +175,7 @@ elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" export NTHREADS_GSI=${NTHREADSmax} - export APRUN_GSI="${APRUN} --cpus-per-task=${NTHREADS_GSI}" + export APRUN_GSI="${APRUN_default} --cpus-per-task=${NTHREADS_GSI}" export NTHREADS_CALCINC=${threads_per_task_calcinc:-1} [[ ${NTHREADS_CALCINC} -gt ${max_threads_per_task} ]] && export NTHREADS_CALCINC=${max_threads_per_task} @@ -193,7 +193,7 @@ elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then elif [[ "${step}" = "sfcanl" ]]; then export NTHREADS_CYCLE=${threads_per_task:-14} - export APRUN_CYCLE="${APRUN} --cpus-per-task=${NTHREADS_CYCLE}" + export APRUN_CYCLE="${APRUN_default} --cpus-per-task=${NTHREADS_CYCLE}" elif [[ "${step}" = "eobs" ]]; then @@ -201,7 +201,7 @@ elif [[ "${step}" = "eobs" ]]; then export MKL_CBWR=AUTO export NTHREADS_GSI=${NTHREADSmax} - export APRUN_GSI="${APRUN} --cpus-per-task=${NTHREADS_GSI}" + export APRUN_GSI="${APRUN_default} --cpus-per-task=${NTHREADS_GSI}" export CFP_MP=${CFP_MP:-"YES"} export USE_CFP=${USE_CFP:-"YES"} @@ -227,7 +227,7 @@ elif [[ "${step}" = "fcst" ]] || [[ "${step}" = "efcs" ]]; then elif [[ "${step}" = "upp" ]]; then export NTHREADS_UPP=${NTHREADS1} - export APRUN_UPP="${APRUN} --cpus-per-task=${NTHREADS_UPP}" + export APRUN_UPP="${APRUN_default} --cpus-per-task=${NTHREADS_UPP}" elif [[ "${step}" = "atmos_products" ]]; then @@ -241,7 +241,7 @@ elif [[ "${step}" = "oceanice_products" ]]; then elif [[ "${step}" = "ecen" ]]; then export NTHREADS_ECEN=${NTHREADSmax} - export APRUN_ECEN="${APRUN} --cpus-per-task=${NTHREADS_ECEN}" + export APRUN_ECEN="${APRUN_default} --cpus-per-task=${NTHREADS_ECEN}" export NTHREADS_CHGRES=${threads_per_task_chgres:-12} [[ ${NTHREADS_CHGRES} -gt ${max_tasks_per_node} ]] && export NTHREADS_CHGRES=${max_tasks_per_node} @@ -249,28 +249,28 @@ elif [[ "${step}" = "ecen" ]]; then export NTHREADS_CALCINC=${threads_per_task_calcinc:-1} [[ ${NTHREADS_CALCINC} -gt ${max_threads_per_task} ]] && export NTHREADS_CALCINC=${max_threads_per_task} - export APRUN_CALCINC="${APRUN} --cpus-per-task=${NTHREADS_CALCINC}" + export APRUN_CALCINC="${APRUN_default} --cpus-per-task=${NTHREADS_CALCINC}" elif [[ "${step}" = "esfc" ]]; then export NTHREADS_ESFC=${threads_per_task_esfc:-${max_threads_per_task}} - export APRUN_ESFC="${APRUN} --cpus-per-task=${NTHREADS_ESFC}" + export APRUN_ESFC="${APRUN_default} --cpus-per-task=${NTHREADS_ESFC}" export NTHREADS_CYCLE=${threads_per_task_cycle:-14} [[ ${NTHREADS_CYCLE} -gt ${max_tasks_per_node} ]] && export NTHREADS_CYCLE=${max_tasks_per_node} - export APRUN_CYCLE="${APRUN} --cpus-per-task=${NTHREADS_CYCLE}" + export APRUN_CYCLE="${APRUN_default} --cpus-per-task=${NTHREADS_CYCLE}" elif [[ "${step}" = "epos" ]]; then export NTHREADS_EPOS=${NTHREADSmax} - export APRUN_EPOS="${APRUN} --cpus-per-task=${NTHREADS_EPOS}" + export APRUN_EPOS="${APRUN_default} --cpus-per-task=${NTHREADS_EPOS}" elif [[ "${step}" = "postsnd" ]]; then export CFP_MP="YES" export NTHREADS_POSTSND=${NTHREADS1} - export APRUN_POSTSND="${APRUN} --cpus-per-task=${NTHREADS_POSTSND}" + export APRUN_POSTSND="${APRUN_default} --cpus-per-task=${NTHREADS_POSTSND}" export NTHREADS_POSTSNDCFP=${threads_per_task_postsndcfp:-1} [[ ${NTHREADS_POSTSNDCFP} -gt ${max_threads_per_task} ]] && export NTHREADS_POSTSNDCFP=${max_threads_per_task} @@ -279,7 +279,7 @@ elif [[ "${step}" = "postsnd" ]]; then elif [[ "${step}" = "awips" ]]; then export NTHREADS_AWIPS=${NTHREADS1} - export APRUN_AWIPSCFP="${APRUN} ${mpmd_opt}" + export APRUN_AWIPSCFP="${APRUN_default} ${mpmd_opt}" elif [[ "${step}" = "gempak" ]]; then @@ -291,6 +291,6 @@ elif [[ "${step}" = "gempak" ]]; then elif [[ "${step}" = "fit2obs" ]]; then export NTHREADS_FIT2OBS=${NTHREADS1} - export MPIRUN="${APRUN} --cpus-per-task=${NTHREADS_FIT2OBS}" + export MPIRUN="${APRUN_default} --cpus-per-task=${NTHREADS_FIT2OBS}" fi diff --git a/env/HERCULES.env b/env/HERCULES.env index 1f3365a0aa..faaf1da229 100755 --- a/env/HERCULES.env +++ b/env/HERCULES.env @@ -36,7 +36,7 @@ if [[ -n "${ntasks:-}" && -n "${max_tasks_per_node:-}" && -n "${tasks_per_node:- NTHREADS1=${threads_per_task:-1} [[ ${NTHREADSmax} -gt ${max_threads_per_task} ]] && NTHREADSmax=${max_threads_per_task} [[ ${NTHREADS1} -gt ${max_threads_per_task} ]] && NTHREADS1=${max_threads_per_task} - APRUN="${launcher} -n ${ntasks}" + APRUN_default="${launcher} -n ${ntasks}" else echo "ERROR config.resources must be sourced before sourcing HERCULES.env" exit 2 @@ -52,11 +52,11 @@ case ${step} in ;; "prepsnowobs") - export APRUN_CALCFIMS="${APRUN}" + export APRUN_CALCFIMS="${APRUN_default}" ;; "prep_emissions") - export APRUN="${APRUN}" + export APRUN="${APRUN_default}" ;; "waveinit" | "waveprep" | "wavepostsbs" | "wavepostbndpnt" | "wavepostpnt" | "wavepostbndpntbll") @@ -69,61 +69,61 @@ case ${step} in "atmanlvar") export NTHREADS_ATMANLVAR=${NTHREADSmax} - export APRUN_ATMANLVAR="${APRUN} --cpus-per-task=${NTHREADS_ATMANLVAR}" + export APRUN_ATMANLVAR="${APRUN_default} --cpus-per-task=${NTHREADS_ATMANLVAR}" ;; "atmanlfv3inc") export NTHREADS_ATMANLFV3INC=${NTHREADSmax} - export APRUN_ATMANLFV3INC="${APRUN} --cpus-per-task=${NTHREADS_ATMANLFV3INC}" + export APRUN_ATMANLFV3INC="${APRUN_default} --cpus-per-task=${NTHREADS_ATMANLFV3INC}" ;; "atmensanlobs") export NTHREADS_ATMENSANLOBS=${NTHREADSmax} - export APRUN_ATMENSANLOBS="${APRUN} --cpus-per-task=${NTHREADS_ATMENSANLOBS}" + export APRUN_ATMENSANLOBS="${APRUN_default} --cpus-per-task=${NTHREADS_ATMENSANLOBS}" ;; "atmensanlsol") export NTHREADS_ATMENSANLSOL=${NTHREADSmax} - export APRUN_ATMENSANLSOL="${APRUN} --cpus-per-task=${NTHREADS_ATMENSANLSOL}" + export APRUN_ATMENSANLSOL="${APRUN_default} --cpus-per-task=${NTHREADS_ATMENSANLSOL}" ;; "atmensanlletkf") export NTHREADS_ATMENSANLLETKF=${NTHREADSmax} - export APRUN_ATMENSANLLETKF="${APRUN} --cpus-per-task=${NTHREADS_ATMENSANLLETKF}" + export APRUN_ATMENSANLLETKF="${APRUN_default} --cpus-per-task=${NTHREADS_ATMENSANLLETKF}" ;; "atmensanlfv3inc") export NTHREADS_ATMENSANLFV3INC=${NTHREADSmax} - export APRUN_ATMENSANLFV3INC="${APRUN} --cpus-per-task=${NTHREADS_ATMENSANLFV3INC}" + export APRUN_ATMENSANLFV3INC="${APRUN_default} --cpus-per-task=${NTHREADS_ATMENSANLFV3INC}" ;; "aeroanlvar") export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" export NTHREADS_AEROANL=${NTHREADSmax} - export APRUN_AEROANL="${APRUN} --cpus-per-task=${NTHREADS_AEROANL}" + export APRUN_AEROANL="${APRUN_default} --cpus-per-task=${NTHREADS_AEROANL}" ;; "aeroanlgenb") export NTHREADS_AEROANLGENB=${NTHREADSmax} - export APRUN_AEROANLGENB="${APRUN} --cpus-per-task=${NTHREADS_AEROANLGENB}" + export APRUN_AEROANLGENB="${APRUN_default} --cpus-per-task=${NTHREADS_AEROANLGENB}" ;; "prepobsaero") export NTHREADS_PREPOBSAERO=${NTHREADS1} - export APRUN_PREPOBSAERO="${APRUN} --cpus-per-task=${NTHREADS_PREPOBSAERO}" + export APRUN_PREPOBSAERO="${APRUN_default} --cpus-per-task=${NTHREADS_PREPOBSAERO}" ;; "snowanl") export NTHREADS_SNOWANL=${NTHREADSmax} - export APRUN_SNOWANL="${APRUN} --cpus-per-task=${NTHREADS_SNOWANL}" + export APRUN_SNOWANL="${APRUN_default} --cpus-per-task=${NTHREADS_SNOWANL}" export APRUN_APPLY_INCR="${launcher} -n 6" ;; "esnowrecen") export NTHREADS_ESNOWRECEN=${NTHREADSmax} - export APRUN_ESNOWRECEN="${APRUN} --cpus-per-task=${NTHREADS_ESNOWRECEN}" + export APRUN_ESNOWRECEN="${APRUN_default} --cpus-per-task=${NTHREADS_ESNOWRECEN}" export APRUN_APPLY_INCR="${launcher} -n 6" ;; @@ -131,12 +131,12 @@ case ${step} in "marinebmat") export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" - export APRUN_MARINEBMAT="${APRUN}" + export APRUN_MARINEBMAT="${APRUN_default}" ;; "ocnanalrun") export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" - export APRUN_OCNANAL="${APRUN}" + export APRUN_OCNANAL="${APRUN_default}" ;; "ocnanalecen") @@ -153,12 +153,12 @@ case ${step} in export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" export NTHREADS_OCNANAL=${NTHREADSmax} - export APRUN_OCNANAL="${APRUN} --cpus-per-task=${NTHREADS_OCNANAL}" + export APRUN_OCNANAL="${APRUN_default} --cpus-per-task=${NTHREADS_OCNANAL}" ;; "calcanl") export NTHREADS_CALCANL=${NTHREADSmax} - export APRUN_CALCANL="${APRUN} --cpus-per-task=${NTHREADS_CALCANL}" + export APRUN_CALCANL="${APRUN_default} --cpus-per-task=${NTHREADS_CALCANL}" ;; "anal" | "analcalc") @@ -171,7 +171,7 @@ case ${step} in export NTHREADS_GSI=${threads_per_task_anal:-${max_threads_per_task}} - export APRUN_GSI="${APRUN} --cpus-per-task=${NTHREADS_GSI}" + export APRUN_GSI="${APRUN_default} --cpus-per-task=${NTHREADS_GSI}" export NTHREADS_CALCINC=${threads_per_task_calcinc:-1} [[ ${NTHREADS_CALCINC} -gt ${max_threads_per_task} ]] && export NTHREADS_CALCINC=${max_threads_per_task} @@ -190,7 +190,7 @@ case ${step} in export NTHREADS_CYCLE=${threads_per_task:-14} [[ ${NTHREADS_CYCLE} -gt ${max_tasks_per_node} ]] && export NTHREADS_CYCLE=${max_tasks_per_node} - export APRUN_CYCLE="${APRUN} --cpus-per-task=${NTHREADS_CYCLE}" + export APRUN_CYCLE="${APRUN_default} --cpus-per-task=${NTHREADS_CYCLE}" ;; "eobs") @@ -204,7 +204,7 @@ case ${step} in export NTHREADS_GSI=${NTHREADSmax} [[ ${NTHREADS_GSI} -gt ${max_threads_per_task} ]] && export NTHREADS_GSI=${max_threads_per_task} - export APRUN_GSI="${APRUN} --cpus-per-task=${NTHREADS_GSI}" + export APRUN_GSI="${APRUN_default} --cpus-per-task=${NTHREADS_GSI}" ;; "eupd") @@ -230,7 +230,7 @@ case ${step} in "upp") export NTHREADS_UPP=${NTHREADS1} - export APRUN_UPP="${APRUN} --cpus-per-task=${NTHREADS_UPP}" + export APRUN_UPP="${APRUN_default} --cpus-per-task=${NTHREADS_UPP}" ;; "atmos_products") @@ -247,7 +247,7 @@ case ${step} in "ecen") export NTHREADS_ECEN=${NTHREADSmax} - export APRUN_ECEN="${APRUN} --cpus-per-task=${NTHREADS_ECEN}" + export APRUN_ECEN="${APRUN_default} --cpus-per-task=${NTHREADS_ECEN}" export NTHREADS_CHGRES=${threads_per_task_chgres:-12} [[ ${NTHREADS_CHGRES} -gt ${max_tasks_per_node} ]] && export NTHREADS_CHGRES=${max_tasks_per_node} @@ -255,23 +255,23 @@ case ${step} in export NTHREADS_CALCINC=${threads_per_task_calcinc:-1} [[ ${NTHREADS_CALCINC} -gt ${max_threads_per_task} ]] && export NTHREADS_CALCINC=${max_threads_per_task} - export APRUN_CALCINC="${APRUN} --cpus-per-task=${NTHREADS_CALCINC}" + export APRUN_CALCINC="${APRUN_default} --cpus-per-task=${NTHREADS_CALCINC}" ;; "esfc") export NTHREADS_ESFC=${NTHREADSmax} - export APRUN_ESFC="${APRUN} --cpus-per-task=${NTHREADS_ESFC}" + export APRUN_ESFC="${APRUN_default} --cpus-per-task=${NTHREADS_ESFC}" export NTHREADS_CYCLE=${threads_per_task_cycle:-14} [[ ${NTHREADS_CYCLE} -gt ${max_tasks_per_node} ]] && export NTHREADS_CYCLE=${max_tasks_per_node} - export APRUN_CYCLE="${APRUN} --cpus-per-task=${NTHREADS_CYCLE}" + export APRUN_CYCLE="${APRUN_default} --cpus-per-task=${NTHREADS_CYCLE}" ;; "epos") export NTHREADS_EPOS=${NTHREADSmax} - export APRUN_EPOS="${APRUN} --cpus-per-task=${NTHREADS_EPOS}" + export APRUN_EPOS="${APRUN_default} --cpus-per-task=${NTHREADS_EPOS}" ;; "postsnd") @@ -279,7 +279,7 @@ case ${step} in export CFP_MP="YES" export NTHREADS_POSTSND=${NTHREADS1} - export APRUN_POSTSND="${APRUN} --cpus-per-task=${NTHREADS_POSTSND}" + export APRUN_POSTSND="${APRUN_default} --cpus-per-task=${NTHREADS_POSTSND}" export NTHREADS_POSTSNDCFP=${threads_per_task_postsndcfp:-1} [[ ${NTHREADS_POSTSNDCFP} -gt ${max_threads_per_task} ]] && export NTHREADS_POSTSNDCFP=${max_threads_per_task} @@ -289,7 +289,7 @@ case ${step} in "awips") export NTHREADS_AWIPS=${NTHREADS1} - export APRUN_AWIPSCFP="${APRUN} ${mpmd_opt}" + export APRUN_AWIPSCFP="${APRUN_default} ${mpmd_opt}" ;; "gempak") @@ -300,7 +300,7 @@ case ${step} in "fit2obs") export NTHREADS_FIT2OBS=${NTHREADS1} - export MPIRUN="${APRUN} --cpus-per-task=${NTHREADS_FIT2OBS}" + export MPIRUN="${APRUN_default} --cpus-per-task=${NTHREADS_FIT2OBS}" ;; *) diff --git a/env/JET.env b/env/JET.env index 57dc243c36..c05fe35789 100755 --- a/env/JET.env +++ b/env/JET.env @@ -27,7 +27,7 @@ if [[ -n "${ntasks:-}" && -n "${max_tasks_per_node:-}" && -n "${tasks_per_node:- NTHREADS1=${threads_per_task:-1} [[ ${NTHREADSmax} -gt ${max_threads_per_task} ]] && NTHREADSmax=${max_threads_per_task} [[ ${NTHREADS1} -gt ${max_threads_per_task} ]] && NTHREADS1=${max_threads_per_task} - APRUN="${launcher} -n ${ntasks}" + APRUN_default="${launcher} -n ${ntasks}" else echo "ERROR config.resources must be sourced before sourcing JET.env" exit 2 @@ -58,17 +58,17 @@ elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step} elif [[ "${step}" = "atmanlvar" ]]; then export NTHREADS_ATMANLVAR=${NTHREADSmax} - export APRUN_ATMANLVAR="${APRUN}" + export APRUN_ATMANLVAR="${APRUN_default}" elif [[ "${step}" = "atmensanlobs" ]]; then export NTHREADS_ATMENSANLOBS=${NTHREADSmax} - export APRUN_ATMENSANLOBS="${APRUN}" + export APRUN_ATMENSANLOBS="${APRUN_default}" elif [[ "${step}" = "atmensanlsol" ]]; then export NTHREADS_ATMENSANLSOL=${NTHREADSmax} - export APRUN_ATMENSANLSOL="${APRUN}" + export APRUN_ATMENSANLSOL="${APRUN_default}" elif [[ "${step}" = "atmensanlletkf" ]]; then @@ -83,53 +83,53 @@ elif [[ "${step}" = "atmensanlfv3inc" ]]; then elif [[ "${step}" = "aeroanlvar" ]]; then export NTHREADS_AEROANL=${NTHREADSmax} - export APRUN_AEROANL="${APRUN}" + export APRUN_AEROANL="${APRUN_default}" elif [[ "${step}" = "aeroanlgenb" ]]; then export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" export NTHREADS_AEROANLGENB=${NTHREADSmax} - export APRUN_AEROANLGENB="${APRUN} --cpus-per-task=${NTHREADS_AEROANLGENB}" + export APRUN_AEROANLGENB="${APRUN_default} --cpus-per-task=${NTHREADS_AEROANLGENB}" elif [[ "${step}" = "prepobsaero" ]]; then export NTHREADS_PREPOBSAERO=${NTHREADS1} - export APRUN_PREPOBSAERO="${APRUN} --cpus-per-task=${NTHREADS_PREPOBSAERO}" + export APRUN_PREPOBSAERO="${APRUN_default} --cpus-per-task=${NTHREADS_PREPOBSAERO}" elif [[ "${step}" = "snowanl" ]]; then export NTHREADS_SNOWANL=${NTHREADSmax} - export APRUN_SNOWANL="${APRUN}" + export APRUN_SNOWANL="${APRUN_default}" export APRUN_APPLY_INCR="${launcher} -n 6" elif [[ "${step}" = "esnowrecen" ]]; then export NTHREADS_ESNOWRECEN=${NTHREADSmax} - export APRUN_ESNOWRECEN="${APRUN} --cpus-per-task=${NTHREADS_ESNOWRECEN}" + export APRUN_ESNOWRECEN="${APRUN_default} --cpus-per-task=${NTHREADS_ESNOWRECEN}" export APRUN_APPLY_INCR="${launcher} -n 6" elif [[ "${step}" = "atmanlfv3inc" ]]; then export NTHREADS_ATMANLFV3INC=${NTHREADSmax} - export APRUN_ATMANLFV3INC="${APRUN}" + export APRUN_ATMANLFV3INC="${APRUN_default}" elif [[ "${step}" = "marinebmat" ]]; then export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" - export APRUN_MARINEBMAT="${APRUN}" + export APRUN_MARINEBMAT="${APRUN_default}" elif [[ "${step}" = "ocnanalrun" ]]; then export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" - export APRUN_OCNANAL="${APRUN}" + export APRUN_OCNANAL="${APRUN_default}" elif [[ "${step}" = "calcanl" ]]; then export NTHREADS_CALCANL=${NTHREADSmax} - export APRUN_CALCANL="${APRUN}" + export APRUN_CALCANL="${APRUN_default}" elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then @@ -141,7 +141,7 @@ elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" export NTHREADS_GSI=${threads_per_task_anal:-${max_threads_per_task}} - export APRUN_GSI="${APRUN}" + export APRUN_GSI="${APRUN_default}" export NTHREADS_CALCINC=${threads_per_task_calcinc:-1} [[ ${NTHREADS_CALCINC} -gt ${max_threads_per_task} ]] && export NTHREADS_CALCINC=${max_threads_per_task} @@ -159,7 +159,7 @@ elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then elif [[ "${step}" = "sfcanl" ]]; then export NTHREADS_CYCLE=${threads_per_task:-14} [[ ${NTHREADS_CYCLE} -gt ${max_tasks_per_node} ]] && export NTHREADS_CYCLE=${max_tasks_per_node} - export APRUN_CYCLE="${APRUN}" + export APRUN_CYCLE="${APRUN_default}" elif [[ "${step}" = "eobs" ]]; then @@ -167,7 +167,7 @@ elif [[ "${step}" = "eobs" ]]; then export MKL_CBWR=AUTO export NTHREADS_GSI=${NTHREADSmax} - export APRUN_GSI="${APRUN}" + export APRUN_GSI="${APRUN_default}" export CFP_MP=${CFP_MP:-"YES"} export USE_CFP=${USE_CFP:-"YES"} @@ -193,7 +193,7 @@ elif [[ "${step}" = "fcst" ]] || [[ "${step}" = "efcs" ]]; then elif [[ "${step}" = "upp" ]]; then export NTHREADS_UPP=${NTHREADS1} - export APRUN_UPP="${APRUN}" + export APRUN_UPP="${APRUN_default}" elif [[ "${step}" = "atmos_products" ]]; then @@ -207,7 +207,7 @@ elif [[ "${step}" = "oceanice_products" ]]; then elif [[ "${step}" = "ecen" ]]; then export NTHREADS_ECEN=${NTHREADSmax} - export APRUN_ECEN="${APRUN}" + export APRUN_ECEN="${APRUN_default}" export NTHREADS_CHGRES=${threads_per_task_chgres:-12} [[ ${NTHREADS_CHGRES} -gt ${max_tasks_per_node} ]] && export NTHREADS_CHGRES=${max_tasks_per_node} @@ -215,28 +215,28 @@ elif [[ "${step}" = "ecen" ]]; then export NTHREADS_CALCINC=${threads_per_task_calcinc:-1} [[ ${NTHREADS_CALCINC} -gt ${max_threads_per_task} ]] && export NTHREADS_CALCINC=${max_threads_per_task} - export APRUN_CALCINC="${APRUN}" + export APRUN_CALCINC="${APRUN_default}" elif [[ "${step}" = "esfc" ]]; then export NTHREADS_ESFC=${NTHREADSmax} - export APRUN_ESFC="${APRUN}" + export APRUN_ESFC="${APRUN_default}" export NTHREADS_CYCLE=${threads_per_task_cycle:-14} [[ ${NTHREADS_CYCLE} -gt ${max_tasks_per_node} ]] && export NTHREADS_CYCLE=${max_tasks_per_node} - export APRUN_CYCLE="${APRUN}" + export APRUN_CYCLE="${APRUN_default}" elif [[ "${step}" = "epos" ]]; then export NTHREADS_EPOS=${NTHREADSmax} - export APRUN_EPOS="${APRUN}" + export APRUN_EPOS="${APRUN_default}" elif [[ "${step}" = "postsnd" ]]; then export CFP_MP="YES" export NTHREADS_POSTSND=${NTHREADS1} - export APRUN_POSTSND="${APRUN}" + export APRUN_POSTSND="${APRUN_default}" export NTHREADS_POSTSNDCFP=${threads_per_task_postsndcfp:-1} [[ ${NTHREADS_POSTSNDCFP} -gt ${max_threads_per_task} ]] && export NTHREADS_POSTSNDCFP=${max_threads_per_task} @@ -253,6 +253,6 @@ elif [[ "${step}" = "gempak" ]]; then elif [[ "${step}" = "fit2obs" ]]; then export NTHREADS_FIT2OBS=${NTHREADS1} - export MPIRUN="${APRUN}" + export MPIRUN="${APRUN_default}" fi diff --git a/env/ORION.env b/env/ORION.env index c188df9ec7..ea44e510f8 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -34,7 +34,7 @@ if [[ -n "${ntasks:-}" && -n "${max_tasks_per_node:-}" && -n "${tasks_per_node:- NTHREADS1=${threads_per_task:-1} [[ ${NTHREADSmax} -gt ${max_threads_per_task} ]] && NTHREADSmax=${max_threads_per_task} [[ ${NTHREADS1} -gt ${max_threads_per_task} ]] && NTHREADS1=${max_threads_per_task} - APRUN="${launcher} -n ${ntasks}" + APRUN_default="${launcher} -n ${ntasks}" else echo "ERROR config.resources must be sourced before sourcing ORION.env" exit 2 @@ -66,97 +66,97 @@ elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step} elif [[ "${step}" = "atmanlvar" ]]; then export NTHREADS_ATMANLVAR=${NTHREADSmax} - export APRUN_ATMANLVAR="${APRUN} --cpus-per-task=${NTHREADS_ATMANLVAR}" + export APRUN_ATMANLVAR="${APRUN_default} --cpus-per-task=${NTHREADS_ATMANLVAR}" elif [[ "${step}" = "atmensanlobs" ]]; then export NTHREADS_ATMENSANLOBS=${NTHREADSmax} - export APRUN_ATMENSANLOBS="${APRUN} --cpus-per-task=${NTHREADS_ATMENSANLOBS}" + export APRUN_ATMENSANLOBS="${APRUN_default} --cpus-per-task=${NTHREADS_ATMENSANLOBS}" elif [[ "${step}" = "atmensanlsol" ]]; then export NTHREADS_ATMENSANLSOL=${NTHREADSmax} - export APRUN_ATMENSANLSOL="${APRUN} --cpus-per-task=${NTHREADS_ATMENSANLSOL}" + export APRUN_ATMENSANLSOL="${APRUN_default} --cpus-per-task=${NTHREADS_ATMENSANLSOL}" elif [[ "${step}" = "atmensanlletkf" ]]; then export NTHREADS_ATMENSANLLETKF=${NTHREADSmax} - export APRUN_ATMENSANLLETKF="${APRUN} --cpus-per-task=${NTHREADS_ATMENSANLLETKF}" + export APRUN_ATMENSANLLETKF="${APRUN_default} --cpus-per-task=${NTHREADS_ATMENSANLLETKF}" elif [[ "${step}" = "atmensanlfv3inc" ]]; then export NTHREADS_ATMENSANLFV3INC=${NTHREADSmax} - export APRUN_ATMENSANLFV3INC="${APRUN} --cpus-per-task=${NTHREADS_ATMENSANLFV3INC}" + export APRUN_ATMENSANLFV3INC="${APRUN_default} --cpus-per-task=${NTHREADS_ATMENSANLFV3INC}" elif [[ "${step}" = "aeroanlvar" ]]; then export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" export NTHREADS_AEROANL=${NTHREADSmax} - export APRUN_AEROANL="${APRUN} --cpus-per-task=${NTHREADS_AEROANL}" + export APRUN_AEROANL="${APRUN_default} --cpus-per-task=${NTHREADS_AEROANL}" elif [[ "${step}" = "aeroanlgenb" ]]; then export NTHREADS_AEROANLGENB=${NTHREADSmax} - export APRUN_AEROANLGENB="${APRUN} --cpus-per-task=${NTHREADS_AEROANLGENB}" + export APRUN_AEROANLGENB="${APRUN_default} --cpus-per-task=${NTHREADS_AEROANLGENB}" elif [[ "${step}" = "prepobsaero" ]]; then export NTHREADS_PREPOBSAERO=${NTHREADS1} - export APRUN_PREPOBSAERO="${APRUN} --cpus-per-task=${NTHREADS_PREPOBSAERO}" + export APRUN_PREPOBSAERO="${APRUN_default} --cpus-per-task=${NTHREADS_PREPOBSAERO}" elif [[ "${step}" = "snowanl" ]]; then export NTHREADS_SNOWANL=${NTHREADSmax} - export APRUN_SNOWANL="${APRUN} --cpus-per-task=${NTHREADS_SNOWANL}" + export APRUN_SNOWANL="${APRUN_default} --cpus-per-task=${NTHREADS_SNOWANL}" export APRUN_APPLY_INCR="${launcher} -n 6" elif [[ "${step}" = "esnowrecen" ]]; then export NTHREADS_ESNOWRECEN=${NTHREADSmax} - export APRUN_ESNOWRECEN="${APRUN} --cpus-per-task=${NTHREADS_ESNOWRECEN}" + export APRUN_ESNOWRECEN="${APRUN_default} --cpus-per-task=${NTHREADS_ESNOWRECEN}" export APRUN_APPLY_INCR="${launcher} -n 6" elif [[ "${step}" = "atmanlfv3inc" ]]; then export NTHREADS_ATMANLFV3INC=${NTHREADSmax} - export APRUN_ATMANLFV3INC="${APRUN} --cpus-per-task=${NTHREADS_ATMANLFV3INC}" + export APRUN_ATMANLFV3INC="${APRUN_default} --cpus-per-task=${NTHREADS_ATMANLFV3INC}" elif [[ "${step}" = "marinebmat" ]]; then export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" export NTHREADS_MARINEBMAT=${NTHREADSmax} - export APRUN_MARINEBMAT="${APRUN}" + export APRUN_MARINEBMAT="${APRUN_default}" elif [[ "${step}" = "ocnanalrun" ]]; then export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" - export APRUN_OCNANAL="${APRUN}" + export APRUN_OCNANAL="${APRUN_default}" elif [[ "${step}" = "ocnanalchkpt" ]]; then export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" export NTHREADS_OCNANAL=${NTHREADSmax} - export APRUN_OCNANAL="${APRUN} --cpus-per-task=${NTHREADS_OCNANAL}" + export APRUN_OCNANAL="${APRUN_default} --cpus-per-task=${NTHREADS_OCNANAL}" elif [[ "${step}" = "ocnanalecen" ]]; then export NTHREADS_OCNANALECEN=${NTHREADSmax} - export APRUN_OCNANALECEN="${APRUN} --cpus-per-task=${NTHREADS_OCNANALECEN}" + export APRUN_OCNANALECEN="${APRUN_default} --cpus-per-task=${NTHREADS_OCNANALECEN}" elif [[ "${step}" = "marineanalletkf" ]]; then export NTHREADS_MARINEANALLETKF=${NTHREADSmax} - export APRUN_MARINEANALLETKF="${APRUN} --cpus-per-task=${NTHREADS_MARINEANALLETKF}" + export APRUN_MARINEANALLETKF="${APRUN_default} --cpus-per-task=${NTHREADS_MARINEANALLETKF}" elif [[ "${step}" = "calcanl" ]]; then export NTHREADS_CALCANL=${NTHREADSmax} - export APRUN_CALCANL="${APRUN} --cpus-per-task=${NTHREADS_CALCANL}" + export APRUN_CALCANL="${APRUN_default} --cpus-per-task=${NTHREADS_CALCANL}" elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then @@ -168,7 +168,7 @@ elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" export NTHREADS_GSI=${NTHREADSmax} - export APRUN_GSI="${APRUN} --cpus-per-task=${NTHREADS_GSI}" + export APRUN_GSI="${APRUN_default} --cpus-per-task=${NTHREADS_GSI}" export NTHREADS_CALCINC=${threads_per_task_calcinc:-1} [[ ${NTHREADS_CALCINC} -gt ${max_threads_per_task} ]] && export NTHREADS_CALCINC=${max_threads_per_task} @@ -186,7 +186,7 @@ elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then elif [[ "${step}" = "sfcanl" ]]; then export NTHREADS_CYCLE=${threads_per_task:-14} [[ ${NTHREADS_CYCLE} -gt ${max_tasks_per_node} ]] && export NTHREADS_CYCLE=${max_tasks_per_node} - export APRUN_CYCLE="${APRUN} --cpus-per-task=${NTHREADS_CYCLE}" + export APRUN_CYCLE="${APRUN_default} --cpus-per-task=${NTHREADS_CYCLE}" elif [[ "${step}" = "eobs" ]]; then @@ -199,7 +199,7 @@ elif [[ "${step}" = "eobs" ]]; then export NTHREADS_GSI=${NTHREADSmax} [[ ${NTHREADS_GSI} -gt ${max_threads_per_task} ]] && export NTHREADS_GSI=${max_threads_per_task} - export APRUN_GSI="${APRUN} --cpus-per-task=${NTHREADS_GSI}" + export APRUN_GSI="${APRUN_default} --cpus-per-task=${NTHREADS_GSI}" elif [[ "${step}" = "eupd" ]]; then @@ -221,7 +221,7 @@ elif [[ "${step}" = "fcst" ]] || [[ "${step}" = "efcs" ]]; then elif [[ "${step}" = "upp" ]]; then export NTHREADS_UPP=${NTHREADS1} - export APRUN_UPP="${APRUN} --cpus-per-task=${NTHREADS_UPP}" + export APRUN_UPP="${APRUN_default} --cpus-per-task=${NTHREADS_UPP}" elif [[ "${step}" = "atmos_products" ]]; then @@ -235,7 +235,7 @@ elif [[ "${step}" = "oceanice_products" ]]; then elif [[ "${step}" = "ecen" ]]; then export NTHREADS_ECEN=${NTHREADSmax} - export APRUN_ECEN="${APRUN} --cpus-per-task=${NTHREADS_ECEN}" + export APRUN_ECEN="${APRUN_default} --cpus-per-task=${NTHREADS_ECEN}" export NTHREADS_CHGRES=${threads_per_task:-12} [[ ${NTHREADS_CHGRES} -gt ${max_tasks_per_node} ]] && export NTHREADS_CHGRES=${max_tasks_per_node} @@ -243,28 +243,28 @@ elif [[ "${step}" = "ecen" ]]; then export NTHREADS_CALCINC=${threads_per_task_calcinc:-1} [[ ${NTHREADS_CALCINC} -gt ${max_threads_per_task} ]] && export NTHREADS_CALCINC=${max_threads_per_task} - export APRUN_CALCINC="${APRUN} --cpus-per-task=${NTHREADS_CALCINC}" + export APRUN_CALCINC="${APRUN_default} --cpus-per-task=${NTHREADS_CALCINC}" elif [[ "${step}" = "esfc" ]]; then export NTHREADS_ESFC=${NTHREADSmax} - export APRUN_ESFC="${APRUN} --cpus-per-task=${NTHREADS_ESFC}" + export APRUN_ESFC="${APRUN_default} --cpus-per-task=${NTHREADS_ESFC}" export NTHREADS_CYCLE=${threads_per_task_cycle:-14} [[ ${NTHREADS_CYCLE} -gt ${max_tasks_per_node} ]] && export NTHREADS_CYCLE=${max_tasks_per_node} - export APRUN_CYCLE="${APRUN} --cpus-per-task=${NTHREADS_CYCLE}" + export APRUN_CYCLE="${APRUN_default} --cpus-per-task=${NTHREADS_CYCLE}" elif [[ "${step}" = "epos" ]]; then export NTHREADS_EPOS=${NTHREADSmax} - export APRUN_EPOS="${APRUN} --cpus-per-task=${NTHREADS_EPOS}" + export APRUN_EPOS="${APRUN_default} --cpus-per-task=${NTHREADS_EPOS}" elif [[ "${step}" = "postsnd" ]]; then export CFP_MP="YES" export NTHREADS_POSTSND=${NTHREADS1} - export APRUN_POSTSND="${APRUN} --cpus-per-task=${NTHREADS_POSTSND}" + export APRUN_POSTSND="${APRUN_default} --cpus-per-task=${NTHREADS_POSTSND}" export NTHREADS_POSTSNDCFP=${threads_per_task_postsndcfp:-1} [[ ${NTHREADS_POSTSNDCFP} -gt ${max_threads_per_task} ]] && export NTHREADS_POSTSNDCFP=${max_threads_per_task} @@ -273,7 +273,7 @@ elif [[ "${step}" = "postsnd" ]]; then elif [[ "${step}" = "awips" ]]; then export NTHREADS_AWIPS=${NTHREADS1} - export APRUN_AWIPSCFP="${APRUN} ${mpmd_opt}" + export APRUN_AWIPSCFP="${APRUN_default} ${mpmd_opt}" elif [[ "${step}" = "gempak" ]]; then @@ -282,6 +282,6 @@ elif [[ "${step}" = "gempak" ]]; then elif [[ "${step}" = "fit2obs" ]]; then export NTHREADS_FIT2OBS=${NTHREADS1} - export MPIRUN="${APRUN} --cpus-per-task=${NTHREADS_FIT2OBS}" + export MPIRUN="${APRUN_default} --cpus-per-task=${NTHREADS_FIT2OBS}" fi diff --git a/env/S4.env b/env/S4.env index d0f29ab221..679af59d1f 100755 --- a/env/S4.env +++ b/env/S4.env @@ -27,7 +27,7 @@ if [[ -n "${ntasks:-}" && -n "${max_tasks_per_node:-}" && -n "${tasks_per_node:- NTHREADS1=${threads_per_task:-1} [[ ${NTHREADSmax} -gt ${max_threads_per_task} ]] && NTHREADSmax=${max_threads_per_task} [[ ${NTHREADS1} -gt ${max_threads_per_task} ]] && NTHREADS1=${max_threads_per_task} - APRUN="${launcher} -n ${ntasks}" + APRUN_default="${launcher} -n ${ntasks}" else echo "ERROR config.resources must be sourced before sourcing S4.env" exit 2 @@ -42,11 +42,11 @@ if [[ "${step}" = "prep" ]] || [[ "${step}" = "prepbufr" ]]; then elif [[ "${step}" = "prepsnowobs" ]]; then - export APRUN_CALCFIMS="${APRUN}" + export APRUN_CALCFIMS="${APRUN_default}" elif [[ "${step}" = "prep_emissions" ]]; then - export APRUN="${APRUN}" + export APRUN="${APRUN_default}" elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step}" = "wavepostsbs" ]] || [[ "${step}" = "wavepostbndpnt" ]] || [[ "${step}" = "wavepostbndpntbll" ]] || [[ "${step}" = "wavepostpnt" ]]; then @@ -58,63 +58,63 @@ elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step} elif [[ "${step}" = "atmanlvar" ]]; then export NTHREADS_ATMANLVAR=${NTHREADSmax} - export APRUN_ATMANLVAR="${APRUN}" + export APRUN_ATMANLVAR="${APRUN_default}" elif [[ "${step}" = "atmensanlobs" ]]; then export NTHREADS_ATMENSANLOBS=${NTHREADSmax} - export APRUN_ATMENSANLOBS="${APRUN}" + export APRUN_ATMENSANLOBS="${APRUN_default}" elif [[ "${step}" = "atmensanlsol" ]]; then export NTHREADS_ATMENSANLSOL=${NTHREADSmax} - export APRUN_ATMENSANLSOL="${APRUN}" + export APRUN_ATMENSANLSOL="${APRUN_default}" elif [[ "${step}" = "atmensanlletkf" ]]; then export NTHREADS_ATMENSANLLETKF=${NTHREADSmax} - export APRUN_ATMENSANLLETKF="${APRUN}" + export APRUN_ATMENSANLLETKF="${APRUN_default}" elif [[ "${step}" = "atmensanlfv3inc" ]]; then export NTHREADS_ATMENSANLFV3INC=${NTHREADSmax} - export APRUN_ATMENSANLFV3INC="${APRUN}" + export APRUN_ATMENSANLFV3INC="${APRUN_default}" elif [[ "${step}" = "aeroanlvar" ]]; then export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" export NTHREADS_AEROANL=${NTHREADSmax} - export APRUN_AEROANL="${APRUN}" + export APRUN_AEROANL="${APRUN_default}" elif [[ "${step}" = "aeroanlgenb" ]]; then export NTHREADS_AEROANLGENB=${NTHREADSmax} - export APRUN_AEROANLGENB="${APRUN} --cpus-per-task=${NTHREADS_AEROANLGENB}" + export APRUN_AEROANLGENB="${APRUN_default} --cpus-per-task=${NTHREADS_AEROANLGENB}" elif [[ "${step}" = "prepobsaero" ]]; then export NTHREADS_PREPOBSAERO=${NTHREADS1} - export APRUN_PREPOBSAERO="${APRUN} --cpus-per-task=${NTHREADS_PREPOBSAERO}" + export APRUN_PREPOBSAERO="${APRUN_default} --cpus-per-task=${NTHREADS_PREPOBSAERO}" elif [[ "${step}" = "snowanl" ]]; then export NTHREADS_SNOWANL=${NTHREADSmax} - export APRUN_SNOWANL="${APRUN}" + export APRUN_SNOWANL="${APRUN_default}" export APRUN_APPLY_INCR="${launcher} -n 6" elif [[ "${step}" = "esnowrecen" ]]; then export NTHREADS_ESNOWRECEN=${NTHREADSmax} - export APRUN_ESNOWRECEN="${APRUN} --cpus-per-task=${NTHREADS_ESNOWRECEN}" + export APRUN_ESNOWRECEN="${APRUN_default} --cpus-per-task=${NTHREADS_ESNOWRECEN}" export APRUN_APPLY_INCR="${launcher} -n 6" elif [[ "${step}" = "atmanlfv3inc" ]]; then export NTHREADS_ATMANLFV3INC=${NTHREADSmax} - export APRUN_ATMANLFV3INC="${APRUN}" + export APRUN_ATMANLFV3INC="${APRUN_default}" elif [[ "${step}" = "marinebmat" ]]; then echo "WARNING: ${step} is not enabled on S4!" @@ -125,7 +125,7 @@ elif [[ "${step}" = "marinerun" ]]; then elif [[ "${step}" = "calcanl" ]]; then export NTHREADS_CALCANL=${NTHREADSmax} - export APRUN_CALCANL="${APRUN}" + export APRUN_CALCANL="${APRUN_default}" elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then @@ -137,7 +137,7 @@ elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" export NTHREADS_GSI=${NTHREADSmax} - export APRUN_GSI="${APRUN}" + export APRUN_GSI="${APRUN_default}" export NTHREADS_CALCINC=${threads_per_task_calcinc:-1} [[ ${NTHREADS_CALCINC} -gt ${max_threads_per_task} ]] && export NTHREADS_CALCINC=${max_threads_per_task} @@ -156,7 +156,7 @@ elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then elif [[ "${step}" = "sfcanl" ]]; then export NTHREADS_CYCLE=${threads_per_task:-14} [[ ${NTHREADS_CYCLE} -gt ${max_tasks_per_node} ]] && export NTHREADS_CYCLE=${max_tasks_per_node} - export APRUN_CYCLE="${APRUN}" + export APRUN_CYCLE="${APRUN_default}" elif [[ "${step}" = "eobs" ]]; then @@ -164,7 +164,7 @@ elif [[ "${step}" = "eobs" ]]; then export MKL_CBWR=AUTO export NTHREADS_GSI=${NTHREADSmax} - export APRUN_GSI="${APRUN}" + export APRUN_GSI="${APRUN_default}" export CFP_MP=${CFP_MP:-"YES"} export USE_CFP=${USE_CFP:-"YES"} @@ -191,7 +191,7 @@ elif [[ "${step}" = "upp" ]]; then export NTHREADS_UPP=${NTHREADS1} export OMP_NUM_THREADS="${NTHREADS_UPP}" - export APRUN_UPP="${APRUN}" + export APRUN_UPP="${APRUN_default}" elif [[ "${step}" = "atmos_products" ]]; then @@ -205,7 +205,7 @@ elif [[ "${step}" = "oceanice_products" ]]; then elif [[ "${step}" = "ecen" ]]; then export NTHREADS_ECEN=${NTHREADSmax} - export APRUN_ECEN="${APRUN}" + export APRUN_ECEN="${APRUN_default}" export NTHREADS_CHGRES=${threads_per_task_chgres:-12} [[ ${NTHREADS_CHGRES} -gt ${max_tasks_per_node} ]] && export NTHREADS_CHGRES=${max_tasks_per_node} @@ -213,25 +213,25 @@ elif [[ "${step}" = "ecen" ]]; then export NTHREADS_CALCINC=${threads_per_task_calcinc:-1} [[ ${NTHREADS_CALCINC} -gt ${max_threads_per_task} ]] && export NTHREADS_CALCINC=${max_threads_per_task} - export APRUN_CALCINC="${APRUN}" + export APRUN_CALCINC="${APRUN_default}" elif [[ "${step}" = "esfc" ]]; then export NTHREADS_ESFC=${NTHREADSmax} - export APRUN_ESFC="${APRUN}" + export APRUN_ESFC="${APRUN_default}" export NTHREADS_CYCLE=${threads_per_task_cycle:-14} [[ ${NTHREADS_CYCLE} -gt ${max_tasks_per_node} ]] && export NTHREADS_CYCLE=${max_tasks_per_node} - export APRUN_CYCLE="${APRUN}" + export APRUN_CYCLE="${APRUN_default}" elif [[ "${step}" = "epos" ]]; then export NTHREADS_EPOS=${NTHREADSmax} - export APRUN_EPOS="${APRUN}" + export APRUN_EPOS="${APRUN_default}" elif [[ "${step}" = "fit2obs" ]]; then export NTHREADS_FIT2OBS=${NTHREADS1} - export MPIRUN="${APRUN}" + export MPIRUN="${APRUN_default}" fi diff --git a/env/WCOSS2.env b/env/WCOSS2.env index 554daa00d0..71717b0bb1 100755 --- a/env/WCOSS2.env +++ b/env/WCOSS2.env @@ -21,7 +21,7 @@ if [[ -n "${ntasks:-}" && -n "${max_tasks_per_node:-}" && -n "${tasks_per_node:- NTHREADS1=${threads_per_task:-1} [[ ${NTHREADSmax} -gt ${max_threads_per_task} ]] && NTHREADSmax=${max_threads_per_task} [[ ${NTHREADS1} -gt ${max_threads_per_task} ]] && NTHREADS1=${max_threads_per_task} - APRUN="${launcher} -n ${ntasks}" + APRUN_default="${launcher} -n ${ntasks}" else echo "ERROR config.resources must be sourced before sourcing WCOSS2.env" exit 2 @@ -36,11 +36,11 @@ if [[ "${step}" = "prep" ]] || [[ "${step}" = "prepbufr" ]]; then elif [[ "${step}" = "prepsnowobs" ]]; then - export APRUN_CALCFIMS="${APRUN}" + export APRUN_CALCFIMS="${APRUN_default}" elif [[ "${step}" = "prep_emissions" ]]; then - export APRUN="${APRUN}" + export APRUN="${APRUN_default}" elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step}" = "wavepostsbs" ]] || [[ "${step}" = "wavepostbndpnt" ]] || [[ "${step}" = "wavepostbndpntbll" ]] || [[ "${step}" = "wavepostpnt" ]]; then @@ -51,95 +51,95 @@ elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step} elif [[ "${step}" = "atmanlvar" ]]; then export NTHREADS_ATMANLVAR=${NTHREADSmax} - export APRUN_ATMANLVAR="${APRUN}" + export APRUN_ATMANLVAR="${APRUN_default}" elif [[ "${step}" = "atmensanlobs" ]]; then export NTHREADS_ATMENSANLOBS=${NTHREADSmax} - export APRUN_ATMENSANLOBS="${APRUN}" + export APRUN_ATMENSANLOBS="${APRUN_default}" elif [[ "${step}" = "atmensanlsol" ]]; then export NTHREADS_ATMENSANLSOL=${NTHREADSmax} - export APRUN_ATMENSANLSOL="${APRUN}" + export APRUN_ATMENSANLSOL="${APRUN_default}" elif [[ "${step}" = "atmensanlletkf" ]]; then export NTHREADS_ATMENSANLLETKF=${NTHREADSmax} - export APRUN_ATMENSANLLETKF="${APRUN}" + export APRUN_ATMENSANLLETKF="${APRUN_default}" elif [[ "${step}" = "atmensanlfv3inc" ]]; then export NTHREADS_ATMENSANLFV3INC=${NTHREADSmax} - export APRUN_ATMENSANLFV3INC="${APRUN}" + export APRUN_ATMENSANLFV3INC="${APRUN_default}" elif [[ "${step}" = "aeroanlvar" ]]; then export APRUNCFP="${launcher} -np \$ncmd ${mpmd_opt}" export NTHREADS_AEROANL=${NTHREADSmax} - export APRUN_AEROANL="${APRUN}" + export APRUN_AEROANL="${APRUN_default}" elif [[ "${step}" = "aeroanlgenb" ]]; then export NTHREADS_AEROANLGENB=${NTHREADSmax} - export APRUN_AEROANLGENB="${APRUN}" + export APRUN_AEROANLGENB="${APRUN_default}" elif [[ "${step}" = "prepobsaero" ]]; then export NTHREADS_PREPOBSAERO=${NTHREADS1} - export APRUN_PREPOBSAERO="${APRUN} --ppn ${tasks_per_node}--cpu-bind depth --depth=${NTHREADS_PREPOBSAERO}" + export APRUN_PREPOBSAERO="${APRUN_default} --ppn ${tasks_per_node}--cpu-bind depth --depth=${NTHREADS_PREPOBSAERO}" elif [[ "${step}" = "snowanl" ]]; then export NTHREADS_SNOWANL=${NTHREADSmax} - export APRUN_SNOWANL="${APRUN}" + export APRUN_SNOWANL="${APRUN_default}" export APRUN_APPLY_INCR="${launcher} -n 6" elif [[ "${step}" = "esnowrecen" ]]; then export NTHREADS_ESNOWRECEN=${NTHREADSmax} - export APRUN_ESNOWRECEN="${APRUN}" + export APRUN_ESNOWRECEN="${APRUN_default}" export APRUN_APPLY_INCR="${launcher} -n 6" elif [[ "${step}" = "marinebmat" ]]; then export APRUNCFP="${launcher} -n \$ncmd --multi-prog" - export APRUN_MARINEBMAT="${APRUN}" + export APRUN_MARINEBMAT="${APRUN_default}" elif [[ "${step}" = "ocnanalrun" ]]; then export APRUNCFP="${launcher} -n \$ncmd --multi-prog" - export APRUN_OCNANAL="${APRUN}" + export APRUN_OCNANAL="${APRUN_default}" elif [[ "${step}" = "ocnanalchkpt" ]]; then export APRUNCFP="${launcher} -n \$ncmd --multi-prog" - export APRUN_OCNANAL="${APRUN}" + export APRUN_OCNANAL="${APRUN_default}" elif [[ "${step}" = "ocnanalecen" ]]; then export NTHREADS_OCNANALECEN=${NTHREADSmax} - export APRUN_OCNANALECEN="${APRUN} --cpus-per-task=${NTHREADS_OCNANALECEN}" + export APRUN_OCNANALECEN="${APRUN_default} --cpus-per-task=${NTHREADS_OCNANALECEN}" elif [[ "${step}" = "marineanalletkf" ]]; then export NTHREADS_MARINEANALLETKF=${NTHREADSmax} - export APRUN_MARINEANALLETKF="${APRUN} --cpus-per-task=${NTHREADS_MARINEANALLETKF}" + export APRUN_MARINEANALLETKF="${APRUN_default} --cpus-per-task=${NTHREADS_MARINEANALLETKF}" elif [[ "${step}" = "atmanlfv3inc" ]]; then export NTHREADS_ATMANLFV3INC=${NTHREADSmax} - export APRUN_ATMANLFV3INC="${APRUN}" + export APRUN_ATMANLFV3INC="${APRUN_default}" elif [[ "${step}" = "calcanl" ]]; then export NTHREADS_CALCANL=${NTHREADSmax} - export APRUN_CALCANL="${APRUN}" + export APRUN_CALCANL="${APRUN_default}" elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then @@ -152,7 +152,7 @@ elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then fi export NTHREADS_GSI=${NTHREADSmax} - export APRUN_GSI="${APRUN} -ppn ${tasks_per_node} --cpu-bind depth --depth ${NTHREADS_GSI}" + export APRUN_GSI="${APRUN_default} -ppn ${tasks_per_node} --cpu-bind depth --depth ${NTHREADS_GSI}" export NTHREADS_CALCINC=${threads_per_task_calcinc:-1} [[ ${NTHREADS_CALCINC} -gt ${max_threads_per_task} ]] && export NTHREADS_CALCINC=${max_threads_per_task} @@ -179,7 +179,7 @@ elif [[ "${step}" = "sfcanl" ]]; then export NTHREADS_CYCLE=${threads_per_task:-14} [[ ${NTHREADS_CYCLE} -gt ${max_tasks_per_node} ]] && export NTHREADS_CYCLE=${max_tasks_per_node} - export APRUN_CYCLE="${APRUN}" + export APRUN_CYCLE="${APRUN_default}" elif [[ "${step}" = "eobs" ]]; then @@ -188,7 +188,7 @@ elif [[ "${step}" = "eobs" ]]; then export FI_OFI_RXM_SAR_LIMIT=3145728 export NTHREADS_GSI=${NTHREADSmax} - export APRUN_GSI="${APRUN} -ppn ${tasks_per_node} --cpu-bind depth --depth ${NTHREADS_GSI}" + export APRUN_GSI="${APRUN_default} -ppn ${tasks_per_node} --cpu-bind depth --depth ${NTHREADS_GSI}" export CFP_MP=${CFP_MP:-"NO"} export USE_CFP=${USE_CFP:-"YES"} @@ -229,7 +229,7 @@ elif [[ "${step}" = "fcst" ]] || [[ "${step}" = "efcs" ]]; then elif [[ "${step}" = "upp" ]]; then export NTHREADS_UPP=${NTHREADS1} - export APRUN_UPP="${APRUN} -ppn ${tasks_per_node} --cpu-bind depth --depth ${NTHREADS_UPP}" + export APRUN_UPP="${APRUN_default} -ppn ${tasks_per_node} --cpu-bind depth --depth ${NTHREADS_UPP}" elif [[ "${step}" = "atmos_products" ]]; then @@ -243,7 +243,7 @@ elif [[ "${step}" = "oceanice_products" ]]; then elif [[ "${step}" = "ecen" ]]; then export NTHREADS_ECEN=${NTHREADSmax} - export APRUN_ECEN="${APRUN} -ppn ${tasks_per_node} --cpu-bind depth --depth ${NTHREADS_ECEN}" + export APRUN_ECEN="${APRUN_default} -ppn ${tasks_per_node} --cpu-bind depth --depth ${NTHREADS_ECEN}" export NTHREADS_CHGRES=${threads_per_task_chgres:-14} [[ ${NTHREADS_CHGRES} -gt ${max_tasks_per_node} ]] && export NTHREADS_CHGRES=${max_tasks_per_node} @@ -251,25 +251,25 @@ elif [[ "${step}" = "ecen" ]]; then export NTHREADS_CALCINC=${threads_per_task_calcinc:-1} [[ ${NTHREADS_CALCINC} -gt ${max_threads_per_task} ]] && export NTHREADS_CALCINC=${max_threads_per_task} - export APRUN_CALCINC="${APRUN}" + export APRUN_CALCINC="${APRUN_default}" export NTHREADS_CYCLE=${threads_per_task_cycle:-14} [[ ${NTHREADS_CYCLE} -gt ${max_tasks_per_node} ]] && export NTHREADS_CYCLE=${max_tasks_per_node} - export APRUN_CYCLE="${APRUN} -ppn ${tasks_per_node_cycle} --cpu-bind depth --depth ${NTHREADS_CYCLE}" + export APRUN_CYCLE="${APRUN_default} -ppn ${tasks_per_node_cycle} --cpu-bind depth --depth ${NTHREADS_CYCLE}" elif [[ "${step}" = "esfc" ]]; then export NTHREADS_ESFC=${NTHREADSmax} - export APRUN_ESFC="${APRUN} -ppn ${tasks_per_node} --cpu-bind depth --depth ${NTHREADS_ESFC}" + export APRUN_ESFC="${APRUN_default} -ppn ${tasks_per_node} --cpu-bind depth --depth ${NTHREADS_ESFC}" export NTHREADS_CYCLE=${threads_per_task_cycle:-14} [[ ${NTHREADS_CYCLE} -gt ${max_tasks_per_node} ]] && export NTHREADS_CYCLE=${max_tasks_per_node} - export APRUN_CYCLE="${APRUN} -ppn ${tasks_per_node_cycle} --cpu-bind depth --depth ${NTHREADS_CYCLE}" + export APRUN_CYCLE="${APRUN_default} -ppn ${tasks_per_node_cycle} --cpu-bind depth --depth ${NTHREADS_CYCLE}" elif [[ "${step}" = "epos" ]]; then export NTHREADS_EPOS=${NTHREADSmax} - export APRUN_EPOS="${APRUN} -ppn ${tasks_per_node} --cpu-bind depth --depth ${NTHREADS_EPOS}" + export APRUN_EPOS="${APRUN_default} -ppn ${tasks_per_node} --cpu-bind depth --depth ${NTHREADS_EPOS}" elif [[ "${step}" = "postsnd" ]]; then diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index cf3f1fb64c..c7267dabad 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -926,7 +926,7 @@ case ${step} in ;; "verfozn") - walltime="00:05:00" + walltime="00:10:00" ntasks=1 threads_per_task=1 tasks_per_node=1 diff --git a/workflow/applications/applications.py b/workflow/applications/applications.py index d6d7453c3c..a694129e38 100644 --- a/workflow/applications/applications.py +++ b/workflow/applications/applications.py @@ -3,7 +3,6 @@ from typing import Dict, List, Any from datetime import timedelta from hosts import Host -from pathlib import Path from wxflow import Configuration, to_timedelta from abc import ABC, ABCMeta, abstractmethod @@ -32,57 +31,50 @@ def __init__(self, conf: Configuration) -> None: self.scheduler = Host().scheduler - # Save the configuration so we can source the config files when - # determining task resources - self.conf = conf + base = conf.parse_config('config.base') - _base = self.conf.parse_config('config.base') - # Define here so the child __init__ functions can use it; will - # be overwritten later during _init_finalize(). - self._base = _base - - self.mode = _base['MODE'] + self.mode = base['MODE'] if self.mode not in self.VALID_MODES: - raise NotImplementedError(f'{self.mode} is not a valid application mode.\n' + - 'Valid application modes are:\n' + - f'{", ".join(self.VALID_MODES)}') - - self.net = _base['NET'] - self.model_app = _base.get('APP', 'ATM') - self.do_atm = _base.get('DO_ATM', True) - self.do_wave = _base.get('DO_WAVE', False) - self.do_wave_bnd = _base.get('DOBNDPNT_WAVE', False) - self.do_ocean = _base.get('DO_OCN', False) - self.do_ice = _base.get('DO_ICE', False) - self.do_aero = _base.get('DO_AERO', False) - self.do_prep_obs_aero = _base.get('DO_PREP_OBS_AERO', False) - self.do_bufrsnd = _base.get('DO_BUFRSND', False) - self.do_gempak = _base.get('DO_GEMPAK', False) - self.do_awips = _base.get('DO_AWIPS', False) - self.do_verfozn = _base.get('DO_VERFOZN', True) - self.do_verfrad = _base.get('DO_VERFRAD', True) - self.do_vminmon = _base.get('DO_VMINMON', True) - self.do_tracker = _base.get('DO_TRACKER', True) - self.do_genesis = _base.get('DO_GENESIS', True) - self.do_genesis_fsu = _base.get('DO_GENESIS_FSU', False) - self.do_metp = _base.get('DO_METP', False) - self.do_upp = not _base.get('WRITE_DOPOST', True) - self.do_goes = _base.get('DO_GOES', False) - self.do_mos = _base.get('DO_MOS', False) - self.do_extractvars = _base.get('DO_EXTRACTVARS', False) - - self.do_hpssarch = _base.get('HPSSARCH', False) - - self.nens = _base.get('NMEM_ENS', 0) - self.fcst_segments = _base.get('FCST_SEGMENTS', None) + raise NotImplementedError(f'{self.mode} is not a valid application mode.\n' + f'Valid application modes are:\n' + f'{", ".join(self.VALID_MODES)}\n') + + self.net = base['NET'] + self.model_app = base.get('APP', 'ATM') + self.do_atm = base.get('DO_ATM', True) + self.do_wave = base.get('DO_WAVE', False) + self.do_wave_bnd = base.get('DOBNDPNT_WAVE', False) + self.do_ocean = base.get('DO_OCN', False) + self.do_ice = base.get('DO_ICE', False) + self.do_aero = base.get('DO_AERO', False) + self.do_prep_obs_aero = base.get('DO_PREP_OBS_AERO', False) + self.do_bufrsnd = base.get('DO_BUFRSND', False) + self.do_gempak = base.get('DO_GEMPAK', False) + self.do_awips = base.get('DO_AWIPS', False) + self.do_verfozn = base.get('DO_VERFOZN', True) + self.do_verfrad = base.get('DO_VERFRAD', True) + self.do_vminmon = base.get('DO_VMINMON', True) + self.do_tracker = base.get('DO_TRACKER', True) + self.do_genesis = base.get('DO_GENESIS', True) + self.do_genesis_fsu = base.get('DO_GENESIS_FSU', False) + self.do_metp = base.get('DO_METP', False) + self.do_upp = not base.get('WRITE_DOPOST', True) + self.do_goes = base.get('DO_GOES', False) + self.do_mos = base.get('DO_MOS', False) + self.do_extractvars = base.get('DO_EXTRACTVARS', False) + + self.do_hpssarch = base.get('HPSSARCH', False) + + self.nens = base.get('NMEM_ENS', 0) + self.fcst_segments = base.get('FCST_SEGMENTS', None) if not AppConfig.is_monotonic(self.fcst_segments): raise ValueError(f'Forecast segments do not increase monotonically: {",".join(self.fcst_segments)}') self.wave_runs = None if self.do_wave: - wave_run = _base.get('WAVE_RUN', 'BOTH').lower() + wave_run = base.get('WAVE_RUN', 'BOTH').lower() if wave_run in ['both']: self.wave_runs = ['gfs', 'gdas'] elif wave_run in ['gfs', 'gdas']: @@ -91,45 +83,52 @@ def __init__(self, conf: Configuration) -> None: self.aero_anl_runs = None self.aero_fcst_runs = None if self.do_aero: - aero_anl_run = _base.get('AERO_ANL_RUN', 'BOTH').lower() + aero_anl_run = base.get('AERO_ANL_RUN', 'BOTH').lower() if aero_anl_run in ['both']: self.aero_anl_runs = ['gfs', 'gdas'] elif aero_anl_run in ['gfs', 'gdas']: self.aero_anl_runs = [aero_anl_run] - aero_fcst_run = _base.get('AERO_FCST_RUN', None).lower() + aero_fcst_run = base.get('AERO_FCST_RUN', None).lower() if aero_fcst_run in ['both']: self.aero_fcst_runs = ['gfs', 'gdas'] elif aero_fcst_run in ['gfs', 'gdas']: self.aero_fcst_runs = [aero_fcst_run] - def _init_finalize(self, *args): + def _init_finalize(self, conf: Configuration): print("Finalizing initialize") # Get a list of all possible config_files that would be part of the application self.configs_names = self._get_app_configs() - # Source the config_files for the jobs in the application - self.configs = self.source_configs() + # Source the config files for the jobs in the application without specifying a RUN + self.configs = {'_no_run': self._source_configs(conf)} - # Update the base config dictionary base on application - self.configs['base'] = self.update_base(self.configs['base']) + # Update the base config dictionary based on application + self.configs['_no_run']['base'] = self._update_base(self.configs['_no_run']['base']) # Save base in the internal state since it is often needed - self._base = self.configs['base'] + base = self.configs['_no_run']['base'] # Get more configuration options into the class attributes - self.gfs_cyc = self._base.get('gfs_cyc') + self.gfs_cyc = base.get('gfs_cyc') - # Finally get task names for the application + # Get task names for the application self.task_names = self.get_task_names() + # Finally, source the configuration files for each valid `RUN` + for run in self.task_names.keys(): + self.configs[run] = self._source_configs(conf, run=run, log=False) + + # Update the base config dictionary based on application and RUN + self.configs[run]['base'] = self._update_base(self.configs[run]['base']) + @abstractmethod def _get_app_configs(self): pass @staticmethod @abstractmethod - def update_base(base_in: Dict[str, Any]) -> Dict[str, Any]: + def _update_base(base_in: Dict[str, Any]) -> Dict[str, Any]: ''' Make final updates to base and return an updated copy @@ -146,7 +145,7 @@ def update_base(base_in: Dict[str, Any]) -> Dict[str, Any]: ''' pass - def source_configs(self, run: str = "gfs", log: bool = True) -> Dict[str, Any]: + def _source_configs(self, conf: Configuration, run: str = "gfs", log: bool = True) -> Dict[str, Any]: """ Given the configuration object used to initialize this application, source the configurations for each config and return a dictionary @@ -156,7 +155,7 @@ def source_configs(self, run: str = "gfs", log: bool = True) -> Dict[str, Any]: configs = dict() # Return config.base as well - configs['base'] = self.conf.parse_config('config.base') + configs['base'] = conf.parse_config('config.base', RUN=run) # Source the list of all config_files involved in the application for config in self.configs_names: @@ -180,12 +179,12 @@ def source_configs(self, run: str = "gfs", log: bool = True) -> Dict[str, Any]: files += [f'config.{config}'] print(f'sourcing config.{config}') if log else 0 - configs[config] = self.conf.parse_config(files, RUN=run) + configs[config] = conf.parse_config(files, RUN=run) return configs @abstractmethod - def get_task_names(self) -> Dict[str, List[str]]: + def get_task_names(self, run="_no_run") -> Dict[str, List[str]]: ''' Create a list of task names for each RUN valid for the configuation. diff --git a/workflow/applications/gefs.py b/workflow/applications/gefs.py index c1e001c171..1db3c51287 100644 --- a/workflow/applications/gefs.py +++ b/workflow/applications/gefs.py @@ -10,6 +10,9 @@ class GEFSAppConfig(AppConfig): def __init__(self, conf: Configuration): super().__init__(conf) + base = conf.parse_config('config.base') + self.run = base.get('RUN', 'gefs') + def _get_app_configs(self): """ Returns the config_files that are involved in gefs @@ -36,7 +39,7 @@ def _get_app_configs(self): return configs @staticmethod - def update_base(base_in): + def _update_base(base_in): base_out = base_in.copy() base_out['INTERVAL_GFS'] = AppConfig.get_gfs_interval(base_in['gfs_cyc']) @@ -81,4 +84,4 @@ def get_task_names(self): tasks += ['arch'] - return {f"{self._base['RUN']}": tasks} + return {f"{self.run}": tasks} diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py index e21828ceba..2af2f53b7a 100644 --- a/workflow/applications/gfs_cycled.py +++ b/workflow/applications/gfs_cycled.py @@ -11,20 +11,21 @@ class GFSCycledAppConfig(AppConfig): def __init__(self, conf: Configuration): super().__init__(conf) - self.do_hybvar = self._base.get('DOHYBVAR', False) - self.do_fit2obs = self._base.get('DO_FIT2OBS', True) - self.do_jediatmvar = self._base.get('DO_JEDIATMVAR', False) - self.do_jediatmens = self._base.get('DO_JEDIATMENS', False) - self.do_jediocnvar = self._base.get('DO_JEDIOCNVAR', False) - self.do_jedisnowda = self._base.get('DO_JEDISNOWDA', False) - self.do_mergensst = self._base.get('DO_MERGENSST', False) - self.do_vrfy_oceanda = self._base.get('DO_VRFY_OCEANDA', False) + base = conf.parse_config('config.base') + self.do_hybvar = base.get('DOHYBVAR', False) + self.do_fit2obs = base.get('DO_FIT2OBS', True) + self.do_jediatmvar = base.get('DO_JEDIATMVAR', False) + self.do_jediatmens = base.get('DO_JEDIATMENS', False) + self.do_jediocnvar = base.get('DO_JEDIOCNVAR', False) + self.do_jedisnowda = base.get('DO_JEDISNOWDA', False) + self.do_mergensst = base.get('DO_MERGENSST', False) + self.do_vrfy_oceanda = base.get('DO_VRFY_OCEANDA', False) self.lobsdiag_forenkf = False self.eupd_runs = None if self.do_hybvar: - self.lobsdiag_forenkf = self._base.get('lobsdiag_forenkf', False) - eupd_run = self._base.get('EUPD_CYC', 'gdas').lower() + self.lobsdiag_forenkf = base.get('lobsdiag_forenkf', False) + eupd_run = base.get('EUPD_CYC', 'gdas').lower() if eupd_run in ['both']: self.eupd_runs = ['gfs', 'gdas'] elif eupd_run in ['gfs', 'gdas']: @@ -125,7 +126,7 @@ def _get_app_configs(self): return configs @staticmethod - def update_base(base_in): + def _update_base(base_in): return GFSCycledAppConfig.get_gfs_cyc_dates(base_in) diff --git a/workflow/applications/gfs_forecast_only.py b/workflow/applications/gfs_forecast_only.py index 680588e4ca..93551ac0cc 100644 --- a/workflow/applications/gfs_forecast_only.py +++ b/workflow/applications/gfs_forecast_only.py @@ -10,6 +10,11 @@ class GFSForecastOnlyAppConfig(AppConfig): def __init__(self, conf: Configuration): super().__init__(conf) + base = conf.parse_config('config.base') + self.aero_fcst_run = base.get('AERO_FCST_RUN', 'BOTH').lower() + self.run = base.get('RUN', 'gfs') + self.exp_warm_start = base.get('EXP_WARM_START', False) + def _get_app_configs(self): """ Returns the config_files that are involved in the forecast-only app @@ -25,7 +30,7 @@ def _get_app_configs(self): configs += ['atmos_products'] if self.do_aero: - if not self._base['EXP_WARM_START']: + if not self.exp_warm_start: configs += ['aerosol_init'] if self.do_tracker: @@ -70,11 +75,10 @@ def _get_app_configs(self): return configs @staticmethod - def update_base(base_in): + def _update_base(base_in): base_out = base_in.copy() base_out['INTERVAL_GFS'] = AppConfig.get_gfs_interval(base_in['gfs_cyc']) - base_out['RUN'] = 'gfs' return base_out @@ -88,9 +92,9 @@ def get_task_names(self): tasks = ['stage_ic'] if self.do_aero: - aero_fcst_run = self._base.get('AERO_FCST_RUN', 'BOTH').lower() - if self._base['RUN'] in aero_fcst_run or aero_fcst_run == "both": - if not self._base['EXP_WARM_START']: + aero_fcst_run = self.aero_fcst_run + if self.run in aero_fcst_run or aero_fcst_run == "both": + if not self.exp_warm_start: tasks += ['aerosol_init'] if self.do_wave: @@ -153,4 +157,4 @@ def get_task_names(self): tasks += ['arch', 'cleanup'] # arch and cleanup **must** be the last tasks - return {f"{self._base['RUN']}": tasks} + return {f"{self.run}": tasks} diff --git a/workflow/rocoto/tasks.py b/workflow/rocoto/tasks.py index cacf530c35..f61abac7d8 100644 --- a/workflow/rocoto/tasks.py +++ b/workflow/rocoto/tasks.py @@ -1,6 +1,5 @@ #!/usr/bin/env python3 -import copy import numpy as np from applications.applications import AppConfig import rocoto.rocoto as rocoto @@ -39,15 +38,16 @@ class Tasks: def __init__(self, app_config: AppConfig, run: str) -> None: - self.app_config = copy.deepcopy(app_config) + self.app_config = app_config self.run = run - # Re-source the configs with RUN specified - print(f"Source configs with RUN={run}") - self._configs = self.app_config.source_configs(run=run, log=False) + + # Get the configs for the specified RUN + self._configs = self.app_config.configs[run] # Update the base config for the application - self._configs['base'] = self.app_config.update_base(self._configs['base']) - # Save dict_configs and base in the internal state (never know where it may be needed) + self._configs['base'] = self.app_config._update_base(self._configs['base']) + + # Save base in the internal state (never know where it may be needed) self._base = self._configs['base'] self.HOMEgfs = self._base['HOMEgfs'] @@ -134,7 +134,6 @@ def _template_to_rocoto_cycstring(self, template: str, subs_dict: dict = {}) -> def _get_forecast_hours(run, config, component='atmos') -> List[str]: # Make a local copy of the config to avoid modifying the original local_config = config.copy() - # Ocean/Ice components do not have a HF output option like the atmosphere if component in ['ocean', 'ice']: local_config['FHMAX_HF_GFS'] = 0 diff --git a/workflow/rocoto/workflow_xml.py b/workflow/rocoto/workflow_xml.py index d9ca4fb961..3ad7c4bd91 100644 --- a/workflow/rocoto/workflow_xml.py +++ b/workflow/rocoto/workflow_xml.py @@ -18,7 +18,8 @@ def __init__(self, app_config: AppConfig, rocoto_config: Dict) -> None: self._app_config = app_config self.rocoto_config = rocoto_config - self._base = self._app_config.configs['base'] + # Use the generic config.base (without RUN specified) + self._base = self._app_config.configs['_no_run']['base'] self.preamble = self._get_preamble() self.definitions = self._get_definitions() From 42122d3218d6ce8398bcdcc1796f0d9438c0b1f3 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 11 Sep 2024 15:35:17 +0000 Subject: [PATCH 08/71] Initial commit --- sorc/ufs_model.fd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/ufs_model.fd b/sorc/ufs_model.fd index fcc9f8461d..6a4e09e947 160000 --- a/sorc/ufs_model.fd +++ b/sorc/ufs_model.fd @@ -1 +1 @@ -Subproject commit fcc9f8461db5eafbfd1f080da61ea79156ca0145 +Subproject commit 6a4e09e94773ffa39ce7ab6a54a885efada91f21 From 653453f3cb815de06b0a8ff597c1d604797cc30c Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 11 Sep 2024 16:01:34 +0000 Subject: [PATCH 09/71] Update gdas hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index faa95efb18..41fbc19da1 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit faa95efb18f0f52acab2cf09b17f78406f9b48b1 +Subproject commit 41fbc19da18cc8663ad995e4ac6001d8e71effb1 From 8333e32e0c01a3c59ddb7172ad56d44fb482004d Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 11 Sep 2024 18:20:24 +0000 Subject: [PATCH 10/71] Update GDAS hash and namelist to read increment on native grid --- sorc/gdas.cd | 2 +- ush/forecast_postdet.sh | 3 +++ ush/forecast_predet.sh | 1 + ush/parsing_namelists_FV3.sh | 1 + 4 files changed, 6 insertions(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 41fbc19da1..12014a43a8 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 41fbc19da18cc8663ad995e4ac6001d8e71effb1 +Subproject commit 12014a43a8f1ca4309e46aacda74a5d7b4d9dbeb diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index d13cb0df0c..3c178933f0 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -94,6 +94,7 @@ FV3_postdet() { inc_files=("atminc.nc") read_increment=".true." res_latlon_dynamics="atminc.nc" + increment_file_on_native_grid=".true." fi local increment_file for inc_file in "${inc_files[@]}"; do @@ -161,6 +162,7 @@ EOF inc_files=("atminc.nc") read_increment=".true." res_latlon_dynamics="atminc.nc" + increment_file_on_native_grid=".true." if [[ "${REPLAY_ICS:-NO}" == "YES" ]]; then IAU_FHROT=${half_window} # Replay ICs start at the end of the assimilation window # Control member has no perturbation @@ -168,6 +170,7 @@ EOF inc_files=() read_increment=".false." res_latlon_dynamics='""' + increment_file_on_native_grid=".true." fi fi fi diff --git a/ush/forecast_predet.sh b/ush/forecast_predet.sh index 9e08a12dd8..b83b5b37b2 100755 --- a/ush/forecast_predet.sh +++ b/ush/forecast_predet.sh @@ -362,6 +362,7 @@ FV3_predet(){ warm_start=".false." read_increment=".false." res_latlon_dynamics='""' + increment_file_on_native_grid=".true." # Stochastic Physics Options do_skeb=".false." diff --git a/ush/parsing_namelists_FV3.sh b/ush/parsing_namelists_FV3.sh index 617ecff719..84b467a91f 100755 --- a/ush/parsing_namelists_FV3.sh +++ b/ush/parsing_namelists_FV3.sh @@ -180,6 +180,7 @@ cat > input.nml < Date: Wed, 11 Sep 2024 18:26:53 +0000 Subject: [PATCH 11/71] Saving progress --- parm/config/gfs/config.calcanl | 2 + ush/python/pygfs/task/calcanl.py | 72 ++++++++++---------------------- 2 files changed, 24 insertions(+), 50 deletions(-) diff --git a/parm/config/gfs/config.calcanl b/parm/config/gfs/config.calcanl index c719b1bc4c..644069f2e6 100644 --- a/parm/config/gfs/config.calcanl +++ b/parm/config/gfs/config.calcanl @@ -18,4 +18,6 @@ else export CASE_ANL=${CASE} fi +export JEDI_FIX_YAML="${PARMgfs}/gdas/atm_jedi_fix.yaml.j2" + echo "END: config.calcanl" diff --git a/ush/python/pygfs/task/calcanl.py b/ush/python/pygfs/task/calcanl.py index 3eeb3ccaaf..b05ae78601 100644 --- a/ush/python/pygfs/task/calcanl.py +++ b/ush/python/pygfs/task/calcanl.py @@ -5,7 +5,7 @@ from pprint import pformat import os from pygfs.jedi import Jedi -from wxflow import add_to_datetime, AttrDict, FileHandler, logit, Task, save_as_yaml, to_timedelta +from wxflow import add_to_datetime, AttrDict, FileHandler, logit, parse_j2yaml, Task, save_as_yaml, to_timedelta logger = getLogger(__name__.split('.')[-1]) @@ -56,66 +56,38 @@ def initialize_jedi(self) -> None: logger.debug(f"Writing JEDI YAML config to: {self.jedi.yaml}") save_as_yaml(self.jedi.config, self.jedi.yaml) + # stage fix files + if not os.path.isdir(self.task_config.DATA + 'fv3jedi'): + logger.info(f"Staging JEDI fix files from {self.task_config.JEDI_FIX_YAML}") + jedi_fix_dict = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config) + FileHandler(jedi_fix_dict).sync() + logger.debug(f"JEDI fix files:\n{pformat(jedi_fix_dict)}") + # link JEDI executable logger.info(f"Linking JEDI executable {self.task_config.JEDIEXE} to {self.jedi.exe}") self.jedi.link_exe(self.task_config) @logit(logger) def initialize(self) -> None: - logger.info('calcanl_gfs beginning at: ', datetime.datetime.utcnow()) - # Initialize dictionary used to construct Filehandler fh_dict = {'mkdir': [], 'copy': []} - logger.info(f"Linking JEDI executable {self.task_config.JEDIEXE} to {self.jedi.exe}") - self.jedi.link_exe(self.task_config) - # Initialize FileHandler to make directories and copy files - if self.task_config.DOIAU and self.task_config.l4densvar and self.task_config.lwrite4danl: - - for fh in self.task_config.IAUFHRS: - if fh == 6: - CalcAnlDir = self.task_config.DATA + '/calcanl_' + format(fh, '02') - - if not os.path.exists(CalcAnlDir): - fh_dict['mkdir'].append(CalcAnlDir) - fh_dict['copy'].append([self.task_config.DATA + '/siginc.nc', - CalcAnlDir + '/siginc.nc.06']) - fh_dict['copy'].append([self.task_config.DATA + '/sigf06', - CalcAnlDir + '/ges.06']) - fh_dict['copy'].append([self.task_config.DATA + '/siganl', - CalcAnlDir + '/anl.06']) - else: - if os.path.isfile('sigi' + format(fh, '02') + '.nc'): - CalcAnlDir = self.task_config.DATA + '/calcanl_' + format(fh, '02') - CalcAnlDir6 = self.task_config.DATA + '/calcanl_' + format(6, '02') - - if not os.path.exists(CalcAnlDir): - fh_dict['mkdir'].append(CalcAnlDir) - if not os.path.exists(CalcAnlDir6): - fh_dict['mkdir'].append(CalcAnlDir6) - fh_dict['copy'].append([self.task_config.COM_ATMOS_ANALYSIS + '/' + self.task_config.APREFIX + 'atma' + format(fh, '03') + '.nc', - CalcAnlDir6 + '/anl.' + format(fh, '02')]) - fh_dict['copy'].append([self.task_config.DATA + '/siga' + format(fh, '02'), - CalcAnlDir6 + '/anl.' + format(fh, '02')]) - fh_dict['copy'].append([self.task_config.DATA + '/sigi' + format(fh, '02') + '.nc', - CalcAnlDir + '/siginc.nc.' + format(fh, '02')]) - fh_dict['copy'].append([CalcAnlDir6 + '/inc.fullres.' + format(fh, '02'), - CalcAnlDir + '/inc.fullres.' + format(fh, '02')]) - fh_dict['copy'].append([self.task_config.DATA + '/sigf' + format(fh, '02'), - CalcAnlDir6 + '/ges.' + format(fh, '02')]) - fh_dict['copy'].append([self.task_config.DATA + '/sigf' + format(fh, '02'), - CalcAnlDir + '/ges.' + format(fh, '02')]) - else: - CalcAnlDir = self.task_config.DATA + '/calcanl_' + format(6, '02') - - if not os.path.exists(CalcAnlDir): - fh_dict['mkdir'].append(CalcAnlDir) - fh_dict['copy'].append([self.task_config.COM_ATMOS_ANALYSIS + '/' + self.task_config.APREFIX + 'atminc006.nc', - CalcAnlDir + '/siginc.nc.06']) - fh_dict['copy'].append([self.task_config.COM_ATMOS_HISTORY_PREV + '/' + self.task_config.GPREFIX + 'cubed_sphere_grid_atmf006.nc' - CalcAnlDir + '/ges.06']) + for fh in self.task_config.IAUFHRS: + CalcAnlDir = self.task_config.DATA + '/calcanl_' + format(fh, '02') + fh_dict['mkdir'].append(CalcAnlDir) + + if fh == 6: + fh_dict['copy'].append([self.task_config.COM_ATMOS_ANALYSIS + '/' + self.task_config.APREFIX + 'atminc.nc', + CalcAnlDir + '/siginc.nc.06']) + fh_dict['copy'].append([self.task_config.COM_ATMOS_HISTORY_PREV + '/' + self.task_config.GPREFIX + 'cubed_sphere_grid_atmf006.nc', + CalcAnlDir + '/ges.06']) + else: + fh_dict['copy'].append([self.task_config.COM_ATMOS_ANALYSIS + '/' + self.task_config.APREFIX + '/atmi' + format(fh, '02') + '.nc', + CalcAnlDir + '/siginc.nc.' + format(fh, '02')]) + fh_dict['copy'].append([self.task_config.COM_ATMOS_HISTORY_PREV + '/' + self.task_config.GPREFIX + 'cubed_sphere_grid_atmf' + format(fh, '02'), + CalcAnlDir + '/ges.' + format(fh, '02')]) # Stage files FileHandler(fh_dict).sync() From bfc06c2380b4145036d2daa263999f63b3624832 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 11 Sep 2024 19:06:36 +0000 Subject: [PATCH 12/71] Update GDAS hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 12014a43a8..452bcf7614 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 12014a43a8f1ca4309e46aacda74a5d7b4d9dbeb +Subproject commit 452bcf7614944c1e7395894ff981d69ebc5822cb From dc59de494056d5381bca02821faf24f2e8d51a5a Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 11 Sep 2024 19:11:01 +0000 Subject: [PATCH 13/71] Update GDAS hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 452bcf7614..45b20300dd 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 452bcf7614944c1e7395894ff981d69ebc5822cb +Subproject commit 45b20300dd7eb06163ddab34018741b7e74a8b32 From 13e35d0c529b26917dc4bef9836d8af28c41dedd Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 11 Sep 2024 20:23:03 +0000 Subject: [PATCH 14/71] Update GDAS hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 45b20300dd..f1869534b1 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 45b20300dd7eb06163ddab34018741b7e74a8b32 +Subproject commit f1869534b1a8ec5a7c04ed3d3bf93114acf51b10 From ac8d31d9afb84626cb2941ddc3cc4142438ec82a Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 11 Sep 2024 21:08:29 +0000 Subject: [PATCH 15/71] Update GDAS hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index f1869534b1..87c5c3c3ab 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit f1869534b1a8ec5a7c04ed3d3bf93114acf51b10 +Subproject commit 87c5c3c3ab76c3d65c87248cf99241c7b71724e7 From 84723c898b5cb22cc75d020009d2ba1769817a22 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Thu, 12 Sep 2024 14:30:17 +0000 Subject: [PATCH 16/71] Update GDAS hash and update finalize jobs in atmanl and atmensanl --- sorc/gdas.cd | 2 +- ush/python/pygfs/task/atm_analysis.py | 15 +++++++++------ ush/python/pygfs/task/atmens_analysis.py | 17 +++++++++-------- 3 files changed, 19 insertions(+), 15 deletions(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 87c5c3c3ab..1b1390723c 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 87c5c3c3ab76c3d65c87248cf99241c7b71724e7 +Subproject commit 1b1390723cc57bca466c777e9641f703e59db5d4 diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index 8d340a5b73..8fad5366f7 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -301,12 +301,15 @@ def finalize(self) -> None: logger.info("Copy UFS model readable atm increment file") cdate = to_fv3time(self.task_config.current_cycle) cdate_inc = cdate.replace('.', '_') - src = os.path.join(self.task_config.DATA, 'anl', f"atminc.{cdate_inc}z.nc4") - dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f'{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atminc.nc') - logger.debug(f"Copying {src} to {dest}") - inc_copy = { - 'copy': [[src, dest]] - } + inc_copy = {'copy': []} + for itile in range(6): + src = os.path.join(self.task_config.DATA, 'anl', f"atminc.{cdate_inc}z.tile{itile+1}.nc4") + dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f'{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atminc.tile{itile+1}.nc') + inc_copy['copy'].append([src, dest]) + + # copy increments + src_list,dest_list = zip(*inc_copy['copy']) + logger.debug(f"Copying {src_list}\nto {dest_list}") FileHandler(inc_copy).sync() def clean(self): diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index 55e72702b1..5a563e57de 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -268,14 +268,15 @@ def finalize(self) -> None: # create output path for member analysis increment tmpl_inc_dict['MEMDIR'] = memchar incdir = Template.substitute_structure(template_inc, TemplateConstants.DOLLAR_CURLY_BRACE, tmpl_inc_dict.get) - src = os.path.join(self.task_config.DATA, 'anl', memchar, f"atminc.{cdate_inc}z.nc4") - dest = os.path.join(incdir, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atminc.nc") - - # copy increment - logger.debug(f"Copying {src} to {dest}") - inc_copy = { - 'copy': [[src, dest]] - } + inc_copy = {'copy': []} + for itile in range(6): + src = os.path.join(self.task_config.DATA, 'anl', memchar, f"atminc.{cdate_inc}z.{itile+1}.nc4") + dest = os.path.join(incdir, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atminc.tile{itile+1}.nc") + inc_copy['copy'].append([src, dest]) + src_list,dest_list = zip(*inc_copy['copy']) + + # copy increments + logger.debug(f"Copying {src_list}\nto {dest_list}") FileHandler(inc_copy).sync() def clean(self): From 61d201aaf233f3c003bfab1c4aabccdefb022bd1 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Thu, 12 Sep 2024 14:35:53 +0000 Subject: [PATCH 17/71] pynorms --- ush/python/pygfs/task/atm_analysis.py | 2 +- ush/python/pygfs/task/atmens_analysis.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index 8fad5366f7..d443bb0862 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -308,7 +308,7 @@ def finalize(self) -> None: inc_copy['copy'].append([src, dest]) # copy increments - src_list,dest_list = zip(*inc_copy['copy']) + src_list, dest_list = zip(*inc_copy['copy']) logger.debug(f"Copying {src_list}\nto {dest_list}") FileHandler(inc_copy).sync() diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index 5a563e57de..c209359d62 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -273,8 +273,8 @@ def finalize(self) -> None: src = os.path.join(self.task_config.DATA, 'anl', memchar, f"atminc.{cdate_inc}z.{itile+1}.nc4") dest = os.path.join(incdir, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atminc.tile{itile+1}.nc") inc_copy['copy'].append([src, dest]) - src_list,dest_list = zip(*inc_copy['copy']) - + src_list, dest_list = zip(*inc_copy['copy']) + # copy increments logger.debug(f"Copying {src_list}\nto {dest_list}") FileHandler(inc_copy).sync() From 1de0c626ba6db003797c5a8d5b385119b8a98e04 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Thu, 12 Sep 2024 14:42:02 +0000 Subject: [PATCH 18/71] Update GDAS hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index faa95efb18..07e65283f4 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit faa95efb18f0f52acab2cf09b17f78406f9b48b1 +Subproject commit 07e65283f45e910115e240ea6466bc190232fcbf From 19e6f1c06c1458a608777a6ebfc17b4d95d11310 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Thu, 12 Sep 2024 14:42:44 +0000 Subject: [PATCH 19/71] Update GDAS hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 40523d9982..07e65283f4 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 40523d998284c145b9fc3873417bc774f70b77f6 +Subproject commit 07e65283f45e910115e240ea6466bc190232fcbf From 89e8dd48273f38d49a35ab231170f2ac2b9bdd21 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Thu, 12 Sep 2024 14:45:40 +0000 Subject: [PATCH 20/71] pynorms --- ush/python/pygfs/task/calcanl.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ush/python/pygfs/task/calcanl.py b/ush/python/pygfs/task/calcanl.py index b05ae78601..8d728c5674 100644 --- a/ush/python/pygfs/task/calcanl.py +++ b/ush/python/pygfs/task/calcanl.py @@ -62,7 +62,7 @@ def initialize_jedi(self) -> None: jedi_fix_dict = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config) FileHandler(jedi_fix_dict).sync() logger.debug(f"JEDI fix files:\n{pformat(jedi_fix_dict)}") - + # link JEDI executable logger.info(f"Linking JEDI executable {self.task_config.JEDIEXE} to {self.jedi.exe}") self.jedi.link_exe(self.task_config) @@ -77,12 +77,12 @@ def initialize(self) -> None: for fh in self.task_config.IAUFHRS: CalcAnlDir = self.task_config.DATA + '/calcanl_' + format(fh, '02') fh_dict['mkdir'].append(CalcAnlDir) - + if fh == 6: fh_dict['copy'].append([self.task_config.COM_ATMOS_ANALYSIS + '/' + self.task_config.APREFIX + 'atminc.nc', CalcAnlDir + '/siginc.nc.06']) fh_dict['copy'].append([self.task_config.COM_ATMOS_HISTORY_PREV + '/' + self.task_config.GPREFIX + 'cubed_sphere_grid_atmf006.nc', - CalcAnlDir + '/ges.06']) + CalcAnlDir + '/ges.06']) else: fh_dict['copy'].append([self.task_config.COM_ATMOS_ANALYSIS + '/' + self.task_config.APREFIX + '/atmi' + format(fh, '02') + '.nc', CalcAnlDir + '/siginc.nc.' + format(fh, '02')]) From f123d9a96d939710006437ffacb8a218d9ef6375 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Fri, 13 Sep 2024 01:48:36 +0000 Subject: [PATCH 21/71] Stage multifile (FMS) increments and update GDAS hash --- sorc/gdas.cd | 2 +- ush/forecast_postdet.sh | 16 +++++++++++----- 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 1b1390723c..1e3c90c826 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 1b1390723cc57bca466c777e9641f703e59db5d4 +Subproject commit 1e3c90c826dcd5e3992a473f25b0a0710bbd86dd diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index 3c178933f0..873e06584e 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -94,8 +94,8 @@ FV3_postdet() { inc_files=("atminc.nc") read_increment=".true." res_latlon_dynamics="atminc.nc" - increment_file_on_native_grid=".true." fi + increment_file_on_native_grid=".false." local increment_file for inc_file in "${inc_files[@]}"; do increment_file="${COMIN_ATMOS_INPUT}/${RUN}.t${cyc}z.${inc_file}" @@ -159,10 +159,16 @@ EOF delimiter="," done else # "${DOIAU}" == "NO" - inc_files=("atminc.nc") read_increment=".true." - res_latlon_dynamics="atminc.nc" - increment_file_on_native_grid=".true." + if [[ "${DO_JEDIATMVAR:-NO}" ]]; then + inc_files=("atminc.tile1.nc" "atminc.tile2.nc" "atminc.tile3.nc" "atminc.tile4.nc" "atminc.tile5.nc" "atminc.tile6.nc") + res_latlon_dynamics="atminc" + increment_file_on_native_grid=".true." + else + inc_files=("atminc.nc") + res_latlon_dynamics="atminc.nc" + increment_file_on_native_grid=".false." + fi if [[ "${REPLAY_ICS:-NO}" == "YES" ]]; then IAU_FHROT=${half_window} # Replay ICs start at the end of the assimilation window # Control member has no perturbation @@ -170,8 +176,8 @@ EOF inc_files=() read_increment=".false." res_latlon_dynamics='""' - increment_file_on_native_grid=".true." fi + increment_file_on_native_grid=".false." fi fi From 0430bef578663818ee4b77cf847245e00199fb3b Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Fri, 13 Sep 2024 01:53:41 +0000 Subject: [PATCH 22/71] Update GDAS hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 1e3c90c826..fd7410a2b6 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 1e3c90c826dcd5e3992a473f25b0a0710bbd86dd +Subproject commit fd7410a2b6cb70e58227efdfefbd972580c71323 From ad27e15b2c34f6880f98ff01dbc2b144d7c41902 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 11 Sep 2024 19:06:36 +0000 Subject: [PATCH 23/71] Update GDAS hash Update GDAS hash Update GDAS hash Update GDAS hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 12014a43a8..87c5c3c3ab 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 12014a43a8f1ca4309e46aacda74a5d7b4d9dbeb +Subproject commit 87c5c3c3ab76c3d65c87248cf99241c7b71724e7 From 07d1ffff6835d1a94ac28207552f41b08546accf Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Thu, 12 Sep 2024 14:30:17 +0000 Subject: [PATCH 24/71] Update GDAS hash and update finalize jobs in atmanl and atmensanl --- sorc/gdas.cd | 2 +- ush/python/pygfs/task/atm_analysis.py | 15 +++++++++------ ush/python/pygfs/task/atmens_analysis.py | 17 +++++++++-------- 3 files changed, 19 insertions(+), 15 deletions(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 87c5c3c3ab..1b1390723c 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 87c5c3c3ab76c3d65c87248cf99241c7b71724e7 +Subproject commit 1b1390723cc57bca466c777e9641f703e59db5d4 diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index 8d340a5b73..8fad5366f7 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -301,12 +301,15 @@ def finalize(self) -> None: logger.info("Copy UFS model readable atm increment file") cdate = to_fv3time(self.task_config.current_cycle) cdate_inc = cdate.replace('.', '_') - src = os.path.join(self.task_config.DATA, 'anl', f"atminc.{cdate_inc}z.nc4") - dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f'{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atminc.nc') - logger.debug(f"Copying {src} to {dest}") - inc_copy = { - 'copy': [[src, dest]] - } + inc_copy = {'copy': []} + for itile in range(6): + src = os.path.join(self.task_config.DATA, 'anl', f"atminc.{cdate_inc}z.tile{itile+1}.nc4") + dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f'{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atminc.tile{itile+1}.nc') + inc_copy['copy'].append([src, dest]) + + # copy increments + src_list,dest_list = zip(*inc_copy['copy']) + logger.debug(f"Copying {src_list}\nto {dest_list}") FileHandler(inc_copy).sync() def clean(self): diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index 55e72702b1..5a563e57de 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -268,14 +268,15 @@ def finalize(self) -> None: # create output path for member analysis increment tmpl_inc_dict['MEMDIR'] = memchar incdir = Template.substitute_structure(template_inc, TemplateConstants.DOLLAR_CURLY_BRACE, tmpl_inc_dict.get) - src = os.path.join(self.task_config.DATA, 'anl', memchar, f"atminc.{cdate_inc}z.nc4") - dest = os.path.join(incdir, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atminc.nc") - - # copy increment - logger.debug(f"Copying {src} to {dest}") - inc_copy = { - 'copy': [[src, dest]] - } + inc_copy = {'copy': []} + for itile in range(6): + src = os.path.join(self.task_config.DATA, 'anl', memchar, f"atminc.{cdate_inc}z.{itile+1}.nc4") + dest = os.path.join(incdir, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atminc.tile{itile+1}.nc") + inc_copy['copy'].append([src, dest]) + src_list,dest_list = zip(*inc_copy['copy']) + + # copy increments + logger.debug(f"Copying {src_list}\nto {dest_list}") FileHandler(inc_copy).sync() def clean(self): From 01166d72fdbb7f999d01c6fd761818e13b7e0854 Mon Sep 17 00:00:00 2001 From: David Huber <69919478+DavidHuber-NOAA@users.noreply.github.com> Date: Thu, 12 Sep 2024 05:01:54 +0000 Subject: [PATCH 25/71] Add new UPP links to .gitignore (#2904) This adds 3 missing links from the UPP into parm/ufs to .gitignore. Resolves #2901 --- .gitignore | 1 + sorc/link_workflow.sh | 7 +------ 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/.gitignore b/.gitignore index 83706de085..8fc6d0b20b 100644 --- a/.gitignore +++ b/.gitignore @@ -79,6 +79,7 @@ parm/ufs/MOM_input_*.IN parm/ufs/MOM6_data_table.IN parm/ufs/ice_in.IN parm/ufs/ufs.configure.*.IN +parm/ufs/post_itag_gfs parm/wafs # Ignore sorc and logs folders from externals diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index 92404afc01..270a8bb1c9 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -213,12 +213,7 @@ declare -a ufs_templates=("model_configure.IN" "input_global_nest.nml.IN"\ "ufs.configure.s2swa_esmf.IN" \ "ufs.configure.leapfrog_atm_wav.IN" \ "ufs.configure.leapfrog_atm_wav_esmf.IN" \ - "post_itag_gfs" \ - "postxconfig-NT-gfs.txt" \ - "postxconfig-NT-gfs_FH00.txt") - # TODO: The above postxconfig files in the UFSWM are not the same as the ones in UPP - # TODO: GEFS postxconfig files also need to be received from UFSWM - # See forecast_predet.sh where the UPP versions are used. They will need to be replaced with these. + "post_itag_gfs") for file in "${ufs_templates[@]}"; do [[ -s "${file}" ]] && rm -f "${file}" ${LINK_OR_COPY} "${HOMEgfs}/sorc/ufs_model.fd/tests/parm/${file}" . From 3805262c74690bd8cbd3d0e8365167056c742fe2 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Thu, 12 Sep 2024 14:35:53 +0000 Subject: [PATCH 26/71] pynorms --- ush/python/pygfs/task/atm_analysis.py | 2 +- ush/python/pygfs/task/atmens_analysis.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index 8fad5366f7..d443bb0862 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -308,7 +308,7 @@ def finalize(self) -> None: inc_copy['copy'].append([src, dest]) # copy increments - src_list,dest_list = zip(*inc_copy['copy']) + src_list, dest_list = zip(*inc_copy['copy']) logger.debug(f"Copying {src_list}\nto {dest_list}") FileHandler(inc_copy).sync() diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index 5a563e57de..c209359d62 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -273,8 +273,8 @@ def finalize(self) -> None: src = os.path.join(self.task_config.DATA, 'anl', memchar, f"atminc.{cdate_inc}z.{itile+1}.nc4") dest = os.path.join(incdir, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atminc.tile{itile+1}.nc") inc_copy['copy'].append([src, dest]) - src_list,dest_list = zip(*inc_copy['copy']) - + src_list, dest_list = zip(*inc_copy['copy']) + # copy increments logger.debug(f"Copying {src_list}\nto {dest_list}") FileHandler(inc_copy).sync() From c9f40d39ab2b1770ef44177f3406f159c7bf4dba Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Fri, 13 Sep 2024 01:48:36 +0000 Subject: [PATCH 27/71] Stage multifile (FMS) increments and update GDAS hash --- sorc/gdas.cd | 2 +- ush/forecast_postdet.sh | 16 +++++++++++----- 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 1b1390723c..1e3c90c826 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 1b1390723cc57bca466c777e9641f703e59db5d4 +Subproject commit 1e3c90c826dcd5e3992a473f25b0a0710bbd86dd diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index 3c178933f0..873e06584e 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -94,8 +94,8 @@ FV3_postdet() { inc_files=("atminc.nc") read_increment=".true." res_latlon_dynamics="atminc.nc" - increment_file_on_native_grid=".true." fi + increment_file_on_native_grid=".false." local increment_file for inc_file in "${inc_files[@]}"; do increment_file="${COMIN_ATMOS_INPUT}/${RUN}.t${cyc}z.${inc_file}" @@ -159,10 +159,16 @@ EOF delimiter="," done else # "${DOIAU}" == "NO" - inc_files=("atminc.nc") read_increment=".true." - res_latlon_dynamics="atminc.nc" - increment_file_on_native_grid=".true." + if [[ "${DO_JEDIATMVAR:-NO}" ]]; then + inc_files=("atminc.tile1.nc" "atminc.tile2.nc" "atminc.tile3.nc" "atminc.tile4.nc" "atminc.tile5.nc" "atminc.tile6.nc") + res_latlon_dynamics="atminc" + increment_file_on_native_grid=".true." + else + inc_files=("atminc.nc") + res_latlon_dynamics="atminc.nc" + increment_file_on_native_grid=".false." + fi if [[ "${REPLAY_ICS:-NO}" == "YES" ]]; then IAU_FHROT=${half_window} # Replay ICs start at the end of the assimilation window # Control member has no perturbation @@ -170,8 +176,8 @@ EOF inc_files=() read_increment=".false." res_latlon_dynamics='""' - increment_file_on_native_grid=".true." fi + increment_file_on_native_grid=".false." fi fi From dc4cf6e3a2be6468fb3f642cd816fdddb6bbb25c Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Fri, 13 Sep 2024 01:53:41 +0000 Subject: [PATCH 28/71] Update GDAS hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 1e3c90c826..fd7410a2b6 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 1e3c90c826dcd5e3992a473f25b0a0710bbd86dd +Subproject commit fd7410a2b6cb70e58227efdfefbd972580c71323 From 7f2327b26d6f426aea15b66059a6df3cd55db0cc Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Fri, 13 Sep 2024 12:52:10 +0000 Subject: [PATCH 29/71] Fix bug and update GDAS hash --- sorc/gdas.cd | 2 +- ush/python/pygfs/task/atmens_analysis.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index fd7410a2b6..42601f12ff 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit fd7410a2b6cb70e58227efdfefbd972580c71323 +Subproject commit 42601f12ff5fdc5390db77effcae1b792ff5075d diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index c209359d62..a8987f0347 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -270,12 +270,12 @@ def finalize(self) -> None: incdir = Template.substitute_structure(template_inc, TemplateConstants.DOLLAR_CURLY_BRACE, tmpl_inc_dict.get) inc_copy = {'copy': []} for itile in range(6): - src = os.path.join(self.task_config.DATA, 'anl', memchar, f"atminc.{cdate_inc}z.{itile+1}.nc4") + src = os.path.join(self.task_config.DATA, 'anl', memchar, f"atminc.{cdate_inc}z.tile{itile+1}.nc4") dest = os.path.join(incdir, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atminc.tile{itile+1}.nc") inc_copy['copy'].append([src, dest]) - src_list, dest_list = zip(*inc_copy['copy']) # copy increments + src_list, dest_list = zip(*inc_copy['copy']) logger.debug(f"Copying {src_list}\nto {dest_list}") FileHandler(inc_copy).sync() From 2d9b9b60896b8481db03b89ab1af8f5acf062223 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Fri, 13 Sep 2024 13:18:36 +0000 Subject: [PATCH 30/71] Update gdas hash and update calcanl.py --- sorc/gdas.cd | 2 +- ush/python/pygfs/task/calcanl.py | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 07e65283f4..5eb57a732c 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 07e65283f45e910115e240ea6466bc190232fcbf +Subproject commit 5eb57a732ce3646f514cfd1cd70abc2e93f10f5f diff --git a/ush/python/pygfs/task/calcanl.py b/ush/python/pygfs/task/calcanl.py index 8d728c5674..7bd959b6c6 100644 --- a/ush/python/pygfs/task/calcanl.py +++ b/ush/python/pygfs/task/calcanl.py @@ -80,14 +80,14 @@ def initialize(self) -> None: if fh == 6: fh_dict['copy'].append([self.task_config.COM_ATMOS_ANALYSIS + '/' + self.task_config.APREFIX + 'atminc.nc', - CalcAnlDir + '/siginc.nc.06']) + CalcAnlDir + '/siginc.06.nc']) fh_dict['copy'].append([self.task_config.COM_ATMOS_HISTORY_PREV + '/' + self.task_config.GPREFIX + 'cubed_sphere_grid_atmf006.nc', - CalcAnlDir + '/ges.06']) + CalcAnlDir + '/ges.06.nc']) else: fh_dict['copy'].append([self.task_config.COM_ATMOS_ANALYSIS + '/' + self.task_config.APREFIX + '/atmi' + format(fh, '02') + '.nc', - CalcAnlDir + '/siginc.nc.' + format(fh, '02')]) + CalcAnlDir + '/siginc.' + format(fh, '02') + '.nc']) fh_dict['copy'].append([self.task_config.COM_ATMOS_HISTORY_PREV + '/' + self.task_config.GPREFIX + 'cubed_sphere_grid_atmf' + format(fh, '02'), - CalcAnlDir + '/ges.' + format(fh, '02')]) + CalcAnlDir + '/ges.' + format(fh, '02') + '.nc']) # Stage files FileHandler(fh_dict).sync() From 93076f590eae2a3be77e3f3fae82e80ddf3cc38e Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Mon, 16 Sep 2024 13:26:43 +0000 Subject: [PATCH 31/71] Update UFS hash --- sorc/ufs_model.fd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/ufs_model.fd b/sorc/ufs_model.fd index fcc9f8461d..6a4e09e947 160000 --- a/sorc/ufs_model.fd +++ b/sorc/ufs_model.fd @@ -1 +1 @@ -Subproject commit fcc9f8461db5eafbfd1f080da61ea79156ca0145 +Subproject commit 6a4e09e94773ffa39ce7ab6a54a885efada91f21 From eaaf1fd2523448e902a7e940b758db662b8de85d Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Tue, 17 Sep 2024 15:01:33 +0000 Subject: [PATCH 32/71] Update --- parm/config/gfs/config.calcanl | 3 +++ parm/config/gfs/config.resources | 12 ++++-------- sorc/gdas.cd | 2 +- ush/python/pygfs/task/calcanl.py | 18 ++++++++++-------- 4 files changed, 18 insertions(+), 17 deletions(-) diff --git a/parm/config/gfs/config.calcanl b/parm/config/gfs/config.calcanl index 644069f2e6..b2db1f427a 100644 --- a/parm/config/gfs/config.calcanl +++ b/parm/config/gfs/config.calcanl @@ -5,6 +5,9 @@ echo "BEGIN: config.calcanl" +export layout_x_calcanl=8 +export layout_y_calcanl=8 + # Get task specific resources . "${EXPDIR}/config.resources" calcanl diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index f29db21f3c..e8da64b5ed 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -701,18 +701,14 @@ case ${step} in ;; "calcanl") + export layout_x=${layout_x_calcanl} + export layout_y=${layout_y_calcanl} + walltime="00:15:00" - ntasks=127 - export ntasks_calcanl="${ntasks}" + ntasks=$(( layout_x * layout_y * 6 )) threads_per_task=1 tasks_per_node=$(( max_tasks_per_node / threads_per_task )) - export threads_per_task_echgres_gdas=4 - export threads_per_task_echgres_gfs=12 export is_exclusive=True - memory="48GB" - if [[ "${CASE}" == "C384" || "${CASE}" == "C768" ]]; then - memory="${mem_node_max}" - fi ;; "analcalc") diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 49ddc0d526..9d863f0386 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 49ddc0d5264a8b28a95f74930891e19701b52a7f +Subproject commit 9d863f03860ad29620a1be2d7790c1b7fcb624ab diff --git a/ush/python/pygfs/task/calcanl.py b/ush/python/pygfs/task/calcanl.py index 7bd959b6c6..14f1b61d15 100644 --- a/ush/python/pygfs/task/calcanl.py +++ b/ush/python/pygfs/task/calcanl.py @@ -79,15 +79,17 @@ def initialize(self) -> None: fh_dict['mkdir'].append(CalcAnlDir) if fh == 6: - fh_dict['copy'].append([self.task_config.COM_ATMOS_ANALYSIS + '/' + self.task_config.APREFIX + 'atminc.nc', - CalcAnlDir + '/siginc.06.nc']) - fh_dict['copy'].append([self.task_config.COM_ATMOS_HISTORY_PREV + '/' + self.task_config.GPREFIX + 'cubed_sphere_grid_atmf006.nc', - CalcAnlDir + '/ges.06.nc']) + fh_dict['copy'].append([f"{self.task_config.COM_ATMOS_HISTORY_PREV}/{self.task_config.GPREFIX}cubed_sphere_grid_atmf006.nc", + f"{CalcAnlDir}/ges.06.nc"]) + for itile in range(6): + fh_dict['copy'].append([f"{self.task_config.COM_ATMOS_ANALYSIS}/{self.task_config.APREFIX}atminc.tile{itile+1}.nc", + f"{CalcAnlDir}/siginc.06.tile{itile+1}.nc"]) else: - fh_dict['copy'].append([self.task_config.COM_ATMOS_ANALYSIS + '/' + self.task_config.APREFIX + '/atmi' + format(fh, '02') + '.nc', - CalcAnlDir + '/siginc.' + format(fh, '02') + '.nc']) - fh_dict['copy'].append([self.task_config.COM_ATMOS_HISTORY_PREV + '/' + self.task_config.GPREFIX + 'cubed_sphere_grid_atmf' + format(fh, '02'), - CalcAnlDir + '/ges.' + format(fh, '02') + '.nc']) + fh_dict['copy'].append([f"{self.task_config.COM_ATMOS_HISTORY_PREV}/{self.task_config.GPREFIX}cubed_sphere_grid_atmf{format(fh, '02')}.nc", + f"{CalcAnlDir}/ges.{format(fh, '02')}.nc"]) + for itile in range(6): + fh_dict['copy'].append([f"{self.task_config.COM_ATMOS_ANALYSIS}/{self.task_config.APREFIX}/atmi{format(fh, '02')}.tile{itile+1}.nc", + f"{CalcAnlDir}/siginc.{format(fh, '02')}.tile{itile+1}.nc"]) # Stage files FileHandler(fh_dict).sync() From 0f419a01c5a2a53c110b63428931ac338323770a Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Tue, 17 Sep 2024 20:23:42 +0000 Subject: [PATCH 33/71] Update GDAS hash and save progress on calcanl.py --- sorc/gdas.cd | 2 +- ush/python/pygfs/task/calcanl.py | 24 ++++++++++++++++++++++-- 2 files changed, 23 insertions(+), 3 deletions(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 9d863f0386..5659d1d0cf 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 9d863f03860ad29620a1be2d7790c1b7fcb624ab +Subproject commit 5659d1d0cf0a01486ab35233c5f8ad0f3ef0a201 diff --git a/ush/python/pygfs/task/calcanl.py b/ush/python/pygfs/task/calcanl.py index 14f1b61d15..59c3c794ca 100644 --- a/ush/python/pygfs/task/calcanl.py +++ b/ush/python/pygfs/task/calcanl.py @@ -80,13 +80,18 @@ def initialize(self) -> None: if fh == 6: fh_dict['copy'].append([f"{self.task_config.COM_ATMOS_HISTORY_PREV}/{self.task_config.GPREFIX}cubed_sphere_grid_atmf006.nc", - f"{CalcAnlDir}/ges.06.nc"]) + f"{CalcAnlDir}/ges.atm.06.nc"]) + fh_dict['copy'].append([f"{self.task_config.COM_ATMOS_HISTORY_PREV}/{self.task_config.GPREFIX}cubed_sphere_grid_sfcf006.nc", + f"{CalcAnlDir}/ges.sfc.06.nc"]) for itile in range(6): fh_dict['copy'].append([f"{self.task_config.COM_ATMOS_ANALYSIS}/{self.task_config.APREFIX}atminc.tile{itile+1}.nc", f"{CalcAnlDir}/siginc.06.tile{itile+1}.nc"]) else: fh_dict['copy'].append([f"{self.task_config.COM_ATMOS_HISTORY_PREV}/{self.task_config.GPREFIX}cubed_sphere_grid_atmf{format(fh, '02')}.nc", - f"{CalcAnlDir}/ges.{format(fh, '02')}.nc"]) + f"{CalcAnlDir}/ges.atm.{format(fh, '02')}.nc"]) + fh_dict['copy'].append([f"{self.task_config.COM_ATMOS_HISTORY_PREV}/{self.task_config.GPREFIX}cubed_sphere_grid_sfcf{format(fh, '02')}.nc", + f"{CalcAnlDir}/ges.sfc.{format(fh, '02')}.nc"]) + for itile in range(6): fh_dict['copy'].append([f"{self.task_config.COM_ATMOS_ANALYSIS}/{self.task_config.APREFIX}/atmi{format(fh, '02')}.tile{itile+1}.nc", f"{CalcAnlDir}/siginc.{format(fh, '02')}.tile{itile+1}.nc"]) @@ -94,6 +99,21 @@ def initialize(self) -> None: # Stage files FileHandler(fh_dict).sync() + @logit(logger) + def finalize(self) -> None: + CalcAnlDir = self.task_config.DATA + '/calcanl_' + format(fh, '02') + + cdate = to_fv3time(self.task_config.current_cycle) + cdate_ = cdate.replace('.', '_') + + # Initialize dictionary used to construct Filehandler + fh_dict = {'mkdir': [], + 'copy': []} + + for fh in self.task_config.IAUFHRS: + fh_dict['copy'].append([f"{CalcAnlDir}/anl.{format(fh, '02')}.{cdate_}", + f"{self.task_config.COM_ATMOS_ANALYSIS}"] + @logit(logger) def execute(self, aprun_cmd: str) -> None: self.jedi.execute(self.task_config, aprun_cmd) From 31cea0632bba342fdf11e9300b6d2712514d5182 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 18 Sep 2024 01:15:44 +0000 Subject: [PATCH 34/71] Update --- scripts/exglobal_atm_calc_analysis.py | 3 ++ sorc/gdas.cd | 2 +- ush/python/pygfs/task/calcanl.py | 48 ++++++++++++++------------- 3 files changed, 29 insertions(+), 24 deletions(-) diff --git a/scripts/exglobal_atm_calc_analysis.py b/scripts/exglobal_atm_calc_analysis.py index 671a7dadd6..96abdb776f 100755 --- a/scripts/exglobal_atm_calc_analysis.py +++ b/scripts/exglobal_atm_calc_analysis.py @@ -26,3 +26,6 @@ # Execute JEDI application CalcAnl.execute(config.APRUN_CALCANL) + + # Finalize + CalcAnl.finalize() diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 5659d1d0cf..1e9031a536 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 5659d1d0cf0a01486ab35233c5f8ad0f3ef0a201 +Subproject commit 1e9031a536145ac81d5ac12a18e8c6b750088a1d diff --git a/ush/python/pygfs/task/calcanl.py b/ush/python/pygfs/task/calcanl.py index 59c3c794ca..a37fa0f3ab 100644 --- a/ush/python/pygfs/task/calcanl.py +++ b/ush/python/pygfs/task/calcanl.py @@ -5,7 +5,7 @@ from pprint import pformat import os from pygfs.jedi import Jedi -from wxflow import add_to_datetime, AttrDict, FileHandler, logit, parse_j2yaml, Task, save_as_yaml, to_timedelta +from wxflow import add_to_datetime, AttrDict, FileHandler, logit, parse_j2yaml, Task, save_as_yaml, to_fv3time, to_timedelta logger = getLogger(__name__.split('.')[-1]) @@ -36,6 +36,7 @@ def __init__(self, config, yaml_name=None): 'ATM_WINDOW_LENGTH': f"PT{self.task_config.assim_freq}H", 'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", 'GPREFIX': f"gdas.t{self.task_config.previous_cycle.hour:02d}z.", + 'CalcAnlDir': lambda fh : f"{self.task_config.DATA}/calcanl_{format(fh, '02')}" } ) @@ -74,46 +75,47 @@ def initialize(self) -> None: 'copy': []} # Initialize FileHandler to make directories and copy files + hist_prefix = f"{self.task_config.COM_ATMOS_HISTORY_PREV}/{self.task_config.GPREFIX}" + anl_prefix = f"{self.task_config.COM_ATMOS_ANALYSIS}/{self.task_config.APREFIX}" for fh in self.task_config.IAUFHRS: - CalcAnlDir = self.task_config.DATA + '/calcanl_' + format(fh, '02') - fh_dict['mkdir'].append(CalcAnlDir) + fh_dict['mkdir'].append(self.task_config.CalcAnlDir(fh)) + fh_dict['copy'].append([f"{hist_prefix}cubed_sphere_grid_atmf{format(fh, '03')}.nc", + f"{self.task_config.CalcAnlDir(fh)}/ges.atm.{format(fh, '02')}.nc"]) + fh_dict['copy'].append([f"{hist_prefix}cubed_sphere_grid_sfcf{format(fh, '03')}.nc", + f"{self.task_config.CalcAnlDir(fh)}/ges.sfc.{format(fh, '02')}.nc"]) + if fh == 6: - fh_dict['copy'].append([f"{self.task_config.COM_ATMOS_HISTORY_PREV}/{self.task_config.GPREFIX}cubed_sphere_grid_atmf006.nc", - f"{CalcAnlDir}/ges.atm.06.nc"]) - fh_dict['copy'].append([f"{self.task_config.COM_ATMOS_HISTORY_PREV}/{self.task_config.GPREFIX}cubed_sphere_grid_sfcf006.nc", - f"{CalcAnlDir}/ges.sfc.06.nc"]) for itile in range(6): - fh_dict['copy'].append([f"{self.task_config.COM_ATMOS_ANALYSIS}/{self.task_config.APREFIX}atminc.tile{itile+1}.nc", - f"{CalcAnlDir}/siginc.06.tile{itile+1}.nc"]) + fh_dict['copy'].append([f"{anl_prefix}atminc.tile{itile+1}.nc", + f"{self.task_config.CalcAnlDir(fh)}/siginc.06.tile{itile+1}.nc"]) else: - fh_dict['copy'].append([f"{self.task_config.COM_ATMOS_HISTORY_PREV}/{self.task_config.GPREFIX}cubed_sphere_grid_atmf{format(fh, '02')}.nc", - f"{CalcAnlDir}/ges.atm.{format(fh, '02')}.nc"]) - fh_dict['copy'].append([f"{self.task_config.COM_ATMOS_HISTORY_PREV}/{self.task_config.GPREFIX}cubed_sphere_grid_sfcf{format(fh, '02')}.nc", - f"{CalcAnlDir}/ges.sfc.{format(fh, '02')}.nc"]) - for itile in range(6): - fh_dict['copy'].append([f"{self.task_config.COM_ATMOS_ANALYSIS}/{self.task_config.APREFIX}/atmi{format(fh, '02')}.tile{itile+1}.nc", - f"{CalcAnlDir}/siginc.{format(fh, '02')}.tile{itile+1}.nc"]) + fh_dict['copy'].append([f"{anl_prefix}/atmi{format(fh, '02')}.tile{itile+1}.nc", + f"{self.task_config.CalcAnlDir(fh)}/siginc.{format(fh, '02')}.tile{itile+1}.nc"]) # Stage files FileHandler(fh_dict).sync() @logit(logger) def finalize(self) -> None: - CalcAnlDir = self.task_config.DATA + '/calcanl_' + format(fh, '02') - - cdate = to_fv3time(self.task_config.current_cycle) - cdate_ = cdate.replace('.', '_') + cdate = to_fv3time(self.task_config.current_cycle).replace('.', '_') + anl_prefix = f"{self.task_config.COM_ATMOS_ANALYSIS}/{self.task_config.APREFIX}" # Initialize dictionary used to construct Filehandler fh_dict = {'mkdir': [], 'copy': []} for fh in self.task_config.IAUFHRS: - fh_dict['copy'].append([f"{CalcAnlDir}/anl.{format(fh, '02')}.{cdate_}", - f"{self.task_config.COM_ATMOS_ANALYSIS}"] - + if fh == 6: + fh_dict['copy'].append([f"{self.task_config.CalcAnlDir(fh)}/anl.{format(fh, '02')}.{cdate}z.nc4", + f"{anl_prefix}atmanl.nc"]) + else: + fh_dict['copy'].append([f"{self.task_config.CalcAnlDir(fh)}/anl.{format(fh, '02')}.{cdate}z.nc4", + f"{anl_prefix}atma{format(fh, '03')}.nc"]) + + FileHandler(fh_dict).sync() + @logit(logger) def execute(self, aprun_cmd: str) -> None: self.jedi.execute(self.task_config, aprun_cmd) From 8af11f2d1e926a41a96a11c51afa21c58d7b8603 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Thu, 19 Sep 2024 16:22:59 +0000 Subject: [PATCH 35/71] Update GDASApp hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 1e9031a536..fd2976345e 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 1e9031a536145ac81d5ac12a18e8c6b750088a1d +Subproject commit fd2976345ee6618269bd8b6aeea5f623db71322d From f3d3449f80896d91bdbb53d9d871cb72230f7a15 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Mon, 23 Sep 2024 15:32:38 +0000 Subject: [PATCH 36/71] Update --- sorc/gdas.cd | 2 +- ush/python/pygfs/task/calcanl.py | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index fd2976345e..76daa87d5e 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit fd2976345ee6618269bd8b6aeea5f623db71322d +Subproject commit 76daa87d5e47e89f1f6f94105877f66e56fa0262 diff --git a/ush/python/pygfs/task/calcanl.py b/ush/python/pygfs/task/calcanl.py index a37fa0f3ab..99348c5074 100644 --- a/ush/python/pygfs/task/calcanl.py +++ b/ush/python/pygfs/task/calcanl.py @@ -110,9 +110,13 @@ def finalize(self) -> None: if fh == 6: fh_dict['copy'].append([f"{self.task_config.CalcAnlDir(fh)}/anl.{format(fh, '02')}.{cdate}z.nc4", f"{anl_prefix}atmanl.nc"]) + fh_dict['copy'].append([f"{self.task_config.CalcAnlDir(fh)}/anl.ensres.{format(fh, '02')}.{cdate}z.nc4", + f"{anl_prefix}atmanl.ensres.nc"]) else: fh_dict['copy'].append([f"{self.task_config.CalcAnlDir(fh)}/anl.{format(fh, '02')}.{cdate}z.nc4", f"{anl_prefix}atma{format(fh, '03')}.nc"]) + fh_dict['copy'].append([f"{self.task_config.CalcAnlDir(fh)}/anl.ensres.{format(fh, '02')}.{cdate}z.nc4", + f"{anl_prefix}atma{format(fh, '03')}.ensres.nc"]) FileHandler(fh_dict).sync() From 21b10da1ae7531daad980978b42f5c08be4242e7 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Mon, 23 Sep 2024 15:36:18 +0000 Subject: [PATCH 37/71] pynorms --- ush/python/pygfs/task/calcanl.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/ush/python/pygfs/task/calcanl.py b/ush/python/pygfs/task/calcanl.py index 99348c5074..621e07c78f 100644 --- a/ush/python/pygfs/task/calcanl.py +++ b/ush/python/pygfs/task/calcanl.py @@ -36,7 +36,7 @@ def __init__(self, config, yaml_name=None): 'ATM_WINDOW_LENGTH': f"PT{self.task_config.assim_freq}H", 'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", 'GPREFIX': f"gdas.t{self.task_config.previous_cycle.hour:02d}z.", - 'CalcAnlDir': lambda fh : f"{self.task_config.DATA}/calcanl_{format(fh, '02')}" + 'CalcAnlDir': lambda fh: f"{self.task_config.DATA}/calcanl_{format(fh, '02')}" } ) @@ -84,7 +84,7 @@ def initialize(self) -> None: f"{self.task_config.CalcAnlDir(fh)}/ges.atm.{format(fh, '02')}.nc"]) fh_dict['copy'].append([f"{hist_prefix}cubed_sphere_grid_sfcf{format(fh, '03')}.nc", f"{self.task_config.CalcAnlDir(fh)}/ges.sfc.{format(fh, '02')}.nc"]) - + if fh == 6: for itile in range(6): fh_dict['copy'].append([f"{anl_prefix}atminc.tile{itile+1}.nc", @@ -101,11 +101,11 @@ def initialize(self) -> None: def finalize(self) -> None: cdate = to_fv3time(self.task_config.current_cycle).replace('.', '_') anl_prefix = f"{self.task_config.COM_ATMOS_ANALYSIS}/{self.task_config.APREFIX}" - + # Initialize dictionary used to construct Filehandler fh_dict = {'mkdir': [], 'copy': []} - + for fh in self.task_config.IAUFHRS: if fh == 6: fh_dict['copy'].append([f"{self.task_config.CalcAnlDir(fh)}/anl.{format(fh, '02')}.{cdate}z.nc4", @@ -119,7 +119,7 @@ def finalize(self) -> None: f"{anl_prefix}atma{format(fh, '03')}.ensres.nc"]) FileHandler(fh_dict).sync() - + @logit(logger) def execute(self, aprun_cmd: str) -> None: self.jedi.execute(self.task_config, aprun_cmd) From 02b27b93e124fddc51950a7c052c7a23586e1703 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Mon, 23 Sep 2024 16:33:03 +0000 Subject: [PATCH 38/71] Add comment blocks to calcanl.py methods --- ush/python/pygfs/task/calcanl.py | 94 ++++++++++++++++++++++++++++++-- 1 file changed, 89 insertions(+), 5 deletions(-) diff --git a/ush/python/pygfs/task/calcanl.py b/ush/python/pygfs/task/calcanl.py index 621e07c78f..aef6956c6a 100644 --- a/ush/python/pygfs/task/calcanl.py +++ b/ush/python/pygfs/task/calcanl.py @@ -5,7 +5,10 @@ from pprint import pformat import os from pygfs.jedi import Jedi -from wxflow import add_to_datetime, AttrDict, FileHandler, logit, parse_j2yaml, Task, save_as_yaml, to_fv3time, to_timedelta +from wxflow import (AttrDict, FileHandler, Task, + add_to_datetime, to_fv3time, to_timedelta, + parse_j2yaml, save_as_yaml, + logit) logger = getLogger(__name__.split('.')[-1]) @@ -16,6 +19,24 @@ class CalcAnalysis(Task): """ @logit(logger, name="CalcAnalysis") def __init__(self, config, yaml_name=None): + """Constructor diagnostic atmospheric analysis calculation task + + This method will construct a diagnostic atmospheric analysis calculation task. + This includes: + - extending the task_config attribute AttrDict to include parameters required for this task + - instantiate the Jedi attribute object + + Parameters + ---------- + config: Dict + dictionary object containing task configuration + yaml_name: str, optional + name of YAML file for JEDI configuration + + Returns + ---------- + None + """ super().__init__(config) _res = int(self.task_config.CASE[1:]) @@ -48,6 +69,24 @@ def __init__(self, config, yaml_name=None): @logit(logger) def initialize_jedi(self) -> None: + """Initialize JEDI application + + This method will initialize a JEDI application used in the diagnostic + atmospheric analysis computation task. + This includes: + - generating and saving JEDI YAML config + - staging the JEDI fix files + - linking the JEDI executable + + Parameters + ---------- + None + + Returns + ---------- + None + """ + # get JEDI-to-FV3 increment converter config and save to YAML file logger.info(f"Generating JEDI YAML config: {self.jedi.yaml}") self.jedi.set_config(self.task_config) @@ -70,6 +109,22 @@ def initialize_jedi(self) -> None: @logit(logger) def initialize(self) -> None: + """Initialize the diagnostic atmospheric analysis computation task + + This method will initialize the diagnostic atmospheric analysis computation task. + This includes: + - creating working directories for each forecast hour + - staging backgrounds and increments + + Parameters + ---------- + None + + Returns + ---------- + None + """ + # Initialize dictionary used to construct Filehandler fh_dict = {'mkdir': [], 'copy': []} @@ -97,8 +152,41 @@ def initialize(self) -> None: # Stage files FileHandler(fh_dict).sync() + @logit(logger) + def execute(self, aprun_cmd: str) -> None: + """Run JEDI executable + + This method will run the JEDI executable for the diagnostic atmospheric analysis computation + + Parameters + ---------- + aprun_cmd : str + Run command for JEDI application on HPC system + + Returns + ---------- + None + """ + + self.jedi.execute(self.task_config, aprun_cmd) + @logit(logger) def finalize(self) -> None: + """Finalize the diagnostic atmospheric analysis computation task + + This method will finalize the diagnostic atmospheric analysis computation task. + This includes: + - Move analysis files to the comrot directory + + Parameters + ---------- + None + + Returns + ---------- + None + """ + cdate = to_fv3time(self.task_config.current_cycle).replace('.', '_') anl_prefix = f"{self.task_config.COM_ATMOS_ANALYSIS}/{self.task_config.APREFIX}" @@ -119,7 +207,3 @@ def finalize(self) -> None: f"{anl_prefix}atma{format(fh, '03')}.ensres.nc"]) FileHandler(fh_dict).sync() - - @logit(logger) - def execute(self, aprun_cmd: str) -> None: - self.jedi.execute(self.task_config, aprun_cmd) From dd6c1828cdf1dbebfa0ce325ad71f6c5ffc4d822 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Mon, 23 Sep 2024 16:37:27 +0000 Subject: [PATCH 39/71] pynorms --- ush/python/pygfs/task/calcanl.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/ush/python/pygfs/task/calcanl.py b/ush/python/pygfs/task/calcanl.py index aef6956c6a..9e73ead780 100644 --- a/ush/python/pygfs/task/calcanl.py +++ b/ush/python/pygfs/task/calcanl.py @@ -86,7 +86,7 @@ def initialize_jedi(self) -> None: ---------- None """ - + # get JEDI-to-FV3 increment converter config and save to YAML file logger.info(f"Generating JEDI YAML config: {self.jedi.yaml}") self.jedi.set_config(self.task_config) @@ -124,7 +124,7 @@ def initialize(self) -> None: ---------- None """ - + # Initialize dictionary used to construct Filehandler fh_dict = {'mkdir': [], 'copy': []} @@ -156,7 +156,7 @@ def initialize(self) -> None: def execute(self, aprun_cmd: str) -> None: """Run JEDI executable - This method will run the JEDI executable for the diagnostic atmospheric analysis computation + This method will run the JEDI executable for the diagnostic atmospheric analysis computation Parameters ---------- @@ -167,9 +167,9 @@ def execute(self, aprun_cmd: str) -> None: ---------- None """ - + self.jedi.execute(self.task_config, aprun_cmd) - + @logit(logger) def finalize(self) -> None: """Finalize the diagnostic atmospheric analysis computation task @@ -186,7 +186,7 @@ def finalize(self) -> None: ---------- None """ - + cdate = to_fv3time(self.task_config.current_cycle).replace('.', '_') anl_prefix = f"{self.task_config.COM_ATMOS_ANALYSIS}/{self.task_config.APREFIX}" From 1b12887b3f29f524e4d66fd346e8ca17a29951bc Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Mon, 23 Sep 2024 16:41:07 +0000 Subject: [PATCH 40/71] Fix indentation error and allocate more appropriate resources for job --- parm/config/gfs/config.calcanl | 4 ++-- sorc/link_workflow.sh | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/parm/config/gfs/config.calcanl b/parm/config/gfs/config.calcanl index b2db1f427a..f8f4d96264 100644 --- a/parm/config/gfs/config.calcanl +++ b/parm/config/gfs/config.calcanl @@ -5,8 +5,8 @@ echo "BEGIN: config.calcanl" -export layout_x_calcanl=8 -export layout_y_calcanl=8 +export layout_x_calcanl=2 +export layout_y_calcanl=2 # Get task specific resources . "${EXPDIR}/config.resources" calcanl diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index c36d34131c..5efa77688e 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -368,7 +368,7 @@ if [[ -d "${HOMEgfs}/sorc/gdas.cd/build" ]]; then "fv3jedi_plot_field.x" \ "gdasapp_chem_diagb.x" \ "fv3jedi_fv3inc.x" \ - "fv3jedi_calcanl.x" \ + "fv3jedi_calcanl.x" \ "gdas_ens_handler.x" \ "gdas_incr_handler.x" \ "gdas_obsprovider2ioda.x" \ From 2932cc2541bef0be8d2834754b637ecdfeab1c91 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Mon, 23 Sep 2024 17:08:02 +0000 Subject: [PATCH 41/71] Shell norms --- jobs/JGLOBAL_ATM_CALC_ANALYSIS | 2 +- ush/forecast_postdet.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/jobs/JGLOBAL_ATM_CALC_ANALYSIS b/jobs/JGLOBAL_ATM_CALC_ANALYSIS index 8f468555f8..d1aa172537 100755 --- a/jobs/JGLOBAL_ATM_CALC_ANALYSIS +++ b/jobs/JGLOBAL_ATM_CALC_ANALYSIS @@ -47,7 +47,7 @@ fi # Remove the Temporary working directory ############################################## -cd ${DATAROOT} +cd "${DATAROOT}" || ( echo "FATAL ERROR: ${DATAROOT} does not exist, ABORT!"; exit 1 ) if [[ ${KEEPDATA} = "NO" ]]; then rm -rf "${DATA}" fi diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index 137d242092..33b8b81c45 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -160,7 +160,7 @@ EOF done else # "${DOIAU}" == "NO" read_increment=".true." - if [[ "${DO_JEDIATMVAR:-NO}" ]]; then + if [[ "${DO_JEDIATMVAR:-NO}" == "YES" ]]; then inc_files=("atminc.tile1.nc" "atminc.tile2.nc" "atminc.tile3.nc" "atminc.tile4.nc" "atminc.tile5.nc" "atminc.tile6.nc") res_latlon_dynamics="atminc" increment_file_on_native_grid=".true." From 0eaa63d6e45237f43844100d1e194ba96d5c6b57 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Mon, 23 Sep 2024 20:30:10 +0000 Subject: [PATCH 42/71] Rename some stuff --- ...OBAL_ATM_CALC_ANALYSIS => JGLOBAL_ATMOS_ANALYSIS_CALC_FV3JEDI} | 0 ...GLOBAL_ATMOS_ANALYSIS_CALC => JGLOBAL_ATMOS_ANALYSIS_CALC_GSI} | 0 jobs/rocoto/{calcanl.sh => analcalc_fv3jedi.sh} | 0 jobs/rocoto/{analcalc.sh => analcalc_gsi.sh} | 0 parm/config/gfs/{config.calcanl => config.analcalc_fv3jedi} | 0 parm/config/gfs/{config.analcalc => config.analcalc_gsi} | 0 ...m_calc_analysis.py => exglobal_atmos_analysis_calc_fv3jedi.py} | 0 ...atmos_analysis_calc.sh => exglobal_atmos_analysis_calc_gsi.sh} | 0 ush/python/pygfs/task/{calcanl.py => analcalc.py} | 0 9 files changed, 0 insertions(+), 0 deletions(-) rename jobs/{JGLOBAL_ATM_CALC_ANALYSIS => JGLOBAL_ATMOS_ANALYSIS_CALC_FV3JEDI} (100%) rename jobs/{JGLOBAL_ATMOS_ANALYSIS_CALC => JGLOBAL_ATMOS_ANALYSIS_CALC_GSI} (100%) rename jobs/rocoto/{calcanl.sh => analcalc_fv3jedi.sh} (100%) rename jobs/rocoto/{analcalc.sh => analcalc_gsi.sh} (100%) rename parm/config/gfs/{config.calcanl => config.analcalc_fv3jedi} (100%) rename parm/config/gfs/{config.analcalc => config.analcalc_gsi} (100%) rename scripts/{exglobal_atm_calc_analysis.py => exglobal_atmos_analysis_calc_fv3jedi.py} (100%) rename scripts/{exglobal_atmos_analysis_calc.sh => exglobal_atmos_analysis_calc_gsi.sh} (100%) rename ush/python/pygfs/task/{calcanl.py => analcalc.py} (100%) diff --git a/jobs/JGLOBAL_ATM_CALC_ANALYSIS b/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC_FV3JEDI similarity index 100% rename from jobs/JGLOBAL_ATM_CALC_ANALYSIS rename to jobs/JGLOBAL_ATMOS_ANALYSIS_CALC_FV3JEDI diff --git a/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC b/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC_GSI similarity index 100% rename from jobs/JGLOBAL_ATMOS_ANALYSIS_CALC rename to jobs/JGLOBAL_ATMOS_ANALYSIS_CALC_GSI diff --git a/jobs/rocoto/calcanl.sh b/jobs/rocoto/analcalc_fv3jedi.sh similarity index 100% rename from jobs/rocoto/calcanl.sh rename to jobs/rocoto/analcalc_fv3jedi.sh diff --git a/jobs/rocoto/analcalc.sh b/jobs/rocoto/analcalc_gsi.sh similarity index 100% rename from jobs/rocoto/analcalc.sh rename to jobs/rocoto/analcalc_gsi.sh diff --git a/parm/config/gfs/config.calcanl b/parm/config/gfs/config.analcalc_fv3jedi similarity index 100% rename from parm/config/gfs/config.calcanl rename to parm/config/gfs/config.analcalc_fv3jedi diff --git a/parm/config/gfs/config.analcalc b/parm/config/gfs/config.analcalc_gsi similarity index 100% rename from parm/config/gfs/config.analcalc rename to parm/config/gfs/config.analcalc_gsi diff --git a/scripts/exglobal_atm_calc_analysis.py b/scripts/exglobal_atmos_analysis_calc_fv3jedi.py similarity index 100% rename from scripts/exglobal_atm_calc_analysis.py rename to scripts/exglobal_atmos_analysis_calc_fv3jedi.py diff --git a/scripts/exglobal_atmos_analysis_calc.sh b/scripts/exglobal_atmos_analysis_calc_gsi.sh similarity index 100% rename from scripts/exglobal_atmos_analysis_calc.sh rename to scripts/exglobal_atmos_analysis_calc_gsi.sh diff --git a/ush/python/pygfs/task/calcanl.py b/ush/python/pygfs/task/analcalc.py similarity index 100% rename from ush/python/pygfs/task/calcanl.py rename to ush/python/pygfs/task/analcalc.py From 693e6d1f3a34e9ed1952ab2ede75aace58543e09 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Mon, 23 Sep 2024 20:31:39 +0000 Subject: [PATCH 43/71] Rename some stuff per comment from Rahul --- env/HERA.env | 8 ++++---- env/HERCULES.env | 8 ++++---- env/JET.env | 8 ++++---- env/ORION.env | 8 ++++---- env/S4.env | 8 ++++---- env/WCOSS2.env | 10 +++++----- jobs/JGLOBAL_ATMOS_ANALYSIS_CALC_FV3JEDI | 4 ++-- jobs/JGLOBAL_ATMOS_ANALYSIS_CALC_GSI | 4 ++-- jobs/rocoto/analcalc_fv3jedi.sh | 4 ++-- jobs/rocoto/analcalc_gsi.sh | 4 ++-- parm/config/gfs/config.analcalc_fv3jedi | 16 ++++++++-------- parm/config/gfs/config.analcalc_gsi | 8 ++++---- parm/config/gfs/config.resources | 10 +++++----- scripts/exglobal_atmos_analysis_calc_fv3jedi.py | 16 ++++++++-------- sorc/link_workflow.sh | 2 +- ush/python/pygfs/task/analcalc.py | 4 ++-- 16 files changed, 61 insertions(+), 61 deletions(-) diff --git a/env/HERA.env b/env/HERA.env index ec7db1d787..94df9766af 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -160,12 +160,12 @@ elif [[ "${step}" = "marineanalletkf" ]]; then export NTHREADS_MARINEANALLETKF=${NTHREADSmax} export APRUN_MARINEANALLETKF="${APRUN_default} --cpus-per-task=${NTHREADS_MARINEANALLETKF}" -elif [[ "${step}" = "calcanl" ]]; then +elif [[ "${step}" = "analcalc_fv3jedi" ]]; then - export NTHREADS_CALCANL=${NTHREADSmax} - export APRUN_CALCANL="${APRUN_default} --cpus-per-task=${NTHREADS_CALCANL}" + export NTHREADS_ANALCALC_FV3JEDI=${NTHREADSmax} + export APRUN_ANALCALC_FV3JEDI="${APRUN_default} --cpus-per-task=${NTHREADS_ANALCALC_FV3JEDI}" -elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then +elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc_gsi" ]]; then export MKL_NUM_THREADS=4 export MKL_CBWR=AUTO diff --git a/env/HERCULES.env b/env/HERCULES.env index faaf1da229..0aa521b029 100755 --- a/env/HERCULES.env +++ b/env/HERCULES.env @@ -155,12 +155,12 @@ case ${step} in export NTHREADS_OCNANAL=${NTHREADSmax} export APRUN_OCNANAL="${APRUN_default} --cpus-per-task=${NTHREADS_OCNANAL}" ;; - "calcanl") + "analcalc_fv3jedi") - export NTHREADS_CALCANL=${NTHREADSmax} - export APRUN_CALCANL="${APRUN_default} --cpus-per-task=${NTHREADS_CALCANL}" + export NTHREADS_ANALCALC_FV3JEDI=${NTHREADSmax} + export APRUN_ANALCALC_FV3JEDI="${APRUN_default} --cpus-per-task=${NTHREADS_ANALCALC_FV3JEDI}" ;; - "anal" | "analcalc") + "anal" | "analcalc_gsi") export MKL_NUM_THREADS=4 export MKL_CBWR=AUTO diff --git a/env/JET.env b/env/JET.env index c05fe35789..4d3b4b36bd 100755 --- a/env/JET.env +++ b/env/JET.env @@ -126,12 +126,12 @@ elif [[ "${step}" = "ocnanalrun" ]]; then export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" export APRUN_OCNANAL="${APRUN_default}" -elif [[ "${step}" = "calcanl" ]]; then +elif [[ "${step}" = "analcalc_fv3jedi" ]]; then - export NTHREADS_CALCANL=${NTHREADSmax} - export APRUN_CALCANL="${APRUN_default}" + export NTHREADS_ANALCALC_FV3JEDI=${NTHREADSmax} + export APRUN_ANALCALC_FV3JEDI="${APRUN_default}" -elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then +elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc_gsi" ]]; then export MKL_NUM_THREADS=4 export MKL_CBWR=AUTO diff --git a/env/ORION.env b/env/ORION.env index ea44e510f8..d65ecc7b92 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -153,12 +153,12 @@ elif [[ "${step}" = "marineanalletkf" ]]; then export NTHREADS_MARINEANALLETKF=${NTHREADSmax} export APRUN_MARINEANALLETKF="${APRUN_default} --cpus-per-task=${NTHREADS_MARINEANALLETKF}" -elif [[ "${step}" = "calcanl" ]]; then +elif [[ "${step}" = "analcalc_fv3jedi" ]]; then - export NTHREADS_CALCANL=${NTHREADSmax} - export APRUN_CALCANL="${APRUN_default} --cpus-per-task=${NTHREADS_CALCANL}" + export NTHREADS_ANALCALC_FV3JEDI=${NTHREADSmax} + export APRUN_ANALCALC_FV3JEDI="${APRUN_default} --cpus-per-task=${NTHREADS_ANALCALC_FV3JEDI}" -elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then +elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc_gsi" ]]; then export MKL_NUM_THREADS=4 export MKL_CBWR=AUTO diff --git a/env/S4.env b/env/S4.env index 679af59d1f..e4e1b896de 100755 --- a/env/S4.env +++ b/env/S4.env @@ -122,12 +122,12 @@ elif [[ "${step}" = "marinebmat" ]]; then elif [[ "${step}" = "marinerun" ]]; then echo "WARNING: ${step} is not enabled on S4!" -elif [[ "${step}" = "calcanl" ]]; then +elif [[ "${step}" = "analcalc_fv3jedi" ]]; then - export NTHREADS_CALCANL=${NTHREADSmax} - export APRUN_CALCANL="${APRUN_default}" + export NTHREADS_ANALCALC_FV3JEDI=${NTHREADSmax} + export APRUN_ANALCALC_FV3JEDI="${APRUN_default}" -elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then +elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc_gsi" ]]; then export MKL_NUM_THREADS=4 export MKL_CBWR=AUTO diff --git a/env/WCOSS2.env b/env/WCOSS2.env index 6daaf6ab21..29fd57d35f 100755 --- a/env/WCOSS2.env +++ b/env/WCOSS2.env @@ -136,18 +136,18 @@ elif [[ "${step}" = "atmanlfv3inc" ]]; then export NTHREADS_ATMANLFV3INC=${NTHREADSmax} export APRUN_ATMANLFV3INC="${APRUN_default}" -elif [[ "${step}" = "calcanl" ]]; then +elif [[ "${step}" = "analcalc_fv3jedi" ]]; then - export NTHREADS_CALCANL=${NTHREADSmax} - export APRUN_CALCANL="${APRUN_default}" + export NTHREADS_ANALCALC_FV3JEDI=${NTHREADSmax} + export APRUN_ANALCALC_FV3JEDI="${APRUN_default}" -elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then +elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc_gsi" ]]; then export OMP_PLACES=cores export OMP_STACKSIZE=1G export FI_OFI_RXM_SAR_LIMIT=3145728 - if [[ "${step}" = "analcalc" ]]; then + if [[ "${step}" = "analcalc_gsi" ]]; then export MPICH_MPIIO_HINTS="*:romio_cb_write=disable" fi diff --git a/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC_FV3JEDI b/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC_FV3JEDI index d1aa172537..af4e5537cf 100755 --- a/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC_FV3JEDI +++ b/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC_FV3JEDI @@ -1,7 +1,7 @@ #! /usr/bin/env bash source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "calcanl" -c "base calcanl" +source "${HOMEgfs}/ush/jjob_header.sh" -e "analcalc_fv3jedi" -c "base analcalc_fv3jedi" ############################################## # Set variables used in the script @@ -26,7 +26,7 @@ RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \ # Run relevant script ############################################## -EXSCRIPT=${GDASATMRUNSH:-${SCRgfs}/exglobal_atm_calc_analysis.py} +EXSCRIPT=${GDASATMRUNSH:-${SCRgfs}/exglobal_atmos_analysis_calc_fv3jedi.py} ${EXSCRIPT} status=$? [[ ${status} -ne 0 ]] && exit "${status}" diff --git a/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC_GSI b/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC_GSI index 5b6073254a..5671864fb2 100755 --- a/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC_GSI +++ b/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC_GSI @@ -1,7 +1,7 @@ #! /usr/bin/env bash source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "analcalc" -c "base anal analcalc" +source "${HOMEgfs}/ush/jjob_header.sh" -e "analcalc_gsi" -c "base anal analcalc_gsi" ############################################## @@ -56,7 +56,7 @@ export DOGAUSFCANL=${DOGAUSFCANL:-"YES"} ############################################################### # Run relevant script -${ANALCALCSH:-${SCRgfs}/exglobal_atmos_analysis_calc.sh} +${ANALCALCSH:-${SCRgfs}/exglobal_atmos_analysis_calc_gsi.sh} status=$? [[ ${status} -ne 0 ]] && exit ${status} diff --git a/jobs/rocoto/analcalc_fv3jedi.sh b/jobs/rocoto/analcalc_fv3jedi.sh index d345d433c8..ceb4077160 100755 --- a/jobs/rocoto/analcalc_fv3jedi.sh +++ b/jobs/rocoto/analcalc_fv3jedi.sh @@ -8,11 +8,11 @@ source "${HOMEgfs}/ush/preamble.sh" status=$? [[ ${status} -ne 0 ]] && exit "${status}" -export job="calcanl" +export job="analcalc_fv3jedi" export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/jobs/JGLOBAL_ATM_CALC_ANALYSIS" +"${HOMEgfs}/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC_FV3JEDI status=$? exit "${status}" diff --git a/jobs/rocoto/analcalc_gsi.sh b/jobs/rocoto/analcalc_gsi.sh index 2e669b0163..68779c43bf 100755 --- a/jobs/rocoto/analcalc_gsi.sh +++ b/jobs/rocoto/analcalc_gsi.sh @@ -8,12 +8,12 @@ source "${HOMEgfs}/ush/preamble.sh" status=$? [[ ${status} -ne 0 ]] && exit ${status} -export job="analcalc" +export job="analcalc_gsi" export jobid="${job}.$$" ############################################################### # Execute the JJOB -${HOMEgfs}/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC +${HOMEgfs}/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC_GSI status=$? diff --git a/parm/config/gfs/config.analcalc_fv3jedi b/parm/config/gfs/config.analcalc_fv3jedi index f8f4d96264..e1fdb867e2 100644 --- a/parm/config/gfs/config.analcalc_fv3jedi +++ b/parm/config/gfs/config.analcalc_fv3jedi @@ -1,19 +1,19 @@ #! /usr/bin/env bash -########## config.calcanl ########## +########## config.analcalc_fv3jedi ########## # Diagnostic amospheric analysis calculation specific -echo "BEGIN: config.calcanl" +echo "BEGIN: config.analcalc_fv3jedi" -export layout_x_calcanl=2 -export layout_y_calcanl=2 +export layout_x_analcalc_fv3jedi=2 +export layout_y_analcalc_fv3jedi=2 # Get task specific resources -. "${EXPDIR}/config.resources" calcanl +. "${EXPDIR}/config.resources" analcalc_fv3jedi export JCB_BASE_YAML=${PARMgfs}/gdas/atm/jcb-base.yaml.j2 -export JCB_ALGO=fv3jedi_calcanl -export JEDIEXE=${EXECgfs}/fv3jedi_calcanl.x +export JCB_ALGO=fv3jedi_analcalc +export JEDIEXE=${EXECgfs}/fv3jedi_analcalc.x if [[ ${DOHYBVAR} = "YES" ]]; then export CASE_ANL=${CASE_ENS} @@ -23,4 +23,4 @@ fi export JEDI_FIX_YAML="${PARMgfs}/gdas/atm_jedi_fix.yaml.j2" -echo "END: config.calcanl" +echo "END: config.analcalc_fv3jedi" diff --git a/parm/config/gfs/config.analcalc_gsi b/parm/config/gfs/config.analcalc_gsi index d9501503f0..102c3e03c3 100644 --- a/parm/config/gfs/config.analcalc_gsi +++ b/parm/config/gfs/config.analcalc_gsi @@ -1,11 +1,11 @@ #! /usr/bin/env bash -########## config.analcalc ########## +########## config.analcalc_gsi ########## # GFS post-anal specific (non-diag) -echo "BEGIN: config.analcalc" +echo "BEGIN: config.analcalc_gsi" # Get task specific resources -. ${EXPDIR}/config.resources analcalc +. ${EXPDIR}/config.resources analcalc_gsi -echo "END: config.analcalc" +echo "END: config.analcalc_gsi" diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index e8da64b5ed..148a439606 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -17,7 +17,7 @@ if (( $# != 1 )); then echo "atmensanlinit atmensanlobs atmensanlsol atmensanlletkf atmensanlfv3inc atmensanlfinal" echo "snowanl esnowrecen" echo "prepobsaero aeroanlinit aeroanlvar aeroanlfinal aeroanlgenb" - echo "anal sfcanl calcanl analcalc analdiag fcst echgres" + echo "anal sfcanl analcalc_fv3jedi analcalc_gsi analdiag fcst echgres" echo "upp atmos_products" echo "tracker genesis genesis_fsu" echo "verfozn verfrad vminmon fit2obs metp arch cleanup" @@ -700,9 +700,9 @@ case ${step} in export is_exclusive=True ;; - "calcanl") - export layout_x=${layout_x_calcanl} - export layout_y=${layout_y_calcanl} + "analcalc_fv3jedi") + export layout_x=${layout_x_analcalc_fv3jedi} + export layout_y=${layout_y_analcalc_fv3jedi} walltime="00:15:00" ntasks=$(( layout_x * layout_y * 6 )) @@ -711,7 +711,7 @@ case ${step} in export is_exclusive=True ;; - "analcalc") + "analcalc_gsi") walltime="00:15:00" ntasks=127 export ntasks_calcanl="${ntasks}" diff --git a/scripts/exglobal_atmos_analysis_calc_fv3jedi.py b/scripts/exglobal_atmos_analysis_calc_fv3jedi.py index 96abdb776f..a591979584 100755 --- a/scripts/exglobal_atmos_analysis_calc_fv3jedi.py +++ b/scripts/exglobal_atmos_analysis_calc_fv3jedi.py @@ -1,12 +1,12 @@ #!/usr/bin/env python3 # exglobal_atm_calc_analysis.py -# This script creates an CalcAnalysis object +# This script creates an AnalysisCalc object # and runs the execute method which executes # the diagnostic global analysis calculation import os from wxflow import Logger, cast_strdict_as_dtypedict -from pygfs.task.calcanl import CalcAnalysis +from pygfs.task.analcalc_fv3jedi import AnalysisCalc # Initialize root logger logger = Logger(level='DEBUG', colored_log=True) @@ -17,15 +17,15 @@ # Take configuration from environment and cast it as python dictionary config = cast_strdict_as_dtypedict(os.environ) - # Instantiate the CalcAnalysis task - CalcAnl = CalcAnalysis(config, 'calcanl') + # Instantiate the AnalysisCalc task + AnalCalc = AnalysisCalc(config, 'fv3jedi_analcalc') # Initialize - CalcAnl.initialize_jedi() - CalcAnl.initialize() + AnalCalc.initialize_jedi() + AnalCalc.initialize() # Execute JEDI application - CalcAnl.execute(config.APRUN_CALCANL) + AnalCalc.execute(config.APRUN_ANALCALC_FV3JEDI) # Finalize - CalcAnl.finalize() + AnalCalc.finalize() diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index 5efa77688e..462292a613 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -368,7 +368,7 @@ if [[ -d "${HOMEgfs}/sorc/gdas.cd/build" ]]; then "fv3jedi_plot_field.x" \ "gdasapp_chem_diagb.x" \ "fv3jedi_fv3inc.x" \ - "fv3jedi_calcanl.x" \ + "fv3jedi_analcalc.x" \ "gdas_ens_handler.x" \ "gdas_incr_handler.x" \ "gdas_obsprovider2ioda.x" \ diff --git a/ush/python/pygfs/task/analcalc.py b/ush/python/pygfs/task/analcalc.py index 9e73ead780..8123aaec46 100644 --- a/ush/python/pygfs/task/analcalc.py +++ b/ush/python/pygfs/task/analcalc.py @@ -13,11 +13,11 @@ logger = getLogger(__name__.split('.')[-1]) -class CalcAnalysis(Task): +class AnalysisCalc(Task): """ Class for JEDI-based analysis calculation """ - @logit(logger, name="CalcAnalysis") + @logit(logger, name="AnalysisCalc") def __init__(self, config, yaml_name=None): """Constructor diagnostic atmospheric analysis calculation task From dff1e59cd548f2caad0e9cad41a5114ba7a534de Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Mon, 23 Sep 2024 20:36:54 +0000 Subject: [PATCH 44/71] Missed renaming of some things --- .../exglobal_atmos_analysis_calc_fv3jedi.py | 2 +- ush/python/pygfs/__init__.py | 2 +- ush/python/pygfs/task/analcalc.py | 20 +++++++++---------- workflow/applications/gfs_cycled.py | 8 ++++---- workflow/rocoto/gfs_tasks.py | 20 +++++++++---------- workflow/rocoto/tasks.py | 2 +- 6 files changed, 27 insertions(+), 27 deletions(-) diff --git a/scripts/exglobal_atmos_analysis_calc_fv3jedi.py b/scripts/exglobal_atmos_analysis_calc_fv3jedi.py index a591979584..3ac97c4a29 100755 --- a/scripts/exglobal_atmos_analysis_calc_fv3jedi.py +++ b/scripts/exglobal_atmos_analysis_calc_fv3jedi.py @@ -6,7 +6,7 @@ import os from wxflow import Logger, cast_strdict_as_dtypedict -from pygfs.task.analcalc_fv3jedi import AnalysisCalc +from pygfs.task.analcalc import AnalysisCalc # Initialize root logger logger = Logger(level='DEBUG', colored_log=True) diff --git a/ush/python/pygfs/__init__.py b/ush/python/pygfs/__init__.py index 8f66811b80..fdc37b9462 100644 --- a/ush/python/pygfs/__init__.py +++ b/ush/python/pygfs/__init__.py @@ -8,7 +8,7 @@ from .task.aero_bmatrix import AerosolBMatrix from .task.atm_analysis import AtmAnalysis from .task.atmens_analysis import AtmEnsAnalysis -from .task.calcanl import CalcAnalysis +from .task.analcalc import AnalysisCalc from .task.marine_bmat import MarineBMat from .task.snow_analysis import SnowAnalysis from .task.snowens_analysis import SnowEnsAnalysis diff --git a/ush/python/pygfs/task/analcalc.py b/ush/python/pygfs/task/analcalc.py index 8123aaec46..9a40640f63 100644 --- a/ush/python/pygfs/task/analcalc.py +++ b/ush/python/pygfs/task/analcalc.py @@ -57,7 +57,7 @@ def __init__(self, config, yaml_name=None): 'ATM_WINDOW_LENGTH': f"PT{self.task_config.assim_freq}H", 'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", 'GPREFIX': f"gdas.t{self.task_config.previous_cycle.hour:02d}z.", - 'CalcAnlDir': lambda fh: f"{self.task_config.DATA}/calcanl_{format(fh, '02')}" + 'AnalCalcDir': lambda fh: f"{self.task_config.DATA}/analcalc_{format(fh, '02')}" } ) @@ -133,21 +133,21 @@ def initialize(self) -> None: hist_prefix = f"{self.task_config.COM_ATMOS_HISTORY_PREV}/{self.task_config.GPREFIX}" anl_prefix = f"{self.task_config.COM_ATMOS_ANALYSIS}/{self.task_config.APREFIX}" for fh in self.task_config.IAUFHRS: - fh_dict['mkdir'].append(self.task_config.CalcAnlDir(fh)) + fh_dict['mkdir'].append(self.task_config.AnalCalcDir(fh)) fh_dict['copy'].append([f"{hist_prefix}cubed_sphere_grid_atmf{format(fh, '03')}.nc", - f"{self.task_config.CalcAnlDir(fh)}/ges.atm.{format(fh, '02')}.nc"]) + f"{self.task_config.AnalCalcDir(fh)}/ges.atm.{format(fh, '02')}.nc"]) fh_dict['copy'].append([f"{hist_prefix}cubed_sphere_grid_sfcf{format(fh, '03')}.nc", - f"{self.task_config.CalcAnlDir(fh)}/ges.sfc.{format(fh, '02')}.nc"]) + f"{self.task_config.AnalCalcDir(fh)}/ges.sfc.{format(fh, '02')}.nc"]) if fh == 6: for itile in range(6): fh_dict['copy'].append([f"{anl_prefix}atminc.tile{itile+1}.nc", - f"{self.task_config.CalcAnlDir(fh)}/siginc.06.tile{itile+1}.nc"]) + f"{self.task_config.AnalCalcDir(fh)}/siginc.06.tile{itile+1}.nc"]) else: for itile in range(6): fh_dict['copy'].append([f"{anl_prefix}/atmi{format(fh, '02')}.tile{itile+1}.nc", - f"{self.task_config.CalcAnlDir(fh)}/siginc.{format(fh, '02')}.tile{itile+1}.nc"]) + f"{self.task_config.AnalCalcDir(fh)}/siginc.{format(fh, '02')}.tile{itile+1}.nc"]) # Stage files FileHandler(fh_dict).sync() @@ -196,14 +196,14 @@ def finalize(self) -> None: for fh in self.task_config.IAUFHRS: if fh == 6: - fh_dict['copy'].append([f"{self.task_config.CalcAnlDir(fh)}/anl.{format(fh, '02')}.{cdate}z.nc4", + fh_dict['copy'].append([f"{self.task_config.AnalCalcDir(fh)}/anl.{format(fh, '02')}.{cdate}z.nc4", f"{anl_prefix}atmanl.nc"]) - fh_dict['copy'].append([f"{self.task_config.CalcAnlDir(fh)}/anl.ensres.{format(fh, '02')}.{cdate}z.nc4", + fh_dict['copy'].append([f"{self.task_config.AnalCalcDir(fh)}/anl.ensres.{format(fh, '02')}.{cdate}z.nc4", f"{anl_prefix}atmanl.ensres.nc"]) else: - fh_dict['copy'].append([f"{self.task_config.CalcAnlDir(fh)}/anl.{format(fh, '02')}.{cdate}z.nc4", + fh_dict['copy'].append([f"{self.task_config.AnalCalcDir(fh)}/anl.{format(fh, '02')}.{cdate}z.nc4", f"{anl_prefix}atma{format(fh, '03')}.nc"]) - fh_dict['copy'].append([f"{self.task_config.CalcAnlDir(fh)}/anl.ensres.{format(fh, '02')}.{cdate}z.nc4", + fh_dict['copy'].append([f"{self.task_config.AnalCalcDir(fh)}/anl.ensres.{format(fh, '02')}.{cdate}z.nc4", f"{anl_prefix}atma{format(fh, '03')}.ensres.nc"]) FileHandler(fh_dict).sync() diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py index 2af2f53b7a..fb9b175207 100644 --- a/workflow/applications/gfs_cycled.py +++ b/workflow/applications/gfs_cycled.py @@ -39,9 +39,9 @@ def _get_app_configs(self): configs = ['prep'] if self.do_jediatmvar: - configs += ['prepatmiodaobs', 'atmanlinit', 'atmanlvar', 'atmanlfv3inc', 'atmanlfinal', 'calcanl'] + configs += ['prepatmiodaobs', 'atmanlinit', 'atmanlvar', 'atmanlfv3inc', 'atmanlfinal', 'analcalc_fv3jedi'] else: - configs += ['anal', 'analdiag', 'analcalc'] + configs += ['anal', 'analdiag', 'analcalc_gsi'] if self.do_jediocnvar: configs += ['prepoceanobs', 'ocnanalprep', 'marinebmat', 'ocnanalrun'] @@ -141,9 +141,9 @@ def get_task_names(self): gdas_gfs_common_cleanup_tasks = ['arch', 'cleanup'] if self.do_jediatmvar: - gdas_gfs_common_tasks_before_fcst += ['prepatmiodaobs', 'atmanlinit', 'atmanlvar', 'atmanlfv3inc', 'atmanlfinal', 'calcanl'] + gdas_gfs_common_tasks_before_fcst += ['prepatmiodaobs', 'atmanlinit', 'atmanlvar', 'atmanlfv3inc', 'atmanlfinal', 'analcalc_fv3jedi'] else: - gdas_gfs_common_tasks_before_fcst += ['anal', 'analcalc'] + gdas_gfs_common_tasks_before_fcst += ['anal', 'analcalc_gsi'] if self.do_jediocnvar: gdas_gfs_common_tasks_before_fcst += ['prepoceanobs', 'ocnanalprep', 'marinebmat', 'ocnanalrun'] diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index 996bced8f8..9add4bb0fb 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -244,7 +244,7 @@ def sfcanl(self): return task - def calcanl(self): + def analcalc_fv3jedi(self): deps = [] dep_dict = {'type': 'task', 'name': f'{self.run}atmanlfinal'} @@ -256,14 +256,14 @@ def calcanl(self): deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - resources = self.get_resource('calcanl') - task_name = f'{self.run}calcanl' + resources = self.get_resource('analcalc_fv3jedi') + task_name = f'{self.run}analcalc_fv3jedi' task_dict = {'task_name': task_name, 'resources': resources, 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/jobs/rocoto/calcanl.sh', + 'command': f'{self.HOMEgfs}/jobs/rocoto/analcalc_fv3jedi.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -273,7 +273,7 @@ def calcanl(self): return task - def analcalc(self): + def analcalc_gsi(self): deps = [] dep_dict = {'type': 'task', 'name': f'{self.run}anal'} @@ -285,14 +285,14 @@ def analcalc(self): deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - resources = self.get_resource('analcalc') - task_name = f'{self.run}analcalc' + resources = self.get_resource('analcalc_gsi') + task_name = f'{self.run}analcalc_gsi' task_dict = {'task_name': task_name, 'resources': resources, 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.run.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/jobs/rocoto/analcalc.sh', + 'command': f'{self.HOMEgfs}/jobs/rocoto/analcalc_gsi.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -2669,7 +2669,7 @@ def _get_ecengroups(): return grp, dep, lst deps = [] - dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf","")}analcalc'} + dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf","")}analcalc_gsi'} deps.append(rocoto.add_dependency(dep_dict)) if self.app_config.do_jediatmens: dep_dict = {'type': 'task', 'name': f'{self.run}atmensanlfinal'} @@ -2715,7 +2715,7 @@ def esfc(self): # eupd_run = 'gdas' if 'gdas' in self.app_config.eupd_runs else 'gfs' deps = [] - dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf","")}analcalc'} + dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf","")}analcalc_gsi'} deps.append(rocoto.add_dependency(dep_dict)) if self.app_config.do_jediatmens: dep_dict = {'type': 'task', 'name': f'{self.run}atmensanlfinal'} diff --git a/workflow/rocoto/tasks.py b/workflow/rocoto/tasks.py index f61abac7d8..e38ad75c39 100644 --- a/workflow/rocoto/tasks.py +++ b/workflow/rocoto/tasks.py @@ -12,7 +12,7 @@ class Tasks: SERVICE_TASKS = ['arch', 'earc'] VALID_TASKS = ['aerosol_init', 'stage_ic', - 'prep', 'anal', 'sfcanl', 'calcanl', 'analcalc', 'analdiag', 'arch', "cleanup", + 'prep', 'anal', 'sfcanl', 'analcalc_fv3jedi', 'analcalc_gsi', 'analdiag', 'arch', "cleanup", 'prepatmiodaobs', 'atmanlinit', 'atmanlvar', 'atmanlfv3inc', 'atmanlfinal', 'prepoceanobs', 'ocnanalprep', 'marinebmat', 'ocnanalrun', 'ocnanalecen', 'ocnanalchkpt', 'ocnanalpost', 'ocnanalvrfy', From 24f5029d01666973dc65b0c4805f6fad749c3146 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Mon, 23 Sep 2024 20:41:37 +0000 Subject: [PATCH 45/71] Update GDASApp hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 76daa87d5e..d8b47436e8 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 76daa87d5e47e89f1f6f94105877f66e56fa0262 +Subproject commit d8b47436e8bccb712929315dc2f1d6bf4be7c74d From 6aa3b96c437b34ea857abf9aad2ddfaf2567e91c Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Mon, 23 Sep 2024 20:45:05 +0000 Subject: [PATCH 46/71] Update analcalc name in two more corners of the GW --- docs/source/jobs.rst | 2 +- env/GAEA.env | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/source/jobs.rst b/docs/source/jobs.rst index 0e3700bf20..30411283a6 100644 --- a/docs/source/jobs.rst +++ b/docs/source/jobs.rst @@ -33,7 +33,7 @@ Jobs in the GFS Configuration | anal | Runs the analysis. 1) Runs the atmospheric analysis (global_gsi) to produce analysis increments; 2) Update surface | | | guess file via global_cycle to create surface analysis on tiles. | +-------------------+-----------------------------------------------------------------------------------------------------------------------+ -| analcalc | Adds the analysis increments to previous cycle’s forecasts to produce atmospheric analysis files. Produces surface | +| analcalc_gsi | Adds the analysis increments to previous cycle’s forecasts to produce atmospheric analysis files. Produces surface | | | analysis file on Gaussian grid. | +-------------------+-----------------------------------------------------------------------------------------------------------------------+ | analdiag | Creates netCDF diagnostic files containing observation values, innovation (O-F), error, quality control, as well as | diff --git a/env/GAEA.env b/env/GAEA.env index 7736e0f1ea..d80958e54d 100755 --- a/env/GAEA.env +++ b/env/GAEA.env @@ -41,7 +41,7 @@ if [[ "${step}" = "prep" ]]; then export sys_tp="GAEA" export launcher_PREP="srun" -elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then +elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc_gsi" ]]; then export MKL_NUM_THREADS=4 export MKL_CBWR=AUTO From 65f66f46c1d419b8da40ca92a0ac009e781249a7 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Mon, 23 Sep 2024 21:06:36 +0000 Subject: [PATCH 47/71] Shell norms and update UFS hash --- jobs/rocoto/analcalc_fv3jedi.sh | 2 +- jobs/rocoto/analcalc_gsi.sh | 2 +- parm/config/gfs/config.analcalc_gsi | 2 +- sorc/ufs_model.fd | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/jobs/rocoto/analcalc_fv3jedi.sh b/jobs/rocoto/analcalc_fv3jedi.sh index ceb4077160..22e8abd4ed 100755 --- a/jobs/rocoto/analcalc_fv3jedi.sh +++ b/jobs/rocoto/analcalc_fv3jedi.sh @@ -13,6 +13,6 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC_FV3JEDI +"${HOMEgfs}"/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC_FV3JEDI status=$? exit "${status}" diff --git a/jobs/rocoto/analcalc_gsi.sh b/jobs/rocoto/analcalc_gsi.sh index 68779c43bf..66d264a8bd 100755 --- a/jobs/rocoto/analcalc_gsi.sh +++ b/jobs/rocoto/analcalc_gsi.sh @@ -13,7 +13,7 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -${HOMEgfs}/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC_GSI +"${HOMEgfs}"/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC_GSI status=$? diff --git a/parm/config/gfs/config.analcalc_gsi b/parm/config/gfs/config.analcalc_gsi index 102c3e03c3..adc7b75570 100644 --- a/parm/config/gfs/config.analcalc_gsi +++ b/parm/config/gfs/config.analcalc_gsi @@ -6,6 +6,6 @@ echo "BEGIN: config.analcalc_gsi" # Get task specific resources -. ${EXPDIR}/config.resources analcalc_gsi +. "${EXPDIR}"/config.resources analcalc_gsi echo "END: config.analcalc_gsi" diff --git a/sorc/ufs_model.fd b/sorc/ufs_model.fd index 6a4e09e947..38a29a6246 160000 --- a/sorc/ufs_model.fd +++ b/sorc/ufs_model.fd @@ -1 +1 @@ -Subproject commit 6a4e09e94773ffa39ce7ab6a54a885efada91f21 +Subproject commit 38a29a62461cb1f9bf530420d5bc2f73a4650724 From dd84949be69c6a41a18346a16cba6417410b4336 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Tue, 24 Sep 2024 14:17:59 +0000 Subject: [PATCH 48/71] Fix job dependencies bug --- workflow/rocoto/gfs_tasks.py | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index 9add4bb0fb..6e83d52c5a 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -2669,13 +2669,16 @@ def _get_ecengroups(): return grp, dep, lst deps = [] - dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf","")}analcalc_gsi'} - deps.append(rocoto.add_dependency(dep_dict)) if self.app_config.do_jediatmens: + dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf","")}analcalc_fv3jedi'} + deps.append(rocoto.add_dependency(dep_dict)) dep_dict = {'type': 'task', 'name': f'{self.run}atmensanlfinal'} + deps.append(rocoto.add_dependency(dep_dict)) else: + dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf","")}analcalc_gsi'} + deps.append(rocoto.add_dependency(dep_dict)) dep_dict = {'type': 'task', 'name': f'{self.run}eupd'} - deps.append(rocoto.add_dependency(dep_dict)) + deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) ecenenvars = self.envars.copy() @@ -2715,13 +2718,17 @@ def esfc(self): # eupd_run = 'gdas' if 'gdas' in self.app_config.eupd_runs else 'gfs' deps = [] - dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf","")}analcalc_gsi'} - deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_jediatmens: + dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf","")}analcalc_fv3jedi'} + deps.append(rocoto.add_dependency(dep_dict)) dep_dict = {'type': 'task', 'name': f'{self.run}atmensanlfinal'} + deps.append(rocoto.add_dependency(dep_dict)) else: + dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf","")}analcalc_gsi'} + deps.append(rocoto.add_dependency(dep_dict)) dep_dict = {'type': 'task', 'name': f'{self.run}eupd'} - deps.append(rocoto.add_dependency(dep_dict)) + deps.append(rocoto.add_dependency(dep_dict)) if self.app_config.do_jedisnowda: dep_dict = {'type': 'task', 'name': f'{self.run}esnowrecen'} deps.append(rocoto.add_dependency(dep_dict)) From 76558a1688e1a65d0e18450ed4c4ad57a14ec4cb Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 25 Sep 2024 15:53:58 +0000 Subject: [PATCH 49/71] Update GDAS hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index d8b47436e8..d9f10d940c 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit d8b47436e8bccb712929315dc2f1d6bf4be7c74d +Subproject commit d9f10d940cd6a2cacbcb3b11eaad4b1bc883b4dd From ca827950c9b7c3d8b0e088a03bc4e66af1844303 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 25 Sep 2024 21:09:35 +0000 Subject: [PATCH 50/71] Have ensemble write native grid AND Gaussian grid increments temporarily. Add "cubed_sphere_grid_" to "atminc" prefixes. Update GDAS hash. --- sorc/gdas.cd | 2 +- ush/forecast_postdet.sh | 6 +++++- ush/python/pygfs/task/atm_analysis.py | 2 +- ush/python/pygfs/task/atmens_analysis.py | 8 ++++++-- 4 files changed, 13 insertions(+), 5 deletions(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index d9f10d940c..233ab2a57c 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit d9f10d940cd6a2cacbcb3b11eaad4b1bc883b4dd +Subproject commit 233ab2a57c6da0bfb0b356df1619b217d8783c5b diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index 33b8b81c45..4f6d7749fd 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -183,7 +183,11 @@ EOF local increment_file for inc_file in "${inc_files[@]}"; do - increment_file="${COMIN_ATMOS_ANALYSIS}/${RUN}.t${cyc}z.${PREFIX_ATMINC}${inc_file}" + if [[ "${DO_JEDIATMVAR:-NO}" == "YES" ]]; then + increment_file="${COMIN_ATMOS_ANALYSIS}/${RUN}.t${cyc}z.cubed_sphere_grid_${PREFIX_ATMINC}${inc_file}" + else + increment_file="${COMIN_ATMOS_ANALYSIS}/${RUN}.t${cyc}z.${PREFIX_ATMINC}${inc_file}" + fi if [[ -f "${increment_file}" ]]; then ${NCP} "${increment_file}" "${DATA}/INPUT/${inc_file}" else diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index d443bb0862..081951e3ae 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -303,7 +303,7 @@ def finalize(self) -> None: cdate_inc = cdate.replace('.', '_') inc_copy = {'copy': []} for itile in range(6): - src = os.path.join(self.task_config.DATA, 'anl', f"atminc.{cdate_inc}z.tile{itile+1}.nc4") + src = os.path.join(self.task_config.DATA, 'anl', f"cubed_sphere_grid_atminc.{cdate_inc}z.tile{itile+1}.nc4") dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f'{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atminc.tile{itile+1}.nc') inc_copy['copy'].append([src, dest]) diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index a8987f0347..0022f7fb45 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -128,7 +128,6 @@ def initialize_analysis(self) -> None: ---------- None """ - super().initialize() # stage observations logger.info(f"Staging list of observation files generated from JEDI config") @@ -270,10 +269,15 @@ def finalize(self) -> None: incdir = Template.substitute_structure(template_inc, TemplateConstants.DOLLAR_CURLY_BRACE, tmpl_inc_dict.get) inc_copy = {'copy': []} for itile in range(6): - src = os.path.join(self.task_config.DATA, 'anl', memchar, f"atminc.{cdate_inc}z.tile{itile+1}.nc4") + src = os.path.join(self.task_config.DATA, 'anl', memchar, f"cubed_sphere_grid_atminc.{cdate_inc}z.tile{itile+1}.nc4") dest = os.path.join(incdir, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atminc.tile{itile+1}.nc") inc_copy['copy'].append([src, dest]) + # Copy the temporary Gaussian increment. + #This will be removed when we have JEDI-based recentering. + src = os.path.join(self.task_config.DATA, 'anl', memchar, f"atminc.{cdate_inc}z.tile{itile+1}.nc4") + inc_copy['copy'].append([src, dest]) + # copy increments src_list, dest_list = zip(*inc_copy['copy']) logger.debug(f"Copying {src_list}\nto {dest_list}") From a0fc4b9338b6c2142dcaff49ba0ee024f4777b93 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 25 Sep 2024 22:43:34 +0000 Subject: [PATCH 51/71] Fix bug from last commit --- ush/python/pygfs/task/atm_analysis.py | 2 +- ush/python/pygfs/task/atmens_analysis.py | 11 ++++++----- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index 081951e3ae..1c2a3da22a 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -304,7 +304,7 @@ def finalize(self) -> None: inc_copy = {'copy': []} for itile in range(6): src = os.path.join(self.task_config.DATA, 'anl', f"cubed_sphere_grid_atminc.{cdate_inc}z.tile{itile+1}.nc4") - dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f'{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atminc.tile{itile+1}.nc') + dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f'{self.task_config.RUN}.t{self.task_config.cyc:02d}z.cubed_sphere_grid_atminc.tile{itile+1}.nc') inc_copy['copy'].append([src, dest]) # copy increments diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index 0022f7fb45..b90b07c205 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -270,13 +270,14 @@ def finalize(self) -> None: inc_copy = {'copy': []} for itile in range(6): src = os.path.join(self.task_config.DATA, 'anl', memchar, f"cubed_sphere_grid_atminc.{cdate_inc}z.tile{itile+1}.nc4") - dest = os.path.join(incdir, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atminc.tile{itile+1}.nc") + dest = os.path.join(incdir, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.cubed_sphere_grid_atminc.tile{itile+1}.nc") inc_copy['copy'].append([src, dest]) - # Copy the temporary Gaussian increment. - #This will be removed when we have JEDI-based recentering. - src = os.path.join(self.task_config.DATA, 'anl', memchar, f"atminc.{cdate_inc}z.tile{itile+1}.nc4") - inc_copy['copy'].append([src, dest]) + # Copy the temporary Gaussian increment. + # This will be removed when we have JEDI-based recentering. + src = os.path.join(self.task_config.DATA, 'anl', memchar, f"atminc.{cdate_inc}z.nc4") + dest = os.path.join(incdir, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atminc.nc") + inc_copy['copy'].append([src, dest]) # copy increments src_list, dest_list = zip(*inc_copy['copy']) From 947f735112771c0bde45db7225b3eb501bc93ee0 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Thu, 26 Sep 2024 14:26:57 +0000 Subject: [PATCH 52/71] Hopefully final bug fix --- ush/forecast_postdet.sh | 4 ++-- ush/python/pygfs/task/analcalc.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index 4f6d7749fd..4a5bcaecfa 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -160,7 +160,7 @@ EOF done else # "${DOIAU}" == "NO" read_increment=".true." - if [[ "${DO_JEDIATMVAR:-NO}" == "YES" ]]; then + if [[ "${DO_JEDIATMVAR:-NO}" == "YES" ]] && [[ "${PREFIX_ATMINC}" != "r" ]]; then inc_files=("atminc.tile1.nc" "atminc.tile2.nc" "atminc.tile3.nc" "atminc.tile4.nc" "atminc.tile5.nc" "atminc.tile6.nc") res_latlon_dynamics="atminc" increment_file_on_native_grid=".true." @@ -183,7 +183,7 @@ EOF local increment_file for inc_file in "${inc_files[@]}"; do - if [[ "${DO_JEDIATMVAR:-NO}" == "YES" ]]; then + if [[ "${DO_JEDIATMVAR:-NO}" == "YES" ]] && [[ "${PREFIX_ATMINC}" != "r" ]]; then increment_file="${COMIN_ATMOS_ANALYSIS}/${RUN}.t${cyc}z.cubed_sphere_grid_${PREFIX_ATMINC}${inc_file}" else increment_file="${COMIN_ATMOS_ANALYSIS}/${RUN}.t${cyc}z.${PREFIX_ATMINC}${inc_file}" diff --git a/ush/python/pygfs/task/analcalc.py b/ush/python/pygfs/task/analcalc.py index 9a40640f63..ea692c93ef 100644 --- a/ush/python/pygfs/task/analcalc.py +++ b/ush/python/pygfs/task/analcalc.py @@ -142,7 +142,7 @@ def initialize(self) -> None: if fh == 6: for itile in range(6): - fh_dict['copy'].append([f"{anl_prefix}atminc.tile{itile+1}.nc", + fh_dict['copy'].append([f"{anl_prefix}cubed_sphere_grid_atminc.tile{itile+1}.nc", f"{self.task_config.AnalCalcDir(fh)}/siginc.06.tile{itile+1}.nc"]) else: for itile in range(6): From b28ad28074c4a5c9675e739428e26f653932bb7b Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Thu, 26 Sep 2024 15:16:58 +0000 Subject: [PATCH 53/71] Revert some stuff --- sorc/gdas.cd | 2 +- ush/python/pygfs/task/atmens_analysis.py | 17 +++++------------ 2 files changed, 6 insertions(+), 13 deletions(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 233ab2a57c..b1d90f24c4 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 233ab2a57c6da0bfb0b356df1619b217d8783c5b +Subproject commit b1d90f24c411a38d5419ab5dddecf010b13cb58a diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index b90b07c205..1667e919a5 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -267,21 +267,14 @@ def finalize(self) -> None: # create output path for member analysis increment tmpl_inc_dict['MEMDIR'] = memchar incdir = Template.substitute_structure(template_inc, TemplateConstants.DOLLAR_CURLY_BRACE, tmpl_inc_dict.get) - inc_copy = {'copy': []} - for itile in range(6): - src = os.path.join(self.task_config.DATA, 'anl', memchar, f"cubed_sphere_grid_atminc.{cdate_inc}z.tile{itile+1}.nc4") - dest = os.path.join(incdir, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.cubed_sphere_grid_atminc.tile{itile+1}.nc") - inc_copy['copy'].append([src, dest]) - - # Copy the temporary Gaussian increment. - # This will be removed when we have JEDI-based recentering. src = os.path.join(self.task_config.DATA, 'anl', memchar, f"atminc.{cdate_inc}z.nc4") dest = os.path.join(incdir, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atminc.nc") - inc_copy['copy'].append([src, dest]) - # copy increments - src_list, dest_list = zip(*inc_copy['copy']) - logger.debug(f"Copying {src_list}\nto {dest_list}") + # copy increment + logger.debug(f"Copying {src} to {dest}") + inc_copy = { + 'copy': [[src, dest]] + } FileHandler(inc_copy).sync() def clean(self): From 0086604cacb795f86bbcf3ff729399c2fdc335c5 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Thu, 26 Sep 2024 15:18:54 +0000 Subject: [PATCH 54/71] Minor cleanup --- ush/python/pygfs/task/atm_analysis.py | 1 - 1 file changed, 1 deletion(-) diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index 1c2a3da22a..973c59a329 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -129,7 +129,6 @@ def initialize_analysis(self) -> None: ---------- None """ - super().initialize() # stage observations logger.info(f"Staging list of observation files generated from JEDI config") From f1e5ed50b8e0496f3cfabeb468324c2c48c408b5 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Fri, 27 Sep 2024 17:45:53 +0000 Subject: [PATCH 55/71] Update GDAS hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index b1d90f24c4..7bae01e9e7 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit b1d90f24c411a38d5419ab5dddecf010b13cb58a +Subproject commit 7bae01e9e73a61889e205cc3ae471aca9bd3a9c3 From f517dfa484c5571856d2f82e4f5edd01a1e5c43b Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 2 Oct 2024 14:45:26 +0000 Subject: [PATCH 56/71] Update hashes --- sorc/gdas.cd | 2 +- sorc/ufs_model.fd | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 7bae01e9e7..65bcffce9e 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 7bae01e9e73a61889e205cc3ae471aca9bd3a9c3 +Subproject commit 65bcffce9e84422d3a2c8b176a5e7bacc34acd28 diff --git a/sorc/ufs_model.fd b/sorc/ufs_model.fd index 38a29a6246..6a4e09e947 160000 --- a/sorc/ufs_model.fd +++ b/sorc/ufs_model.fd @@ -1 +1 @@ -Subproject commit 38a29a62461cb1f9bf530420d5bc2f73a4650724 +Subproject commit 6a4e09e94773ffa39ce7ab6a54a885efada91f21 From b9d83381722427c2c977bc857e34b4303b9c95c0 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Wed, 2 Oct 2024 14:50:47 +0000 Subject: [PATCH 57/71] Update GDAS hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 65bcffce9e..7fc8a9b9eb 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 65bcffce9e84422d3a2c8b176a5e7bacc34acd28 +Subproject commit 7fc8a9b9eb36ff3d373b555835d89d17064e1cc2 From 5a65800fdb822d34b70b3837d3445bae73fe8aa9 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Fri, 4 Oct 2024 17:27:16 +0000 Subject: [PATCH 58/71] Update GDAS hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 7fc8a9b9eb..4691caa195 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 7fc8a9b9eb36ff3d373b555835d89d17064e1cc2 +Subproject commit 4691caa1952b9ccbdfb5a692aef9ab12af858f4f From 0b8484323dd1f3026c0ce1b03269aae66698845b Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Mon, 7 Oct 2024 14:03:53 +0000 Subject: [PATCH 59/71] Remove ges.sfc background file --- ush/python/pygfs/task/analcalc.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/ush/python/pygfs/task/analcalc.py b/ush/python/pygfs/task/analcalc.py index ea692c93ef..338cbbd3a2 100644 --- a/ush/python/pygfs/task/analcalc.py +++ b/ush/python/pygfs/task/analcalc.py @@ -134,12 +134,8 @@ def initialize(self) -> None: anl_prefix = f"{self.task_config.COM_ATMOS_ANALYSIS}/{self.task_config.APREFIX}" for fh in self.task_config.IAUFHRS: fh_dict['mkdir'].append(self.task_config.AnalCalcDir(fh)) - fh_dict['copy'].append([f"{hist_prefix}cubed_sphere_grid_atmf{format(fh, '03')}.nc", - f"{self.task_config.AnalCalcDir(fh)}/ges.atm.{format(fh, '02')}.nc"]) - fh_dict['copy'].append([f"{hist_prefix}cubed_sphere_grid_sfcf{format(fh, '03')}.nc", - f"{self.task_config.AnalCalcDir(fh)}/ges.sfc.{format(fh, '02')}.nc"]) - + f"{self.task_config.AnalCalcDir(fh)}/ges.{format(fh, '02')}.nc"]) if fh == 6: for itile in range(6): fh_dict['copy'].append([f"{anl_prefix}cubed_sphere_grid_atminc.tile{itile+1}.nc", From a7abdfbb850c0a1e587206d79b649715dc9ce47b Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Mon, 7 Oct 2024 14:08:22 +0000 Subject: [PATCH 60/71] pynorms --- ush/python/pygfs/task/atm_analysis.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index 5624deb231..8abfc4f71d 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -307,7 +307,8 @@ def finalize(self) -> None: inc_copy = {'copy': []} for itile in range(6): src = os.path.join(self.task_config.DATA, 'anl', f"cubed_sphere_grid_atminc.{cdate_inc}z.tile{itile+1}.nc4") - dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f'{self.task_config.RUN}.t{self.task_config.cyc:02d}z.cubed_sphere_grid_atminc.tile{itile+1}.nc') + dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, + f'{self.task_config.RUN}.t{self.task_config.cyc:02d}z.cubed_sphere_grid_atminc.tile{itile+1}.nc') inc_copy['copy'].append([src, dest]) # copy increments From c5231100764d48ecabb86248ded71c28d9879310 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Mon, 7 Oct 2024 14:11:24 +0000 Subject: [PATCH 61/71] Update GDAS hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 4691caa195..903975ee86 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 4691caa1952b9ccbdfb5a692aef9ab12af858f4f +Subproject commit 903975ee86869790a9bf858e1ee8b969ba1a363e From ae0ae95cc42150490b60d579b2b25af98333f5f3 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Tue, 8 Oct 2024 00:16:35 +0000 Subject: [PATCH 62/71] Account for warm start in forecast_postdet.sh --- ush/forecast_postdet.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index 4a5bcaecfa..f0d86c72cb 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -159,8 +159,8 @@ EOF delimiter="," done else # "${DOIAU}" == "NO" - read_increment=".true." - if [[ "${DO_JEDIATMVAR:-NO}" == "YES" ]] && [[ "${PREFIX_ATMINC}" != "r" ]]; then + read_increment=".true." + if [[ "${DO_JEDIATMVAR:-NO}" == "YES" ]] && [[ "${PREFIX_ATMINC}" != "r" ]] && [[ "${PDY}${cyc}" != "${SDATE}" ]]; then inc_files=("atminc.tile1.nc" "atminc.tile2.nc" "atminc.tile3.nc" "atminc.tile4.nc" "atminc.tile5.nc" "atminc.tile6.nc") res_latlon_dynamics="atminc" increment_file_on_native_grid=".true." @@ -183,7 +183,7 @@ EOF local increment_file for inc_file in "${inc_files[@]}"; do - if [[ "${DO_JEDIATMVAR:-NO}" == "YES" ]] && [[ "${PREFIX_ATMINC}" != "r" ]]; then + if [[ "${DO_JEDIATMVAR:-NO}" == "YES" ]] && [[ "${PREFIX_ATMINC}" != "r" ]] && [[ "${PDY}${cyc}" != "${SDATE}" ]]; then increment_file="${COMIN_ATMOS_ANALYSIS}/${RUN}.t${cyc}z.cubed_sphere_grid_${PREFIX_ATMINC}${inc_file}" else increment_file="${COMIN_ATMOS_ANALYSIS}/${RUN}.t${cyc}z.${PREFIX_ATMINC}${inc_file}" From ac31f23f4f547cf15895038103abe08498dfdaa5 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Tue, 8 Oct 2024 00:19:25 +0000 Subject: [PATCH 63/71] Update GDAS hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 903975ee86..59b6d97a42 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 903975ee86869790a9bf858e1ee8b969ba1a363e +Subproject commit 59b6d97a42c6cd72ea23e0f9f40f7f22dbedade7 From e7fcafd3ad0504ba12c3cc2ad647217d99dfea83 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Sun, 13 Oct 2024 14:07:32 +0000 Subject: [PATCH 64/71] Update GDAS hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 59b6d97a42..58b34ab8c1 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 59b6d97a42c6cd72ea23e0f9f40f7f22dbedade7 +Subproject commit 58b34ab8c1429df11b8f04b94d102fe3bd535a75 From f584fc297c361c6c543e75249f81696635b1b733 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Tue, 22 Oct 2024 13:09:41 +0000 Subject: [PATCH 65/71] Update GDAS hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 58b34ab8c1..04fa9b617a 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 58b34ab8c1429df11b8f04b94d102fe3bd535a75 +Subproject commit 04fa9b617a1b5672e25b1a0afdb639db2185e63d From 2f96c528621a49367c220cb01e2a153157e9c9ac Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Thu, 24 Oct 2024 00:56:57 +0000 Subject: [PATCH 66/71] Update GDAS hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 04fa9b617a..9393b72656 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 04fa9b617a1b5672e25b1a0afdb639db2185e63d +Subproject commit 9393b726561155480d45bddc8b503b09406aff08 From f7957f858a41d643fee2833846d7f4f9d5ff1ce6 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Mon, 28 Oct 2024 14:14:19 +0000 Subject: [PATCH 67/71] Update GDAS hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 9393b72656..2947d61001 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 9393b726561155480d45bddc8b503b09406aff08 +Subproject commit 2947d610010e5ca2cdd4e2cb4fc492c281bb6867 From 8bd51669d295598d39759bde69850a0115301c52 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Fri, 1 Nov 2024 15:18:39 +0000 Subject: [PATCH 68/71] Update GDAS hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 2947d61001..e7e952ccb0 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 2947d610010e5ca2cdd4e2cb4fc492c281bb6867 +Subproject commit e7e952ccb00d2eee3be3145572c49b7c51a34cf0 From ef1673fb9b77923b20b5498efc3f810b9ea3b5dd Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Fri, 1 Nov 2024 15:25:02 +0000 Subject: [PATCH 69/71] Update GDAS hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index e7e952ccb0..83e3dc3649 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit e7e952ccb00d2eee3be3145572c49b7c51a34cf0 +Subproject commit 83e3dc3649e5b4fb4a73ac0bb3947b8595069bd7 From d1874dcd6bda5cd5d7076fb9ff7f1ef97d8c7c3b Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Sun, 3 Nov 2024 14:52:41 +0000 Subject: [PATCH 70/71] Update GDAS hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 83e3dc3649..b388be8f5a 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 83e3dc3649e5b4fb4a73ac0bb3947b8595069bd7 +Subproject commit b388be8f5a5063beeffd7028cab28f035dfcabb6 From 18a80244b3fd4a163aadbf4b87fa3ef8c2cd6751 Mon Sep 17 00:00:00 2001 From: DavidNew-NOAA Date: Thu, 7 Nov 2024 17:16:50 +0000 Subject: [PATCH 71/71] Update gdas hash --- sorc/gdas.cd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/gdas.cd b/sorc/gdas.cd index b388be8f5a..e8ed7532f4 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit b388be8f5a5063beeffd7028cab28f035dfcabb6 +Subproject commit e8ed7532f4df58c7d2ab918c715194434b89c241