Skip to content

Commit

Permalink
Add handling to select CRTM cloud optical table based on cloud scheme…
Browse files Browse the repository at this point in the history
… and update calcanal_gfs.py (#2645)

This PR proposes updates for the following two scripts: 

1. In **scripts/exglobal_atmos_analysis.sh** --- Add handling to select
CRTM cloud optical table based on cloud microphysical scheme indicated
by `imp_physics'
The default scheme in the GFS forecast model is Thompson scheme
(imp_physics = 8).

2. In **/ush/calcanl_gfs.py** --- Increase the MPI number declared in
the script due to increased variables to interplate increments and
calculate analysis in the netcdf_io routines in GSI-utils.
Here is the related [PR #46 for
GSI-utils](NOAA-EMC/GSI-utils#46).

---------

Co-authored-by: Rahul Mahajan <[email protected]>
Co-authored-by: Walter Kolczynski - NOAA <[email protected]>
  • Loading branch information
3 people authored Jun 10, 2024
1 parent 9caa51d commit e7909af
Show file tree
Hide file tree
Showing 2 changed files with 20 additions and 9 deletions.
13 changes: 12 additions & 1 deletion scripts/exglobal_atmos_analysis.sh
Original file line number Diff line number Diff line change
Expand Up @@ -430,7 +430,18 @@ ${NLN} ${CRTM_FIX}/NPOESS.VISsnow.EmisCoeff.bin ./crtm_coeffs/NPOESS.VISsnow.Em
${NLN} ${CRTM_FIX}/NPOESS.VISwater.EmisCoeff.bin ./crtm_coeffs/NPOESS.VISwater.EmisCoeff.bin
${NLN} ${CRTM_FIX}/FASTEM6.MWwater.EmisCoeff.bin ./crtm_coeffs/FASTEM6.MWwater.EmisCoeff.bin
${NLN} ${CRTM_FIX}/AerosolCoeff.bin ./crtm_coeffs/AerosolCoeff.bin
${NLN} ${CRTM_FIX}/CloudCoeff.GFDLFV3.-109z-1.bin ./crtm_coeffs/CloudCoeff.bin
if (( imp_physics == 8 )); then
echo "using CRTM Thompson cloud optical table"
${NLN} "${CRTM_FIX}/CloudCoeff.Thompson08.-109z-1.bin" ./crtm_coeffs/CloudCoeff.bin
elif (( imp_physics == 11 )); then
echo "using CRTM GFDL cloud optical table"
${NLN} "${CRTM_FIX}/CloudCoeff.GFDLFV3.-109z-1.bin" ./crtm_coeffs/CloudCoeff.bin
else
echo "INVALID imp_physics = ${imp_physics}"
echo "FATAL ERROR: No valid CRTM cloud optical table found for imp_physics = ${imp_physics}"
exit 1
fi


##############################################################
# Observational data
Expand Down
16 changes: 8 additions & 8 deletions ush/calcanl_gfs.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ def calcanl_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix,
ExecCMDMPI1 = ExecCMDMPI.replace("$ncmd", str(1))
ExecCMDMPI = ExecCMDMPI.replace("$ncmd", str(nFH))
ExecCMDLevs = ExecCMDMPI.replace("$ncmd", str(levs))
ExecCMDMPI10 = ExecCMDMPI.replace("$ncmd", str(10))
ExecCMDMPI13 = ExecCMDMPI.replace("$ncmd", str(13))

# are we using mpirun with lsf, srun, or aprun with Cray?
launcher = ExecCMDMPI.split(' ')[0]
Expand All @@ -156,7 +156,7 @@ def calcanl_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix,
ExecCMDMPILevs_host = 'mpirun -np ' + str(levs) + ' --hostfile hosts'
ExecCMDMPILevs_nohost = 'mpirun -np ' + str(levs)
ExecCMDMPI1_host = 'mpirun -np 1 --hostfile hosts'
ExecCMDMPI10_host = 'mpirun -np 10 --hostfile hosts'
ExecCMDMPI13_host = 'mpirun -np 13 --hostfile hosts'
elif launcher == 'mpiexec':
hostfile = os.getenv('PBS_NODEFILE', '')
with open(hostfile) as f:
Expand All @@ -175,7 +175,7 @@ def calcanl_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix,
ExecCMDMPILevs_host = 'mpiexec -l -n ' + str(levs)
ExecCMDMPILevs_nohost = 'mpiexec -l -n ' + str(levs)
ExecCMDMPI1_host = 'mpiexec -l -n 1 --cpu-bind depth --depth ' + str(NThreads)
ExecCMDMPI10_host = 'mpiexec -l -n 10 --cpu-bind depth --depth ' + str(NThreads)
ExecCMDMPI13_host = 'mpiexec -l -n 13 --cpu-bind depth --depth ' + str(NThreads)
elif launcher == 'srun':
nodes = os.getenv('SLURM_JOB_NODELIST', '')
hosts_tmp = subprocess.check_output('scontrol show hostnames ' + nodes, shell=True)
Expand All @@ -200,7 +200,7 @@ def calcanl_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix,
ExecCMDMPILevs_host = 'srun -n ' + str(levs) + ' --verbose --export=ALL -c 1 --distribution=arbitrary --cpu-bind=cores'
ExecCMDMPILevs_nohost = 'srun -n ' + str(levs) + ' --verbose --export=ALL'
ExecCMDMPI1_host = 'srun -n 1 --verbose --export=ALL -c 1 --distribution=arbitrary --cpu-bind=cores'
ExecCMDMPI10_host = 'srun -n 10 --verbose --export=ALL -c 1 --distribution=arbitrary --cpu-bind=cores'
ExecCMDMPI13_host = 'srun -n 13 --verbose --export=ALL -c 1 --distribution=arbitrary --cpu-bind=cores'
elif launcher == 'aprun':
hostfile = os.getenv('LSB_DJOB_HOSTFILE', '')
with open(hostfile) as f:
Expand All @@ -213,7 +213,7 @@ def calcanl_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix,
ExecCMDMPILevs_host = 'aprun -l hosts -d ' + str(NThreads) + ' -n ' + str(levs)
ExecCMDMPILevs_nohost = 'aprun -d ' + str(NThreads) + ' -n ' + str(levs)
ExecCMDMPI1_host = 'aprun -l hosts -d ' + str(NThreads) + ' -n 1'
ExecCMDMPI10_host = 'aprun -l hosts -d ' + str(NThreads) + ' -n 10'
ExecCMDMPI13_host = 'aprun -l hosts -d ' + str(NThreads) + ' -n 13'
else:
print('unknown MPI launcher. Failure.')
sys.exit(1)
Expand Down Expand Up @@ -248,13 +248,13 @@ def calcanl_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix,
ihost += 1
for a in range(0, 5):
hostfile.write(hosts[ihost] + '\n')
for a in range(0, 9): # need 9 more of the same host for the 10 tasks for chgres_inc
for a in range(0, 12): # need 12 more of the same host for the 13 tasks for chgres_inc
hostfile.write(hosts[ihost] + '\n')
if launcher == 'srun':
os.environ['SLURM_HOSTFILE'] = CalcAnlDir + '/hosts'
print('interp_inc', fh, namelist)
job = subprocess.Popen(ExecCMDMPI10_host + ' ' + CalcAnlDir + '/chgres_inc.x', shell=True, cwd=CalcAnlDir)
print(ExecCMDMPI10_host + ' ' + CalcAnlDir + '/chgres_inc.x submitted on ' + hosts[ihost])
job = subprocess.Popen(ExecCMDMPI13_host + ' ' + CalcAnlDir + '/chgres_inc.x', shell=True, cwd=CalcAnlDir)
print(ExecCMDMPI13_host + ' ' + CalcAnlDir + '/chgres_inc.x submitted on ' + hosts[ihost])
sys.stdout.flush()
ec = job.wait()
if ec != 0:
Expand Down

0 comments on commit e7909af

Please sign in to comment.