Skip to content

Commit

Permalink
Enable nesting (#40)
Browse files Browse the repository at this point in the history
  • Loading branch information
jonasjucker authored Nov 21, 2024
1 parent c259e65 commit 23f56e8
Show file tree
Hide file tree
Showing 6 changed files with 179 additions and 138 deletions.
43 changes: 6 additions & 37 deletions jenkins/RemoteExtraction
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ pipeline {
}
}
options {
timeout(time: 3, unit: 'HOURS')
timeout(time: 6, unit: 'HOURS')
}
stages {
stage('Create archive') {
Expand All @@ -20,7 +20,7 @@ pipeline {
stage('Create Hash from Build ID') {
steps {
sh """
python3 src/hash.py --build-id ${BUILD_ID} --hash-file ${WORKSPACE}/hash.txt
python3 src/hash.py --hash-file ${WORKSPACE}/hash.txt
"""
}
}
Expand Down Expand Up @@ -54,60 +54,29 @@ pipeline {
}
stage('Generate external parameters') {
steps {
script {
env.GRID_FILE = readFile('icontools/grid.txt').trim()
}
sh """
cd ${WORKSPACE}/extpar
podman run \
-v /c2sm-data/extpar-input-data:/data \
-v ${WORKSPACE}/icontools:/grid \
-v ${WORKSPACE}/extpar:/work \
extpar-image \
python3 -m extpar.WrapExtpar \
--run-dir /work \
--raw-data-path /data/linked_data \
--account none \
--no-batch-job \
--host docker \
--input-grid /grid/${GRID_FILE} \
--extpar-config /work/config.json
./src/run_extpar.sh
"""
}
}
}
post {
success {
sh "cp extpar/*.log ${WORKSPACE}/output/logs"
sh "cp extpar/external_parameter.nc ${WORKSPACE}/output/."
sh "cp icontools/*.nc ${WORKSPACE}/output/."
sh "cp icontools/*.html ${WORKSPACE}/output/."
sh "zip -r output.zip output"
sh "python3 src/copy_zip.py --zip-file output.zip --destination ${https_public_root} --hash-file ${WORKSPACE}/hash.txt"
sh "python3 src/archive_artifacts.py --workspace ${WORKSPACE} --destination ${https_public_root} --hash-file ${WORKSPACE}/hash.txt"
withCredentials([string(credentialsId: 'd976fe24-cabf-479e-854f-587c152644bc', variable: 'GITHUB_AUTH_TOKEN')]) {
sh "python3 src/report.py --auth_token ${GITHUB_AUTH_TOKEN} --issue_id_file ${WORKSPACE}/issue.txt --hash-file ${WORKSPACE}/hash.txt"
}
deleteDir()
}
failure {
sh "cp extpar/*.log ${WORKSPACE}/output/logs"
sh "cp extpar/external_parameter.nc ${WORKSPACE}/output/. || true"
sh "cp icontools/*.nc ${WORKSPACE}/output/. || true"
sh "cp icontools/*.html ${WORKSPACE}/output/. || true"
sh "zip -r output.zip output"
sh "python3 src/copy_zip.py --zip-file output.zip --destination ${https_public_root} --hash-file ${WORKSPACE}/hash.txt"
sh "python3 src/archive_artifacts.py --workspace ${WORKSPACE} --destination ${https_public_root} --hash-file ${WORKSPACE}/hash.txt"
withCredentials([string(credentialsId: 'd976fe24-cabf-479e-854f-587c152644bc', variable: 'GITHUB_AUTH_TOKEN')]) {
sh "python3 src/report.py --auth_token ${GITHUB_AUTH_TOKEN} --issue_id_file ${WORKSPACE}/issue.txt --hash-file ${WORKSPACE}/hash.txt --failure"
}
deleteDir()
}
aborted {
sh "cp extpar/*.log ${WORKSPACE}/output/logs"
sh "cp extpar/external_parameter.nc ${WORKSPACE}/output/. || true"
sh "cp icontools/*.nc ${WORKSPACE}/output/. || true"
sh "cp icontools/*.html ${WORKSPACE}/output/. || true"
sh "zip -r output.zip output"
sh "python3 src/copy_zip.py --zip-file output.zip --destination ${https_public_root} --hash-file ${WORKSPACE}/hash.txt"
sh "python3 src/archive_artifacts.py --workspace ${WORKSPACE} --destination ${https_public_root} --hash-file ${WORKSPACE}/hash.txt"
withCredentials([string(credentialsId: 'd976fe24-cabf-479e-854f-587c152644bc', variable: 'GITHUB_AUTH_TOKEN')]) {
sh "python3 src/report.py --auth_token ${GITHUB_AUTH_TOKEN} --issue_id_file ${WORKSPACE}/issue.txt --hash-file ${WORKSPACE}/hash.txt --abort"
}
Expand Down
73 changes: 73 additions & 0 deletions src/archive_artifacts.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
import os
import argparse
import shutil
import glob
import zipfile

def copy_files(src_pattern, dest_dir, prefix=""):
for file in glob.glob(src_pattern):
dest_file = os.path.join(dest_dir, prefix + os.path.basename(file))
print(f"Copying {file} to {dest_file}")
shutil.copy(file, dest_file)

def copy_extpar(workspace, dest):
i = 1
for domain in sorted(glob.glob(os.path.join(workspace, 'extpar_*'))):
# Copy logfiles
copy_files(os.path.join(domain, "*.log"), os.path.join(dest,'logs'), f"DOM_{i}_")
# Copy external parameter file
copy_files(os.path.join(domain, "external_parameter*.nc"), dest, f"DOM_{i}_")
i += 1


def copy_icontools(workspace, dest):
# Copy .nc files
copy_files(os.path.join(workspace, 'icontools', '*.nc'), os.path.join(dest))
# Copy .html files
copy_files(os.path.join(workspace, 'icontools', '*.html'), dest)

def copy_zip(destination, zip_file, hash):
folder = os.path.join(destination, hash)
# Create the directory
os.makedirs(folder, exist_ok=True)
print(f"Created directory {folder}")

# Copy the zip file to the directory
shutil.copy(zip_file, folder)
print(f"Copied {zip_file} to {folder}")

def create_zip(zip_file_path, source_dir):
with zipfile.ZipFile(zip_file_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
for root, dirs, files in os.walk(source_dir):
for file in files:
file_path = os.path.join(root, file)
arcname = os.path.relpath(file_path, source_dir)
zipf.write(file_path, arcname)

def main():
# Create the parser
parser = argparse.ArgumentParser(description="Archive artifacts to a unique folder.")

# Add the arguments
parser.add_argument('--destination', type=str, required=True, help='The destination folder to store the zip file')
parser.add_argument('--hash-file', type=str, required=True, help='Hash file')
parser.add_argument('--workspace', type=str, required=True, help='The workspace folder')

# Parse the arguments
args = parser.parse_args()

with open(args.hash_file, 'r') as f:
hash = f.read()

# Copy icontools and extpar files to the output directory
output_dir = os.path.join(args.workspace, 'output')
copy_icontools(args.workspace, output_dir)
copy_extpar(args.workspace, output_dir)

# Create a zip file
zip_file = os.path.join(args.workspace, 'output.zip')
create_zip(zip_file, output_dir)
copy_zip(args.destination, zip_file, hash)

if __name__ == '__main__':
main()
26 changes: 0 additions & 26 deletions src/copy_zip.py

This file was deleted.

136 changes: 68 additions & 68 deletions src/gridgen_namelist.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,89 +8,89 @@ def load_config(config_file):
config = json.load(f)
return config

def write_local_namelist(config,wrk_dir):
# Set default values
parent_id = 0
lwrite_parent = True
initial_refinement = True
basegrid_grid_root = config.get('grid_root')
basegrid_grid_level = config.get('grid_level')
dom_outfile = config.get('outfile')
dom_region_type = config.get('region_type')

# Create the namelist content
namelist_content = f"""&gridgen_nml
parent_id = {parent_id} ! This list defines parent-nest relations
dom(1)%lwrite_parent = .{str(lwrite_parent).upper()}.
basegrid%grid_root = {basegrid_grid_root}
basegrid%grid_level = {basegrid_grid_level}
initial_refinement = .{str(initial_refinement).upper()}.
dom(1)%outfile = "{dom_outfile}"
dom(1)%region_type = {dom_region_type}
dom(1)%center_lon = {config.get('center_lon')}
dom(1)%center_lat = {config.get('center_lat')}
dom(1)%hwidth_lon = {config.get('hwidth_lon')}
dom(1)%hwidth_lat = {config.get('hwidth_lat')}
/
"""

# Write the namelist content to a file
with open(os.path.join(wrk_dir,'nml_gridgen'), 'w') as f:
f.write(namelist_content)

# write filename to grid.txt for extpar
with open(os.path.join(wrk_dir,'grid.txt'), 'w') as f:
f.write(f'{dom_outfile}_DOM01.nc')

def write_global_namelist(config,wrk_dir):
def write_gridgen_namelist(config,wrk_dir):
# Set default values
parent_id = 0
lwrite_parent = True
parent_id = ",".join(map(str, range(len(config["domains"]))))
initial_refinement = True
basegrid_grid_root = config.get('grid_root')
basegrid_grid_level = config.get('grid_level')
dom_outfile = config.get('outfile')
dom_region_type = config.get('region_type')

# Create the namelist content
namelist_content = f"""&gridgen_nml
parent_id = {parent_id} ! This list defines parent-nest relations
dom(1)%lwrite_parent = .{str(lwrite_parent).upper()}.
basegrid%grid_root = {basegrid_grid_root}
basegrid%grid_level = {basegrid_grid_level}
initial_refinement = .{str(initial_refinement).upper()}.
dom(1)%outfile = "{dom_outfile}"
dom(1)%region_type = {dom_region_type}
/
"""
namelist = []
namelist.append("&gridgen_nml")
namelist.append(f" parent_id = {parent_id} ! This list defines parent-nest relations")
namelist.append(f" initial_refinement = .{str(initial_refinement).upper()}.")
namelist.append("")

# base grid
namelist.append(f" basegrid%grid_root = {config.get('grid_root')}")
namelist.append(f" basegrid%grid_level = {config.get('grid_level')}")
namelist.append(f" basegrid%icopole_lon = {config.get('icopole_lon', 0.0)}")
namelist.append(f" basegrid%icopole_lat = {config.get('icopole_lat', 90)}")
namelist.append(f" basegrid%icorotation = {config.get('icorotation', 0.0)}")

# tuning parameters
namelist.append(f" lspring_dynamics = .{str(config.get('lspring_dynamics',False)).upper()}.")
namelist.append(f" maxit = {config.get('maxit', 500)}")
namelist.append(f" beta_spring = {config.get('beta_spring', 0.9)}")
namelist.append("")

# centre and subcentre
namelist.append(f" centre = {config.get('centre',78)}")
namelist.append(f" subcentre = {config.get('subcentre',255)}")
namelist.append("")

for i, domain in enumerate(config["domains"]):
lwrite_parent = i == 0
namelist.append(f" dom({i+1})%outfile = \"{config.get('outfile')}\" ")
namelist.append(f" dom({i+1})%lwrite_parent = .{str(lwrite_parent).upper()}.")
namelist.append(f" dom({i+1})%region_type = {domain['region_type']}")
namelist.append(f" dom({i+1})%number_of_grid_used = {domain.get('number_of_grid_used',0)}")
namelist.append("")

# local domain
if domain["region_type"] == 3:
namelist.append(f" dom({i+1})%center_lon = {domain.get('center_lon',0.0)}")
namelist.append(f" dom({i+1})%center_lat = {domain.get('center_lat',0.0)}")
namelist.append(f" dom({i+1})%hwidth_lon = {domain.get('hwidth_lon',0.0)}")
namelist.append(f" dom({i+1})%hwidth_lat = {domain.get('hwidth_lat',0.0)}")
namelist.append("")

namelist.append(f" dom({i+1})%lrotate = .{str(domain.get('lrotate', True)).upper()}.")
namelist.append(f" dom({i+1})%pole_lon = {domain.get('pole_lon',-180.0)}")
namelist.append(f" dom({i+1})%pole_lat = {domain.get('pole_lat', 90.0)}")
namelist.append("")

# write filename to grid_i.txt for extpar
with open(os.path.join(wrk_dir,f'grid_{i+1}.txt'), 'w') as f:
f.write(f"{config.get('outfile')}_DOM{(i+1):02d}.nc")

namelist.append("/")
namelist.append("")

# Write the namelist content to a file
with open(os.path.join(wrk_dir,'nml_gridgen'), 'w') as f:
f.write(namelist_content)
f.write("\n".join(namelist))

# write filename to grid.txt for extpar
with open(os.path.join(wrk_dir,'grid.txt'), 'w') as f:
f.write(f'{dom_outfile}_DOM01.nc')

def main(workspace, config_path):
# Create directories
extpar_dir = os.path.join(workspace, 'extpar')
icontools_dir = os.path.join(workspace, 'icontools')
os.makedirs(extpar_dir, exist_ok=True)
os.makedirs(icontools_dir, exist_ok=True)

# Copy config.json to extpar directory
shutil.copy(config_path, os.path.join(extpar_dir, 'config.json'))


# Load config and write namelist
config = load_config(config_path)
config = config['icontools']

if config["region_type"] == 1:
write_global_namelist(config, icontools_dir)
else:
write_local_namelist(config, icontools_dir)
nr_domains = len(config['domains'])

# Create directories
for i in range(nr_domains):
extpar_dir = os.path.join(workspace, f"extpar_{i+1}")
os.makedirs(extpar_dir, exist_ok=True)
# Copy config.json to extpar directory
shutil.copy(config_path, os.path.join(extpar_dir, 'config.json'))

icontools_dir = os.path.join(workspace, 'icontools')
os.makedirs(icontools_dir, exist_ok=True)

write_gridgen_namelist(config, icontools_dir)

if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Setup workspace and generate namelist")
Expand Down
13 changes: 6 additions & 7 deletions src/hash.py
Original file line number Diff line number Diff line change
@@ -1,20 +1,19 @@
import os
import hashlib
import argparse
import shutil
from datetime import datetime

# Create the parser
parser = argparse.ArgumentParser(description="Hash Build ID and create a file with the hash value.")

# Add the arguments
parser.add_argument('--build-id', type=str, required=True, help='The build ID')
parser.add_argument('--hash-file', type=str, required=True, help='Hash file')

# Parse the arguments
args = parser.parse_args()

# Compute the SHA256 hash of BUILD_ID
hash = hashlib.sha256(args.build_id.encode()).hexdigest()
# Get the current time as a string
current_time = datetime.now().isoformat()

# Create a hash from the current time
hash = hashlib.sha256(current_time.encode()).hexdigest()

with open(args.hash_file, 'w') as f:
f.write(hash)
26 changes: 26 additions & 0 deletions src/run_extpar.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
#!/bin/bash

set -e

# Loop over all folders with the pattern extpar_*
i=1
for dir in ${WORKSPACE}/extpar_*; do
echo "Processing directory: $dir"
cd "$dir"
grid_file=$(cat ../icontools/grid_$i.txt) # Assuming grid.txt contains the grid file name
podman run \
-v /c2sm-data/extpar-input-data:/data \
-v ${WORKSPACE}/icontools:/grid \
-v "$dir":/work \
extpar-image \
python3 -m extpar.WrapExtpar \
--run-dir /work \
--raw-data-path /data/linked_data \
--account none \
--no-batch-job \
--host docker \
--input-grid /grid/${grid_file} \
--extpar-config /work/config.json
cd ..
((i++))
done

0 comments on commit 23f56e8

Please sign in to comment.