Skip to content

Commit

Permalink
Merge pull request #141 from CIROH-UA/forcing_s3_url
Browse files Browse the repository at this point in the history
Forcing s3 url
  • Loading branch information
JordanLaserGit authored Nov 1, 2024
2 parents ddfb563 + 184bea8 commit a5085c6
Show file tree
Hide file tree
Showing 4 changed files with 21 additions and 5 deletions.
20 changes: 18 additions & 2 deletions forcingprocessor/src/forcingprocessor/processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -571,7 +571,7 @@ def write_netcdf(data, vpu, t_ax, catchments):
"""
if storage_type == 's3':
s3_client = boto3.session.Session().client("s3")
nc_filename = forcing_path + f'/{vpu}_forcings.nc'
nc_filename = forcing_path + f'/ngen.{FCST_CYCLE}z.{URLBASE}.forcing.{LEAD_START}_{LEAD_END}.{vpu}.nc'
else:
nc_filename = Path(forcing_path,f'{vpu}_forcings.nc')

Expand Down Expand Up @@ -761,7 +761,7 @@ def prep_ngen_data(conf):

elif storage_type == "s3":
bucket_path = output_path
forcing_path = bucket_path + '/forcings'
forcing_path = bucket_path
meta_path = bucket_path + '/metadata'
metaf_path = bucket_path + '/metadata/forcings_metadata'
bucket, key = convert_url2key(metaf_path,storage_type)
Expand Down Expand Up @@ -796,6 +796,19 @@ def prep_ngen_data(conf):
nwm_forcing_files.append(jline.strip())
nfiles = len(nwm_forcing_files)

# s3://noaa-nwm-pds/nwm.20241029/forcing_short_range/nwm.t00z.short_range.forcing.f001.conus.nc
pattern = r"nwm\.(\d{8})/forcing_(\w+)/nwm\.(\w+)(\d{2})z\.\w+\.forcing\.(\w+)(\d{2})\.conus\.nc"

global URLBASE, FCST_CYCLE, LEAD_START, LEAD_END
match = re.search(pattern, nwm_forcing_files[0])
if match:
URLBASE = match.group(2)
FCST_CYCLE = match.group(3) + match.group(4)
LEAD_START = match.group(5) + match.group(6)
match = re.search(pattern, nwm_forcing_files[-1])
if match:
LEAD_END = match.group(5) + match.group(6)

global fs_type
if 's3://' in nwm_forcing_files[0] in nwm_forcing_files[0]:
fs = s3fs.S3FileSystem(
Expand Down Expand Up @@ -838,6 +851,9 @@ def prep_ngen_data(conf):
# Hack to ensure data is always written out with time moving forward.
t_ax=list(reversed(t_ax))
data_array = np.flip(data_array,axis=0)
tmp = LEAD_START
LEAD_START = LEAD_END
LEAD_END = tmp

t_extract = time.perf_counter() - t0
complexity = (nfiles_tot * ncatchments) / 10000
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
},
"instance_parameters" :
{
"ImageId" : "ami-062bdcbb454b8d833",
"ImageId" : "ami-03b72f226b125860d",
"InstanceType" : "t4g.large",
"KeyName" : "jlaser_west2",
"SecurityGroupIds" : ["sg-04365a4248fe126bc"],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
},
"instance_parameters" :
{
"ImageId" : "ami-062bdcbb454b8d833",
"ImageId" : "ami-03b72f226b125860d",
"InstanceType" : "t4g.large",
"KeyName" : "actions_key_arm",
"SecurityGroupIds" : ["sg-0fcbe0c6d6faa0117"],
Expand Down
2 changes: 1 addition & 1 deletion research_datastream/terraform/test/execution_gp_test.json
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
},
"instance_parameters" :
{
"ImageId" : "ami-062bdcbb454b8d833",
"ImageId" : "ami-03b72f226b125860d",
"InstanceType" : "t4g.nano",
"KeyName" : "actions_key",
"SecurityGroupIds" : ["sg-06f57f883e902d7bc"],
Expand Down

0 comments on commit a5085c6

Please sign in to comment.