-
Notifications
You must be signed in to change notification settings - Fork 10
/
makenc.py
934 lines (763 loc) · 39.2 KB
/
makenc.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
"""
Created on 2/19/2016
This script is desinged to create netCDF files using the netCDF4 module from python as
part of the Coastal Model Test Bed (CMTB)
@author: Spicer Bak
@contact: [email protected]
"""
import numpy as np
import netCDF4 as nc
import csv, yaml
import datetime as DT
import time as ttime
def readflags(flagfname, header=1):
"""This function reads the flag file from the data in to the STWAVE CMTB runs
Args:
flagfname: the relative/absolute location of the flags file
header: (Default value = 1)
Returns:
flags of data dtype=dictionary
"""
times, waveflag, windflag, WLflag, curflag,allflags = [], [],[],[],[],[]
try:
with open(flagfname, 'rb') as f:
reader = csv.reader(f) # opening file
for row in reader: # iteratin
# go over the open file
if len(row) > 1 and row[0] != 'Date':
waveflag.append(int(row[2])) # appending wave data flag
windflag.append(int(row[3])) # appending Wind data flag
WLflag.append(int(row[4])) # appending Water Level Flag data
curflag.append(int(row[5])) # appending ocean Currents flag data
times.append(DT.datetime.strptime(row[0]+row[1], '%Y-%m-%d%H%M'))
allflags.append([int(row[2]), int(row[3]), int(row[4]), int(row[5])])
# creating array of flags
allflags = np.array(allflags)
except IOError:
allflags = None
# putting data into a dictionary
flags = {'time': times,
'windflag': windflag,
'waveflag': waveflag,
'WLflag': WLflag,
'curflag': curflag,
'allflags': allflags
}
return flags
def import_template_file(yaml_location):
"""This function loads a yaml file and returns the attributes in dictionary
written by: ASA
Step 1 in netCDF file creation, open global and variable yamls
Args:
yaml_location: yaml file location
Returns:
dictionary with variables from the yaml file
"""
# load the template
f = open(yaml_location)
# use safe_load instead load
vars_dict = yaml.safe_load(f)
f.close()
return vars_dict
def init_nc_file(nc_filename, attributes):
"""Create the netCDF file and write the Global Attributes
written by ASA
will initalize netCDF file and set global attributes, write date created and issued to global meta data
Args:
nc_filename: output netCDF file name
attributes: attributes from global yaml load
Returns:
open netCDF file ready for writing data
"""
ncfile = nc.Dataset(nc_filename, 'w', clobber=True)
# Write some Global Attributes
for key, value in attributes.iteritems():
if value is not None:
setattr(ncfile, key, value)
dt_today = ttime.strftime("%Y-%m-%d")
ncfile.date_created = dt_today
ncfile.date_issued = dt_today
return ncfile
def write_data_to_nc(ncfile, template_vars, data_dict, write_vars='_variables'):
"""This function actually writes the variables and the variable attributes to
the netCDF file
in the yaml, the "[variable]:" needs to be in the data dictionary,
the output netcdf variable will take the name "name:"
Edited by Spicer Bak
Args:
ncfile: this is an alreayd opened netCDF file with already defined dimensions
template_vars (dict): variable and meta data associated with data_dict
data_dict (dict): this is a dictionary with keys associated to those hopefully in template_vars, this holds the data
write_vars: Unknown (Default value = '_variables')
Returns:
netCDF file (still open)
also returns error strings and count that were created during the data writing process
"""
# Keep track of any errors found
num_errors = 0
error_str = ''
# write some more global attributes if present
if '_attributes' in template_vars:
for var in template_vars['_attributes']:
if var in data_dict:
setattr(ncfile, var, data_dict[var])
# List all possible variable attributes in the template
possible_var_attr = ['standard_name', 'long_name', 'coordinates', 'flag_values', 'flag_meanings', 'description',
'notes', 'positive', 'valid_min', 'valid_max', 'calendar', 'description', 'cf_role',
'missing_value']
# Write variables to file
accept_vars = template_vars['_variables']
for var in accept_vars: # only write varibles that were loaded from .yaml file
if var in data_dict:
try:
if "fill_value" in template_vars[var] and "least_significant_digit" in template_vars[var]:
new_var = ncfile.createVariable(template_vars[var]["name"],
template_vars[var]["data_type"],
template_vars[var]["dim"],
fill_value=template_vars[var]["fill_value"],
least_significant_digit=template_vars[var]['least_significant_digit'] )
elif "fill_value" in template_vars[var]:
new_var = ncfile.createVariable(template_vars[var]["name"], template_vars[var]["data_type"],
template_vars[var]["dim"], fill_value=template_vars[var]["fill_value"])
else:
new_var = ncfile.createVariable(template_vars[var]["name"],
template_vars[var]["data_type"],
template_vars[var]["dim"])
new_var.units = template_vars[var]["units"]
# Write the attributes
for attr in possible_var_attr: # only write attributes listed in this list above
if attr in template_vars[var]:
if template_vars[var][attr] == 'NaN':
setattr(new_var, attr, np.nan)
else:
setattr(new_var, attr, template_vars[var][attr])
# Write the short_name attribute as the variable name
if 'short_name' in template_vars[var]:
new_var.short_name = template_vars[var]["short_name"]
else:
new_var.short_name = template_vars[var]["name"]
# _____________________________________________________________________________________
# Write the data (1D, 2D, or 3D)
#______________________________________________________________________________________
if var == "station_name":
station_id = data_dict[var]
data = np.empty((1,), 'S'+repr(len(station_id)))
data[0] = station_id
new_var[:] = nc.stringtochar(data)
elif len(template_vars[var]["dim"]) == 0:
try:
new_var[:] = data_dict[var]
except Exception, e:
new_var = data_dict[var]
elif len(template_vars[var]["dim"]) == 1:
# catch some possible errors for frequency and direction arrays
if template_vars[var]["data_type"] == 'str':
for i, c in enumerate(template_vars[var]["data_type"]):
new_var[i] = data_dict[var][i]
else:
try:
new_var[:] = data_dict[var]
except IndexError:
try:
new_var[:] = data_dict[var][0][0]
except Exception, e:
raise e
elif len(template_vars[var]["dim"]) == 2:
# create an empty 2d data set of the correct sizes
try:
# handles row vs col data, rather than transposing the array just figure out which it is
length = data_dict[var][0].shape[1]
if data_dict[var][0].shape[0] > length:
length = data_dict[var][0].shape[0]
x = np.empty([data_dict[var].shape[0], length], dtype=np.float64)
for i in range(data_dict[var].shape[0]):
# squeeze the 3d array in to 2d as dimension is not needed
x[i] = np.squeeze(data_dict[var][i])
new_var[:, :] = x
except Exception, e:
# if the tuple fails must be right...right?
new_var[:] = data_dict[var]
elif len(template_vars[var]["dim"]) == 3:
# create an empty 3d data set of the correct sizes
# this portion was modified by Spicer Bak
assert data_dict[var].shape == new_var.shape, 'The data must have the Same Dimensions (missing time?)'
x = np.empty([data_dict[var].shape[0], data_dict[var].shape[1], data_dict[var].shape[2]], np.float64)
for i in range(data_dict[var].shape[0]):
x[i] = data_dict[var][i]
new_var[:, :, :] = x[:, :, :]
except Exception, e:
num_errors += 1
print('ERROR WRITING VARIABLE: {} - {} \n'.format(var, str(e)))
return num_errors, error_str
def makenc_field(data_lib, globalyaml_fname, flagfname, ofname, var_yaml_fname):
"""This is a function that takes wave nest dictionary and Tp_nest dictionnary and creates the high resolution
near shore field data from the Coastal Model Test Bed
Args:
data_lib: data lib is a library of data with keys the same name as associated variables to be written in the
netCDF file to be created, This function will look for:
'time', 'DX', 'DY', 'NI', 'NJ', 'bathymetry', 'bathymetryDate', 'waveHs', 'station_name'
globalyaml_fname: global meta data yaml file name
ofname: the file name to be created
flagfname: flag input file to flag data
var_yaml_fname: variable meta data yaml file name
Returns:
written netCDF file
"""
# import global atts
globalatts = import_template_file(globalyaml_fname)
# import variable data and meta
var_atts = import_template_file(var_yaml_fname)
# import flag data
flags = readflags(flagfname)['allflags']
data_lib['flags'] = flags
# figure out my grid spacing and write it to the file
if np.mean(data_lib['DX']) != np.median(data_lib['DX']): # variable grid spacing
globalatts['grid_dx'] = 'variable'
globalatts['grid_dy'] = 'variable'
else:
globalatts['grid_dx'] = data_lib['DX']
globalatts['grid_dy'] = data_lib['DY']
globalatts['n_cell_y'] = data_lib['NJ']
globalatts['n_cell_x'] = data_lib['NI']
# making bathymetry the length of time so it can be concatnated
if data_lib['waveHs'].shape[1] != data_lib['bathymetry'].shape[1]:
data_lib['waveHs']=data_lib['waveHs'][:,:data_lib['bathymetry'].shape[1],:]
data_lib['bathymetry'] = np.full_like(data_lib['waveHs'], data_lib['bathymetry'], dtype=np.float32 )
if 'bathymetryDate' in data_lib:
data_lib['bathymetryDate'] = np.full_like(data_lib['time'], data_lib['bathymetryDate'], dtype=np.float32 )
#data_lib['bathymetry'] =
fid = init_nc_file(ofname, globalatts) # initialize and write inital globals
#### create dimensions
tdim = fid.createDimension('time', np.shape(data_lib['waveHs'])[0])
xdim = fid.createDimension('X_shore', data_lib['NI'])
ydim = fid.createDimension('Y_shore', data_lib['NJ'])
inputtypes = fid.createDimension('in_type', np.shape(flags)[1]) # there are 4 input data types for flags
statnamelen = fid.createDimension('station_name_length', len(data_lib['station_name']))
#if 'bathymetryDate' in data_lib:
# bathyDate_length = fid.createDimension('bathyDate_length', np.shape(data_lib['bathymetry'])[0])
# bathydate = fid.createDimension('bathyDate_length', np.size(data_lib['bathymetryDate']))
# write data to the nc file
write_data_to_nc(fid, var_atts, data_lib)
# close file
fid.close()
def makenc_FRFTransect(bathyDict, ofname, globalYaml, varYaml):
"""This function makes netCDF files from csv Transect data library created with testbedUtils.load_FRF_transect
Args:
bathyDict: data input matching var yaml, must have 'time' in it for dimension
ofname: the file name to be created
globalYaml: global meta data yaml file name
varYaml: variable meta data yaml file name
Returns:
closed netCDF file
"""
globalAtts = import_template_file(globalYaml) # loading global meta data attributes from yaml
varAtts = import_template_file(varYaml) # loading variables to write and associated meta data
# initializing output cshore_ncfile
fid =init_nc_file(ofname, globalAtts)
# creating dimensions of data
tdim = fid.createDimension('time', np.shape(bathyDict['time'])[0])
# write data to the cshore_ncfile
write_data_to_nc(fid, varAtts, bathyDict)
# close file
fid.close()
def makenc_FRFGrid(gridDict, ofname, globalYaml, varYaml):
"""This is a function that makes netCDF files from the FRF Natural neighbor tool created by
Spicer Bak using the pyngl library. the transect dictionary is created using the natural
neighbor tool in FRF_natneighbor.py
Args:
gridDict: data dictionary matching varYaml requires
'zgrid', 'ygrid', 'xgrid', 'StateplaneE', 'StateplaneN', 'Lat', 'Lon', 'FRF_X', 'FRF_Y'
globalYaml: global meta data yaml file name
ofname: the file name to be created
varYaml: variable meta data yaml file name
Returns:
netCDF file with gridded data in it
"""
from testbedutils import geoprocess as gp # this might be creating a circular import
globalAtts = import_template_file(globalYaml)
varAtts = import_template_file(varYaml)
# create netcdf file
fid = init_nc_file(ofname, globalAtts)
# creating dimensions of data
xShore = fid.createDimension('xShore', np.shape(gridDict['zgrid'])[0])
yShore = fid.createDimension('yShore', np.shape(gridDict['zgrid'])[1])
time = fid.createDimension('time', np.size(gridDict['time']))
# creating lat/lon and state plane coords
#xgrid, ygrid = np.meshgrid(gridDict['xgrid'], gridDict['ygrid'])
xx, yy = np.meshgrid(gridDict['xgrid'], gridDict['ygrid'])
latGrid = np.zeros(np.shape(yy))
lonGrid = np.zeros(np.shape(xx))
statePlN = np.zeros(np.shape(yy))
statePlE = np.zeros(np.shape(xx))
for iy in range(0, np.size(gridDict['zgrid'], axis=1)):
for ix in range(0, np.size(gridDict['zgrid'], axis=0)):
coords = gp.FRFcoord(xx[iy, ix], yy[iy, ix])#, grid[iy, ix]))
statePlE[iy, ix] = coords['StateplaneE']
statePlN[iy, ix] = coords['StateplaneN']
latGrid[iy, ix] = coords['Lat']
lonGrid[iy, ix] = coords['Lon']
assert xx[iy, ix] == coords['FRF_X']
assert yy[iy, ix] == coords['FRF_Y']
# put these data into the dictionary that matches the yaml
gridDict['Latitude'] = latGrid[:, 0]
gridDict['Longitude'] = lonGrid[0, :]
gridDict['Easting'] = statePlE[:, 0]
gridDict['Northing'] = statePlN[0, :]
gridDict['FRF_Xshore'] = gridDict.pop('xgrid')
gridDict['FRF_Yshore'] = gridDict.pop('ygrid')
# addding 3rd dimension for time
a=gridDict.pop('zgrid').T
gridDict['Elevation'] = np.full([1, a.shape[0], a.shape[1]], fill_value=[a], dtype=np.float32)
# write data to file
write_data_to_nc(fid, varAtts, gridDict)
# close file
fid.close()
def makenc_Station(stat_data, globalyaml_fname, flagfname, ofname, stat_yaml_fname):
"""This function will make netCDF files from the station output data from the
Coastal Model Test Bed of STWAVE for the STATion files
Args:
stat_data: data lib is a library of data with keys the same name as associated variables to be written in the
netCDF file to be created, This function will look for:
'time', 'DX', 'DY', 'NI', 'NJ', 'station_name', 'Northing', 'Easting', 'Longitude', 'Latitude', 'waveDirectionBins', 'waveFrequency'
flagfname: name/path of flag file
globalyaml_fname: global yaml name
stat_yaml_fname: varable yamle name
ofname: output file name
Returns:
a nc file with station data in it
"""
# import global yaml data
globalatts = import_template_file(globalyaml_fname)
# import variable data and meta
stat_var_atts = import_template_file(stat_yaml_fname)
# import flag data
flags = readflags(flagfname)['allflags']
stat_data['flags'] = flags # this is a library of flags
globalatts['grid_dx'] = stat_data['DX']
globalatts['grid_dy'] = stat_data['DY']
globalatts['n_cell_y'] = stat_data['NJ']
globalatts['n_cell_x'] = stat_data['NI']
fid = init_nc_file(ofname, globalatts) # initialize and write inital globals
#### create dimensions
tdim = fid.createDimension('time', np.shape(stat_data['time'])[0]) # None = size of the dimension, what does this gain me if i know it
inputtypes = fid.createDimension('input_types_length', np.shape(flags)[1]) # there are 4 input dtaa types for flags
statnamelen = fid.createDimension('station_name_length', len(stat_data['station_name']))
northing = fid.createDimension('Northing', 1L)
easting = fid.createDimension('Easting', 1L )
Lon = fid.createDimension('Longitude', np.size(stat_data['Longitude']))
Lat = fid.createDimension('Latitude', np.size(stat_data['Latitude']))
dirbin = fid.createDimension('waveDirectionBins', np.size(stat_data['waveDirectionBins']))
frqbin = fid.createDimension('waveFrequency', np.size(stat_data['waveFrequency']))
#
# convert to Lat/lon here
# write data to the nc file
write_data_to_nc(fid, stat_var_atts, stat_data)
# close file
fid.close()
def convert_FRFgrid(gridFname, ofname, globalYaml, varYaml, plotFlag=False):
"""This function will convert the FRF gridded text product into a NetCDF file
Args:
gridFname: input FRF gridded product
ofname: output netcdf filename
globalYaml: a yaml file containing global meta data
varYaml: a yaml file containing variable meta data
plotFlag: true or false for creation of QA plots (Default value = False)
Returns:
None
"""
# Defining rigid parameters
raise NotImplementedError('is this depricated?')
# defining the bounds of the FRF gridded product
gridYmax = 1100 # maximum FRF Y distance for netCDF file
gridYmin = -100 # minimum FRF Y distance for netCDF file
gridXmax = 950 # maximum FRF X distance for netCDF file
gridXmin = 50 # minimum FRF xdistance for netCDF file
fill_value= '-999.0'
# main body
# load Grid from file
tempClass = PrepData.inputOutput.genericIO()
xyz = tempClass.importXYZ(gridFname)
# make dictionary in right form
dx = np.median(np.diff(xyz['x']))
dy = np.max(np.diff(xyz['y']))
xgrid = np.unique(xyz['x'])
ygrid = np.unique(xyz['y'])
# putting the loaded grid into a 2D array
zgrid = np.zeros((len(xgrid), len(ygrid)))
rc = 0
for i in range(np.size(ygrid, axis=0 )):
for j in range(np.size(xgrid, axis=0)):
zgrid[j, i] = xyz['z'][rc]
rc += 1
if plotFlag == True:
from matplotlib import pyplot as plt
plt.pcolor(xgrid, ygrid, zgrid.T)
plt.colorbar()
plt.title('FRF GRID %s' % ofname[:-3].split('/')[-1])
plt.savefig(ofname[:-4] + '_RawGridTxt.png')
plt.close()
# aking labels in FRF coords for
ncXcoord = np.linspace(gridXmin, gridXmax, num=(gridXmax - gridXmin) / dx + 1, endpoint=True)
ncYcoord = np.linspace(gridYmin, gridYmax, num=(gridYmax - gridYmin) / dy + 1, endpoint=True)
frame = np.full((np.shape(ncXcoord)[0], np.shape(ncYcoord)[0]), fill_value=fill_value)
# find the overlap locations between grids
xOverlap = np.intersect1d(xgrid, ncXcoord)
yOverlap = np.intersect1d(ygrid, ncYcoord)
assert len(yOverlap) >= 3, 'The overlap between grid nodes and netCDF grid nodes is short'
lastX = np.argwhere(ncXcoord == xOverlap[-1])[0][0]
firstX = np.argwhere(ncXcoord == xOverlap[0])[0][0]
lastY = np.argwhere(ncYcoord == yOverlap[-1])[0][0]
firstY = np.argwhere(ncYcoord == yOverlap[0])[0][0]
# fill the frame grid with the loaded data
frame[firstX:lastX+1, firstY:lastY+1] = zgrid
# run data check
assert set(xOverlap).issubset(ncXcoord), 'The FRF X values in your function do not fit into the netCDF format, please rectify'
assert set(yOverlap).issubset(ncYcoord), 'The FRF Y values in your function do not fit into the netCDF format, please rectify'
# putting the data into a dictioary to make a netCDF file
fields = gridFname.split('_')
for fld in fields:
if len(fld) == 8:
dte = fld # finding the date in the file name
break
gridDict = {'zgrid': frame,
'xgrid': ncXcoord,
'ygrid': ncYcoord,
'time': nc.date2num(DT.datetime(int(dte[:4]), int(dte[4:6]),
int(dte[6:])), 'seconds since 1970-01-01')}
# making the netCDF file from the gridded data
makenc_FRFGrid(gridDict, ofname, globalYaml, varYaml)
def makeDirectionalWavesWHOI(ofname, dataDict, globalYaml, varYaml):
"""
Args:
ofname: output file name
dataDict: input data dictionary matching variable names in yaml
globalYaml: global yaml for meta data ahead of file
varYaml: variable data structured the same to that of the dataDict
Returns:
None - writes out the netCDF file
"""
globalAtts = import_template_file(globalYaml)
varAtts = import_template_file(varYaml)
# create netcdf file
fid = init_nc_file(ofname, globalAtts)
#### create dimensions
tdim = fid.createDimension('time', np.shape(dataDict['time'])[0]) # None = size of the dimension, what does this gain me if i know it
statnamelen = fid.createDimension('station_name_length', len(dataDict['station_name']))
dirbin = fid.createDimension('waveDirectionBins', np.size(dataDict['directionBands']))
frqbin = fid.createDimension('waveFrequency', np.size(dataDict['freqBands']))
# write data to the nc file
write_data_to_nc(fid, varAtts, dataDict)
# close file
fid.close()
def makenc_todaysBathyCMTB(gridDict, ofname, globalYaml, varYaml):
"""Generate bathymetry file for CMTB
Args:
gridDict: data dictionary matching varYaml
ofname: file output name
globalYaml: yaml containing CF compliant meta data
varYaml: yaml containing matching data structure to gridDict and CF compliant meta data
Returns:
"""
globalAtts = import_template_file(globalYaml)
varAtts = import_template_file(varYaml)
# create netcdf file
fid = init_nc_file(ofname, globalAtts)
# creating dimensions of data
xFRF = fid.createDimension('xFRF', gridDict['xFRF'].shape[0])
yFRF = fid.createDimension('yFRF', gridDict['yFRF'].shape[0])
time = fid.createDimension('time', np.size(gridDict['time']))
# write data to file
write_data_to_nc(fid, varAtts, gridDict)
# close file
fid.close()
def makenc_CSHORErun(ofname, dataDict, globalYaml, varYaml):
"""This is a function that makes netCDF files from CSHORE model runs created by
David Young using all the stuff Spicer Bak used. You have to build dataDict from the different dictionaries
output by cshore_io.load_CSHORE_results(). YOU DONT HAVE TO HAND IT LAT LON THOUGH!!!
Args:
dataDict: keys:
time: - time steps of the simulation nc file
xFRF: - xFRF positions of the simulation
aveE: - depth averaged eastward current!
stdE: - standard deviation of eastward current
aveN: - same as above but northward current
stdN: - same as above but northward
waveHs: - significant wave heights
waveMeanDirection: mean direction of the waves at each cross-shore position
waterLevel: mean water level at each cross-shore position
stdWaterLevel: standard deviation of the water surface elevation at each cross-shore position
setup: wave setup at each cross-shore position
runup2perc: 2 percent exceedance runup elevation for each model time-step
runupMean: mean runup elevation for each model time-step
qbx: cross-shore bed load sediment transport rate
qsx: cross-shore suspended sediment transport rate
qby: alongshore bed load sediment transport rate
qsy: alongshore suspended sediment transport rate
probabilitySuspension: probability that sediment will be suspended at particular node
probabilityMovement: probability that sediment will move
suspendedSedVolume: suspended sediment volume at each cross-shore position
bottomElevation: the bottom elevation at each xFRF position in the simulation
surveyNumber: this is the surveyNumber that the integrated bathymetry for this simulation was built on
profileNumber: this is either the profileNumber of the survey or the alongshore position of the integratred bathymetry transect that is used as the bed elevation boundary condition
bathymetryDate: this is the day that the aforementioned survey was taken
yFRF: this is the yFRF position of the transect itself. if it is the integrated bathymetry, then this will be identical to the profileNumber
ofname (str): this is the FULL PATH INCLUDING FILENAME AND EXTENSION to the position where the ncFile will be saved when output
globalYaml (str): full path to the globalYaml used to build this ncFile
varYaml (str): full path to the variableYaml used to build this ncFile
Returns:
netCDF file with CSHORE model results in it
"""
from testbedutils import geoprocess as gp # this might create a circular import
globalAtts = import_template_file(globalYaml)
varAtts = import_template_file(varYaml)
# create netcdf file
fid = init_nc_file(ofname, globalAtts)
# note: you have to hand this the yFRF coordinates of the BC gage if you want to get lat/lon..
lx = np.size(dataDict['xFRF'], axis=0)
lat = np.zeros(lx)
lon = np.zeros(lx)
for ii in range(0, lx):
coords = gp.FRFcoord(dataDict['xFRF'][ii], dataDict['yFRF'])
lat[ii] = coords['Lat']
lon[ii] = coords['Lon']
dataDict['latitude'] = lat
dataDict['longitude'] = lon
# ok, we are HARD CODING the dimensions to ALWAYS be at the 8m ARRAY (xFRF = 914.44 rounded DOWN to 914)
# we will just fill in the missing values with nans as required
array8m_loc = 914
# creating dimensions of data
new_s = np.shape(range(-50, array8m_loc+1))[0]
new_t = np.shape(dataDict['waveHs'])[0]
xFRF = fid.createDimension('xFRF', new_s)
time = fid.createDimension('time', new_t)
# check to see if the grid I am importing is smaller than my netCDF grid
if np.shape(range(-50, array8m_loc+1))[0] == np.shape(dataDict['xFRF']):
# the model grid is the same as the netCDF grid, so do nothing
dataDict_n = dataDict
pass
else:
dataDict_n = {'xFRF': np.flipud(np.array(range(-50, array8m_loc+1)) + 0.0),
'time': dataDict['time'],
'aveE': np.full((new_t, new_s), fill_value=np.nan),
'stdE': np.full((new_t, new_s), fill_value=np.nan),
'aveN': np.full((new_t, new_s), fill_value=np.nan),
'stdN': np.full((new_t, new_s), fill_value=np.nan),
'waveHs': np.full((new_t, new_s), fill_value=np.nan),
'waveMeanDirection': np.full((new_t, new_s), fill_value=np.nan),
'waterLevel': np.full((new_t, new_s), fill_value=np.nan),
'stdWaterLevel': np.full((new_t, new_s), fill_value=np.nan),
'setup': np.full((new_t, new_s), fill_value=np.nan),
'runup2perc': dataDict['runup2perc'],
'runupMean': dataDict['runupMean'],
'qbx': np.full((new_t, new_s), fill_value=np.nan),
'qsx': np.full((new_t, new_s), fill_value=np.nan),
'qby': np.full((new_t, new_s), fill_value=np.nan),
'qsy': np.full((new_t, new_s), fill_value=np.nan),
'probabilitySuspension': np.full((new_t, new_s), fill_value=np.nan),
'probabilityMovement': np.full((new_t, new_s), fill_value=np.nan),
'suspendedSedVolume': np.full((new_t, new_s), fill_value=np.nan),
'bottomElevation': np.full((new_t, new_s), fill_value=np.nan),
'latitude': np.full((new_s), fill_value=np.nan),
'longitude': np.full((new_s), fill_value=np.nan),
'surveyNumber': dataDict['surveyNumber'],
'profileNumber': dataDict['profileNumber'],
'bathymetryDate': dataDict['bathymetryDate'],
'yFRF': dataDict['yFRF'],}
if 'FIXED' in ofname:
dataDict_n['bottomElevation'] = np.full((new_s), fill_value=np.nan)
elif 'MOBILE' in ofname:
dataDict_n['bottomElevation'] = np.full((new_t, new_s), fill_value=np.nan)
else:
print 'You need to modify makenc_CSHORErun in makenc.py to accept your new version name!'
# find index of first point on dataDict grid
min_x = min(dataDict['xFRF'])
ind_minx = int(np.argwhere(dataDict_n['xFRF'] == min_x))
max_x = max(dataDict['xFRF'])
ind_maxx = int(np.argwhere(dataDict_n['xFRF'] == max_x))
for ii in range(0, int(new_t)):
dataDict_n['aveE'][ii][ind_maxx:ind_minx+1] = dataDict['aveE'][ii]
dataDict_n['stdE'][ii][ind_maxx:ind_minx+1] = dataDict['stdE'][ii]
dataDict_n['aveN'][ii][ind_maxx:ind_minx+1] = dataDict['aveN'][ii]
dataDict_n['stdN'][ii][ind_maxx:ind_minx+1] = dataDict['stdN'][ii]
dataDict_n['waveHs'][ii][ind_maxx:ind_minx+1] = dataDict['waveHs'][ii]
dataDict_n['waveMeanDirection'][ii][ind_maxx:ind_minx+1] = dataDict['waveMeanDirection'][ii]
dataDict_n['waterLevel'][ii][ind_maxx:ind_minx+1] = dataDict['waterLevel'][ii]
dataDict_n['stdWaterLevel'][ii][ind_maxx:ind_minx+1] = dataDict['stdWaterLevel'][ii]
dataDict_n['setup'][ii][ind_maxx:ind_minx+1] = dataDict['setup'][ii]
dataDict_n['qbx'][ii][ind_maxx:ind_minx+1] = dataDict['qbx'][ii]
dataDict_n['qsx'][ii][ind_maxx:ind_minx+1] = dataDict['qsx'][ii]
dataDict_n['qby'][ii][ind_maxx:ind_minx+1] = dataDict['qby'][ii]
dataDict_n['qsy'][ii][ind_maxx:ind_minx+1] = dataDict['qsy'][ii]
dataDict_n['probabilitySuspension'][ii][ind_maxx:ind_minx+1] = dataDict['probabilitySuspension'][ii]
dataDict_n['probabilityMovement'][ii][ind_maxx:ind_minx+1] = dataDict['probabilityMovement'][ii]
dataDict_n['suspendedSedVolume'][ii][ind_maxx:ind_minx+1] = dataDict['suspendedSedVolume'][ii]
dataDict_n['latitude'][ind_maxx:ind_minx+1] = dataDict['latitude'][ii]
dataDict_n['longitude'][ind_maxx:ind_minx+1] = dataDict['longitude'][ii]
if 'FIXED' in ofname:
dataDict_n['bottomElevation'][ind_maxx:ind_minx + 1] = dataDict['bottomElevation']
elif 'MOBILE' in ofname:
for ii in range(0, int(new_t)):
dataDict_n['bottomElevation'][ii][ind_maxx:ind_minx + 1] = dataDict['bottomElevation'][ii]
else:
print 'You need to modify makenc_CSHORErun in makenc.py to accept your new version name!'
# get rid of all masks
test = np.ma.masked_array(dataDict_n['aveE'], np.isnan(dataDict_n['aveE']))
dataDict_n['aveE'] = test
del test
test = np.ma.masked_array(dataDict_n['stdE'], np.isnan(dataDict_n['stdE']))
dataDict_n['stdE'] = test
del test
test = np.ma.masked_array(dataDict_n['aveN'], np.isnan(dataDict_n['aveN']))
dataDict_n['aveN'] = test
del test
test = np.ma.masked_array(dataDict_n['stdN'], np.isnan(dataDict_n['stdN']))
dataDict_n['stdN'] = test
del test
test = np.ma.masked_array(dataDict_n['waveHs'], np.isnan(dataDict_n['waveHs']))
dataDict_n['waveHs'] = test
del test
test = np.ma.masked_array(dataDict_n['waveMeanDirection'], np.isnan(dataDict_n['waveMeanDirection']))
dataDict_n['waveMeanDirection'] = test
del test
test = np.ma.masked_array(dataDict_n['waterLevel'], np.isnan(dataDict_n['waterLevel']))
dataDict_n['waterLevel'] = test
del test
test = np.ma.masked_array(dataDict_n['stdWaterLevel'], np.isnan(dataDict_n['stdWaterLevel']))
dataDict_n['stdWaterLevel'] = test
del test
test = np.ma.masked_array(dataDict_n['setup'], np.isnan(dataDict_n['setup']))
dataDict_n['setup'] = test
del test
test = np.ma.masked_array(dataDict_n['qbx'], np.isnan(dataDict_n['qbx']))
dataDict_n['qbx'] = test
del test
test = np.ma.masked_array(dataDict_n['qsx'], np.isnan(dataDict_n['qsx']))
dataDict_n['qsx'] = test
del test
test = np.ma.masked_array(dataDict_n['qby'], np.isnan(dataDict_n['qby']))
dataDict_n['qby'] = test
del test
test = np.ma.masked_array(dataDict_n['qsy'], np.isnan(dataDict_n['qsy']))
dataDict_n['qsy'] = test
del test
test = np.ma.masked_array(dataDict_n['probabilitySuspension'], np.isnan(dataDict_n['probabilitySuspension']))
dataDict_n['probabilitySuspension'] = test
del test
test = np.ma.masked_array(dataDict_n['probabilityMovement'], np.isnan(dataDict_n['probabilityMovement']))
dataDict_n['probabilityMovement'] = test
del test
test = np.ma.masked_array(dataDict_n['suspendedSedVolume'], np.isnan(dataDict_n['suspendedSedVolume']))
dataDict_n['suspendedSedVolume'] = test
del test
test = np.ma.masked_array(dataDict_n['latitude'], np.isnan(dataDict_n['latitude']))
dataDict_n['latitude'] = test
del test
test = np.ma.masked_array(dataDict_n['longitude'], np.isnan(dataDict_n['longitude']))
dataDict_n['longitude'] = test
del test
test = np.ma.masked_array(dataDict_n['bottomElevation'], np.isnan(dataDict_n['bottomElevation']))
dataDict_n['bottomElevation'] = test
del test
# check to see if I screwed up!
assert set(dataDict.keys()) == set(dataDict_n.keys()), 'You are missing dictionary keys in the new dictionary!'
# replace the dictionary with the new dictionary
del dataDict
dataDict = dataDict_n
del dataDict_n
# now we flip everything that has a spatial dimension around so it will be all pretty like spicer wants?
dataDict['aveN'] = np.flip(dataDict['aveN'], 1)
dataDict['waveHs'] = np.flip(dataDict['waveHs'], 1)
dataDict['aveE'] = np.flip(dataDict['aveE'], 1)
dataDict['waveMeanDirection'] = np.flip(dataDict['waveMeanDirection'], 1)
dataDict['stdWaterLevel'] = np.flip(dataDict['stdWaterLevel'], 1)
dataDict['probabilitySuspension'] = np.flip(dataDict['probabilitySuspension'], 1)
dataDict['stdN'] = np.flip(dataDict['stdN'], 1)
dataDict['stdE'] = np.flip(dataDict['stdE'], 1)
dataDict['bottomElevation'] = np.flip(dataDict['bottomElevation'], 1)
dataDict['xFRF'] = np.flip(dataDict['xFRF'], 0)
dataDict['qsy'] = np.flip(dataDict['qsy'], 1)
dataDict['qsx'] = np.flip(dataDict['qsx'], 1)
dataDict['waterLevel'] = np.flip(dataDict['waterLevel'], 1)
dataDict['qbx'] = np.flip(dataDict['qbx'], 1)
dataDict['qby'] = np.flip(dataDict['qby'], 1)
dataDict['setup'] = np.flip(dataDict['setup'], 1)
dataDict['longitude'] = np.flip(dataDict['longitude'], 0)
dataDict['latitude'] = np.flip(dataDict['latitude'], 0)
dataDict['suspendedSedVolume'] = np.flip(dataDict['suspendedSedVolume'], 1)
dataDict['probabilityMovement'] = np.flip(dataDict['probabilityMovement'], 1)
# write data to file
write_data_to_nc(fid, varAtts, dataDict)
# close file
fid.close()
def makenc_intBATHY(ofname, dataDict, globalYaml, varYaml):
"""
TODO: can this be combined with makenc_t0BATHY
Args:
ofname: this is the name of the cshore_ncfile you are building
dataDict: keys must include... and match the varYaml file
utmNorthing - this is utm in meters (not feet)
utmEasting - this is utm in meters (not feet)
globalYaml: yaml containing global meta data
varYaml: yaml containing variable meta data
Returns:
writes out the ncfile
"""
globalAtts = import_template_file(globalYaml)
varAtts = import_template_file(varYaml)
# create netcdf file
fid = init_nc_file(ofname, globalAtts)
# creating dimensions of data
ni = fid.createDimension('ni', dataDict['utmEasting'].shape[1])
nj = fid.createDimension('nj', dataDict['utmEasting'].shape[0])
# write data to file
write_data_to_nc(fid, varAtts, dataDict)
# close file
fid.close()
def makenc_t0BATHY(ofname, dataDict, globalYaml, varYaml):
"""# this is the script that builds the t0 netCDF file from the initial Bathy DEM (intBathy)
Args:
ofname (str): this is the name of the cshore_ncfile you are building
dataDict (dict): keys must include... and matching keys to var Yaml
xFRF - in m
yFRF - in m
globalYaml (str): CF compliant meta data in global file
varYaml (str): CF compliant variable meta data matching dataDict
Returns:
writes out the cshore_ncfile
"""
globalAtts = import_template_file(globalYaml)
varAtts = import_template_file(varYaml)
# create netcdf file
fid = init_nc_file(ofname, globalAtts)
# creating dimensions of data
xFRF = fid.createDimension('xFRF', dataDict['xFRF'].shape[0])
yFRF = fid.createDimension('yFRF', dataDict['yFRF'].shape[0])
# write data to file
write_data_to_nc(fid, varAtts, dataDict)
# close file
fid.close()
def makenc_tiBATHY(ofname, dataDict, globalYaml, varYaml):
"""# this is the script that builds the monthly ti netCDF file by incorporating the new survey data into the most recent Bathy DEM
Args:
ofname (str): this is the name of the cshore_ncfile you are building
dataDict (str): keys must include... and matching keys to varYaml
time:
xFRF: - in m
yFRF: - in m
globalYaml (str): path to global yaml with CF compliant meta data
varYaml (str): path to variable CF compliant meta data and matching the keys in data Dict
Returns:
writes out the netCDF file
"""
globalAtts = import_template_file(globalYaml)
varAtts = import_template_file(varYaml)
# create netcdf file
fid = init_nc_file(ofname, globalAtts)
# creating dimensions of data
time = fid.createDimension('time', dataDict['time'].shape[0])
xFRF = fid.createDimension('xFRF', dataDict['xFRF'].shape[0])
yFRF = fid.createDimension('yFRF', dataDict['yFRF'].shape[0])
# write data to file
write_data_to_nc(fid, varAtts, dataDict)
# close file
fid.close()