-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathGrid.hpp
248 lines (214 loc) · 9.57 KB
/
Grid.hpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
/*!
* @file Grid.hpp
* @author Athena Elafrou <[email protected]>
* @date 05 Nov 2024
*/
#pragma once
#include <mpi.h>
#include <string>
#include <vector>
#include "domain_decomp_export.hpp"
/*!
* @class Grid
* @brief A class that encapsulates a distributed 2D grid describing the ocean.
*
* A class that encapsulates a distributed 2D grid. The dimensions of the grid
* as well as the ordering of dimensions is extracted from the netCDF grid file,
* with the assumption that all variables defined in the netCDF file follow the
* same convention in terms of dimension ordering. The grid is partitioned
* evenly among processes using a 2D decomposition, ignoring any land mask. The
* grid can be subsequently re-partitioned differently using a Partitioner.
*
* Partitions are described by bounding boxes, which are defined by the
* global coordinates of the upper left corner and the local extents of the box
* in each dimension.
*/
class LIB_EXPORT Grid {
// Default grid metadata naming conventions
const std::string data_id = "data";
public:
// Disallow compiler-generated special functions
Grid(const Grid&) = delete;
Grid& operator=(const Grid&) = delete;
/*!
* @brief Destructor.
*/
~Grid() {};
/*!
* @brief Constructs a distributed 2D grid from a netCDF file describing the
* global domain.
*
* Constructs a distributed grid of dimensions (dim0, dim1) from a netCDF
* file. The dimension sizes as well as the ordering of dimensions is
* extracted from the netCDF grid file and mapped appropriately to the (dim0,
* dim1) dimensions of the Grid class, where dim1 is defined as the fastest
* increasing dimension. For example, if the dimensions of interest in the
* netCDF file are named x and y and the variables are dimensioned as (y, x),
* then the y dimension will be mapped to dim1 and x to dim0 of the Grid
* class, since the netCDF C/C++ convention is that the last dimension in the
* CDL notation is the fastest increasing dimension. The default dimension
* names for the netCDF file are "x" for dim0 and "y" for dim1, and the
* default name for the land mask variable is "mask". the We assume that all
* variables defined in the netCDF file follow the same convention in terms of
* dimension ordering.
*
* The code was originally written with the assumption that nextsim-dg would
* store arrays in (x,y) order. However, the indices have since been swapped
* and now nextsim-dg uses (y,x) order. To account for the switch of indices
* the grid input netcdf file is now transposed when it is read-in and the x
* and y dims are swapped in Grid.cpp.
*
* @param comm MPI communicator.
* @param filename Grid file in netCDF format.
* @param xdim_name Name of 1st grid dimension in netCDF file (optional)
* @param ydim_name Name of 2nd grid dimension in netCDF file (optional)
* @param dim_order Permutation for ordering of dimensions.
* @param mask_name Name of land mask variable in netCDF file (optional)
* @param ignore_mask Should the land mask be ignored
* @param px Is the domain periodic in the x-direction
* @param py Is the domain periodic in the y-direction
* @return A distributed 2D grid object partitioned evenly in terms of grid
* points.
*/
// We are using the named constructor idiom so that objects can only be
// created in the heap to ensure it's dtor is executed before MPI_Finalize()
static Grid* create(MPI_Comm comm, const std::string& filename, bool ignore_mask = false,
bool px = false, bool py = false);
static Grid* create(MPI_Comm comm, const std::string& filename, const std::string xdim_name,
const std::string ydim_name, const std::vector<int> dim_order, const std::string mask_name,
bool ignore_mask = false, bool px = false, bool py = false);
/*!
* @brief Returns the total number of objects in the local domain.
*
* @return Total number of objects in the local domain.
*/
int get_num_objects() const;
/*!
* @brief Returns the number of non-land objects in the local domain.
*
* @return Number of non-land objects in the local domain.
*/
int get_num_nonzero_objects() const;
/*!
* @brief Returns the number of processes.
*
* @return vector containing number of processes in each dimension of the grid
*/
std::vector<int> get_num_procs() const;
/*!
* @brief Returns the local extent
*
* @return vector containing local extent in each dimension of the grid
*/
std::vector<int> get_local_ext() const;
/*!
* @brief Returns the global position of the domain in the grid (bottom left corner of domain)
*
* @return vector containing the global position
*/
std::vector<int> get_global() const;
/*!
* @brief Returns the global extent
*
* @return vector containing global extent in each dimension of the grid
*/
std::vector<int> get_global_ext() const;
/*!
* @brief Returns the global land mask dimensioned (dim0, dim1), where dim0 is
* the 1st dimension and dim1 is the 2nd, with dim1 varying the fastest in
* terms of storage.
*
* @return Global land mask.
*/
const int* get_land_mask() const;
/*!
* @brief Returns `true` if the grid is periodic in the x-direction, otherwise `false`.
* @return Periodicity in x-direction
*/
bool get_px() const;
/*!
* @brief Returns `true` if the grid is periodic in the y-direction, otherwise `false`.
* @return Periodicity in y-direction
*/
bool get_py() const;
/*!
* @brief Returns the index mapping of sparse to dense representation, where
* dim0 is the 1st dimension and dim1 is the 2nd, with dim1 varying the
* fastest in terms of storage.
*
* @return Index mapping of sparse to dense representation.
*/
const int* get_sparse_to_dense() const;
/*!
* @brief Returns the IDs of the non-land objects in the local domain, where
* dim0 is the 1st dimension and dim1 is the 2nd, with dim1 varying the
* fastest in terms of storage.
*
* @return IDs of the non-land objects in the local domain.
*/
const int* get_nonzero_object_ids() const;
/*!
* @brief Returns the bounding box for this process.
*
* @param global_0 Global coordinate in the 1st dimension of the upper left corner.
* @param global_1 Global coordinate in the 2nd dimension of the upper left corner.
* @param local_ext_0 Local extent in the 1st dimension of the grid.
* @param local_ext_1 Local extent in the 2nd dimension of the grid.
*/
void get_bounding_box(int& global_0, int& global_1, int& local_ext_0, int& local_ext_1) const;
private:
// Construct a ditributed grid from a NetCDF file describing the global domain
Grid(MPI_Comm comm, const std::string& filename, const std::string& dim0_id = "x",
const std::string& dim1_id = "y",
const std::vector<int>& dim_order = std::vector<int>({ 1, 0 }),
const std::string& mask_id = "mask", bool ignore_mask = false, bool px = false,
bool py = false);
/*!
* @brief Read dims from netcdf grid file.
*
* @param filename filename of the input netcdf grid file.
*/
void ReadGridExtents(const std::string& filename);
/*!
* @brief Read data from netcdf grid file.
*
* @param filename filename of the input netcdf grid file.
* @param mask_name name of the land mask in the grid file.
*/
void ReadGridMask(const std::string& filename, const std::string& mask_name);
public:
static const int NDIMS = 2;
private:
MPI_Comm _comm; // MPI communicator
int _rank = -1; // Process rank
int _total_num_procs = -1; // Total number of processes in communicator
// Total number of processes in each dimension
std::vector<int> _num_procs = std::vector<int>(NDIMS, -1);
// Global extents in each dimension
std::vector<int> _global_ext = std::vector<int>(NDIMS, 0);
// Local extents in each dimension
std::vector<int> _local_ext = std::vector<int>(NDIMS, 0);
// Global coordinates of upper left corner
std::vector<int> _global = std::vector<int>(NDIMS, -1);
// Local extents in each dimension (after partitioning)
std::vector<int> _local_ext_new = std::vector<int>(NDIMS, 0);
// Global coordinates of upper left corner (after partitioning)
std::vector<int> _global_new = std::vector<int>(NDIMS, -1);
// dimension names
const std::vector<std::string> _dim_names;
// order of dimensions
const std::vector<int> _dim_order;
int _num_objects = 0; // Number of grid points ignoring land mask
int _num_nonzero_objects = 0; // Number of non-land grid points
bool _px = false; // Periodicity in the x-direction
bool _py = false; // Periodicity in the y-direction
std::vector<int> _land_mask = {}; // Land mask values
std::vector<int> _local_id = {}; // Map from sparse to dense index
std::vector<int> _global_id = {}; // Unique non-land grid point IDs
};
#define NC_CHECK(func) \
{ \
int e = (func); \
if (e != NC_NOERR) \
throw std::runtime_error("ERROR: " + std::string(nc_strerror(e))); \
}