Skip to content

Commit

Permalink
api: enforce gridpoints as subfunc for precomputed
Browse files Browse the repository at this point in the history
  • Loading branch information
mloubout committed May 19, 2023
1 parent bae42e4 commit 1f61f3a
Show file tree
Hide file tree
Showing 3 changed files with 78 additions and 64 deletions.
2 changes: 1 addition & 1 deletion devito/types/grid.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,7 @@ def origin_ioffset(self):
"""Offset index of the local (per-process) origin from the domain origin."""
grid_origin = [min(i) for i in self.distributor.glb_numb]
assert len(grid_origin) == len(self.spacing)
return grid_origin
return tuple(grid_origin)

@property
def origin_offset(self):
Expand Down
103 changes: 43 additions & 60 deletions devito/types/sparse.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,7 @@ def __subfunc_setup__(self, key, suffix):

dimensions = dimensions[:n]
shape = shape[:n]

sf = SubFunction(
name=name, parent=self, dtype=dtype, dimensions=dimensions,
shape=shape, space_order=0, initializer=key, alias=self.alias,
Expand Down Expand Up @@ -167,6 +168,11 @@ def _sparse_dim(self):
def _mpitype(self):
return MPI._typedict[np.dtype(self.dtype).char]

@property
def _smpitype(self):
sfuncs = [getattr(self, s) for s in self._sub_functions]
return {s: MPI._typedict[np.dtype(s.dtype).char] for s in sfuncs}

@property
def comm(self):
return self.grid.distributor.comm
Expand Down Expand Up @@ -272,7 +278,7 @@ def _support(self):
"""
max_shape = np.array(self.grid.shape).reshape(1, self.grid.dim)
minmax = lambda arr: np.minimum(max_shape, np.maximum(0, arr))
return np.stack([minmax(self.gridpoints + s) for s in self._point_support],
return np.stack([minmax(self.gridpoints_data + s) for s in self._point_support],
axis=2)

@property
Expand Down Expand Up @@ -435,14 +441,13 @@ def _dist_subfunc_scatter(self, subfunc):
_, scount, sdisp, rshape, rcount, rdisp = \
self._dist_subfunc_alltoall(subfunc, dmap=dmap)
scattered = np.empty(shape=rshape, dtype=subfunc.dtype)
self.comm.Alltoallv([sfuncd, scount, sdisp, self._mpitype],
[scattered, rcount, rdisp, self._mpitype])
self.comm.Alltoallv([sfuncd, scount, sdisp, self._smpitype[subfunc]],
[scattered, rcount, rdisp, self._smpitype[subfunc]])
sfuncd = scattered

# Translate global coordinates into local coordinates
if self.dist_origin[subfunc] is not None:
sfuncd = sfuncd - np.array(self.dist_origin[subfunc], dtype=self.dtype)

sfuncd = sfuncd - np.array(self.dist_origin[subfunc], dtype=subfunc.dtype)
return {subfunc: sfuncd}

def _dist_data_gather(self, data):
Expand Down Expand Up @@ -480,13 +485,13 @@ def _dist_subfunc_gather(self, sfuncd, sfunc):
mask = self._dist_scatter_mask(dmap=dmap)
# Pack (reordered) coordinates so that they can be sent out via an Alltoallv
if self.dist_origin[sfunc] is not None:
sfuncd = sfuncd + np.array(self.dist_origin[sfunc], dtype=self.dtype)
sfuncd = sfuncd + np.array(self.dist_origin[sfunc], dtype=sfunc.dtype)
# Send out the sparse point coordinates
sshape, scount, sdisp, _, rcount, rdisp = \
self._dist_subfunc_alltoall(sfunc, dmap=dmap)
gathered = np.empty(shape=sshape, dtype=sfunc.dtype)
self.comm.Alltoallv([sfuncd, rcount, rdisp, self._mpitype],
[gathered, scount, sdisp, self._mpitype])
self.comm.Alltoallv([sfuncd, rcount, rdisp, self._smpitype[sfunc]],
[gathered, scount, sdisp, self._smpitype[sfunc]])
sfunc.data._local[mask[self._sparse_position]] = gathered[:]

# Note: this method "mirrors" `_dist_scatter`: a sparse point that is sent
Expand All @@ -503,7 +508,8 @@ def _dist_scatter(self, data=None):
def _dist_gather(self, data, *subfunc):
self._dist_data_gather(data)
for (sg, s) in zip(subfunc, self._sub_functions):
self._dist_subfunc_gather(sg, getattr(self, s))
if getattr(self, s) is not None:
self._dist_subfunc_gather(sg, getattr(self, s))

def _arg_defaults(self, alias=None):
key = alias or self
Expand Down Expand Up @@ -773,7 +779,9 @@ def gridpoints(self):
raise ValueError("No coordinates attached to this SparseFunction")
return (
np.floor(self.coordinates.data._local - self.grid.origin) / self.grid.spacing
).astype(int)
).astype(np.int32)

gridpoints_data = gridpoints

def guard(self, expr=None, offset=0):
"""
Expand Down Expand Up @@ -1028,11 +1036,10 @@ class PrecomputedSparseFunction(AbstractSparseFunction):
uses `*args` to (re-)create the dimension arguments of the symbolic object.
"""

_sub_functions = ('coordinates', 'gridpoints', 'interpolation_coeffs')
_sub_functions = ('gridpoints', 'interpolation_coeffs')

__rkwargs__ = (AbstractSparseFunction.__rkwargs__ +
('r', 'coordinates_data', 'gridpoints_data',
'interpolation_coeffs_data'))
('r', 'gridpoints_data', 'interpolation_coeffs_data'))

def __init_finalize__(self, *args, **kwargs):
super().__init_finalize__(*args, **kwargs)
Expand All @@ -1054,19 +1061,19 @@ def __init_finalize__(self, *args, **kwargs):

# Specifying only `npoints` is acceptable; this will require the user
# to setup the coordinates data later on
npoint = kwargs.get('npoint', None)
if self.npoint and coordinates is None and gridpoints is None:
coordinates = np.zeros((npoint, self.grid.dim))
gridpoints = np.zeros((self.npoint, self.grid.dim))

if coordinates is not None:
self._coordinates = self.__subfunc_setup__(coordinates, 'coords')
self._gridpoints = None
self._dist_origin = {self._coordinates: self.grid.origin_offset}
# Convert to gridpoints
if isinstance(coordinates, SubFunction):
raise ValueError("`coordinates` only accepted as array")
loc = np.floor((coordinates - self.grid.origin) / self.grid.spacing)
self._gridpoints = self.__subfunc_setup__(loc.astype(int), 'gridpoints')
else:
assert gridpoints is not None
self._coordinates = None
self._gridpoints = self.__subfunc_setup__(gridpoints, 'gridpoints')
self._dist_origin = {self._coordinates: self.grid.origin_ioffset}
self._dist_origin = {self._gridpoints: self.grid.origin_ioffset}

# Setup the interpolation coefficients. These are compulsory
interpolation_coeffs = kwargs.get('interpolation_coeffs',
Expand All @@ -1091,14 +1098,19 @@ def _point_increments(self):
"""Index increments in each dimension for each point symbol."""
return tuple(product(range(-self.r//2+1, self.r//2+1), repeat=self.grid.dim))

@property
def coordinates(self):
return self._coordinates
@cached_property
def _point_support(self):
return np.array(tuple(product(range(-self.r // 2 + 1, self.r // 2 + 1),
repeat=self.grid.dim)))

@property
def gridpoints(self):
return self._gridpoints

@property
def gridpoints_data(self):
return self.gridpoints.data._local

@property
def dist_origin(self):
return self._dist_origin
Expand All @@ -1108,41 +1120,16 @@ def interpolation_coeffs(self):
""" The Precomputed interpolation coefficients."""
return self._interpolation_coeffs

@property
def coordinates_data(self):
try:
return self.coordinates.data.view(np.ndarray)
except AttributeError:
return None

@property
def gridpoints_data(self):
try:
return self._gridpoints.data.view(np.ndarray)
except AttributeError:
return None

@cached_property
def coords_or_points(self):
if self.gridpoints is None:
return self.coordinates
else:
return self.gridpoints

@property
def interpolation_coeffs_data(self):
return self.interpolation_coeffs.data.view(np.ndarray)
return self.interpolation_coeffs.data._local

@cached_property
def _coordinate_symbols(self):
"""Symbol representing the coordinate values in each dimension."""
p_dim = self.indices[self._sparse_position]
if self._gridpoints is None:
return tuple([self.coordinates.indexify((p_dim, i))
for i in range(self.grid.dim)])
else:
return tuple([self.gridpoints.indexify((p_dim, i)) * d
for (i, d) in enumerate(self.grid.spacing_symbols)])
return tuple([self.coordinates.indexify((p_dim, i))
for i in range(self.grid.dim)])

@memoized_meth
def _index_matrix(self, offset):
Expand All @@ -1152,21 +1139,17 @@ def _index_matrix(self, offset):
# ConditionalDimensions for a given set of indirection indices

# List of indirection indices for all adjacent grid points
if self._gridpoints is None:
index_matrix = [tuple(idx + ii + offset
for ii, idx in zip(inc, self._coordinate_indices))
for inc in self._point_increments]
else:
ddim = self._gridpoints.dimensions[1]
index_matrix = [tuple(self._gridpoints._subs(ddim, d) + ii + offset
for (ii, d) in zip(inc, range(self.grid.dim)))
for inc in self._point_increments]
ddim = self._gridpoints.dimensions[1]
index_matrix = [tuple(self._gridpoints._subs(ddim, d) + ii + offset
for (ii, d) in zip(inc, range(self.grid.dim)))
for inc in self._point_increments]
shifts = [tuple(ii + offset for ii in inc)
for inc in self._point_increments]
# A unique symbol for each indirection index
indices = filter_ordered(flatten(index_matrix))
points = OrderedDict([(p, Symbol(name='ii_%s_%d' % (self.name, i)))
for i, p in enumerate(indices)])

return index_matrix, points, shifts


Expand Down
37 changes: 34 additions & 3 deletions tests/test_mpi.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from devito import (Grid, Constant, Function, TimeFunction, SparseFunction,
SparseTimeFunction, Dimension, ConditionalDimension, SubDimension,
SubDomain, Eq, Ne, Inc, NODE, Operator, norm, inner, configuration,
switchconfig, generic_derivative)
switchconfig, generic_derivative, PrecomputedSparseFunction)
from devito.data import LEFT, RIGHT
from devito.ir.iet import (Call, Conditional, Iteration, FindNodes, FindSymbols,
retrieve_iteration_tree)
Expand Down Expand Up @@ -507,6 +507,36 @@ def test_sparse_coords_issue1823(self):

assert np.allclose(rec.coordinates.data[:], ref.coordinates.data)

@pytest.mark.parallel(mode=4)
@pytest.mark.parametrize('r', [2])
def test_precomputed_sparse(self, r):
grid = Grid(shape=(4, 4), extent=(3.0, 3.0))

coords = np.array([(1.0, 1.0), (2.0, 2.0), (1.0, 2.0), (2.0, 1.0)])
points = np.array([(1, 1), (2, 2), (1, 2), (2, 1)])
coeffs = np.ones((4, 2, r))

sf1 = PrecomputedSparseFunction(name="sf1", grid=grid, coordinates=coords,
npoint=4, interpolation_coeffs=coeffs, r=r)
sf2 = PrecomputedSparseFunction(name="sf2", grid=grid, gridpoints=points,
npoint=4, interpolation_coeffs=coeffs, r=r)

assert sf1.npoint == 1
assert sf2.npoint == 1
assert np.all(sf1.gridpoints.data.shape == (1, 2))
assert np.all(sf2.gridpoints.data.shape == (1, 2))
assert np.all(sf1.gridpoints_data == sf2.gridpoints_data)
assert np.all(sf1.interpolation_coeffs.shape == (1, 2, r))
assert np.all(sf2.interpolation_coeffs.shape == (1, 2, r))

u = Function(name="u", grid=grid, space_order=r)
u._data_with_outhalo[:] = 1
Operator(sf2.interpolate(u))()
print(sf2.data)
assert np.all(sf2.data == 4)
Operator(sf1.interpolate(u))()
assert np.all(sf1.data == 4)


class TestOperatorSimple(object):

Expand Down Expand Up @@ -2411,6 +2441,7 @@ def test_adjoint_F_no_omp(self):
# TestDecomposition().test_reshape_left_right()
# TestOperatorSimple().test_trivial_eq_2d()
# TestFunction().test_halo_exchange_bilateral()
# TestSparseFunction().test_scatter_gather()
# TestSparseFunction().test_sparse_coords()
TestSparseFunction().test_precomputed_sparse(2)
# TestOperatorAdvanced().test_fission_due_to_antidep()
TestIsotropicAcoustic().test_adjoint_F(1)
# TestIsotropicAcoustic().test_adjoint_F(1)

0 comments on commit 1f61f3a

Please sign in to comment.