Skip to content

Commit

Permalink
api: cleanup based on first review
Browse files Browse the repository at this point in the history
  • Loading branch information
mloubout committed Jul 26, 2023
1 parent 1c4ee94 commit b8259f6
Show file tree
Hide file tree
Showing 14 changed files with 88 additions and 66 deletions.
1 change: 0 additions & 1 deletion devito/ir/clusters/cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -333,7 +333,6 @@ def dspace(self):

# Construct the `intervals` of the DataSpace, that is a global,
# Dimension-centric view of the data space

intervals = IntervalGroup.generate('union', *parts.values())
# E.g., `db0 -> time`, but `xi NOT-> x`
intervals = intervals.promote(lambda d: not d.is_Sub)
Expand Down
4 changes: 1 addition & 3 deletions devito/ir/equations/algorithms.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,9 +56,7 @@ def handle_indexed(indexed):
# such as A[3])
indexeds = retrieve_indexed(expr, deep=True)
for i in indexeds:
expl_dims = {d for (d, e) in zip(i.function.dimensions, i.indices)
if e.is_integer}
extra.update(expl_dims)
extra.update({d for d in i.function.dimensions if i.indices[d].is_integer})

# Enforce determinism
extra = filter_sorted(extra)
Expand Down
4 changes: 2 additions & 2 deletions devito/ir/support/basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -660,8 +660,8 @@ def is_const(self, dim):
"""
True if a constant dependence, that is no Dimensions involved, False otherwise.
"""
return (self.source.aindices.get(dim, None) is None and
self.sink.aindices.get(dim, None) is None and
return (self.source.aindices.get(dim) is None and
self.sink.aindices.get(dim) is None and
self.distance_mapper.get(dim, 0) == 0)

@memoized_meth
Expand Down
12 changes: 6 additions & 6 deletions devito/operations/interpolators.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ def _weights(self):
raise NotImplementedError

@property
def _gdim(self):
def _gdims(self):
return self.grid.dimensions

@property
Expand All @@ -153,9 +153,9 @@ def _rdim(self):
parent = self.sfunction.dimensions[-1]
dims = [CustomDimension("r%s%s" % (self.sfunction.name, d.name),
-self.r+1, self.r, 2*self.r, parent)
for d in self._gdim]
for d in self._gdims]

return DimensionTuple(*dims, getters=self._gdim)
return DimensionTuple(*dims, getters=self._gdims)

def _augment_implicit_dims(self, implicit_dims):
return as_tuple(implicit_dims) + self.sfunction.dimensions
Expand All @@ -178,7 +178,7 @@ def _interp_idx(self, variables, implicit_dims=None):

# Coefficient symbol expression
temps.extend(self._coeff_temps(implicit_dims))
for ((di, d), rd, p) in zip(enumerate(self._gdim), self._rdim, pos):
for ((di, d), rd, p) in zip(enumerate(self._gdims), self._rdim, pos):
# Add conditional to avoid OOB
lb = sympy.And(rd + p >= d.symbolic_min - self.r, evaluate=False)
ub = sympy.And(rd + p <= d.symbolic_max + self.r, evaluate=False)
Expand Down Expand Up @@ -330,7 +330,7 @@ class LinearInterpolator(WeightedInterpolator):
@property
def _weights(self):
c = [(1 - p) * (1 - r) + p * r
for (p, d, r) in zip(self._point_symbols, self._gdim, self._rdim)]
for (p, d, r) in zip(self._point_symbols, self._gdims, self._rdim)]
return Mul(*c)

@cached_property
Expand All @@ -345,7 +345,7 @@ def _coeff_temps(self, implicit_dims):
pmap = self.sfunction._position_map
poseq = [Eq(self._point_symbols[d], pos - floor(pos),
implicit_dims=implicit_dims)
for (d, pos) in zip(self._gdim, pmap.keys())]
for (d, pos) in zip(self._gdims, pmap.keys())]
return poseq


Expand Down
2 changes: 1 addition & 1 deletion devito/passes/clusters/aliases.py
Original file line number Diff line number Diff line change
Expand Up @@ -837,7 +837,7 @@ def lower_schedule(schedule, meta, sregistry, ftemps):
# This prevents cases such as `floor(a*b)` with `a` and `b` floats
# that would creat a temporary `int r = b` leading to erronous numerical results
# Such cases happen with the positions for sparse functions for example.
dtype = sympy_dtype(pivot, meta.dtype) or meta.dtype
dtype = sympy_dtype(pivot, meta.dtype)

if writeto:
# The Dimensions defining the shape of Array
Expand Down
4 changes: 3 additions & 1 deletion devito/passes/iet/mpi.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,9 +47,11 @@ def _drop_halospots(iet):

# If a HaloSpot is outside any iteration it is not needed
for iters, halo_spots in MapNodes(Iteration, HaloSpot, 'groupby').visit(iet).items():
if iters:
continue
for hs in halo_spots:
for f, v in hs.fmapper.items():
if not iters and v.loc_indices:
if v.loc_indices:
mapper[hs].add(f)

# Transform the IET introducing the "reduced" HaloSpots
Expand Down
7 changes: 6 additions & 1 deletion devito/passes/iet/parpragma.py
Original file line number Diff line number Diff line change
Expand Up @@ -426,7 +426,12 @@ def _make_guard(self, parregion):

def _make_nested_partree(self, partree):
# Apply heuristic
if self.nhyperthreads <= self.nested or partree.root.is_ParallelAtomic:
if self.nhyperthreads <= self.nested:
return partree

# Loop nest with atomic reductions are more likely to have less latency
# keep outer loop parallel
if partree.root.is_ParallelAtomic:
return partree

# Note: there might be multiple sub-trees amenable to nested parallelism,
Expand Down
3 changes: 2 additions & 1 deletion devito/symbolics/inspection.py
Original file line number Diff line number Diff line change
Expand Up @@ -281,4 +281,5 @@ def sympy_dtype(expr, default):
return default
else:
# Infer expression dtype from its arguments
return infer_dtype([sympy_dtype(a, default) for a in expr.args])
dtype = infer_dtype([sympy_dtype(a, default) for a in expr.args])
return dtype or default
3 changes: 2 additions & 1 deletion devito/tools/data_structures.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def __getnewargs_ex__(self):
# objects with varying number of attributes
return (tuple(self), dict(self.__dict__))

def get(self, key, val):
def get(self, key, val=None):
return self._getters.get(key, val)


Expand Down Expand Up @@ -605,6 +605,7 @@ class UnboundTuple(object):
"""
A simple data structure that returns the last element forever once reached
"""

def __init__(self, items):
self.items = as_tuple(items)
self.last = len(self.items)
Expand Down
3 changes: 3 additions & 0 deletions devito/tools/dtypes_lowering.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,9 @@ def dtype_to_mpitype(dtype):


def dtype_to_mpidtype(dtype):
"""
Map numpy type to MPI internal types for communication
"""
from devito.mpi import MPI
return MPI._typedict[np.dtype(dtype).char]

Expand Down
4 changes: 1 addition & 3 deletions devito/types/dense.py
Original file line number Diff line number Diff line change
Expand Up @@ -1460,9 +1460,7 @@ def parent(self):

@property
def origin(self):
"""
SubFunction have zero origin
"""
# SubFunction have zero origin
return DimensionTuple(*(0 for _ in range(self.ndim)), getters=self.dimensions)


Expand Down
19 changes: 8 additions & 11 deletions devito/types/dimension.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,8 +107,6 @@ class Dimension(ArgProvider):
is_Incr = False
is_Block = False

indirect = False

# Prioritize self's __add__ and __sub__ to construct AffineIndexAccessFunction
_op_priority = sympy.Expr._op_priority + 1.

Expand Down Expand Up @@ -183,6 +181,14 @@ def min_name(self):
def max_name(self):
return "%s_M" % self.name

@property
def indirect(self):
return False

@property
def index(self):
return self if self.indirect is True else self.parent

@property
def is_const(self):
return False
Expand Down Expand Up @@ -456,7 +462,6 @@ class DerivedDimension(BasicDimension):
"""

is_Derived = True
indirect = False

__rargs__ = Dimension.__rargs__ + ('parent',)
__rkwargs__ = ()
Expand Down Expand Up @@ -815,14 +820,6 @@ def factor(self):
def condition(self):
return self._condition

@property
def indirect(self):
return self._indirect

@property
def index(self):
return self if self.indirect is True else self.parent

@cached_property
def free_symbols(self):
retval = set(super().free_symbols)
Expand Down
70 changes: 35 additions & 35 deletions devito/types/sparse.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,19 +136,6 @@ def __subfunc_setup__(self, key, suffix, dtype=None):

return sf

@property
def npoint(self):
return self.shape[self._sparse_position]

@property
def space_order(self):
"""The space order."""
return self._space_order

@property
def r(self):
return self._radius

@property
def _sparse_dim(self):
return self.dimensions[self._sparse_position]
Expand Down Expand Up @@ -179,6 +166,37 @@ def _coords_indices(self):
np.floor((self.coordinates_data - self.grid.origin) / self.grid.spacing)
).astype(int)

@property
def _support(self):
"""
The grid points surrounding each sparse point within the radius of self's
injection/interpolation operators.
"""
max_shape = np.array(self.grid.shape).reshape(1, self.grid.dim)
minmax = lambda arr: np.minimum(max_shape, np.maximum(0, arr))
return np.stack([minmax(self._coords_indices + s) for s in self._point_support],
axis=2)

@property
def _dist_datamap(self):
"""
Mapper ``M : MPI rank -> required sparse data``.
"""
return self.grid.distributor.glb_to_rank(self._support) or {}

@property
def npoint(self):
return self.shape[self._sparse_position]

@property
def space_order(self):
"""The space order."""
return self._space_order

@property
def r(self):
return self._radius

@property
def gridpoints(self):
try:
Expand Down Expand Up @@ -207,28 +225,6 @@ def coordinates_data(self):
except AttributeError:
return None

@property
def _support(self):
"""
The grid points surrounding each sparse point within the radius of self's
injection/interpolation operators.
"""
max_shape = np.array(self.grid.shape).reshape(1, self.grid.dim)
minmax = lambda arr: np.minimum(max_shape, np.maximum(0, arr))
return np.stack([minmax(self._coords_indices + s) for s in self._point_support],
axis=2)

@property
def _dist_datamap(self):
"""
Mapper ``M : MPI rank -> required sparse data``.
"""
return self.grid.distributor.glb_to_rank(self._support) or {}

@cached_property
def dist_origin(self):
return self._dist_origin

@cached_property
def _pos_symbols(self):
return [Symbol(name='pos%s' % d, dtype=np.int32)
Expand Down Expand Up @@ -265,6 +261,10 @@ def _dist_reorder_mask(self):
if d is not self._sparse_dim)
return ret

@cached_property
def dist_origin(self):
return self._dist_origin

def interpolate(self, *args, **kwargs):
"""
Implement an interpolation operation from the grid onto the given sparse points
Expand Down
18 changes: 18 additions & 0 deletions tests/test_pickle.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,6 +135,24 @@ def test_precomputed_sparse_function(self, mode, pickle):
assert sf.dtype == new_sf.dtype
assert sf.npoint == new_sf.npoint == 3

def test_alias_sparse_function(self, pickle):
grid = Grid(shape=(3,))
sf = SparseFunction(name='sf', grid=grid, npoint=3, space_order=2,
coordinates=[(0.,), (1.,), (2.,)])
sf.data[0] = 1.

# Create alias
f0 = sf._rebuild(name='f0', alias=True)
pkl_f0 = pickle.dumps(f0)
new_f0 = pickle.loads(pkl_f0)

assert f0.data is None and new_f0.data is None
assert f0.coordinates.data is None and new_f0.coordinates.data is None

assert sf.space_order == f0.space_order == new_f0.space_order
assert sf.dtype == f0.dtype == new_f0.dtype
assert sf.npoint == f0.npoint == new_f0.npoint

def test_internal_symbols(self, pickle):
s = dSymbol(name='s', dtype=np.float32)
pkl_s = pickle.dumps(s)
Expand Down

0 comments on commit b8259f6

Please sign in to comment.