Skip to content

Commit

Permalink
Merge branch 'main' into stokes_biharmonic2
Browse files Browse the repository at this point in the history
  • Loading branch information
isuruf authored Aug 22, 2022
2 parents f388502 + 879ad35 commit fec9eab
Show file tree
Hide file tree
Showing 5 changed files with 221 additions and 48 deletions.
125 changes: 125 additions & 0 deletions examples/laplace-dirichlet-simple.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
import numpy as np

from meshmode.array_context import PyOpenCLArrayContext
from meshmode.discretization import Discretization
from meshmode.discretization.poly_element import \
InterpolatoryQuadratureSimplexGroupFactory

from pytential import bind, sym
from pytential.target import PointsTarget

# {{{ set some constants for use below

nelements = 20
bdry_quad_order = 4
mesh_order = bdry_quad_order
qbx_order = bdry_quad_order
bdry_ovsmp_quad_order = 4*bdry_quad_order
fmm_order = False
k = 3

# }}}


def main(mesh_name="starfish", visualize=False):
import logging
logging.basicConfig(level=logging.INFO)

import pyopencl as cl
cl_ctx = cl.create_some_context()
queue = cl.CommandQueue(cl_ctx)
actx = PyOpenCLArrayContext(queue, force_device_scalars=True)

from meshmode.mesh.generation import ellipse, make_curve_mesh, starfish
from functools import partial

if mesh_name == "ellipse":
mesh = make_curve_mesh(
partial(ellipse, 1),
np.linspace(0, 1, nelements+1),
mesh_order)
elif mesh_name == "starfish":
mesh = make_curve_mesh(
starfish,
np.linspace(0, 1, nelements+1),
mesh_order)
else:
raise ValueError(f"unknown mesh name: {mesh_name}")

pre_density_discr = Discretization(
actx, mesh,
InterpolatoryQuadratureSimplexGroupFactory(bdry_quad_order))

from pytential.qbx import QBXLayerPotentialSource
qbx = QBXLayerPotentialSource(
pre_density_discr, fine_order=bdry_ovsmp_quad_order, qbx_order=qbx_order,
fmm_order=fmm_order
)

from sumpy.visualization import FieldPlotter
fplot = FieldPlotter(np.zeros(2), extent=5, npoints=500)
targets = actx.from_numpy(fplot.points)

from pytential import GeometryCollection
places = GeometryCollection({
"qbx": qbx,
"qbx_high_target_assoc_tol": qbx.copy(target_association_tolerance=0.05),
"targets": PointsTarget(targets)
}, auto_where="qbx")
density_discr = places.get_discretization("qbx")

# {{{ describe bvp

from sumpy.kernel import LaplaceKernel
kernel = LaplaceKernel(2)

sigma_sym = sym.var("sigma")

# -1 for interior Dirichlet
# +1 for exterior Dirichlet
loc_sign = +1

bdry_op_sym = (-loc_sign*0.5*sigma_sym
- sym.D(kernel, sigma_sym, qbx_forced_limit="avg"))

# }}}

bound_op = bind(places, bdry_op_sym)

# {{{ fix rhs and solve

nodes = actx.thaw(density_discr.nodes())
bvp_rhs = actx.np.sin(nodes[0])

from pytential.linalg.gmres import gmres
gmres_result = gmres(
bound_op.scipy_op(actx, sigma_sym.name, dtype=np.float64),
bvp_rhs, tol=1e-8, progress=True,
stall_iterations=0,
hard_failure=True)

# }}}

# {{{ postprocess/visualize

repr_kwargs = dict(
source="qbx_high_target_assoc_tol",
target="targets",
qbx_forced_limit=None)
representation_sym = (
- sym.D(kernel, sigma_sym, **repr_kwargs))

fld_in_vol = actx.to_numpy(
bind(places, representation_sym)(
actx, sigma=gmres_result.solution, k=k))

if visualize:
fplot.write_vtk_file("laplace-dirichlet-potential.vts", [
("potential", fld_in_vol),
])

# }}}


if __name__ == "__main__":
main(visualize=True)
4 changes: 3 additions & 1 deletion pytential/qbx/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -643,7 +643,9 @@ def exec_compute_potential_insn_fmm(self, actx: PyOpenCLArrayContext,
self.qbx_order,
self.fmm_level_to_order,
source_extra_kwargs=source_extra_kwargs,
kernel_extra_kwargs=kernel_extra_kwargs)
kernel_extra_kwargs=kernel_extra_kwargs,
_use_target_specific_qbx=self._use_target_specific_qbx,
)

from pytential.qbx.geometry import target_state
if actx.to_numpy(actx.np.any(
Expand Down
26 changes: 24 additions & 2 deletions pytential/qbx/fmm.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,13 +112,24 @@ class QBXExpansionWrangler(SumpyExpansionWrangler):
def __init__(self, tree_indep, geo_data, dtype,
qbx_order, fmm_level_to_order,
source_extra_kwargs, kernel_extra_kwargs,
translation_classes_data=None,
_use_target_specific_qbx=None):
if _use_target_specific_qbx:
raise ValueError("TSQBX is not implemented in sumpy")

base_kernel = tree_indep.get_base_kernel()
if translation_classes_data is None and base_kernel.is_translation_invariant:
from pytential.qbx.fmm import translation_classes_builder
traversal = geo_data.traversal()
actx = geo_data._setup_actx

translation_classes_data, _ = translation_classes_builder(actx)(
actx.queue, traversal, traversal.tree, is_translation_per_level=True)

super().__init__(
tree_indep, geo_data.traversal(),
dtype, fmm_level_to_order, source_extra_kwargs, kernel_extra_kwargs)
tree_indep, traversal,
dtype, fmm_level_to_order, source_extra_kwargs, kernel_extra_kwargs,
translation_classes_data=translation_classes_data)

self.qbx_order = qbx_order
self.geo_data = geo_data
Expand Down Expand Up @@ -375,6 +386,17 @@ def eval_target_specific_qbx_locals(self, src_weight_vecs):

# }}}


def translation_classes_builder(actx):
from pytools import memoize_in

@memoize_in(actx, (QBXExpansionWrangler, translation_classes_builder))
def make_container():
from boxtree.translation_classes import TranslationClassesBuilder
return TranslationClassesBuilder(actx.context)

return make_container()

# }}}


Expand Down
56 changes: 24 additions & 32 deletions pytential/symbolic/execution.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,29 +124,31 @@ def map_node_min(self, expr):
def _map_elementwise_reduction(self, reduction_name, expr):
import loopy as lp
from arraycontext import make_loopy_program
from meshmode.transform_metadata import (
ConcurrentElementInameTag, ConcurrentDOFInameTag)
from meshmode.transform_metadata import ConcurrentElementInameTag
actx = self.array_context

@memoize_in(self.places, "elementwise_node_"+reduction_name)
@memoize_in(actx, (
EvaluationMapperBase._map_elementwise_reduction,
f"elementwise_node_{reduction_name}"))
def node_knl():
t_unit = make_loopy_program(
"""{[iel, idof, jdof]:
0<=iel<nelements and
0<=idof, jdof<ndofs}""",
"""
result[iel, idof] = %s(jdof, operand[iel, jdof])
<> el_result = %s(jdof, operand[iel, jdof])
result[iel, idof] = el_result
""" % reduction_name,
name="nodewise_reduce")
name=f"elementwise_node_{reduction_name}")

return lp.tag_inames(t_unit, {
"iel": ConcurrentElementInameTag(),
"idof": ConcurrentDOFInameTag(),
})

@memoize_in(self.places, "elementwise_"+reduction_name)
@memoize_in(actx, (
EvaluationMapperBase._map_elementwise_reduction,
f"elementwise_element_{reduction_name}"))
def element_knl():
# FIXME: This computes the reduction value redundantly for each
# output DOF.
t_unit = make_loopy_program(
"""{[iel, jdof]:
0<=iel<nelements and
Expand All @@ -155,37 +157,27 @@ def element_knl():
"""
result[iel, 0] = %s(jdof, operand[iel, jdof])
""" % reduction_name,
name="elementwise_reduce")
name=f"elementwise_element_{reduction_name}")

return lp.tag_inames(t_unit, {
"iel": ConcurrentElementInameTag(),
})

discr = self.places.get_discretization(
expr.dofdesc.geometry, expr.dofdesc.discr_stage)
dofdesc = expr.dofdesc
operand = self.rec(expr.operand)
assert operand.shape == (len(discr.groups),)

def _reduce(knl, result):
for g_operand, g_result in zip(operand, result):
self.array_context.call_loopy(
knl, operand=g_operand, result=g_result)

return result

dtype = operand.entry_dtype
granularity = expr.dofdesc.granularity
if granularity is sym.GRANULARITY_NODE:
return _reduce(node_knl(),
discr.empty(self.array_context, dtype=dtype))
elif granularity is sym.GRANULARITY_ELEMENT:
result = DOFArray(self.array_context, tuple([
self.array_context.empty((grp.nelements, 1), dtype=dtype)
for grp in discr.groups

if dofdesc.granularity is sym.GRANULARITY_NODE:
return type(operand)(actx, tuple([
actx.call_loopy(node_knl(), operand=operand_i)["result"]
for operand_i in operand
]))
elif dofdesc.granularity is sym.GRANULARITY_ELEMENT:
return type(operand)(actx, tuple([
actx.call_loopy(element_knl(), operand=operand_i)["result"]
for operand_i in operand
]))
return _reduce(element_knl(), result)
else:
raise ValueError(f"unsupported granularity: {granularity}")
raise ValueError(f"unsupported granularity: {dofdesc.granularity}")

def map_elementwise_sum(self, expr):
return self._map_elementwise_reduction("sum", expr)
Expand Down
58 changes: 45 additions & 13 deletions test/test_symbolic.py
Original file line number Diff line number Diff line change
Expand Up @@ -306,26 +306,58 @@ def test_node_reduction(actx_factory):

# {{{ test

# create a shuffled [1, nelements + 1] array
ary = []
el_nr_base = 0
for grp in discr.groups:
x = 1 + np.arange(el_nr_base, grp.nelements)
np.random.shuffle(x)
# create a shuffled [1, ndofs + 1] array
rng = np.random.default_rng(seed=42)

ary.append(actx.freeze(actx.from_numpy(x.reshape(-1, 1))))
el_nr_base += grp.nelements
def randrange_like(xi, offset):
x = offset + np.arange(1, xi.size + 1)
rng.shuffle(x)

return actx.from_numpy(x.reshape(xi.shape))

from meshmode.dof_array import DOFArray
ary = DOFArray(actx, tuple(ary))
base_node_nrs = np.cumsum([0] + [grp.ndofs for grp in discr.groups])
ary = DOFArray(actx, tuple([
randrange_like(xi, offset)
for xi, offset in zip(discr.nodes()[0], base_node_nrs)
]))

n = discr.ndofs
for func, expected in [
(sym.NodeSum, nelements * (nelements + 1) // 2),
(sym.NodeMax, nelements),
(sym.NodeSum, n * (n + 1) // 2),
(sym.NodeMax, n),
(sym.NodeMin, 1),
]:
r = bind(discr, func(sym.var("x")))(actx, x=ary)
assert abs(actx.to_numpy(r) - expected) < 1.0e-15, r
r = actx.to_numpy(
bind(discr, func(sym.var("x")))(actx, x=ary)
)
assert r == expected, (r, expected)

arys = tuple([rng.random(size=xi.shape) for xi in ary])
x = DOFArray(actx, tuple([actx.from_numpy(xi) for xi in arys]))

from meshmode.dof_array import flat_norm
for func, np_func in [
(sym.ElementwiseSum, np.sum),
(sym.ElementwiseMax, np.max)
]:
expected = DOFArray(actx, tuple([
actx.from_numpy(np.tile(np_func(xi, axis=1, keepdims=True), xi.shape[1]))
for xi in arys
]))
r = bind(
discr, func(sym.var("x"), dofdesc=sym.GRANULARITY_NODE)
)(actx, x=x)
assert actx.to_numpy(flat_norm(r - expected)) < 1.0e-15

expected = DOFArray(actx, tuple([
actx.from_numpy(np_func(xi, axis=1, keepdims=True))
for xi in arys
]))
r = bind(
discr, func(sym.var("x"), dofdesc=sym.GRANULARITY_ELEMENT)
)(actx, x=x)
assert actx.to_numpy(flat_norm(r - expected)) < 1.0e-15

# }}}

Expand Down

0 comments on commit fec9eab

Please sign in to comment.