diff --git a/doc/symbolic.rst b/doc/symbolic.rst index e6de9534803468d96993d77f1c93c42275596217..86d580f6eef341b7d256c59b21186abda1eb1141 100644 --- a/doc/symbolic.rst +++ b/doc/symbolic.rst @@ -13,6 +13,8 @@ Binding an operator to a discretization .. currentmodule:: pytential +.. autoclass:: GeometryCollection + .. autofunction:: bind PDE operators diff --git a/pytential/__init__.py b/pytential/__init__.py index 1d07499d48607e5eeb10899dfde365cdb601292b..d28e8bdbcc8be2377ee575edeeb8e2b6ce0fb6e7 100644 --- a/pytential/__init__.py +++ b/pytential/__init__.py @@ -25,6 +25,7 @@ THE SOFTWARE. import numpy as np import pytential.symbolic.primitives as sym +from pytential.symbolic.execution import GeometryCollection # noqa from pytential.symbolic.execution import bind from pytools import memoize_on_first_arg diff --git a/pytential/qbx/__init__.py b/pytential/qbx/__init__.py index 18b356be5c435dd0f5cc9ec14d12e18280b7b174..188660f72b642ca67d0882a9e92b25e211e717fa 100644 --- a/pytential/qbx/__init__.py +++ b/pytential/qbx/__init__.py @@ -564,7 +564,7 @@ class QBXLayerPotentialSource(LayerPotentialSourceBase): maxstretch = bind( self, sym._simplex_mapping_max_stretch_factor( self.ambient_dim, - where=sym._QBXSourceStage2(sym.DEFAULT_SOURCE)) + where=sym.QBXSourceStage2(sym.DEFAULT_SOURCE)) )(queue) maxstretch = utils.to_last_dim_length( self.stage2_density_discr, maxstretch, last_dim_length) diff --git a/pytential/source.py b/pytential/source.py index a8d0cba52feee0c4fb5fb3ccf7080d2cb45a7e4c..9334dff7e1785cf7d1b36c991effcd6382d2a52e 100644 --- a/pytential/source.py +++ b/pytential/source.py @@ -38,7 +38,7 @@ __doc__ = """ class PotentialSource(object): """ - .. method:: preprocess_optemplate(name, expr) + .. automethod:: preprocess_optemplate .. method:: op_group_features(expr) @@ -49,29 +49,43 @@ class PotentialSource(object): :class:`pytential.symbolic.primitives.IntG`. """ + def preprocess_optemplate(self, name, discretizations, expr): + return expr + # {{{ point potential source class PointPotentialSource(PotentialSource): """ - .. attribute:: points + .. attribute:: nodes - An :class:`pyopencl.array.Array` of shape ``[ambient_dim, npoints]``. + An :class:`pyopencl.array.Array` of shape ``[ambient_dim, nnodes]``. .. attribute:: nnodes """ - def __init__(self, cl_context, points): + def __init__(self, cl_context, nodes): self.cl_context = cl_context - self.points = points + self._nodes = nodes + + @property + def points(self): + from warnings import warn + warn("'points' has been renamed to nodes().", + DeprecationWarning, stacklevel=2) + + return self._nodes + + def nodes(self): + return self._nodes @property def real_dtype(self): - return self.points.dtype + return self._nodes.dtype @property def nnodes(self): - return self.points.shape[-1] + return self._nodes.shape[-1] @property def complex_dtype(self): @@ -82,7 +96,7 @@ class PointPotentialSource(PotentialSource): @property def ambient_dim(self): - return self.points.shape[0] + return self._nodes.shape[0] def op_group_features(self, expr): from sumpy.kernel import AxisTargetDerivativeRemover @@ -129,7 +143,7 @@ class PointPotentialSource(PotentialSource): p2p = self.get_p2p(insn.kernels) evt, output_for_each_kernel = p2p(queue, - target_discr.nodes(), self.points, + target_discr.nodes(), self._nodes, [strengths], **kernel_args) result.append((o.name, output_for_each_kernel[o.kernel_index])) @@ -139,7 +153,7 @@ class PointPotentialSource(PotentialSource): @memoize_method def weights_and_area_elements(self): with cl.CommandQueue(self.cl_context) as queue: - result = cl.array.empty(queue, self.points.shape[-1], + result = cl.array.empty(queue, self._nodes.shape[-1], dtype=self.real_dtype) result.fill(1) diff --git a/pytential/symbolic/execution.py b/pytential/symbolic/execution.py index 45840de2701898e6de8336d7d25792e9360d8d22..976e352f48c6a03c61429830758eb24517a47407 100644 --- a/pytential/symbolic/execution.py +++ b/pytential/symbolic/execution.py @@ -1,6 +1,9 @@ from __future__ import division, absolute_import -__copyright__ = "Copyright (C) 2013 Andreas Kloeckner" +__copyright__ = """ +Copyright (C) 2013 Andreas Kloeckner +Copyright (C) 2018 Alexandru Fikl +""" __license__ = """ Permission is hereby granted, free of charge, to any person obtaining a copy @@ -36,6 +39,9 @@ import pyopencl.clmath # noqa from loopy.version import MOST_RECENT_LANGUAGE_VERSION from pytools import memoize_in +from pytential.symbolic.primitives import DEFAULT_SOURCE, DEFAULT_TARGET +from pytential.symbolic.primitives import ( + QBXSourceStage1, QBXSourceStage2, QBXSourceQuadStage2) # FIXME caches: fix up queues @@ -278,54 +284,194 @@ class MatVecOp: # }}} -# {{{ default for 'domains' parameter +# {{{ expression prep + +def _prepare_domains(nresults, places, domains, default_domain): + """ + :arg nresults: number of results. + :arg places: a :class:`pytential.symbolic.execution.GeometryCollection`. + :arg domains: recommended domains. + :arg default_domain: default value for domains which are not provided. + + :return: a list of domains for each result. If domains is `None`, each + element in the list is *default_domain*. If *domains* is a scalar + (i.e., not a *list* or *tuple*), each element in the list is + *domains*. Otherwise, *domains* is returned as is. + """ -def _domains_default(nresults, places, domains, default_val): if domains is None: - if default_val not in places: + if default_domain not in places: raise RuntimeError("'domains is None' requires " - "default domain to be defined") - dom_name = default_val - return nresults*[dom_name] - + "default domain to be defined in places") + dom_name = default_domain + return nresults * [dom_name] elif not isinstance(domains, (list, tuple)): dom_name = domains - return nresults*[dom_name] + return nresults * [dom_name] + + assert len(domains) == nresults + return domains + + +def _prepare_expr(places, expr): + """ + :arg places: :class:`pytential.symbolic.execution.GeometryCollection`. + :arg expr: a symbolic expression. + :return: processed symbolic expressions, tagged with the appropriate + `where` identifier from places, etc. + """ + + from pytential.source import LayerPotentialSourceBase + from pytential.symbolic.mappers import ( + ToTargetTagger, DerivativeBinder) - else: - return domains + expr = ToTargetTagger(*places._default_place_ids)(expr) + expr = DerivativeBinder()(expr) + + for name, place in six.iteritems(places.places): + if isinstance(place, LayerPotentialSourceBase): + expr = place.preprocess_optemplate(name, places, expr) + + return expr # }}} # {{{ bound expression -class BoundExpression: - def __init__(self, optemplate, places): - self.optemplate = optemplate - self.places = places +class GeometryCollection(object): + """A mapping from symbolic identifiers ("place IDs", typically strings) + to 'geometries', where a geometry can be a + :class:`pytential.source.PotentialSource` + or a :class:`pytential.target.TargetBase`. + This class is meant to hold a specific combination of sources and targets + serve to host caches of information derived from them, e.g. FMM trees + of subsets of them, as well as related common subexpressions such as + metric terms. + + .. method:: __getitem__ + .. method:: get_discretization + .. method:: get_cache + """ + + def __init__(self, places, auto_where=None): + """ + :arg places: a scalar, tuple of or mapping of symbolic names to + geometry objects. Supported objects are + :class:`~pytential.source.PotentialSource`, + :class:`~potential.target.TargetBase` and + :class:`~meshmode.discretization.Discretization`. + :arg auto_where: location identifier for each geometry object, used + to denote specific discretizations, e.g. in the case where + *places* is a :class:`~pytential.source.LayerPotentialSourceBase`. + By default, we assume + :class:`~pytential.symbolic.primitives.DEFAULT_SOURCE` and + :class:`~pytential.symbolic.primitives.DEFAULT_TARGET` for + sources and targets, respectively. + """ + + from pytential.target import TargetBase + from meshmode.discretization import Discretization + from pytential.source import LayerPotentialSourceBase, PotentialSource + + if auto_where is None: + source_where, target_where = DEFAULT_SOURCE, DEFAULT_TARGET + else: + # NOTE: keeping this here to make sure auto_where unpacks into + # just the two elements + source_where, target_where = auto_where + + self._default_source_place = source_where + self._default_target_place = target_where + self._default_place_ids = (source_where, target_where) + + self.places = {} + if isinstance(places, LayerPotentialSourceBase): + self.places[source_where] = places + self.places[target_where] = \ + self._get_lpot_discretization(target_where, places) + elif isinstance(places, (Discretization, TargetBase)): + self.places[target_where] = places + elif isinstance(places, tuple): + source_discr, target_discr = places + self.places[source_where] = source_discr + self.places[target_where] = target_discr + else: + self.places = places.copy() + + for p in six.itervalues(self.places): + if not isinstance(p, (PotentialSource, TargetBase, Discretization)): + raise TypeError("Must pass discretization, targets or " + "layer potential sources as 'places'.") self.caches = {} - from pytential.symbolic.compiler import OperatorCompiler - self.code = OperatorCompiler(self.places)(optemplate) + def _get_lpot_discretization(self, where, lpot): + from pytential.source import LayerPotentialSourceBase + if not isinstance(lpot, LayerPotentialSourceBase): + return lpot + + from pytential.symbolic.primitives import _QBXSource + if not isinstance(where, _QBXSource): + where = QBXSourceStage1(where) + + if isinstance(where, QBXSourceStage1): + return lpot.density_discr + if isinstance(where, QBXSourceStage2): + return lpot.stage2_density_discr + if isinstance(where, QBXSourceQuadStage2): + return lpot.quad_stage2_density_discr + + raise ValueError('unknown `where` identifier: {}'.format(type(where))) + + def get_discretization(self, where): + """ + :arg where: location identifier. + + :return: a geometry object in the collection corresponding to the + key *where*. If it is a + :class:`~pytential.source.LayerPotentialSourceBase`, we look for + the corresponding :class:`~meshmode.discretization.Discretization` + in its attributes instead. + """ + + if where in self.places: + lpot = self.places[where] + else: + lpot = self.places.get(getattr(where, 'where', None), None) + + if lpot is None: + raise KeyError('`where` not in the collection: {}'.format(where)) + + return self._get_lpot_discretization(where, lpot) + + def __getitem__(self, where): + return self.places[where] + + def __contains__(self, where): + return where in self.places + + def copy(self): + return GeometryCollection(self.places, auto_where=self.where) def get_cache(self, name): return self.caches.setdefault(name, {}) - def get_discretization(self, where): - from pytential.symbolic.primitives import _QBXSourceStage2 - if isinstance(where, _QBXSourceStage2): - lpot_source = self.places[where.where] - return lpot_source.stage2_density_discr - discr = self.places[where] +class BoundExpression(object): + def __init__(self, places, sym_op_expr): + self.places = places + self.sym_op_expr = sym_op_expr + self.caches = {} - from pytential.source import LayerPotentialSourceBase - if isinstance(discr, LayerPotentialSourceBase): - discr = discr.density_discr + from pytential.symbolic.compiler import OperatorCompiler + self.code = OperatorCompiler(self.places)(sym_op_expr) - return discr + def get_cache(self, name): + return self.places.get_cache(name) + + def get_discretization(self, where): + return self.places.get_discretization(where) def scipy_op(self, queue, arg_name, dtype, domains=None, **extra_args): """ @@ -343,8 +489,7 @@ class BoundExpression: else: nresults = 1 - from pytential.symbolic.primitives import DEFAULT_TARGET - domains = _domains_default(nresults, self.places, domains, + domains = _prepare_domains(nresults, self.places, domains, DEFAULT_TARGET) total_dofs = 0 @@ -370,117 +515,74 @@ class BoundExpression: exec_mapper = EvaluationMapper(self, queue, args) return self.code.execute(exec_mapper) -# }}} - - -# {{{ expression prep - -def prepare_places(places): - from pytential.symbolic.primitives import DEFAULT_SOURCE, DEFAULT_TARGET - from meshmode.discretization import Discretization - from pytential.source import LayerPotentialSourceBase - from pytential.target import TargetBase - - if isinstance(places, LayerPotentialSourceBase): - places = { - DEFAULT_SOURCE: places, - DEFAULT_TARGET: places.density_discr, - } - elif isinstance(places, (Discretization, TargetBase)): - places = { - DEFAULT_TARGET: places, - } - - elif isinstance(places, tuple): - source_discr, target_discr = places - places = { - DEFAULT_SOURCE: source_discr, - DEFAULT_TARGET: target_discr, - } - del source_discr - del target_discr - - def cast_to_place(discr): - from pytential.target import TargetBase - from pytential.source import PotentialSource - if not isinstance(discr, (Discretization, TargetBase, PotentialSource)): - raise TypeError("must pass discretizations, " - "layer potential sources or targets as 'places'") - return discr - return dict( - (key, cast_to_place(value)) - for key, value in six.iteritems(places)) - - -def prepare_expr(places, expr, auto_where=None): +def bind(places, expr, auto_where=None): """ - :arg places: result of :func:`prepare_places` - :arg auto_where: For simple source-to-self or source-to-target + :arg places: a :class:`pytential.symbolic.execution.GeometryCollection`. + Alternatively, any list or mapping that is a valid argument for its + constructor can also be used. + :arg auto_where: for simple source-to-self or source-to-target evaluations, find 'where' attributes automatically. """ - from pytential.symbolic.primitives import DEFAULT_SOURCE, DEFAULT_TARGET - from pytential.source import LayerPotentialSourceBase - - from pytential.symbolic.mappers import ( - ToTargetTagger, - DerivativeBinder, - ) + if not isinstance(places, GeometryCollection): + places = GeometryCollection(places, auto_where=auto_where) - if auto_where is None: - if DEFAULT_TARGET in places: - auto_where = DEFAULT_SOURCE, DEFAULT_TARGET - else: - auto_where = DEFAULT_SOURCE, DEFAULT_SOURCE + expr = _prepare_expr(places, expr) - if auto_where: - expr = ToTargetTagger(*auto_where)(expr) + return BoundExpression(places, expr) - expr = DerivativeBinder()(expr) +# }}} - for name, place in six.iteritems(places): - if isinstance(place, LayerPotentialSourceBase): - expr = place.preprocess_optemplate(name, places, expr) - return expr +# {{{ matrix building -# }}} +def _bmat(blocks, dtypes): + from pytools import single_valued + from pytential.symbolic.matrix import is_zero + nrows = blocks.shape[0] + ncolumns = blocks.shape[1] -def bind(places, expr, auto_where=None): - """ - :arg places: a mapping of symbolic names to - :class:`pytential.discretization.Discretization` objects or a subclass - of :class:`pytential.discretization.target.TargetBase`. - :arg auto_where: For simple source-to-self or source-to-target - evaluations, find 'where' attributes automatically. - """ - - places = prepare_places(places) - expr = prepare_expr(places, expr, auto_where=auto_where) - return BoundExpression(expr, places) + # "block row starts"/"block column starts" + brs = np.cumsum([0] + + [single_valued(blocks[ibrow, ibcol].shape[0] + for ibcol in range(ncolumns) + if not is_zero(blocks[ibrow, ibcol])) + for ibrow in range(nrows)]) + + bcs = np.cumsum([0] + + [single_valued(blocks[ibrow, ibcol].shape[1] + for ibrow in range(nrows) + if not is_zero(blocks[ibrow, ibcol])) + for ibcol in range(ncolumns)]) + + result = np.zeros((brs[-1], bcs[-1]), + dtype=np.find_common_type(dtypes, [])) + for ibcol in range(ncolumns): + for ibrow in range(nrows): + result[brs[ibrow]:brs[ibrow + 1], bcs[ibcol]:bcs[ibcol + 1]] = \ + blocks[ibrow, ibcol] + return result -# {{{ matrix building -def build_matrix(queue, places, expr, input_exprs, domains=None, +def build_matrix(queue, places, exprs, input_exprs, domains=None, auto_where=None, context=None): """ - :arg queue: a :class:`pyopencl.CommandQueue` used to synchronize - the calculation. - :arg places: a mapping of symbolic names to - :class:`pytential.discretization.Discretization` objects or a subclass - of :class:`pytential.discretization.target.TargetBase`. - :arg input_exprs: An object array of expressions corresponding to the - input block columns of the matrix. - - May also be a single expression. + :arg queue: a :class:`pyopencl.CommandQueue`. + :arg places: a :class:`pytential.symbolic.execution.GeometryCollection`. + Alternatively, any list or mapping that is a valid argument for its + constructor can also be used. + :arg exprs: an array of expressions corresponding to the output block + rows of the matrix. May also be a single expression. + :arg input_exprs: an array of expressions corresponding to the + input block columns of the matrix. May also be a single expression. :arg domains: a list of discretization identifiers (see 'places') or *None* values indicating the domains on which each component of the solution vector lives. *None* values indicate that the component - is a scalar. If *None*, - :class:`pytential.symbolic.primitives.DEFAULT_TARGET`, is required + is a scalar. If *None*, *auto_where* or, if it is not provided, + :class:`~pytential.symbolic.primitives.DEFAULT_SOURCE` is required to be a key in :attr:`places`. :arg auto_where: For simple source-to-self or source-to-target evaluations, find 'where' attributes automatically. @@ -489,84 +591,48 @@ def build_matrix(queue, places, expr, input_exprs, domains=None, if context is None: context = {} - places = prepare_places(places) - expr = prepare_expr(places, expr, auto_where=auto_where) - from pytools.obj_array import is_obj_array, make_obj_array - if not is_obj_array(expr): - expr = make_obj_array([expr]) + if not isinstance(places, GeometryCollection): + places = GeometryCollection(places, auto_where=auto_where) + exprs = _prepare_expr(places, exprs) + + if not is_obj_array(exprs): + exprs = make_obj_array([exprs]) try: input_exprs = list(input_exprs) except TypeError: # not iterable, wrap in a list input_exprs = [input_exprs] - from pytential.symbolic.primitives import DEFAULT_SOURCE - domains = _domains_default(len(input_exprs), places, domains, - DEFAULT_SOURCE) + domains = _prepare_domains(len(input_exprs), places, domains, + places._default_source_place) - nblock_rows = len(expr) + from pytential.symbolic.matrix import MatrixBuilder, is_zero + nblock_rows = len(exprs) nblock_columns = len(input_exprs) - blocks = np.zeros((nblock_rows, nblock_columns), dtype=np.object) - from pytential.symbolic.matrix import MatrixBuilder, is_zero - dtypes = [] - for ibcol in range(nblock_columns): mbuilder = MatrixBuilder( queue, dep_expr=input_exprs[ibcol], - other_dep_exprs=( - input_exprs[:ibcol] - + - input_exprs[ibcol+1:]), + other_dep_exprs=(input_exprs[:ibcol] + + input_exprs[ibcol + 1:]), dep_source=places[domains[ibcol]], + dep_discr=places.get_discretization(domains[ibcol]), places=places, context=context) for ibrow in range(nblock_rows): - block = mbuilder(expr[ibrow]) - - assert ( - is_zero(block) - or isinstance(block, np.ndarray)) - if isinstance(block, np.ndarray): - dtypes.append(block.dtype) + block = mbuilder(exprs[ibrow]) + assert is_zero(block) or isinstance(block, np.ndarray) blocks[ibrow, ibcol] = block - - if isinstance(block, cl.array.Array): + if isinstance(block, np.ndarray): dtypes.append(block.dtype) - from pytools import single_valued - - block_row_counts = [ - single_valued( - blocks[ibrow, ibcol].shape[0] - for ibcol in range(nblock_columns) - if not is_zero(blocks[ibrow, ibcol])) - for ibrow in range(nblock_rows)] - - block_col_counts = [ - places[domains[ibcol]].density_discr.nnodes - for ibcol in range(nblock_columns)] - - # "block row starts"/"block column starts" - brs = np.cumsum([0] + block_row_counts) - bcs = np.cumsum([0] + block_col_counts) - - result = np.zeros( - (sum(block_row_counts), sum(block_col_counts)), - dtype=np.find_common_type(dtypes, [])) - - for ibcol in range(nblock_columns): - for ibrow in range(nblock_rows): - result[brs[ibrow]:brs[ibrow+1], bcs[ibcol]:bcs[ibcol+1]] = \ - blocks[ibrow, ibcol] - - return cl.array.to_device(queue, result) + return cl.array.to_device(queue, _bmat(blocks, dtypes)) # }}} diff --git a/pytential/symbolic/mappers.py b/pytential/symbolic/mappers.py index 9fdbaae41875b5c38e258c1158e55073ca909504..00c43689e45ffbe435a87f9b8e6fd3c497d87303 100644 --- a/pytential/symbolic/mappers.py +++ b/pytential/symbolic/mappers.py @@ -292,8 +292,10 @@ class ToTargetTagger(LocationTagger): """ def __init__(self, default_source, default_target): - LocationTagger.__init__(self, default_target) - self.operand_rec = LocationTagger(default_source) + LocationTagger.__init__(self, default_target, + default_source=default_source) + self.operand_rec = LocationTagger(default_source, + default_source=default_source) # }}} @@ -397,19 +399,15 @@ class QBXPreprocessor(IdentityMapper): self.places = places def map_int_g(self, expr): - source = self.places[self.source_name] - target_discr = self.places[expr.target] - - from pytential.source import LayerPotentialSourceBase - if isinstance(target_discr, LayerPotentialSourceBase): - target_discr = target_discr.density_discr + source_discr = self.places.get_discretization(self.source_name) + target_discr = self.places.get_discretization(expr.target) if expr.qbx_forced_limit == 0: raise ValueError("qbx_forced_limit == 0 was a bad idea and " "is no longer supported. Use qbx_forced_limit == 'avg' " "to request two-sided averaging explicitly if needed.") - is_self = source.density_discr is target_discr + is_self = source_discr is target_discr expr = expr.copy( kernel=expr.kernel, @@ -453,8 +451,12 @@ class QBXPreprocessor(IdentityMapper): # {{{ stringifier def stringify_where(where): - if isinstance(where, prim._QBXSourceStage2): + if isinstance(where, prim.QBXSourceStage1): + return "stage1(%s)" % stringify_where(where.where) + if isinstance(where, prim.QBXSourceStage2): return "stage2(%s)" % stringify_where(where.where) + if isinstance(where, prim.QBXSourceQuadStage2): + return "quad_stage2(%s)" % stringify_where(where.where) if where is None: return "?" diff --git a/pytential/symbolic/matrix.py b/pytential/symbolic/matrix.py index f0b1d82f7ae9c963341450e07bb59ba761df9967..9d0c2b2f66a7080f91cda5c668377c9985bfca3f 100644 --- a/pytential/symbolic/matrix.py +++ b/pytential/symbolic/matrix.py @@ -1,6 +1,9 @@ from __future__ import division, absolute_import -__copyright__ = "Copyright (C) 2015 Andreas Kloeckner" +__copyright__ = """ +Copyright (C) 2015 Andreas Kloeckner +Copyright (C) 2018 Alexandru Fikl +""" __license__ = """ Permission is hereby granted, free of charge, to any person obtaining a copy @@ -34,39 +37,214 @@ import pytential.symbolic.primitives as sym from pytential.symbolic.execution import bind +# {{{ helpers + def is_zero(x): return isinstance(x, (int, float, complex, np.number)) and x == 0 -# FIXME: PyOpenCL doesn't do all the required matrix math yet. -# We'll cheat and build the matrix on the host. +def _resample_arg(queue, source, x): + """ + :arg queue: a :class:`pyopencl.CommandQueue`. + :arg source: a :class:`pytential.source.LayerPotentialSourceBase` subclass. + If it is not a layer potential source, no resampling is done. + :arg x: a :class:`numpy.ndarray`. + + :return: a resampled :class:`numpy.ndarray` (see + :method:`pytential.source.LayerPotentialSourceBase.resampler`). + """ + + from pytential.source import LayerPotentialSourceBase + if not isinstance(source, LayerPotentialSourceBase): + return x + + if not isinstance(x, np.ndarray): + return x + + if len(x.shape) >= 2: + raise RuntimeError("matrix variables in kernel arguments") + + def resample(y): + return source.resampler(queue, cl.array.to_device(queue, y)).get(queue) + + from pytools.obj_array import with_object_array_or_scalar + return with_object_array_or_scalar(resample, x) + + +def _get_layer_potential_args(mapper, expr, source): + """ + :arg mapper: a :class:`pytential.symbolic.matrix.MatrixBuilderBase`. + :arg expr: symbolic layer potential expression. + :arg source: a :class:`pytential.source.LayerPotentialSourceBase`. + + :return: a mapping of kernel arguments evaluated by the *mapper*. + """ + + # skip resampling if source and target are the same + from pytential.symbolic.primitives import DEFAULT_SOURCE, DEFAULT_TARGET + if ((expr.source is not DEFAULT_SOURCE) + and (expr.target is not DEFAULT_TARGET) + and (isinstance(expr.source, type(expr.target)))): + source = None + + kernel_args = {} + for arg_name, arg_expr in six.iteritems(expr.kernel_arguments): + rec_arg = mapper.rec(arg_expr) + kernel_args[arg_name] = _resample_arg(mapper.queue, source, rec_arg) + + return kernel_args + + +def _get_kernel_args(mapper, kernel, expr, source): + """ + :arg mapper: a :class:`pytential.symbolic.matrix.MatrixBuilderBase`. + :arg kernel: a :class:`sumpy.kernel.Kernel`. + :arg expr: symbolic layer potential expression. + :arg source: a :class:`pytential.source.LayerPotentialSourceBase`. + + :return: a mapping of kernel arguments evaluated by the *mapper*. + """ + + # NOTE: copied from pytential.symbolic.primitives.IntG + inner_kernel_args = kernel.get_args() + kernel.get_source_args() + inner_kernel_args = set(arg.loopy_arg.name for arg in inner_kernel_args) + + kernel_args = {} + for arg_name, arg_expr in six.iteritems(expr.kernel_arguments): + if arg_name not in inner_kernel_args: + continue + + rec_arg = mapper.rec(arg_expr) + kernel_args[arg_name] = _resample_arg(mapper.queue, source, rec_arg) + + return kernel_args + + +def _get_weights_and_area_elements(queue, source, source_discr): + """ + :arg queue: a :class:`pyopencl.CommandQueue`. + :arg source: a :class:`pytential.source.LayerPotentialSourceBase`. + :arg source_discr: a :class:`meshmode.discretization.Discretization`. + + :return: quadrature weights for each node in *source_discr*. + """ + + if source.quad_stage2_density_discr is source_discr: + waa = source.weights_and_area_elements().with_queue(queue) + else: + # NOTE: copied from `weights_and_area_elements`, but using the + # discretization given by `where` and no interpolation + area = bind(source_discr, + sym.area_element(source.ambient_dim, source.dim))(queue) + qweight = bind(source_discr, sym.QWeight())(queue) + waa = area * qweight + + return waa + + +def _get_centers_and_expansion_radii(queue, source, target_discr, qbx_forced_limit): + """ + :arg queue: a :class:`pyopencl.CommandQueue`. + :arg source: a :class:`pytential.source.LayerPotentialSourceBase`. + :arg target_discr: a :class:`meshmode.discretization.Discretization`. + :arg qbx_forced_limit: an integer (*+1* or *-1*). + + :return: a tuple of `(centers, radii)` for each node in *target_discr*. + """ + + if source.density_discr is target_discr: + # NOTE: skip expensive target association + from pytential.qbx.utils import get_centers_on_side + centers = get_centers_on_side(source, qbx_forced_limit) + radii = source._expansion_radii('nsources') + else: + from pytential.qbx.utils import get_interleaved_centers + centers = get_interleaved_centers(queue, source) + radii = source._expansion_radii('ncenters') + + # NOTE: using a very small tolerance to make sure all the stage2 + # targets are associated to a center. We can't use the user provided + # source.target_association_tolerance here because it will likely be + # way too small. + target_association_tolerance = 1.0e-1 + + from pytential.qbx.target_assoc import associate_targets_to_qbx_centers + code_container = source.target_association_code_container + assoc = associate_targets_to_qbx_centers( + source, + code_container.get_wrangler(queue), + [(target_discr, qbx_forced_limit)], + target_association_tolerance=target_association_tolerance) + + centers = [cl.array.take(c, assoc.target_to_center, queue=queue) + for c in centers] + radii = cl.array.take(radii, assoc.target_to_center, queue=queue) + + return centers, radii + +# }}} + + +# {{{ base class for matrix builders + +class MatrixBuilderBase(EvaluationMapperBase): + def __init__(self, queue, dep_expr, other_dep_exprs, + dep_source, dep_discr, places, context): + """ + :arg queue: a :class:`pyopencl.CommandQueue`. + :arg dep_expr: symbolic expression for the input block column + that the builder is evaluating. + :arg other_dep_exprs: symbolic expressions for the remaining input + block columns. + :arg dep_source: a :class:`pytential.source.LayerPotentialSourceBase` + for the given *dep_expr*. + :arg dep_discr: a concerete :class:`meshmode.discretization.Discretization` + for the given *dep_expr*. + :arg places: a :class:`pytential.symbolic.execution.GeometryCollection` + for all the sources and targets the builder is expected to + encounter. + """ + super(MatrixBuilderBase, self).__init__(context=context) -class MatrixBuilder(EvaluationMapperBase): - def __init__(self, queue, dep_expr, other_dep_exprs, dep_source, places, - context): self.queue = queue self.dep_expr = dep_expr self.other_dep_exprs = other_dep_exprs self.dep_source = dep_source - self.dep_discr = dep_source.density_discr + self.dep_discr = dep_discr self.places = places - self.context = context + + self.dep_nnodes = dep_discr.nnodes + + # {{{ + + def get_dep_variable(self): + return np.eye(self.dep_nnodes, dtype=np.float64) + + def is_kind_vector(self, x): + return len(x.shape) == 1 + + def is_kind_matrix(self, x): + return len(x.shape) == 2 + + # }}} + + # {{{ map_xxx implementation def map_variable(self, expr): if expr == self.dep_expr: - return np.eye(self.dep_discr.nnodes, dtype=np.float64) + return self.get_dep_variable() elif expr in self.other_dep_exprs: return 0 else: - return super(MatrixBuilder, self).map_variable(expr) + return super(MatrixBuilderBase, self).map_variable(expr) def map_subscript(self, expr): if expr == self.dep_expr: - return np.eye(self.dep_discr.nnodes, dtype=np.float64) + return self.get_dep_variable() elif expr in self.other_dep_exprs: return 0 else: - return super(MatrixBuilder, self).map_subscript(expr) + return super(MatrixBuilderBase, self).map_subscript(expr) def map_sum(self, expr): sum_kind = None @@ -83,13 +261,12 @@ class MatrixBuilder(EvaluationMapperBase): continue if isinstance(rec_child, np.ndarray): - if len(rec_child.shape) == 2: + if self.is_kind_matrix(rec_child): term_kind = term_kind_matrix - elif len(rec_child.shape) == 1: + elif self.is_kind_vector(rec_child): term_kind = term_kind_vector else: raise RuntimeError("unexpected array rank") - else: term_kind = term_kind_scalar @@ -108,71 +285,141 @@ class MatrixBuilder(EvaluationMapperBase): mat_result = None vecs_and_scalars = 1 - for term in expr.children: - rec_term = self.rec(term) + for child in expr.children: + rec_child = self.rec(child) - if is_zero(rec_term): + if is_zero(rec_child): return 0 - if isinstance(rec_term, (np.number, int, float, complex)): - vecs_and_scalars = vecs_and_scalars * rec_term - elif isinstance(rec_term, np.ndarray): - if len(rec_term.shape) == 2: + if isinstance(rec_child, (np.number, int, float, complex)): + vecs_and_scalars = vecs_and_scalars * rec_child + elif isinstance(rec_child, np.ndarray): + if self.is_kind_matrix(rec_child): if mat_result is not None: raise RuntimeError("expression is nonlinear in %s" % self.dep_expr) else: - mat_result = rec_term + mat_result = rec_child else: - vecs_and_scalars = vecs_and_scalars * rec_term + vecs_and_scalars = vecs_and_scalars * rec_child if mat_result is not None: - if ( - isinstance(vecs_and_scalars, np.ndarray) - and len(vecs_and_scalars.shape) == 1): + if (isinstance(vecs_and_scalars, np.ndarray) + and self.is_kind_vector(vecs_and_scalars)): vecs_and_scalars = vecs_and_scalars[:, np.newaxis] return mat_result * vecs_and_scalars else: return vecs_and_scalars + def map_num_reference_derivative(self, expr): + rec_operand = self.rec(expr.operand) + + assert isinstance(rec_operand, np.ndarray) + if self.is_kind_matrix(rec_operand): + raise NotImplementedError("derivatives") + + where_discr = self.places[expr.where] + op = sym.NumReferenceDerivative(expr.ref_axes, sym.var("u")) + return bind(where_discr, op)( + self.queue, u=cl.array.to_device(self.queue, rec_operand)).get() + + def map_node_coordinate_component(self, expr): + where_discr = self.places[expr.where] + op = sym.NodeCoordinateComponent(expr.ambient_axis) + return bind(where_discr, op)(self.queue).get() + + def map_call(self, expr): + arg, = expr.parameters + rec_arg = self.rec(arg) + + if isinstance(rec_arg, np.ndarray) and self.is_kind_matrix(rec_arg): + raise RuntimeError("expression is nonlinear in variable") + + if isinstance(rec_arg, np.ndarray): + rec_arg = cl.array.to_device(self.queue, rec_arg) + + op = expr.function(sym.var("u")) + result = bind(self.dep_source, op)(self.queue, u=rec_arg) + + if isinstance(result, cl.array.Array): + result = result.get() + + return result + + # }}} + + +class MatrixBlockBuilderBase(MatrixBuilderBase): + """Evaluate individual blocks of a matrix operator. + + Unlike, e.g. :class:`MatrixBuilder`, matrix block builders are + significantly reduced in scope. They are basically just meant + to evaluate linear combinations of layer potential operators. + For example, they do not support composition of operators because we + assume that each operator acts directly on the density. + """ + + def __init__(self, queue, dep_expr, other_dep_exprs, + dep_source, dep_discr, places, index_set, context): + """ + :arg index_set: a :class:`sumpy.tools.MatrixBlockIndexRanges` class + describing which blocks are going to be evaluated. + """ + + super(MatrixBlockBuilderBase, self).__init__(queue, + dep_expr, other_dep_exprs, dep_source, dep_discr, + places, context) + + self.index_set = index_set + self.dep_nnodes = index_set.col.indices.size + + def get_dep_variable(self): + return 1.0 + + def is_kind_vector(self, x): + # NOTE: since matrices are flattened, the only way to differentiate + # them from a vector is by size + return x.size == self.index_set.row.indices.size + + def is_kind_matrix(self, x): + # NOTE: since matrices are flattened, we recognize them by checking + # if they have the right size + return x.size == self.index_set.linear_row_indices.size + +# }}} + + +# {{{ QBX layer potential matrix builder + +# FIXME: PyOpenCL doesn't do all the required matrix math yet. +# We'll cheat and build the matrix on the host. + +class MatrixBuilder(MatrixBuilderBase): + def __init__(self, queue, dep_expr, other_dep_exprs, + dep_source, dep_discr, places, context): + super(MatrixBuilder, self).__init__(queue, dep_expr, other_dep_exprs, + dep_source, dep_discr, places, context) + def map_int_g(self, expr): - source = self.places[expr.source] - target_discr = self.places[expr.target] + where_source = expr.source + if where_source is sym.DEFAULT_SOURCE: + where_source = sym.QBXSourceQuadStage2(expr.source) - if source.density_discr is not target_discr: - raise NotImplementedError() + source = self.places[expr.source] + source_discr = self.places.get_discretization(where_source) + target_discr = self.places.get_discretization(expr.target) rec_density = self.rec(expr.density) if is_zero(rec_density): return 0 assert isinstance(rec_density, np.ndarray) - if len(rec_density.shape) != 2: + if not self.is_kind_matrix(rec_density): raise NotImplementedError("layer potentials on non-variables") kernel = expr.kernel - - kernel_args = {} - for arg_name, arg_expr in six.iteritems(expr.kernel_arguments): - rec_arg = self.rec(arg_expr) - - if isinstance(rec_arg, np.ndarray): - if len(rec_arg.shape) == 2: - raise RuntimeError("matrix variables in kernel arguments") - if len(rec_arg.shape) == 1: - from pytools.obj_array import with_object_array_or_scalar - - def resample(x): - return ( - source.resampler( - self.queue, - cl.array.to_device(self.queue, x)) - .get(queue=self.queue)) - - rec_arg = with_object_array_or_scalar(resample, rec_arg) - - kernel_args[arg_name] = rec_arg + kernel_args = _get_layer_potential_args(self, expr, source) from sumpy.expansion.local import LineTaylorLocalExpansion local_expn = LineTaylorLocalExpansion(kernel, source.qbx_order) @@ -181,66 +428,210 @@ class MatrixBuilder(EvaluationMapperBase): mat_gen = LayerPotentialMatrixGenerator( self.queue.context, (local_expn,)) - assert target_discr is source.density_discr + assert abs(expr.qbx_forced_limit) > 0 + centers, radii = _get_centers_and_expansion_radii(self.queue, + source, target_discr, expr.qbx_forced_limit) - from pytential.qbx.utils import get_centers_on_side + _, (mat,) = mat_gen(self.queue, + targets=target_discr.nodes(), + sources=source_discr.nodes(), + centers=centers, + expansion_radii=radii, + **kernel_args) + mat = mat.get() + + waa = _get_weights_and_area_elements(self.queue, source, source_discr) + mat[:, :] *= waa.get(self.queue) + + if target_discr.nnodes != source_discr.nnodes: + # NOTE: we only resample sources + assert target_discr.nnodes < source_discr.nnodes + + resampler = source.direct_resampler + resample_mat = resampler.full_resample_matrix(self.queue).get(self.queue) + mat = mat.dot(resample_mat) + + mat = mat.dot(rec_density) + + return mat + +# }}} + + +# {{{ p2p matrix builder + +class P2PMatrixBuilder(MatrixBuilderBase): + def __init__(self, queue, dep_expr, other_dep_exprs, + dep_source, dep_discr, places, context, exclude_self=True): + super(P2PMatrixBuilder, self).__init__(queue, + dep_expr, other_dep_exprs, dep_source, dep_discr, + places, context) + + self.exclude_self = exclude_self + + def map_int_g(self, expr): + source = self.places[expr.source] + source_discr = self.places.get_discretization(expr.source) + target_discr = self.places.get_discretization(expr.target) + + rec_density = self.rec(expr.density) + if is_zero(rec_density): + return 0 + + assert isinstance(rec_density, np.ndarray) + if not self.is_kind_matrix(rec_density): + raise NotImplementedError("layer potentials on non-variables") + + kernel = expr.kernel.get_base_kernel() + kernel_args = _get_kernel_args(self, kernel, expr, source) + if self.exclude_self: + kernel_args["target_to_source"] = \ + cl.array.arange(self.queue, 0, target_discr.nnodes, dtype=np.int) + + from sumpy.p2p import P2PMatrixGenerator + mat_gen = P2PMatrixGenerator( + self.queue.context, (kernel,), exclude_self=self.exclude_self) - assert abs(expr.qbx_forced_limit) > 0 _, (mat,) = mat_gen(self.queue, - target_discr.nodes(), - source.quad_stage2_density_discr.nodes(), - get_centers_on_side(source, expr.qbx_forced_limit), - expansion_radii=self.dep_source._expansion_radii("nsources"), + targets=target_discr.nodes(), + sources=source_discr.nodes(), **kernel_args) mat = mat.get() + mat = mat.dot(rec_density) + + return mat +# }}} - waa = source.weights_and_area_elements().get(queue=self.queue) - mat[:, :] *= waa - resampler = source.direct_resampler - resample_mat = resampler.full_resample_matrix(self.queue).get(self.queue) +# {{{ block matrix builders - mat = mat.dot(resample_mat) - mat = mat.dot(rec_density) +class NearFieldBlockBuilder(MatrixBlockBuilderBase): + def __init__(self, queue, dep_expr, other_dep_exprs, dep_source, dep_discr, + places, index_set, context): + super(NearFieldBlockBuilder, self).__init__(queue, + dep_expr, other_dep_exprs, dep_source, dep_discr, + places, index_set, context) + + # NOTE: we need additional mappers to redirect some operations: + # * mat_mapper is used to compute any kernel arguments that need to + # be computed on the full discretization, ignoring our index_set, + # e.g the normal in a double layer potential + # * blk_mapper is used to recursively compute the density to + # a layer potential operator to ensure there is no composition + self.mat_mapper = MatrixBuilderBase(queue, + dep_expr, other_dep_exprs, dep_source, dep_discr, + places, context) + self.blk_mapper = MatrixBlockBuilderBase(queue, + dep_expr, other_dep_exprs, dep_source, dep_discr, + places, index_set, context) + + def get_dep_variable(self): + tgtindices = self.index_set.linear_row_indices.get(self.queue) + srcindices = self.index_set.linear_col_indices.get(self.queue) + + return np.equal(tgtindices, srcindices).astype(np.float64) + + def map_int_g(self, expr): + source = self.places[expr.source] + source_discr = self.places.get_discretization(expr.source) + target_discr = self.places.get_discretization(expr.target) + + if source_discr is not target_discr: + raise NotImplementedError() + + rec_density = self.blk_mapper.rec(expr.density) + if is_zero(rec_density): + return 0 + + if not np.isscalar(rec_density): + raise NotImplementedError() + + kernel = expr.kernel + kernel_args = _get_layer_potential_args(self.mat_mapper, expr, source) + + from sumpy.expansion.local import LineTaylorLocalExpansion + local_expn = LineTaylorLocalExpansion(kernel, source.qbx_order) + + from sumpy.qbx import LayerPotentialMatrixBlockGenerator + mat_gen = LayerPotentialMatrixBlockGenerator( + self.queue.context, (local_expn,)) + + assert abs(expr.qbx_forced_limit) > 0 + centers, radii = _get_centers_and_expansion_radii(self.queue, + source, target_discr, expr.qbx_forced_limit) + + _, (mat,) = mat_gen(self.queue, + targets=target_discr.nodes(), + sources=source_discr.nodes(), + centers=centers, + expansion_radii=radii, + index_set=self.index_set, + **kernel_args) + + waa = _get_weights_and_area_elements(self.queue, source, source_discr) + mat *= waa[self.index_set.linear_col_indices] + mat = rec_density * mat.get(self.queue) return mat - # IntGdSource should have been removed by a preprocessor - def map_num_reference_derivative(self, expr): - rec_operand = self.rec(expr.operand) +class FarFieldBlockBuilder(MatrixBlockBuilderBase): + def __init__(self, queue, dep_expr, other_dep_exprs, dep_source, dep_discr, + places, index_set, context, exclude_self=False): + super(FarFieldBlockBuilder, self).__init__(queue, + dep_expr, other_dep_exprs, dep_source, dep_discr, + places, index_set, context) - assert isinstance(rec_operand, np.ndarray) - if len(rec_operand.shape) == 2: - raise NotImplementedError("derivatives") + # NOTE: same mapper issues as in the NearFieldBlockBuilder + self.exclude_self = exclude_self + self.mat_mapper = MatrixBuilderBase(queue, + dep_expr, other_dep_exprs, dep_source, dep_discr, + places, context) + self.blk_mapper = MatrixBlockBuilderBase(queue, + dep_expr, other_dep_exprs, dep_source, dep_discr, + places, index_set, context) - where_discr = self.places[expr.where] - op = sym.NumReferenceDerivative(expr.ref_axes, sym.var("u")) - return bind(where_discr, op)( - self.queue, u=cl.array.to_device(self.queue, rec_operand)).get() + def get_dep_variable(self): + tgtindices = self.index_set.linear_row_indices.get(self.queue) + srcindices = self.index_set.linear_col_indices.get(self.queue) - def map_node_coordinate_component(self, expr): - where_discr = self.places[expr.where] - op = sym.NodeCoordinateComponent(expr.ambient_axis) - return bind(where_discr, op)(self.queue).get() + return np.equal(tgtindices, srcindices).astype(np.float64) - def map_call(self, expr): - arg, = expr.parameters - rec_arg = self.rec(arg) + def map_int_g(self, expr): + source = self.places[expr.source] + source_discr = self.places.get_discretization(expr.source) + target_discr = self.places.get_discretization(expr.target) - if ( - isinstance(rec_arg, np.ndarray) - and len(rec_arg.shape) == 2): - raise RuntimeError("expression is nonlinear in variable") + if source_discr is not target_discr: + raise NotImplementedError() - if isinstance(rec_arg, np.ndarray): - rec_arg = cl.array.to_device(self.queue, rec_arg) + rec_density = self.blk_mapper.rec(expr.density) + if is_zero(rec_density): + return 0 - op = expr.function(sym.var("u")) - result = bind(self.dep_source, op)(self.queue, u=rec_arg) + if not np.isscalar(rec_density): + raise NotImplementedError() - if isinstance(result, cl.array.Array): - result = result.get() + kernel = expr.kernel.get_base_kernel() + kernel_args = _get_kernel_args(self.mat_mapper, kernel, expr, source) + if self.exclude_self: + kernel_args["target_to_source"] = \ + cl.array.arange(self.queue, 0, target_discr.nnodes, dtype=np.int) - return result + from sumpy.p2p import P2PMatrixBlockGenerator + mat_gen = P2PMatrixBlockGenerator( + self.queue.context, (kernel,), exclude_self=self.exclude_self) + + _, (mat,) = mat_gen(self.queue, + targets=target_discr.nodes(), + sources=source_discr.nodes(), + index_set=self.index_set, + **kernel_args) + mat = rec_density * mat.get(self.queue) + + return mat + +# }}} + +# vim: foldmethod=marker diff --git a/pytential/symbolic/primitives.py b/pytential/symbolic/primitives.py index 3a297fd1f320e28d3f2308ed46e79c878e97bb90..2288ab85eaf8069a3ee3356354ae0cb5085ab345 100644 --- a/pytential/symbolic/primitives.py +++ b/pytential/symbolic/primitives.py @@ -201,10 +201,10 @@ class DEFAULT_TARGET: # noqa pass -class _QBXSourceStage2(object): - """A symbolic 'where' specifier for the - :attr:`pytential.qbx.QBXLayerPotentialSource.stage2_density_discr` - of the layer potential source identified by :attr:`where`. +class _QBXSource(object): + """A symbolic 'where' specifier for the a density of a + :attr:`pytential.qbx.QBXLayerPotentialSource` + layer potential source identified by :attr:`where`. .. attribute:: where @@ -229,6 +229,27 @@ class _QBXSourceStage2(object): def __ne__(self, other): return not self.__eq__(other) + +class QBXSourceStage1(_QBXSource): + """An explicit symbolic 'where' specifier for the + :attr:`pytential.qbx.QBXLayerPotentialSource.density_discr` + of the layer potential source identified by :attr:`where`. + """ + + +class QBXSourceStage2(_QBXSource): + """A symbolic 'where' specifier for the + :attr:`pytential.qbx.QBXLayerPotentialSource.stage2_density_discr` + of the layer potential source identified by :attr:`where`. + """ + + +class QBXSourceQuadStage2(_QBXSource): + """A symbolic 'where' specifier for the + :attr:`pytential.qbx.QBXLayerPotentialSource.quad_stage2_density_discr` + of the layer potential source identified by :attr:`where`. + """ + # }}} diff --git a/test/test_matrix.py b/test/test_matrix.py index 9f0ff0b44055dcb0ff91eda7e410c0a0f9149e65..d912b2e75f1e932c15e697b130ebfcada5808197 100644 --- a/test/test_matrix.py +++ b/test/test_matrix.py @@ -1,6 +1,9 @@ from __future__ import division, absolute_import, print_function -__copyright__ = "Copyright (C) 2015 Andreas Kloeckner" +__copyright__ = """ +Copyright (C) 2015 Andreas Kloeckner +Copyright (C) 2018 Alexandru Fikl +""" __license__ = """ Permission is hereby granted, free of charge, to any person obtaining a copy @@ -22,66 +25,174 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ +from functools import partial + import numpy as np import numpy.linalg as la + import pyopencl as cl -import pytest -from meshmode.mesh.generation import \ - ellipse, NArmedStarfish, make_curve_mesh -from pytential import bind, sym -from functools import partial +import pyopencl.array # noqa + +from pytools.obj_array import make_obj_array, is_obj_array + from sumpy.symbolic import USE_SYMENGINE +from meshmode.mesh.generation import ( # noqa + ellipse, NArmedStarfish, make_curve_mesh, generate_torus) + +from pytential import bind, sym +from pytential.symbolic.primitives import DEFAULT_SOURCE, DEFAULT_TARGET +from pytential.symbolic.primitives import QBXSourceStage1, QBXSourceQuadStage2 +import pytest from pyopencl.tools import ( # noqa pytest_generate_tests_for_pyopencl as pytest_generate_tests) -@pytest.mark.skipif(USE_SYMENGINE, - reason="https://gitlab.tiker.net/inducer/sumpy/issues/25") -@pytest.mark.parametrize("k", [0, 42]) -@pytest.mark.parametrize("curve_f", [ - partial(ellipse, 3), - NArmedStarfish(5, 0.25)]) -@pytest.mark.parametrize("layer_pot_id", [1, 2]) -def test_matrix_build(ctx_factory, k, curve_f, layer_pot_id, visualize=False): - cl_ctx = ctx_factory() - queue = cl.CommandQueue(cl_ctx) +def _build_qbx_discr(queue, + ndim=2, + nelements=30, + target_order=7, + qbx_order=4, + curve_f=None): - # prevent cache 'splosion - from sympy.core.cache import clear_cache - clear_cache() + if curve_f is None: + curve_f = NArmedStarfish(5, 0.25) + + if ndim == 2: + mesh = make_curve_mesh(curve_f, + np.linspace(0, 1, nelements + 1), + target_order) + elif ndim == 3: + mesh = generate_torus(10.0, 2.0, order=target_order) + else: + raise ValueError("unsupported ambient dimension") + + from meshmode.discretization import Discretization + from meshmode.discretization.poly_element import \ + InterpolatoryQuadratureSimplexGroupFactory + from pytential.qbx import QBXLayerPotentialSource + density_discr = Discretization( + queue.context, mesh, + InterpolatoryQuadratureSimplexGroupFactory(target_order)) + + qbx, _ = QBXLayerPotentialSource(density_discr, + fine_order=4 * target_order, + qbx_order=qbx_order, + fmm_order=False).with_refinement() + + return qbx + + +def _build_block_index(discr, nblks=10, factor=1.0): + nnodes = discr.nnodes + max_particles_in_box = nnodes // nblks + + from pytential.linalg.proxy import partition_by_nodes + indices = partition_by_nodes(discr, use_tree=True, + max_nodes_in_box=max_particles_in_box) + + # randomly pick a subset of points + from sumpy.tools import MatrixBlockIndexRanges, BlockIndexRanges + if abs(factor - 1.0) > 1.0e-14: + with cl.CommandQueue(discr.cl_context) as queue: + indices = indices.get(queue) + + indices_ = np.empty(indices.nblocks, dtype=np.object) + for i in range(indices.nblocks): + iidx = indices.block_indices(i) + isize = int(factor * len(iidx)) + isize = max(1, min(isize, len(iidx))) + + indices_[i] = np.sort( + np.random.choice(iidx, size=isize, replace=False)) + + ranges_ = cl.array.to_device(queue, + np.cumsum([0] + [r.shape[0] for r in indices_])) + indices_ = cl.array.to_device(queue, np.hstack(indices_)) + + indices = BlockIndexRanges(discr.cl_context, + indices_.with_queue(None), + ranges_.with_queue(None)) + + indices = MatrixBlockIndexRanges(indices.cl_context, + indices, indices) + + return indices - target_order = 7 - qbx_order = 4 - nelements = 30 + +def _build_op(lpot_id, + k=0, + ndim=2, + qbx_forced_limit="avg"): from sumpy.kernel import LaplaceKernel, HelmholtzKernel if k: - knl = HelmholtzKernel(2) + knl = HelmholtzKernel(ndim) knl_kwargs = {"k": k} else: - knl = LaplaceKernel(2) + knl = LaplaceKernel(ndim) knl_kwargs = {} - from pytools.obj_array import make_obj_array, is_obj_array - if layer_pot_id == 1: + lpot_kwargs = {"qbx_forced_limit": qbx_forced_limit} + lpot_kwargs.update(knl_kwargs) + if lpot_id == 1: + # scalar single-layer potential + u_sym = sym.var("u") + op = sym.S(knl, u_sym, **lpot_kwargs) + elif lpot_id == 2: + # scalar combination of layer potentials u_sym = sym.var("u") - op = sym.Sp(knl, u_sym, **knl_kwargs) - elif layer_pot_id == 2: + op = sym.S(knl, 0.3 * u_sym, **lpot_kwargs) \ + + sym.D(knl, 0.5 * u_sym, **lpot_kwargs) + elif lpot_id == 3: + # vector potential u_sym = sym.make_sym_vector("u", 2) u0_sym, u1_sym = u_sym op = make_obj_array([ - sym.Sp(knl, u0_sym, **knl_kwargs) + - sym.D(knl, u1_sym, **knl_kwargs), - - sym.S(knl, 0.4 * u0_sym, **knl_kwargs) + - 0.3 * sym.D(knl, u0_sym, **knl_kwargs) + sym.Sp(knl, u0_sym, **lpot_kwargs) + + sym.D(knl, u1_sym, **lpot_kwargs), + sym.S(knl, 0.4 * u0_sym, **lpot_kwargs) + + 0.3 * sym.D(knl, u0_sym, **lpot_kwargs) ]) else: - raise ValueError("Unknown layer_pot_id: {}".format(layer_pot_id)) + raise ValueError("Unknown lpot_id: {}".format(lpot_id)) + + op = 0.5 * u_sym + op + + return op, u_sym, knl_kwargs + + +def _max_block_error(mat, blk, index_set): + error = -np.inf + for i in range(index_set.nblocks): + mat_i = index_set.take(mat, i) + blk_i = index_set.block_take(blk, i) + + error = max(error, la.norm(mat_i - blk_i) / la.norm(mat_i)) + return error + + +@pytest.mark.skipif(USE_SYMENGINE, + reason="https://gitlab.tiker.net/inducer/sumpy/issues/25") +@pytest.mark.parametrize("k", [0, 42]) +@pytest.mark.parametrize("curve_f", [ + partial(ellipse, 3), + NArmedStarfish(5, 0.25)]) +@pytest.mark.parametrize("lpot_id", [2, 3]) +def test_matrix_build(ctx_factory, k, curve_f, lpot_id, visualize=False): + cl_ctx = ctx_factory() + queue = cl.CommandQueue(cl_ctx) + + # prevent cache 'splosion + from sympy.core.cache import clear_cache + clear_cache() + + target_order = 7 + qbx_order = 4 + nelements = 30 mesh = make_curve_mesh(curve_f, np.linspace(0, 1, nelements + 1), target_order) @@ -89,16 +200,18 @@ def test_matrix_build(ctx_factory, k, curve_f, layer_pot_id, visualize=False): from meshmode.discretization import Discretization from meshmode.discretization.poly_element import \ InterpolatoryQuadratureSimplexGroupFactory - from pytential.qbx import QBXLayerPotentialSource pre_density_discr = Discretization( cl_ctx, mesh, InterpolatoryQuadratureSimplexGroupFactory(target_order)) + from pytential.qbx import QBXLayerPotentialSource qbx, _ = QBXLayerPotentialSource(pre_density_discr, 4 * target_order, qbx_order, # Don't use FMM for now fmm_order=False).with_refinement() density_discr = qbx.density_discr + + op, u_sym, knl_kwargs = _build_op(lpot_id, k=k) bound_op = bind(qbx, op) from pytential.symbolic.execution import build_matrix @@ -151,10 +264,230 @@ def test_matrix_build(ctx_factory, k, curve_f, layer_pot_id, visualize=False): abs_err = la.norm(res_mat - res_matvec, np.inf) rel_err = abs_err / la.norm(res_matvec, np.inf) - print(abs_err, rel_err) + print("AbsErr {:.5e} RelErr {:.5e}".format(abs_err, rel_err)) assert rel_err < 1e-13 +@pytest.mark.parametrize("ndim", [2, 3]) +@pytest.mark.parametrize("factor", [1.0, 0.6]) +@pytest.mark.parametrize("lpot_id", [1, 2]) +def test_p2p_block_builder(ctx_factory, factor, ndim, lpot_id, + visualize=False): + ctx = ctx_factory() + queue = cl.CommandQueue(ctx) + + # prevent cache explosion + from sympy.core.cache import clear_cache + clear_cache() + + target_order = 2 if ndim == 3 else 7 + qbx = _build_qbx_discr(queue, target_order=target_order, ndim=ndim) + op, u_sym, _ = _build_op(lpot_id, ndim=ndim) + index_set = _build_block_index(qbx.density_discr, factor=factor) + + from pytential.symbolic.execution import GeometryCollection + from pytential.symbolic.execution import _prepare_expr, _prepare_domains + places = GeometryCollection(qbx) + expr = _prepare_expr(places, op) + domains = _prepare_domains(1, places, None, DEFAULT_SOURCE) + + from pytential.symbolic.matrix import P2PMatrixBuilder + mbuilder = P2PMatrixBuilder(queue, + dep_expr=u_sym, + other_dep_exprs=[], + dep_source=places[domains[0]], + dep_discr=places.get_discretization(domains[0]), + places=places, + context={}, + exclude_self=True) + mat = mbuilder(expr) + + from pytential.symbolic.matrix import FarFieldBlockBuilder + mbuilder = FarFieldBlockBuilder(queue, + dep_expr=u_sym, + other_dep_exprs=[], + dep_source=places[domains[0]], + dep_discr=places.get_discretization(domains[0]), + places=places, + index_set=index_set, + context={}, + exclude_self=True) + blk = mbuilder(expr) + + index_set = index_set.get(queue) + if visualize and ndim == 2: + blk_full = np.zeros_like(mat) + mat_full = np.zeros_like(mat) + + for i in range(index_set.nblocks): + itgt, isrc = index_set.block_indices(i) + + blk_full[np.ix_(itgt, isrc)] = index_set.block_take(blk, i) + mat_full[np.ix_(itgt, isrc)] = index_set.take(mat, i) + + import matplotlib.pyplot as mp + _, (ax1, ax2) = mp.subplots(1, 2, + figsize=(10, 8), dpi=300, constrained_layout=True) + ax1.imshow(blk_full) + ax1.set_title('FarFieldBlockBuilder') + ax2.imshow(mat_full) + ax2.set_title('P2PMatrixBuilder') + mp.savefig("test_p2p_block_{}d_{:.1f}.png".format(ndim, factor)) + + assert _max_block_error(mat, blk, index_set) < 1.0e-14 + + +@pytest.mark.parametrize("factor", [1.0, 0.6]) +@pytest.mark.parametrize("ndim", [2, 3]) +@pytest.mark.parametrize("lpot_id", [1, 2]) +def test_qbx_block_builder(ctx_factory, factor, ndim, lpot_id, + visualize=False): + ctx = ctx_factory() + queue = cl.CommandQueue(ctx) + + # prevent cache explosion + from sympy.core.cache import clear_cache + clear_cache() + + target_order = 2 if ndim == 3 else 7 + qbx = _build_qbx_discr(queue, target_order=target_order, ndim=ndim) + op, u_sym, _ = _build_op(lpot_id, ndim=ndim) + + # NOTE: NearFieldBlockBuilder only does stage1/stage1 or stage2/stage2, + # so we need to hardcode the discr for MatrixBuilder too, since the + # defaults are different + where = (QBXSourceStage1(DEFAULT_SOURCE), QBXSourceStage1(DEFAULT_TARGET)) + + from pytential.symbolic.execution import GeometryCollection, _prepare_expr + places = GeometryCollection(qbx, auto_where=where) + expr = _prepare_expr(places, op) + density_discr = places.get_discretization(where[0]) + index_set = _build_block_index(density_discr, factor=factor) + + from pytential.symbolic.matrix import NearFieldBlockBuilder + mbuilder = NearFieldBlockBuilder(queue, + dep_expr=u_sym, + other_dep_exprs=[], + dep_source=places[where[0]], + dep_discr=places.get_discretization(where[0]), + places=places, + index_set=index_set, + context={}) + blk = mbuilder(expr) + + from pytential.symbolic.matrix import MatrixBuilder + mbuilder = MatrixBuilder(queue, + dep_expr=u_sym, + other_dep_exprs=[], + dep_source=places[where[0]], + dep_discr=places.get_discretization(where[0]), + places=places, + context={}) + mat = mbuilder(expr) + + index_set = index_set.get(queue) + if visualize: + blk_full = np.zeros_like(mat) + mat_full = np.zeros_like(mat) + + for i in range(index_set.nblocks): + itgt, isrc = index_set.block_indices(i) + + blk_full[np.ix_(itgt, isrc)] = index_set.block_take(blk, i) + mat_full[np.ix_(itgt, isrc)] = index_set.take(mat, i) + + import matplotlib.pyplot as mp + _, (ax1, ax2) = mp.subplots(1, 2, + figsize=(10, 8), constrained_layout=True) + ax1.imshow(mat_full) + ax1.set_title('MatrixBuilder') + ax2.imshow(blk_full) + ax2.set_title('NearFieldBlockBuilder') + mp.savefig("test_qbx_block_builder.png", dpi=300) + + assert _max_block_error(mat, blk, index_set) < 1.0e-14 + + +@pytest.mark.parametrize('place_id', + [(DEFAULT_SOURCE, DEFAULT_TARGET), + (QBXSourceStage1(DEFAULT_SOURCE), + QBXSourceStage1(DEFAULT_TARGET)), + (QBXSourceQuadStage2(DEFAULT_SOURCE), + QBXSourceQuadStage2(DEFAULT_TARGET))]) +def test_build_matrix_places(ctx_factory, place_id, visualize=False): + ctx = ctx_factory() + queue = cl.CommandQueue(ctx) + + # prevent cache explosion + from sympy.core.cache import clear_cache + clear_cache() + + qbx = _build_qbx_discr(queue, nelements=8, target_order=2, ndim=2, + curve_f=partial(ellipse, 1.0)) + + qbx_forced_limit = -1 + op, u_sym, _ = _build_op(lpot_id=1, ndim=2, + qbx_forced_limit=qbx_forced_limit) + + from pytential.symbolic.execution import GeometryCollection + places = GeometryCollection(qbx, auto_where=place_id) + source_discr = places.get_discretization(place_id[0]) + target_discr = places.get_discretization(place_id[1]) + + index_set = _build_block_index(source_discr, factor=0.6) + + # build full QBX matrix + from pytential.symbolic.execution import build_matrix + qbx_mat = build_matrix(queue, qbx, op, u_sym, + auto_where=place_id, domains=place_id[0]) + qbx_mat = qbx_mat.get(queue) + + assert qbx_mat.shape == (target_discr.nnodes, source_discr.nnodes) + + # build full p2p matrix + from pytential.symbolic.execution import _prepare_expr + op = _prepare_expr(places, op) + + from pytential.symbolic.matrix import P2PMatrixBuilder + mbuilder = P2PMatrixBuilder(queue, + dep_expr=u_sym, + other_dep_exprs=[], + dep_source=places[place_id[0]], + dep_discr=places.get_discretization(place_id[0]), + places=places, + context={}) + p2p_mat = mbuilder(op) + + assert p2p_mat.shape == (target_discr.nnodes, source_discr.nnodes) + + # build block qbx and p2p matrices + from pytential.symbolic.matrix import NearFieldBlockBuilder + mbuilder = NearFieldBlockBuilder(queue, + dep_expr=u_sym, + other_dep_exprs=[], + dep_source=places[place_id[0]], + dep_discr=places.get_discretization(place_id[0]), + places=places, + index_set=index_set, + context={}) + mat = mbuilder(op) + if place_id[0] is not DEFAULT_SOURCE: + assert _max_block_error(qbx_mat, mat, index_set.get(queue)) < 1.0e-14 + + from pytential.symbolic.matrix import FarFieldBlockBuilder + mbuilder = FarFieldBlockBuilder(queue, + dep_expr=u_sym, + other_dep_exprs=[], + dep_source=places[place_id[0]], + dep_discr=places.get_discretization(place_id[0]), + places=places, + index_set=index_set, + context={}, + exclude_self=True) + mat = mbuilder(op) + assert _max_block_error(p2p_mat, mat, index_set.get(queue)) < 1.0e-14 + + if __name__ == "__main__": import sys if len(sys.argv) > 1: