From 7c6315632bcf2ccb3833136de6caca88102bc45d Mon Sep 17 00:00:00 2001 From: Matt Wala Date: Sun, 11 Jun 2017 17:24:42 -0500 Subject: [PATCH 1/6] First cut: Nystrom layer potential source. Currently supports direct eval. Other than that, it does not do much. In particular: - no refinement - no special handling of singularities for self eval See also: #33 --- doc/discretization.rst | 5 ++ pytential/nystrom.py | 136 +++++++++++++++++++++++++++++++ pytential/qbx/__init__.py | 16 ---- pytential/source.py | 16 ++++ pytential/symbolic/compiler.py | 34 +++++++- pytential/symbolic/mappers.py | 43 +++++++++- pytential/symbolic/primitives.py | 14 +++- test/test_layer_pot.py | 42 ++++++++++ 8 files changed, 283 insertions(+), 23 deletions(-) create mode 100644 pytential/nystrom.py diff --git a/doc/discretization.rst b/doc/discretization.rst index 3229e267..4440f09f 100644 --- a/doc/discretization.rst +++ b/doc/discretization.rst @@ -15,6 +15,11 @@ and you can start computing. .. automodule:: pytential.qbx +Nyström discretization +------- + +.. automodule:: pytential.nystrom + Sources ------- diff --git a/pytential/nystrom.py b/pytential/nystrom.py new file mode 100644 index 00000000..cc06d283 --- /dev/null +++ b/pytential/nystrom.py @@ -0,0 +1,136 @@ +# -*- coding: utf-8 -*- +from __future__ import division, absolute_import + +__copyright__ = """ +Copyright (C) 2017 Matt Wala +""" + +__license__ = """ +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. +""" + +import six + +from pytential.source import LayerPotentialSourceBase + +import logging +logger = logging.getLogger(__name__) + + +__doc__ = """ +.. autoclass:: NystromLayerPotentialSource +""" + + +# {{{ (panel-based) Nystrom layer potential source + +class NystromLayerPotentialSource(LayerPotentialSourceBase): + """A source discretization for a Nyström layer potential with panel-based + quadrature. + """ + + def __init__(self, density_discr, + # begin undocumented arguments + # FIXME default debug=False once everything works + debug=True): + """ + """ + self.density_discr = density_discr + self.debug = debug + + @property + def fine_density_discr(self): + return self.density_discr + + def resampler(self, queue, f): + return f + + def with_refinement(self): + raise NotImplementedError + + def copy( + self, + density_discr=None, + debug=None + ): + return type(self)( + density_discr=density_discr or self.density_discr, + debug=debug if debug is not None else self.debug) + + def exec_compute_potential_insn(self, queue, insn, bound_expr, evaluate): + from pytools.obj_array import with_object_array_or_scalar + + def evaluate_wrapper(expr): + value = evaluate(expr) + return with_object_array_or_scalar(lambda x: x, value) + + func = self.exec_compute_potential_insn_direct + return func(queue, insn, bound_expr, evaluate_wrapper) + + def op_group_features(self, expr): + from sumpy.kernel import AxisTargetDerivativeRemover + result = ( + expr.source, expr.density, + AxisTargetDerivativeRemover()(expr.kernel), + ) + + return result + + def preprocess_optemplate(self, name, discretizations, expr): + """ + :arg name: The symbolic name for *self*, which the preprocessor + should use to find which expressions it is allowed to modify. + """ + from pytential.symbolic.mappers import NystromPreprocessor + return NystromPreprocessor(name, discretizations)(expr) + + def exec_compute_potential_insn_direct(self, queue, insn, bound_expr, evaluate): + kernel_args = {} + + for arg_name, arg_expr in six.iteritems(insn.kernel_arguments): + kernel_args[arg_name] = evaluate(arg_expr) + + strengths = (evaluate(insn.density).with_queue(queue) + * self.weights_and_area_elements()) + + result = [] + p2p = None + + for o in insn.outputs: + target_discr = bound_expr.get_discretization(o.target_name) + + if p2p is None: + p2p = self.get_p2p(insn.kernels) + + evt, output_for_each_kernel = p2p(queue, + target_discr.nodes(), self.fine_density_discr.nodes(), + [strengths], **kernel_args) + + result.append((o.name, output_for_each_kernel[o.kernel_index])) + + return result, [] + +# }}} + + +__all__ = ( + NystromLayerPotentialSource, + ) + +# vim: fdm=marker diff --git a/pytential/qbx/__init__.py b/pytential/qbx/__init__.py index 2212b005..0c4bfdf3 100644 --- a/pytential/qbx/__init__.py +++ b/pytential/qbx/__init__.py @@ -589,22 +589,6 @@ class QBXLayerPotentialSource(LayerPotentialSourceBase): for knl in kernels], value_dtypes=value_dtype) - @memoize_method - def get_p2p(self, kernels): - # needs to be separate method for caching - - from pytools import any - if any(knl.is_complex_valued for knl in kernels): - value_dtype = self.density_discr.complex_dtype - else: - value_dtype = self.density_discr.real_dtype - - from sumpy.p2p import P2P - p2p = P2P(self.cl_context, - kernels, exclude_self=False, value_dtypes=value_dtype) - - return p2p - @memoize_method def get_qbx_target_numberer(self, dtype): assert dtype == np.int32 diff --git a/pytential/source.py b/pytential/source.py index f4dccda5..6d78da57 100644 --- a/pytential/source.py +++ b/pytential/source.py @@ -190,6 +190,22 @@ class LayerPotentialSourceBase(PotentialSource): def complex_dtype(self): return self.density_discr.complex_dtype + @memoize_method + def get_p2p(self, kernels): + # needs to be separate method for caching + + from pytools import any + if any(knl.is_complex_valued for knl in kernels): + value_dtype = self.density_discr.complex_dtype + else: + value_dtype = self.density_discr.real_dtype + + from sumpy.p2p import P2P + p2p = P2P(self.cl_context, + kernels, exclude_self=False, value_dtypes=value_dtype) + + return p2p + # {{{ weights and area elements @memoize_method diff --git a/pytential/symbolic/compiler.py b/pytential/symbolic/compiler.py index fc97155f..9233714e 100644 --- a/pytential/symbolic/compiler.py +++ b/pytential/symbolic/compiler.py @@ -130,6 +130,10 @@ class PotentialOutput(Record): .. attribute:: kernel_index + .. attribute:: diagonal_kernel_index + + Index for the diagonal of the kernel. May be *None* if not required. + .. attribute:: target_name .. attribute:: qbx_forced_limit @@ -152,6 +156,17 @@ class ComputePotentialInstruction(Instruction): a list of :class:`sumpy.kernel.Kernel` instances, indexed by :attr:`LayerPotentialOutput.kernel_index`. + .. attribute:: exclude_self + + If *True*, avoid evaluating a kernel diagonal when computing a self + interaction. Instead, evalute the diagonal using the + value from :attr:`diagonal_kernels`. + + .. attribute:: diagonal_kernels + + A :class:`tuple` of expressions for the kernel diagonals, indexed by + :attr:`LayerPotentialOutput.diagonal_kernel_index`. + .. attribute:: kernel_arguments a dictionary mapping arg names to kernel arguments @@ -165,6 +180,7 @@ class ComputePotentialInstruction(Instruction): .. attribute:: source .. attribute:: priority + """ def get_assignees(self): @@ -444,6 +460,9 @@ class OperatorCompiler(IdentityMapper): from pytential.symbolic.primitives import hashable_kernel_args return ( self.places[expr.source].op_group_features(expr) + # The presence of a diagonal kernel determines + # whether or not exclude_self should be True. + + (expr.diagonal_kernel is not None,) + hashable_kernel_args(expr.kernel_arguments)) @memoize_method @@ -582,10 +601,16 @@ class OperatorCompiler(IdentityMapper): kernels = sorted( set(op.kernel for op in group), key=lambda kernel: repr(kernel)) - kernel_to_index = dict( (kernel, i) for i, kernel in enumerate(kernels)) + diagonal_kernels = sorted( + set(op.diagonal_kernel for op in group) - set([None]), + key=lambda diagonal_kernel: repr(diagonal_kernel)) + diagonal_kernel_to_index = dict( + (diagonal_kernel, i) for i, diagonal_kernel + in enumerate(diagonal_kernels)) + from pytools import single_valued from sumpy.kernel import AxisTargetDerivativeRemover atdr = AxisTargetDerivativeRemover() @@ -603,6 +628,9 @@ class OperatorCompiler(IdentityMapper): PotentialOutput( name=name, kernel_index=kernel_to_index[op.kernel], + diagonal_kernel_index=( + None if op.diagonal_kernel is None + else diagonal_kernel_to_index[op.diagonal_kernel]), target_name=op.target, qbx_forced_limit=op.qbx_forced_limit, ) @@ -613,10 +641,14 @@ class OperatorCompiler(IdentityMapper): ComputePotentialInstruction( outputs=outputs, kernels=tuple(kernels), + diagonal_kernels=tuple(diagonal_kernels), kernel_arguments=kernel_arguments, base_kernel=base_kernel, density=density_var, source=expr.source, + exclude_self=any( + op.diagonal_kernel is not None + for op in group), priority=max(getattr(op, "priority", 0) for op in group), dep_mapper_factory=self.dep_mapper_factory)) diff --git a/pytential/symbolic/mappers.py b/pytential/symbolic/mappers.py index 3eef6c4e..8b4aae1f 100644 --- a/pytential/symbolic/mappers.py +++ b/pytential/symbolic/mappers.py @@ -169,6 +169,7 @@ class EvaluationMapper(EvaluationMapperBase): expr.kernel, self.rec(subexpr), expr.qbx_forced_limit, expr.source, expr.target, + expr.diagonal_kernel, kernel_arguments=dict( (name, self.rec(arg_expr)) for name, arg_expr in expr.kernel_arguments.items() @@ -237,6 +238,7 @@ class LocationTagger(CSECachingMapperMixin, IdentityMapper): expr.kernel, self.operand_rec(expr.density), expr.qbx_forced_limit, source, target, + diagonal_kernel=expr.diagonal_kernel, kernel_arguments=dict( (name, self.operand_rec(arg_expr)) for name, arg_expr in expr.kernel_arguments.items() @@ -340,6 +342,32 @@ class DerivativeBinder(DerivativeBinderBase, IdentityMapper): # }}} +# {{{ Nystrom preprocessor + +class NystromPreprocessor(IdentityMapper): + + def __init__(self, source_name, places): + self.source_name = source_name + self.places = places + + def map_int_g(self, expr): + if expr.qbx_forced_limit in (-1, 1): + raise ValueError("Nystrom evaluation does not support one-sided limits") + + expr = expr.copy( + qbx_forced_limit=None, + kernel=expr.kernel, + density=self.rec(expr.density), + kernel_arguments=dict( + (name, self.rec(arg_expr)) + for name, arg_expr in expr.kernel_arguments.items() + )) + + return expr + +# }}} + + # {{{ QBX preprocessor class QBXPreprocessor(IdentityMapper): @@ -365,6 +393,7 @@ class QBXPreprocessor(IdentityMapper): expr = expr.copy( kernel=expr.kernel, density=self.rec(expr.density), + # QBX does not use the diagonal kernel. Ignore. kernel_arguments=dict( (name, self.rec(arg_expr)) for name, arg_expr in expr.kernel_arguments.items() @@ -467,14 +496,17 @@ class StringifyMapper(BaseStringifyMapper): for name, arg_expr in kernel_arguments.items()) def map_int_g(self, expr, enclosing_prec): - return u"Int[%s->%s]@(%s)%s (%s * %s)" % ( + return u"Int[%s->%s]@(%s)%s (%s * %s%s)" % ( stringify_where(expr.source), stringify_where(expr.target), expr.qbx_forced_limit, self._stringify_kernel_args( expr.kernel_arguments), expr.kernel, - self.rec(expr.density, PREC_PRODUCT)) + self.rec(expr.density, PREC_PRODUCT), + ( + "" if expr.diagonal_kernel is None + else (", diag=" + self.rec(expr.diagonal_kernel, PREC_NONE)))) # }}} @@ -518,11 +550,16 @@ class GraphvizMapper(GraphvizMapperBase): map_q_weight = map_pytential_leaf def map_int_g(self, expr): - descr = u"Int[%s->%s]@(%d) (%s)" % ( + diag_descr = ( + "" if expr.diagonal_kernel is None + else "diag=%s" % self.rec(expr.diagonal_kernel)) + + descr = u"Int[%s->%s]@(%d) (%s%s)" % ( stringify_where(expr.source), stringify_where(expr.target), expr.qbx_forced_limit, expr.kernel, + diag_descr, ) self.lines.append( "%s [label=\"%s\",shape=box];" % ( diff --git a/pytential/symbolic/primitives.py b/pytential/symbolic/primitives.py index b6cf78a2..445bfe91 100644 --- a/pytential/symbolic/primitives.py +++ b/pytential/symbolic/primitives.py @@ -550,6 +550,7 @@ class IntG(Expression): def __init__(self, kernel, density, qbx_forced_limit, source=None, target=None, + diagonal_kernel=None, kernel_arguments=None, **kwargs): """*target_derivatives* and later arguments should be considered @@ -558,6 +559,7 @@ class IntG(Expression): :arg kernel: a kernel as accepted by :func:`sumpy.kernel.to_kernel_and_args`, likely a :class:`sumpy.kernel.Kernel`. + :arg qbx_forced_limit: +1 if the output is required to originate from a QBX center on the "+" side of the boundary. -1 for the other side. Evaluation at a target with a value of +/- 1 in *qbx_forced_limit* @@ -574,6 +576,8 @@ class IntG(Expression): ``'avg'`` may be used as a shorthand to evaluate this potential as an average of the ``+1`` and the ``-1`` value. + :arg diagonal_kernel: The diagonal kernel, if necessary + :arg kernel_arguments: A dictionary mapping named :class:`sumpy.kernel.Kernel` arguments (see :meth:`sumpy.kernel.Kernel.get_args` @@ -632,6 +636,8 @@ class IntG(Expression): kernel_arguments[name] = val + print("KERNEL_ARGUMENTS PROVIDED", set(kernel_arguments.keys())) + provided_arg_names = set(kernel_arguments.keys()) missing_args = kernel_arg_names - provided_arg_names if missing_args: @@ -649,6 +655,7 @@ class IntG(Expression): self.source = source self.target = target self.kernel_arguments = kernel_arguments + self.diagonal_kernel = diagonal_kernel def copy(self, kernel=None, density=None, qbx_forced_limit=_NoArgSentinel, source=None, target=None, kernel_arguments=None): @@ -659,8 +666,9 @@ class IntG(Expression): source = source or self.source target = target or self.target kernel_arguments = kernel_arguments or self.kernel_arguments + diagonal_kernel = self.diagonal_kernel return type(self)(kernel, density, qbx_forced_limit, source, target, - kernel_arguments) + diagonal_kernel, kernel_arguments) def __getinitargs__(self): return (self.kernel, self.density, self.qbx_forced_limit, @@ -776,8 +784,8 @@ class _unspecified: # noqa def S(kernel, density, # noqa - qbx_forced_limit=_unspecified, source=None, target=None, - kernel_arguments=None, **kwargs): + qbx_forced_limit=_unspecified, source=None, target=None, + kernel_arguments=None, **kwargs): if qbx_forced_limit is _unspecified: warn("not specifying qbx_forced_limit on call to 'S' is deprecated, " diff --git a/test/test_layer_pot.py b/test/test_layer_pot.py index 0f3d35bb..8c74c9d4 100644 --- a/test/test_layer_pot.py +++ b/test/test_layer_pot.py @@ -1273,6 +1273,48 @@ def test_off_surface_eval_vs_direct(ctx_getter, do_plot=False): # }}} +# {{{ nystrom tests + + +def test_nystrom_with_ones_kernel(ctx_getter): + cl_ctx = ctx_getter() + queue = cl.CommandQueue(cl_ctx) + + nelements = 20 + order = 8 + + mesh = make_curve_mesh(partial(ellipse, 1), + np.linspace(0, 1, nelements+1), + order) + + from meshmode.discretization import Discretization + from meshmode.discretization.poly_element import \ + InterpolatoryQuadratureSimplexGroupFactory + + discr = Discretization(cl_ctx, mesh, + InterpolatoryQuadratureSimplexGroupFactory(order)) + + from pytential.nystrom import NystromLayerPotentialSource + lpot_src = NystromLayerPotentialSource(discr) + + from sumpy.kernel import one_kernel_2d + import pytential.symbolic.primitives as prim + + op = bind(lpot_src, + sym.IntG(one_kernel_2d, sym.var("sigma"), qbx_forced_limit=None)) + + with cl.CommandQueue(cl_ctx) as queue: + sigma = cl.array.zeros(queue, discr.nnodes, dtype=float) + sigma.fill(1) + sigma.finish() + + result = op(queue, sigma=sigma) + + assert np.allclose(result.get(), 2 * np.pi) + +# }}} + + # You can test individual routines by typing # $ python test_layer_pot.py 'test_routine()' -- GitLab From f84b6d5904fca660bd7b2926aa608797a65e95a0 Mon Sep 17 00:00:00 2001 From: Matt Wala Date: Mon, 12 Jun 2017 22:37:27 -0500 Subject: [PATCH 2/6] test_nystrom_with_ones_kernel(): Add a non-self target. --- test/test_layer_pot.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/test/test_layer_pot.py b/test/test_layer_pot.py index 8c74c9d4..5eb935d4 100644 --- a/test/test_layer_pot.py +++ b/test/test_layer_pot.py @@ -1280,7 +1280,7 @@ def test_nystrom_with_ones_kernel(ctx_getter): cl_ctx = ctx_getter() queue = cl.CommandQueue(cl_ctx) - nelements = 20 + nelements = 10 order = 8 mesh = make_curve_mesh(partial(ellipse, 1), @@ -1298,19 +1298,23 @@ def test_nystrom_with_ones_kernel(ctx_getter): lpot_src = NystromLayerPotentialSource(discr) from sumpy.kernel import one_kernel_2d - import pytential.symbolic.primitives as prim - op = bind(lpot_src, - sym.IntG(one_kernel_2d, sym.var("sigma"), qbx_forced_limit=None)) + expr = sym.IntG(one_kernel_2d, sym.var("sigma"), qbx_forced_limit=None) + + from pytential.target import PointsTarget + op_self = bind(lpot_src, expr) + op_nonself = bind((lpot_src, PointsTarget(np.zeros((2, 1), dtype=float))), expr) with cl.CommandQueue(cl_ctx) as queue: sigma = cl.array.zeros(queue, discr.nnodes, dtype=float) sigma.fill(1) sigma.finish() - result = op(queue, sigma=sigma) + result_self = op_self(queue, sigma=sigma) + result_nonself = op_nonself(queue, sigma=sigma) - assert np.allclose(result.get(), 2 * np.pi) + assert np.allclose(result_self.get(), 2 * np.pi) + assert np.allclose(result_nonself.get(), 2 * np.pi) # }}} -- GitLab From bf737cda78cf285951442e6aac02aff71e499a0b Mon Sep 17 00:00:00 2001 From: Matt Wala Date: Mon, 12 Jun 2017 22:48:43 -0500 Subject: [PATCH 3/6] Remove some accidentally included work from another branch. --- pytential/symbolic/compiler.py | 34 +------------------------------- pytential/symbolic/mappers.py | 17 +++------------- pytential/symbolic/primitives.py | 14 +++---------- 3 files changed, 7 insertions(+), 58 deletions(-) diff --git a/pytential/symbolic/compiler.py b/pytential/symbolic/compiler.py index 9233714e..fc97155f 100644 --- a/pytential/symbolic/compiler.py +++ b/pytential/symbolic/compiler.py @@ -130,10 +130,6 @@ class PotentialOutput(Record): .. attribute:: kernel_index - .. attribute:: diagonal_kernel_index - - Index for the diagonal of the kernel. May be *None* if not required. - .. attribute:: target_name .. attribute:: qbx_forced_limit @@ -156,17 +152,6 @@ class ComputePotentialInstruction(Instruction): a list of :class:`sumpy.kernel.Kernel` instances, indexed by :attr:`LayerPotentialOutput.kernel_index`. - .. attribute:: exclude_self - - If *True*, avoid evaluating a kernel diagonal when computing a self - interaction. Instead, evalute the diagonal using the - value from :attr:`diagonal_kernels`. - - .. attribute:: diagonal_kernels - - A :class:`tuple` of expressions for the kernel diagonals, indexed by - :attr:`LayerPotentialOutput.diagonal_kernel_index`. - .. attribute:: kernel_arguments a dictionary mapping arg names to kernel arguments @@ -180,7 +165,6 @@ class ComputePotentialInstruction(Instruction): .. attribute:: source .. attribute:: priority - """ def get_assignees(self): @@ -460,9 +444,6 @@ class OperatorCompiler(IdentityMapper): from pytential.symbolic.primitives import hashable_kernel_args return ( self.places[expr.source].op_group_features(expr) - # The presence of a diagonal kernel determines - # whether or not exclude_self should be True. - + (expr.diagonal_kernel is not None,) + hashable_kernel_args(expr.kernel_arguments)) @memoize_method @@ -601,16 +582,10 @@ class OperatorCompiler(IdentityMapper): kernels = sorted( set(op.kernel for op in group), key=lambda kernel: repr(kernel)) + kernel_to_index = dict( (kernel, i) for i, kernel in enumerate(kernels)) - diagonal_kernels = sorted( - set(op.diagonal_kernel for op in group) - set([None]), - key=lambda diagonal_kernel: repr(diagonal_kernel)) - diagonal_kernel_to_index = dict( - (diagonal_kernel, i) for i, diagonal_kernel - in enumerate(diagonal_kernels)) - from pytools import single_valued from sumpy.kernel import AxisTargetDerivativeRemover atdr = AxisTargetDerivativeRemover() @@ -628,9 +603,6 @@ class OperatorCompiler(IdentityMapper): PotentialOutput( name=name, kernel_index=kernel_to_index[op.kernel], - diagonal_kernel_index=( - None if op.diagonal_kernel is None - else diagonal_kernel_to_index[op.diagonal_kernel]), target_name=op.target, qbx_forced_limit=op.qbx_forced_limit, ) @@ -641,14 +613,10 @@ class OperatorCompiler(IdentityMapper): ComputePotentialInstruction( outputs=outputs, kernels=tuple(kernels), - diagonal_kernels=tuple(diagonal_kernels), kernel_arguments=kernel_arguments, base_kernel=base_kernel, density=density_var, source=expr.source, - exclude_self=any( - op.diagonal_kernel is not None - for op in group), priority=max(getattr(op, "priority", 0) for op in group), dep_mapper_factory=self.dep_mapper_factory)) diff --git a/pytential/symbolic/mappers.py b/pytential/symbolic/mappers.py index 8b4aae1f..9e6d42bf 100644 --- a/pytential/symbolic/mappers.py +++ b/pytential/symbolic/mappers.py @@ -169,7 +169,6 @@ class EvaluationMapper(EvaluationMapperBase): expr.kernel, self.rec(subexpr), expr.qbx_forced_limit, expr.source, expr.target, - expr.diagonal_kernel, kernel_arguments=dict( (name, self.rec(arg_expr)) for name, arg_expr in expr.kernel_arguments.items() @@ -238,7 +237,6 @@ class LocationTagger(CSECachingMapperMixin, IdentityMapper): expr.kernel, self.operand_rec(expr.density), expr.qbx_forced_limit, source, target, - diagonal_kernel=expr.diagonal_kernel, kernel_arguments=dict( (name, self.operand_rec(arg_expr)) for name, arg_expr in expr.kernel_arguments.items() @@ -393,7 +391,6 @@ class QBXPreprocessor(IdentityMapper): expr = expr.copy( kernel=expr.kernel, density=self.rec(expr.density), - # QBX does not use the diagonal kernel. Ignore. kernel_arguments=dict( (name, self.rec(arg_expr)) for name, arg_expr in expr.kernel_arguments.items() @@ -496,17 +493,14 @@ class StringifyMapper(BaseStringifyMapper): for name, arg_expr in kernel_arguments.items()) def map_int_g(self, expr, enclosing_prec): - return u"Int[%s->%s]@(%s)%s (%s * %s%s)" % ( + return u"Int[%s->%s]@(%s)%s (%s * %s)" % ( stringify_where(expr.source), stringify_where(expr.target), expr.qbx_forced_limit, self._stringify_kernel_args( expr.kernel_arguments), expr.kernel, - self.rec(expr.density, PREC_PRODUCT), - ( - "" if expr.diagonal_kernel is None - else (", diag=" + self.rec(expr.diagonal_kernel, PREC_NONE)))) + self.rec(expr.density, PREC_PRODUCT)) # }}} @@ -550,16 +544,11 @@ class GraphvizMapper(GraphvizMapperBase): map_q_weight = map_pytential_leaf def map_int_g(self, expr): - diag_descr = ( - "" if expr.diagonal_kernel is None - else "diag=%s" % self.rec(expr.diagonal_kernel)) - - descr = u"Int[%s->%s]@(%d) (%s%s)" % ( + descr = u"Int[%s->%s]@(%d) (%s)" % ( stringify_where(expr.source), stringify_where(expr.target), expr.qbx_forced_limit, expr.kernel, - diag_descr, ) self.lines.append( "%s [label=\"%s\",shape=box];" % ( diff --git a/pytential/symbolic/primitives.py b/pytential/symbolic/primitives.py index 445bfe91..b6cf78a2 100644 --- a/pytential/symbolic/primitives.py +++ b/pytential/symbolic/primitives.py @@ -550,7 +550,6 @@ class IntG(Expression): def __init__(self, kernel, density, qbx_forced_limit, source=None, target=None, - diagonal_kernel=None, kernel_arguments=None, **kwargs): """*target_derivatives* and later arguments should be considered @@ -559,7 +558,6 @@ class IntG(Expression): :arg kernel: a kernel as accepted by :func:`sumpy.kernel.to_kernel_and_args`, likely a :class:`sumpy.kernel.Kernel`. - :arg qbx_forced_limit: +1 if the output is required to originate from a QBX center on the "+" side of the boundary. -1 for the other side. Evaluation at a target with a value of +/- 1 in *qbx_forced_limit* @@ -576,8 +574,6 @@ class IntG(Expression): ``'avg'`` may be used as a shorthand to evaluate this potential as an average of the ``+1`` and the ``-1`` value. - :arg diagonal_kernel: The diagonal kernel, if necessary - :arg kernel_arguments: A dictionary mapping named :class:`sumpy.kernel.Kernel` arguments (see :meth:`sumpy.kernel.Kernel.get_args` @@ -636,8 +632,6 @@ class IntG(Expression): kernel_arguments[name] = val - print("KERNEL_ARGUMENTS PROVIDED", set(kernel_arguments.keys())) - provided_arg_names = set(kernel_arguments.keys()) missing_args = kernel_arg_names - provided_arg_names if missing_args: @@ -655,7 +649,6 @@ class IntG(Expression): self.source = source self.target = target self.kernel_arguments = kernel_arguments - self.diagonal_kernel = diagonal_kernel def copy(self, kernel=None, density=None, qbx_forced_limit=_NoArgSentinel, source=None, target=None, kernel_arguments=None): @@ -666,9 +659,8 @@ class IntG(Expression): source = source or self.source target = target or self.target kernel_arguments = kernel_arguments or self.kernel_arguments - diagonal_kernel = self.diagonal_kernel return type(self)(kernel, density, qbx_forced_limit, source, target, - diagonal_kernel, kernel_arguments) + kernel_arguments) def __getinitargs__(self): return (self.kernel, self.density, self.qbx_forced_limit, @@ -784,8 +776,8 @@ class _unspecified: # noqa def S(kernel, density, # noqa - qbx_forced_limit=_unspecified, source=None, target=None, - kernel_arguments=None, **kwargs): + qbx_forced_limit=_unspecified, source=None, target=None, + kernel_arguments=None, **kwargs): if qbx_forced_limit is _unspecified: warn("not specifying qbx_forced_limit on call to 'S' is deprecated, " -- GitLab From 9f5af9edca1d1d2b43de478110ba35efe3ea8b90 Mon Sep 17 00:00:00 2001 From: Matt Wala Date: Tue, 13 Jun 2017 00:05:41 -0500 Subject: [PATCH 4/6] [ci skip] Fix sloppy wording. --- pytential/nystrom.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pytential/nystrom.py b/pytential/nystrom.py index cc06d283..f7bbd6bc 100644 --- a/pytential/nystrom.py +++ b/pytential/nystrom.py @@ -41,8 +41,8 @@ __doc__ = """ # {{{ (panel-based) Nystrom layer potential source class NystromLayerPotentialSource(LayerPotentialSourceBase): - """A source discretization for a Nyström layer potential with panel-based - quadrature. + """A source discretization for a layer potential discretized with a Nyström + method that uses panel-based quadrature. """ def __init__(self, density_discr, -- GitLab From 5bd89b0399967f8c12de26f07adf4f1d8cd6fd9c Mon Sep 17 00:00:00 2001 From: Matt Wala Date: Wed, 14 Jun 2017 22:13:45 -0500 Subject: [PATCH 5/6] Nystrom -> Unregularized --- doc/discretization.rst | 4 ++-- pytential/symbolic/mappers.py | 7 ++++--- pytential/{nystrom.py => unregularized.py} | 14 +++++++------- test/test_layer_pot.py | 8 ++++---- 4 files changed, 17 insertions(+), 16 deletions(-) rename pytential/{nystrom.py => unregularized.py} (90%) diff --git a/doc/discretization.rst b/doc/discretization.rst index 4440f09f..ad2eb738 100644 --- a/doc/discretization.rst +++ b/doc/discretization.rst @@ -15,10 +15,10 @@ and you can start computing. .. automodule:: pytential.qbx -Nyström discretization +Unregularized discretization ------- -.. automodule:: pytential.nystrom +.. automodule:: pytential.unregularized Sources ------- diff --git a/pytential/symbolic/mappers.py b/pytential/symbolic/mappers.py index 9e6d42bf..47319841 100644 --- a/pytential/symbolic/mappers.py +++ b/pytential/symbolic/mappers.py @@ -340,9 +340,9 @@ class DerivativeBinder(DerivativeBinderBase, IdentityMapper): # }}} -# {{{ Nystrom preprocessor +# {{{ Unregularized preprocessor -class NystromPreprocessor(IdentityMapper): +class UnregularizedPreprocessor(IdentityMapper): def __init__(self, source_name, places): self.source_name = source_name @@ -350,7 +350,8 @@ class NystromPreprocessor(IdentityMapper): def map_int_g(self, expr): if expr.qbx_forced_limit in (-1, 1): - raise ValueError("Nystrom evaluation does not support one-sided limits") + raise ValueError( + "Unregularized evaluation does not support one-sided limits") expr = expr.copy( qbx_forced_limit=None, diff --git a/pytential/nystrom.py b/pytential/unregularized.py similarity index 90% rename from pytential/nystrom.py rename to pytential/unregularized.py index f7bbd6bc..4ea94bf1 100644 --- a/pytential/nystrom.py +++ b/pytential/unregularized.py @@ -34,15 +34,15 @@ logger = logging.getLogger(__name__) __doc__ = """ -.. autoclass:: NystromLayerPotentialSource +.. autoclass:: UnregularizedLayerPotentialSource """ -# {{{ (panel-based) Nystrom layer potential source +# {{{ (panel-based) unregularized layer potential source -class NystromLayerPotentialSource(LayerPotentialSourceBase): +class UnregularizedLayerPotentialSource(LayerPotentialSourceBase): """A source discretization for a layer potential discretized with a Nyström - method that uses panel-based quadrature. + method that uses panel-based quadrature and does not modify the kernel. """ def __init__(self, density_discr, @@ -97,8 +97,8 @@ class NystromLayerPotentialSource(LayerPotentialSourceBase): :arg name: The symbolic name for *self*, which the preprocessor should use to find which expressions it is allowed to modify. """ - from pytential.symbolic.mappers import NystromPreprocessor - return NystromPreprocessor(name, discretizations)(expr) + from pytential.symbolic.mappers import UnregularizedPreprocessor + return UnregularizedPreprocessor(name, discretizations)(expr) def exec_compute_potential_insn_direct(self, queue, insn, bound_expr, evaluate): kernel_args = {} @@ -130,7 +130,7 @@ class NystromLayerPotentialSource(LayerPotentialSourceBase): __all__ = ( - NystromLayerPotentialSource, + UnregularizedLayerPotentialSource, ) # vim: fdm=marker diff --git a/test/test_layer_pot.py b/test/test_layer_pot.py index 5eb935d4..8486710f 100644 --- a/test/test_layer_pot.py +++ b/test/test_layer_pot.py @@ -1273,10 +1273,10 @@ def test_off_surface_eval_vs_direct(ctx_getter, do_plot=False): # }}} -# {{{ nystrom tests +# {{{ unregularized tests -def test_nystrom_with_ones_kernel(ctx_getter): +def test_unregularized_with_ones_kernel(ctx_getter): cl_ctx = ctx_getter() queue = cl.CommandQueue(cl_ctx) @@ -1294,8 +1294,8 @@ def test_nystrom_with_ones_kernel(ctx_getter): discr = Discretization(cl_ctx, mesh, InterpolatoryQuadratureSimplexGroupFactory(order)) - from pytential.nystrom import NystromLayerPotentialSource - lpot_src = NystromLayerPotentialSource(discr) + from pytential.unregularized import UnregularizedLayerPotentialSource + lpot_src = UnregularizedLayerPotentialSource from sumpy.kernel import one_kernel_2d -- GitLab From f6585afbc10aaff1ca265fe6a907983a4b6882f6 Mon Sep 17 00:00:00 2001 From: Matt Wala Date: Thu, 15 Jun 2017 01:17:45 -0500 Subject: [PATCH 6/6] Fix missing argument. --- test/test_layer_pot.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/test_layer_pot.py b/test/test_layer_pot.py index 8486710f..994c9673 100644 --- a/test/test_layer_pot.py +++ b/test/test_layer_pot.py @@ -1295,7 +1295,7 @@ def test_unregularized_with_ones_kernel(ctx_getter): InterpolatoryQuadratureSimplexGroupFactory(order)) from pytential.unregularized import UnregularizedLayerPotentialSource - lpot_src = UnregularizedLayerPotentialSource + lpot_src = UnregularizedLayerPotentialSource(discr) from sumpy.kernel import one_kernel_2d -- GitLab