From 08b65e72295eaea194c5178b8c4058d20bb52983 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Wed, 5 Jun 2013 21:54:30 -0400 Subject: [PATCH] Adapt to use with pytential. --- MEMO | 1 + setup.py | 37 +-- sumpy/assignment_collection.py | 28 ++- sumpy/codegen.py | 94 +++++++- sumpy/expansion/__init__.py | 6 +- sumpy/kernel.py | 404 +++++++++++++++++++++------------ sumpy/layerpot.py | 56 +++-- sumpy/p2p.py | 12 +- sumpy/symbolic.py | 25 +- sumpy/tools.py | 8 + 10 files changed, 436 insertions(+), 235 deletions(-) create mode 100644 MEMO diff --git a/MEMO b/MEMO new file mode 100644 index 00000000..6177f9fe --- /dev/null +++ b/MEMO @@ -0,0 +1 @@ +- AoS/SoA flexibility? diff --git a/setup.py b/setup.py index 396dd876..d8d58301 100644 --- a/setup.py +++ b/setup.py @@ -19,25 +19,32 @@ setup(name="sumpy", Code-generating FMM etc. """, classifiers=[ - 'Development Status :: 3 - Alpha', - 'Intended Audience :: Developers', - 'Intended Audience :: Other Audience', - 'Intended Audience :: Science/Research', - 'License :: OSI Approved :: MIT License', - 'Natural Language :: English', - 'Programming Language :: Python', - 'Topic :: Scientific/Engineering', - 'Topic :: Scientific/Engineering :: Information Analysis', - 'Topic :: Scientific/Engineering :: Mathematics', - 'Topic :: Scientific/Engineering :: Visualization', - 'Topic :: Software Development :: Libraries', - 'Topic :: Utilities', - ], + 'Development Status :: 3 - Alpha', + 'Intended Audience :: Developers', + 'Intended Audience :: Other Audience', + 'Intended Audience :: Science/Research', + 'License :: OSI Approved :: MIT License', + 'Natural Language :: English', + 'Programming Language :: Python', + 'Topic :: Scientific/Engineering', + 'Topic :: Scientific/Engineering :: Information Analysis', + 'Topic :: Scientific/Engineering :: Mathematics', + 'Topic :: Scientific/Engineering :: Visualization', + 'Topic :: Software Development :: Libraries', + 'Topic :: Utilities', + ], author="Andreas Kloeckner", author_email="inform@tiker.net", - license = "MIT", + license="MIT", packages=["sumpy", "sumpy.expansion"], + install_requires=[ + "loo.py>=2013.1beta", + "pytools>=2013.3", + "pytest>=2.3", + ], + + # 2to3 invocation cmdclass={'build_py': build_py}) diff --git a/sumpy/assignment_collection.py b/sumpy/assignment_collection.py index 5f415e59..f0902272 100644 --- a/sumpy/assignment_collection.py +++ b/sumpy/assignment_collection.py @@ -25,8 +25,8 @@ THE SOFTWARE. import sympy as sp - - +import logging +logger = logging.getLogger(__name__) def _generate_unique_possibilities(prefix): @@ -38,8 +38,6 @@ def _generate_unique_possibilities(prefix): try_num += 1 - - class _SymbolGenerator: def __init__(self, taken_symbols): self.taken_symbols = taken_symbols @@ -47,14 +45,12 @@ class _SymbolGenerator: def __call__(self, base="expr"): for id_str in _generate_unique_possibilities(base): - if id_str not in self.taken_symbols and id_str not in self.generated_names: + if id_str not in self.taken_symbols \ + and id_str not in self.generated_names: self.generated_names.add(id_str) yield sp.Symbol(id_str) - - - class SymbolicAssignmentCollection(object): """Represents a collection of assignments:: @@ -87,6 +83,11 @@ class SymbolicAssignmentCollection(object): self.user_symbols = set() + def __str__(self): + return "\n".join( + "%s <- %s" % (name, expr) + for name, expr in self.assignments.iteritems()) + def get_all_dependencies(self, var_name): """Including recursive dependencies.""" try: @@ -135,6 +136,8 @@ class SymbolicAssignmentCollection(object): return new_name def run_global_cse(self, extra_exprs=[]): + logger.info("common subexpression elimination: start") + assign_names = list(self.assignments) assign_exprs = [self.assignments[name] for name in assign_names] new_assignments, new_exprs = sp.cse(assign_exprs + extra_exprs, @@ -144,15 +147,18 @@ class SymbolicAssignmentCollection(object): new_extra_exprs = new_exprs[len(assign_exprs):] for name, new_expr in zip(assign_names, new_assign_exprs): - self.assignments[name] = new_expr + self.assignments[name] = new_expr for name, value in new_assignments: assert isinstance(name, sp.Symbol) self.add_assignment(name.name, value) + logger.info("common subexpression elimination: done") return new_extra_exprs def kill_trivial_assignments(self, exprs): + logger.debug("kill trivial assignments: start") + approved_assignments = [] rejected_assignments = [] @@ -173,10 +179,10 @@ class SymbolicAssignmentCollection(object): exprs = [expr.subs(unsubst_rej) for expr in exprs] self.assignments = new_assignments - return exprs - + logger.info("kill trivial assignments: done") + return exprs # vim: fdm=marker diff --git a/sumpy/codegen.py b/sumpy/codegen.py index f2d40b55..a8b73b8e 100644 --- a/sumpy/codegen.py +++ b/sumpy/codegen.py @@ -34,6 +34,37 @@ import pymbolic.primitives as prim from pytools import memoize_method +from pymbolic.sympy_interface import ( + SympyToPymbolicMapper as SympyToPymbolicMapperBase) + +import logging +logger = logging.getLogger(__name__) + + +# {{{ sympy -> pymbolic mapper + +class SympyToPymbolicMapper(SympyToPymbolicMapperBase): + def __init__(self, assignments): + self.assignments = assignments + self.derivative_cse_names = set() + + def map_Derivative(self, expr): + # Sympy has picked up the habit of picking arguments out of derivatives + # and pronounce them common subexpressions. Me no like. Undo it, so + # that the bessel substitutor further down can do its job. + + if expr.expr.is_Symbol: + # These will get removed, because loopy wont' be able to deal + # with them--they contain undefined placeholder symbols. + self.derivative_cse_names.add(expr.expr.name) + + return prim.Derivative(self.rec( + expr.expr.subs(self.assignments)), + tuple(v.name for v in expr.variables)) + + +# }}} + # {{{ bessel handling @@ -64,6 +95,12 @@ hank1_01_result_dtype = cl.tools.get_or_register_dtype("hank1_01_result", def bessel_mangler(identifier, arg_dtypes): + """A function "mangler" to make Bessel functions + digestible for :mod:`loopy`. + + See argument *function_manglers* to :func:`loopy.make_kernel`. + """ + if identifier == "hank1_01": return (np.dtype(hank1_01_result_dtype), identifier, (np.dtype(np.complex128),)) @@ -202,6 +239,8 @@ class BesselSubstitutor(IdentityMapper): # }}} +# {{{ power rewriter + class PowerRewriter(IdentityMapper): def map_power(self, expr): exp = expr.exponent @@ -245,8 +284,17 @@ class PowerRewriter(IdentityMapper): return IdentityMapper.map_power(self, expr) +# }}} + + +# {{{ fraction killer class FractionKiller(IdentityMapper): + """Kills fractions where the numerator is evenly divisible by the + denominator. + + (Why does :mod:`sympy` even produce these?) + """ def map_quotient(self, expr): num = expr.numerator denom = expr.denominator @@ -258,11 +306,17 @@ class FractionKiller(IdentityMapper): return IdentityMapper.map_quotient(self, expr) +# }}} + + +# {{{ vector component rewriter INDEXED_VAR_RE = re.compile("^([a-zA-Z_]+)([0-9]+)$") class VectorComponentRewriter(IdentityMapper): + """For names in name_whitelist, turn ``a3`` into ``a[3]``.""" + def __init__(self, name_whitelist=set()): self.name_whitelist = name_whitelist @@ -278,6 +332,10 @@ class VectorComponentRewriter(IdentityMapper): else: return IdentityMapper.map_variable(self, expr) +# }}} + + +# {{{ sum sign grouper class SumSignGrouper(IdentityMapper): """Anti-cancellation cargo-cultism.""" @@ -306,6 +364,8 @@ class SumSignGrouper(IdentityMapper): return prim.Sum(tuple(first_group+second_group)) +# }}} + class MathConstantRewriter(IdentityMapper): def map_variable(self, expr): @@ -329,10 +389,15 @@ class ComplexConstantSizer(IdentityMapper): def to_loopy_insns(assignments, vector_names=set(), pymbolic_expr_maps=[], complex_dtype=None): + logger.info("loopy instruction generation: start") + # convert from sympy - from pymbolic.sympy_interface import SympyToPymbolicMapper - sympy_conv = SympyToPymbolicMapper() + sympy_conv = SympyToPymbolicMapper(assignments) assignments = [(name, sympy_conv(expr)) for name, expr in assignments] + assignments = [ + (name, expr) for name, expr in assignments + if name not in sympy_conv.derivative_cse_names + ] bdr = BesselDerivativeReplacer() assignments = [(name, bdr(expr)) for name, expr in assignments] @@ -347,29 +412,36 @@ def to_loopy_insns(assignments, vector_names=set(), pymbolic_expr_maps=[], pwr = PowerRewriter() ssg = SumSignGrouper() fck = FractionKiller() - if complex_dtype is not None: - ccs = ComplexConstantSizer(np.dtype(complex_dtype)) - else: - ccs = None - def convert_expr(expr): + if 0: + if complex_dtype is not None: + ccs = ComplexConstantSizer(np.dtype(complex_dtype)) + else: + ccs = None + + def convert_expr(name, expr): + logger.debug("generate expression for: %s" % name) expr = bdr(expr) expr = bessel_sub(expr) expr = vcr(expr) expr = pwr(expr) expr = fck(expr) expr = ssg(expr) - if ccs is not None: - expr = ccs(expr) + if 0: + if ccs is not None: + expr = ccs(expr) for m in pymbolic_expr_maps: expr = m(expr) return expr import loopy as lp - return [ + result = [ lp.Instruction(id=None, - assignee=name, expression=convert_expr(expr), + assignee=name, expression=convert_expr(name, expr), temp_var_type=lp.auto) for name, expr in assignments] + logger.info("loopy instruction generation: done") + return result + # vim: fdm=marker diff --git a/sumpy/expansion/__init__.py b/sumpy/expansion/__init__.py index cc736b8a..e52ff61d 100644 --- a/sumpy/expansion/__init__.py +++ b/sumpy/expansion/__init__.py @@ -35,8 +35,8 @@ class ExpansionBase(object): # {{{ propagate kernel interface @property - def dimensions(self): - return self.kernel.dimensions + def dim(self): + return self.kernel.dim @property def is_complex_valued(self): @@ -85,7 +85,7 @@ class VolumeTaylorExpansionBase(object): generate_nonnegative_integer_tuples_summing_to_at_most as gnitstam) - return sorted(gnitstam(self.order, self.kernel.dimensions), key=sum) + return sorted(gnitstam(self.order, self.kernel.dim), key=sum) return range(self.order+1) diff --git a/sumpy/kernel.py b/sumpy/kernel.py index 3d42c452..02195244 100644 --- a/sumpy/kernel.py +++ b/sumpy/kernel.py @@ -26,7 +26,6 @@ THE SOFTWARE. import loopy as lp import sympy as sp import numpy as np -from pymbolic.primitives import Expression from pymbolic.mapper import IdentityMapper @@ -36,18 +35,45 @@ class Kernel(object): """Basic kernel interface. .. attribute:: is_complex_valued - .. attribute:: dimensions + .. attribute:: dim - *dimensions* is allowed to be *None* if the dimensionality is not yet + *dim* is allowed to be *None* if the dimensionality is not yet known. Use the :meth:`fix_dimensions` """ - def __init__(self, dimensions=None): - self.dimensions = dimensions + def __init__(self, dim=None): + self._dim = dim - def fix_dimensions(self, dimensions): - """Return a new :class:`Kernel` with :attr:`dimensions` set to - *dimensions*. + def __eq__(self, other): + if self is other: + return True + elif hash(self) != hash(other): + return False + else: + return (type(self) is type(other) + and self.__getinitargs__() == other.__getinitargs__()) + + def __ne__(self, other): + return not self.__eq__(other) + + def __hash__(self): + try: + return self.hash_value + except AttributeError: + self.hash_value = hash((type(self),) + self.__getinitargs__()) + return self.hash_value + + @property + def dim(self): + if self._dim is None: + raise RuntimeError("the number of dimensions for this kernel " + "has not yet been set") + + return self._dim + + def fix_dim(self, dim): + """Return a new :class:`Kernel` with :attr:`dim` set to + *dim*. """ raise NotImplementedError @@ -79,14 +105,14 @@ class Kernel(object): def postprocess_at_source(self, expr, avec): """Transform a kernel evaluation or expansion expression in a place - where the vector a (something-source) is known. ("something" may be + where the vector a (something - source) is known. ("something" may be an expansion center or a target.) """ return expr def postprocess_at_target(self, expr, bvec): """Transform a kernel evaluation or expansion expression in a place - where the vector b (target-something) is known. ("something" may be + where the vector b (target - something) is known. ("something" may be an expansion center or a target.) """ return expr @@ -115,26 +141,23 @@ class Kernel(object): class LaplaceKernel(Kernel): is_complex_valued = False + def __getinitargs__(self): + return (self.dim,) + def __repr__(self): - if self.dimensions is not None: - return "Laplace(%d)" % self.dimensions + if self.dim is not None: + return "Laplace(%d)" % self.dim else: return "Laplace()" - def fix_dimensions(self, dimensions): - """Return a new :class:`Kernel` with :attr:`dimensions` set to - *dimensions*. - """ - return LaplaceKernel(dimensions) - def get_expression(self, dist_vec): - assert self.dimensions == len(dist_vec) + assert self.dim == len(dist_vec) from sumpy.symbolic import sympy_real_norm_2 r = sympy_real_norm_2(dist_vec) - if self.dimensions == 2: + if self.dim == 2: return sp.log(r) - elif self.dimensions == 3: + elif self.dim == 3: return 1/r else: raise RuntimeError("unsupported dimensionality") @@ -142,35 +165,33 @@ class LaplaceKernel(Kernel): def get_scaling(self): """Return a global scaling of the kernel.""" - if self.dimensions == 2: + if self.dim == 2: return 1/(-2*sp.pi) - elif self.dimensions == 3: + elif self.dim == 3: return 1/(4*sp.pi) else: raise RuntimeError("unsupported dimensionality") + mapper_method = "map_laplace_kernel" + class HelmholtzKernel(Kernel): - def __init__(self, dimensions=None, helmholtz_k_name="k", + def __init__(self, dim=None, helmholtz_k_name="k", allow_evanescent=False): - Kernel.__init__(self, dimensions) + Kernel.__init__(self, dim) self.helmholtz_k_name = helmholtz_k_name self.allow_evanescent = allow_evanescent + def __getinitargs__(self): + return (self.dim, self.helmholtz_k_name, self.allow_evanescent) + def __repr__(self): - if self.dimensions is not None: + if self.dim is not None: return "Helmh(dim=%s, %s)" % ( - self.dimensions, self.helmholtz_k_name) + self.dim, self.helmholtz_k_name) else: return "Helmh(%s)" % (self.helmholtz_k_name) - def fix_dimensions(self, dimensions): - """Return a new :class:`Kernel` with :attr:`dimensions` set to - *dimensions*. - """ - return HelmholtzKernel(dimensions, self.helmholtz_k_name, - self.allow_evanescent) - is_complex_valued = True def prepare_loopy_kernel(self, loopy_knl): @@ -184,16 +205,16 @@ class HelmholtzKernel(Kernel): return loopy_knl def get_expression(self, dist_vec): - assert self.dimensions == len(dist_vec) + assert self.dim == len(dist_vec) from sumpy.symbolic import sympy_real_norm_2 r = sympy_real_norm_2(dist_vec) k = sp.Symbol(self.helmholtz_k_name) - if self.dimensions == 2: + if self.dim == 2: return sp.Function("hankel_1")(0, k*r) - elif self.dimensions == 3: + elif self.dim == 3: return sp.exp(sp.I*k*r)/r else: raise RuntimeError("unsupported dimensionality") @@ -201,9 +222,9 @@ class HelmholtzKernel(Kernel): def get_scaling(self): """Return a global scaling of the kernel.""" - if self.dimensions == 2: + if self.dim == 2: return sp.I/4 - elif self.dimensions == 3: + elif self.dim == 3: return 1/(4*sp.pi) else: raise RuntimeError("unsupported dimensionality") @@ -220,6 +241,8 @@ class HelmholtzKernel(Kernel): from sumpy.codegen import BESSEL_PREAMBLE return [("sumpy-bessel", BESSEL_PREAMBLE)] + mapper_method = "map_helmholtz_kernel" + class DifferenceKernel(Kernel): def __init__(self, kernel_plus, kernel_minus): @@ -228,135 +251,97 @@ class DifferenceKernel(Kernel): raise ValueError("kernels in difference kernel " "must be basic, unwrapped PDE kernels") - if kernel_plus.dimensions != kernel_minus.dimensions: + if kernel_plus.dim != kernel_minus.dim: raise ValueError( - "kernels in difference kernel have different dimensions") + "kernels in difference kernel have different dim") self.kernel_plus = kernel_plus self.kernel_minus = kernel_minus - Kernel.__init__(self, self.kernel_plus.dimensions) + Kernel.__init__(self, self.kernel_plus.dim) - def fix_dimensions(self, dimensions): - """Return a new :class:`Kernel` with :attr:`dimensions` set to - *dimensions*. + def __getinitargs__(self): + return (self.kernel_plus, self.kernel_minus) + + def fix_dimensions(self, dim): + """Return a new :class:`Kernel` with :attr:`dim` set to + *dim*. """ return DifferenceKernel( - self.kernel_plus.fix_dimensions(dimensions), - self.kernel_minus.fix_dimensions(dimensions)) + self.kernel_plus.fix_dimensions(dim), + self.kernel_minus.fix_dimensions(dim)) + + mapper_method = "map_difference_kernel" # FIXME mostly unimplemented # }}} -def normalize_kernel(kernel): - if not isinstance(kernel, Kernel): - if kernel == 0: - kernel = LaplaceKernel() - elif isinstance(kernel, str): - kernel = HelmholtzKernel(None, kernel) - else: - raise ValueError("Only Kernel instances, 0 (for Laplace) and " - "variable names (strings) " - "for the Helmholtz parameter are allowed as kernels.") - - return kernel - - -def count_derivatives(kernel): - if isinstance(kernel, DerivativeBase): - return 1 + count_derivatives(kernel.kernel) - elif isinstance(kernel, KernelWrapper): - return count_derivatives(kernel.kernel) - else: - return 0 - - -def remove_axis_target_derivatives(kernel): - if isinstance(kernel, AxisTargetDerivative): - return remove_axis_target_derivatives(kernel.kernel) - elif isinstance(kernel, KernelWrapper): - kernel.replace_inner_kernel( - remove_axis_target_derivatives(kernel.kernel)) - else: - return kernel - - # {{{ a kernel defined as wrapping another one--e.g., derivatives class KernelWrapper(Kernel): - def __init__(self, kernel): - Kernel.__init__(self, kernel.dimensions) - self.kernel = kernel + def __init__(self, inner_kernel): + Kernel.__init__(self, inner_kernel.dim) + self.inner_kernel = inner_kernel def get_base_kernel(self): - return self.kernel.get_base_kernel() + return self.inner_kernel.get_base_kernel() def prepare_loopy_kernel(self, loopy_knl): - return self.kernel.prepare_loopy_kernel(loopy_knl) + return self.inner_kernel.prepare_loopy_kernel(loopy_knl) @property def is_complex_valued(self): - return self.kernel.is_complex_valued + return self.inner_kernel.is_complex_valued def get_expression(self, dist_vec): - return self.kernel.get_expression(dist_vec) + return self.inner_kernel.get_expression(dist_vec) def postprocess_at_source(self, expr, avec): - return self.kernel.postprocess_at_source(expr, avec) + return self.inner_kernel.postprocess_at_source(expr, avec) def postprocess_at_target(self, expr, avec): - return self.kernel.postprocess_at_target(expr, avec) + return self.inner_kernel.postprocess_at_target(expr, avec) def get_scaling(self): - return self.kernel.get_scaling() + return self.inner_kernel.get_scaling() def transform_to_code(self, expr): - return self.kernel.transform_to_code(expr) + return self.inner_kernel.transform_to_code(expr) def get_args(self): - return self.kernel.get_args() + return self.inner_kernel.get_args() def get_preambles(self): - return self.kernel.get_preambles() + return self.inner_kernel.get_preambles() # }}} # {{{ derivatives -class KernelPartialDerivative(Expression): - """A placeholder for a partial derivative of a kernel, to be used - in the *derivative_expression* arguments of :class:`DirectionalSourceDerivative` - and :class:`DirectionalTargetDerivative`. - """ - - def __init__(self, axis): - self.axis = axis - - def __getinitargs__(self): - return (self.axis,) - - mapper_method = "map_kernel_partial_derivative" - - class DerivativeBase(KernelWrapper): pass class AxisTargetDerivative(DerivativeBase): - def __init__(self, axis, kernel): - KernelWrapper.__init__(self, kernel) + def __init__(self, axis, inner_kernel): + KernelWrapper.__init__(self, inner_kernel) self.axis = axis - def replace_inner_kernel(self, new_inner_kernel): - return AxisTargetDerivative(new_inner_kernel, self.derivative_expression) + def __getinitargs__(self): + return (self.axis, self.inner_kernel) + + def __str__(self): + return "d/dx%d %s" % (self.axis, self.inner_kernel) def postprocess_at_target(self, expr, bvec): - expr = self.kernel.postprocess_at_target(expr, bvec) + expr = self.inner_kernel.postprocess_at_target(expr, bvec) return expr.diff(bvec[self.axis]) + mapper_method = "map_axis_target_derivative" + class _VectorIndexPrefixer(IdentityMapper): def __init__(self, vec_name, prefix): @@ -373,76 +358,193 @@ class _VectorIndexPrefixer(IdentityMapper): return IdentityMapper.map_subscript(self, expr) -class DirectionalTargetDerivative(DerivativeBase): - def __init__(self, kernel, derivative_expression): +class DirectionalDerivative(DerivativeBase): + def __init__(self, inner_kernel, dir_vec_name=None, dir_vec_data=None): """ - :arg derivative_expression: A scalar expression for the directional - derivative to be computed. :class:`KernelPartialDerivative` is - used as a placeholder for partial derivatives of the kernel. + :arg dir_vec_data: an object of unspecified type, available for + use by client applications to store information about the + direction vector. :mod:`pytential` for instance uses this + to store the direction vector expression to be evaluated. """ - KernelWrapper.__init__(self, kernel) - self.derivative_expression = derivative_expression - def replace_inner_kernel(self, new_inner_kernel): - return DirectionalTargetDerivative( - new_inner_kernel, self.derivative_expression) + if dir_vec_name is None: + dir_vec_name = self.directional_kind + "_derivative_dir" + + KernelWrapper.__init__(self, inner_kernel) + self.dir_vec_name = dir_vec_name + self.dir_vec_data = dir_vec_data + + def __getinitargs__(self): + dir_vec_data = self.dir_vec_data + + # for hashability + if isinstance(dir_vec_data, np.ndarray): + dir_vec_data = tuple(dir_vec_data) + + return (self.inner_kernel, self.dir_vec_name, dir_vec_data) + + def __str__(self): + return r"%s . \/_%s %s" % ( + self.dir_vec_name, self.directional_kind[0], self.inner_kernel) + + def get_args(self): + return self.inner_kernel.get_args() + [ + lp.GlobalArg(self.dir_vec_name, None, shape=lp.auto, order="C")] + + +class DirectionalTargetDerivative(DirectionalDerivative): + directional_kind = "tgt" def transform_to_code(self, expr): from sumpy.codegen import VectorComponentRewriter vcr = VectorComponentRewriter([self.dir_vec_name]) from pymbolic.primitives import Variable return _VectorIndexPrefixer(self.dir_vec_name, (Variable("itgt"),))( - vcr(self.kernel.transform_to_code(expr))) + vcr(self.inner_kernel.transform_to_code(expr))) def postprocess_at_target(self, expr, bvec): - expr = self.kernel.postprocess_at_target(expr, bvec) + expr = self.inner_kernel.postprocess_at_target(expr, bvec) - dimensions = len(bvec) - assert dimensions == self.dimensions + dim = len(bvec) + assert dim == self.dim from sumpy.symbolic import make_sym_vector - dir_vec = make_sym_vector(self.dir_vec_name, dimensions) + dir_vec = make_sym_vector(self.dir_vec_name, dim) # bvec = tgt-center return sum(dir_vec[axis]*expr.diff(bvec[axis]) - for axis in range(dimensions)) - + for axis in range(dim)) -class DirectionalSourceDerivative(DerivativeBase): - def __init__(self, kernel, derivative_expression): - """ - :arg derivative_expression: A scalar expression for the directional - derivative to be computed. :class:`KernelPartialDerivative` is - used as a placeholder for partial derivatives of the kernel. - """ + mapper_method = "map_directional_target_derivative" - KernelWrapper.__init__(self, kernel) - self.derivative_expression = derivative_expression - def replace_inner_kernel(self, new_inner_kernel): - return DirectionalSourceDerivative( - new_inner_kernel, self.derivative_expression) +class DirectionalSourceDerivative(DirectionalDerivative): + directional_kind = "src" def transform_to_code(self, expr): from sumpy.codegen import VectorComponentRewriter vcr = VectorComponentRewriter([self.dir_vec_name]) from pymbolic.primitives import Variable return _VectorIndexPrefixer(self.dir_vec_name, (Variable("isrc"),))( - vcr(self.kernel.transform_to_code(expr))) + vcr(self.inner_kernel.transform_to_code(expr))) - def postprocess_at_source(self, expr, avec): - expr = self.kernel.postprocess_at_source(expr, avec) + def postprocess_at_target(self, expr, bvec): + expr = self.inner_kernel.postprocess_at_target(expr, bvec) - dimensions = len(avec) - assert dimensions == self.dimensions + dim = len(bvec) + assert dim == self.dim from sumpy.symbolic import make_sym_vector - dir_vec = make_sym_vector(self.dir_vec_name, dimensions) + dir_vec = make_sym_vector(self.dir_vec_name, dim) + + # bvec = tgt-center + return sum(dir_vec[axis]*expr.diff(bvec[axis]) + for axis in range(dim)) - # avec = center-src -> minus sign from chain rule - return sum(-dir_vec[axis]*expr.diff(avec[axis]) - for axis in range(dimensions)) + mapper_method = "map_directional_source_derivative" # }}} + +# {{{ kernel mappers + +class KernelMapper(object): + def rec(self, kernel): + try: + method = getattr(self, kernel.mapper_method) + except AttributeError: + raise RuntimeError("%s cannot handle %s" % ( + type(self), type(kernel))) + else: + return method(kernel) + + __call__ = rec + + +class KernelCombineMapper(KernelMapper): + def map_difference_kernel(self, kernel): + return self.combine([ + self.rec(kernel.kernel_plus), + self.rec(kernel.kernel_minus)]) + + def map_axis_target_derivative(self, kernel): + return self.rec(kernel.inner_kernel) + + map_directional_target_derivative = map_axis_target_derivative + map_directional_source_derivative = map_axis_target_derivative + + +class KernelIdentityMapper(KernelMapper): + def map_laplace_kernel(self, kernel): + return kernel + + def map_helmholtz_kernel(self, kernel): + return kernel + + def map_difference_kernel(self, kernel): + return DifferenceKernel( + self.rec(kernel.kernel_plus), self.rec(kernel.kernel_minus)) + + def map_axis_target_derivative(self, kernel): + return AxisTargetDerivative(kernel.axis, self.rec(kernel.inner_kernel)) + + def map_directional_target_derivative(self, kernel): + return type(kernel)( + self.rec(kernel.inner_kernel), + kernel.dir_vec_name, kernel.dir_vec_data) + + map_directional_source_derivative = map_directional_target_derivative + + +class AxisTargetDerivativeRemover(KernelIdentityMapper): + def map_axis_target_derivative(self, kernel): + return self.rec(kernel.inner_kernel) + + +class DerivativeCounter(KernelCombineMapper): + def combine(self, values): + return max(values) + + def map_laplace_kernel(self, kernel): + return 0 + + def map_helmholtz_kernel(self, kernel): + return 0 + + def map_axis_target_derivative(self, kernel): + return 1 + self.rec(kernel.inner_kernel) + + map_directional_target_derivative = map_axis_target_derivative + map_directional_source_derivative = map_axis_target_derivative + + +class KernelDimensionSetter(KernelIdentityMapper): + def __init__(self, dim): + self.dim = dim + + def map_laplace_kernel(self, kernel): + return LaplaceKernel(self.dim) + + def map_helmholtz_kernel(self, kernel): + return HelmholtzKernel(self.dim, kernel.helmholtz_k_name, + kernel.allow_evanescent) + +# }}} + + +def normalize_kernel(kernel_like): + if not isinstance(kernel_like, Kernel): + if kernel_like == 0: + kernel_like = LaplaceKernel() + elif isinstance(kernel_like, str): + kernel_like = HelmholtzKernel(None, kernel_like) + else: + raise ValueError("Only Kernel instances, 0 (for Laplace) and " + "variable names (strings) " + "for the Helmholtz parameter are allowed as kernels.") + + return kernel_like + + + # vim: fdm=marker diff --git a/sumpy/layerpot.py b/sumpy/layerpot.py index 2b5be189..49c8e839 100644 --- a/sumpy/layerpot.py +++ b/sumpy/layerpot.py @@ -31,6 +31,9 @@ from pymbolic import parse, var from sumpy.tools import KernelComputation +import logging +logger = logging.getLogger(__name__) + def stringify_expn_index(i): if isinstance(i, tuple): @@ -71,7 +74,7 @@ class LayerPotentialBase(KernelComputation): name, options, device) from pytools import single_valued - self.dimensions = single_valued(knl.dimensions for knl in self.expansions) + self.dim = single_valued(knl.dim for knl in self.expansions) @property def expansions(self): @@ -81,14 +84,19 @@ class LayerPotentialBase(KernelComputation): def get_kernel(self): from sumpy.symbolic import make_sym_vector - avec = make_sym_vector("a", self.dimensions) - bvec = make_sym_vector("b", self.dimensions) + avec = make_sym_vector("a", self.dim) + bvec = make_sym_vector("b", self.dim) from sumpy.assignment_collection import SymbolicAssignmentCollection sac = SymbolicAssignmentCollection() + logger.info("expand kernels: start") + result_names = [expand(i, sac, expn, avec, bvec) for i, expn in enumerate(self.expansions)] + + logger.info("expand kernels: done") + sac.run_global_cse() from sumpy.symbolic import kill_trivial_assignments @@ -101,8 +109,7 @@ class LayerPotentialBase(KernelComputation): loopy_insns = to_loopy_insns(assignments, vector_names=set(["a", "b"]), pymbolic_expr_maps=[ - expn.kernel.transform_to_code - for expn in self.expansions], + expn.kernel.transform_to_code for expn in self.expansions], complex_dtype=np.complex128 # FIXME ) @@ -116,22 +123,22 @@ class LayerPotentialBase(KernelComputation): arguments = ( [ lp.GlobalArg("src", geo_dtype, - shape=("nsrc", self.dimensions), order="C"), + shape=(self.dim, "nsrc"), order="C"), lp.GlobalArg("tgt", geo_dtype, - shape=("ntgt", self.dimensions), order="C"), + shape=(self.dim, "ntgt"), order="C"), lp.GlobalArg("center", geo_dtype, - shape=("ntgt", self.dimensions), order="C"), + shape=(self.dim, "ntgt"), order="C"), lp.ValueArg("nsrc", np.int32), lp.ValueArg("ntgt", np.int32), ] + self.get_input_and_output_arguments() + self.gather_kernel_arguments()) loopy_knl = lp.make_kernel(self.device, - "[nsrc,ntgt] -> {[isrc,itgt,idim]: 0<=itgt a[idim] = center[itgt,idim] - src[isrc,idim] {id=compute_a}", - "<> b[idim] = tgt[itgt,idim] - center[itgt,idim] {id=compute_b}", + "<> a[idim] = center[idim,itgt] - src[idim,isrc] {id=compute_a}", + "<> b[idim] = tgt[idim,itgt] - center[idim,itgt] {id=compute_b}", ]+self.get_kernel_scaling_assignments()+loopy_insns+[ lp.Instruction(id=None, assignee="pair_result_%d" % i, expression=expr, @@ -145,11 +152,14 @@ class LayerPotentialBase(KernelComputation): ) for where in ["compute_a", "compute_b"]: - loopy_knl = lp.duplicate_inames(loopy_knl, "idim", where) + loopy_knl = lp.duplicate_inames(loopy_knl, "idim", where, + tags={"idim": "unr"}) for expn in self.expansions: loopy_knl = expn.prepare_loopy_kernel(loopy_knl) + loopy_knl = lp.tag_data_axis(loopy_knl, "center", 0, "sep") + return loopy_knl def get_optimized_kernel(self): @@ -344,11 +354,11 @@ class _JumpTermSymbolicArgumentProvider(object): resulting computational kernel. """ - def __init__(self, data_args, dimensions, density_var_name, + def __init__(self, data_args, dim, density_var_name, density_dtype, geometry_dtype): # dictionary of loopy arguments self.arguments = data_args - self.dimensions = dimensions + self.dim = dim self.density_var_name = density_var_name self.density_dtype = density_dtype self.geometry_dtype = geometry_dtype @@ -382,22 +392,22 @@ class _JumpTermSymbolicArgumentProvider(object): def normal(self): self.arguments["normal"] = \ lp.GlobalArg("normal", self.geometry_dtype, - shape=("ntgt", self.dimensions), order="C") + shape=("ntgt", self.dim), order="C") from pytools.obj_array import make_obj_array return make_obj_array([ parse("normal[itgt, %d]" % i) - for i in range(self.dimensions)]) + for i in range(self.dim)]) @property @memoize_method def tangent(self): self.arguments["tangent"] = \ lp.GlobalArg("tangent", self.geometry_dtype, - shape=("ntgt", self.dimensions), order="C") + shape=("ntgt", self.dim), order="C") from pytools.obj_array import make_obj_array return make_obj_array([ parse("tangent[itgt, %d]" % i) - for i in range(self.dimensions)]) + for i in range(self.dim)]) @property @memoize_method @@ -413,24 +423,24 @@ class _JumpTermSymbolicArgumentProvider(object): def src_derivative_dir(self): self.arguments["src_derivative_dir"] = \ lp.GlobalArg("src_derivative_dir", - self.geometry_dtype, shape=("ntgt", self.dimensions), + self.geometry_dtype, shape=("ntgt", self.dim), order="C") from pytools.obj_array import make_obj_array return make_obj_array([ parse("src_derivative_dir[itgt, %d]" % i) - for i in range(self.dimensions)]) + for i in range(self.dim)]) @property @memoize_method def tgt_derivative_dir(self): self.arguments["tgt_derivative_dir"] = \ lp.GlobalArg("tgt_derivative_dir", - self.geometry_dtype, shape=("ntgt", self.dimensions), + self.geometry_dtype, shape=("ntgt", self.dim), order="C") from pytools.obj_array import make_obj_array return make_obj_array([ parse("tgt_derivative_dir[itgt, %d]" % i) - for i in range(self.dimensions)]) + for i in range(self.dim)]) # }}} diff --git a/sumpy/p2p.py b/sumpy/p2p.py index 28bb44f7..c54703a4 100644 --- a/sumpy/p2p.py +++ b/sumpy/p2p.py @@ -50,12 +50,12 @@ class P2P(KernelComputation): self.exclude_self = exclude_self from pytools import single_valued - self.dimensions = single_valued(knl.dimensions for knl in self.kernels) + self.dim = single_valued(knl.dim for knl in self.kernels) @memoize_method def get_kernel(self): from sumpy.symbolic import make_sym_vector - dvec = make_sym_vector("d", self.dimensions) + dvec = make_sym_vector("d", self.dim) from sumpy.assignment_collection import SymbolicAssignmentCollection sac = SymbolicAssignmentCollection() @@ -86,9 +86,9 @@ class P2P(KernelComputation): arguments = ( [ lp.GlobalArg("src", self.geometry_dtype, - shape=(self.dimensions, "nsrc"), order="C"), + shape=(self.dim, "nsrc"), order="C"), lp.GlobalArg("tgt", self.geometry_dtype, - shape=(self.dimensions, "ntgt"), order="C"), + shape=(self.dim, "ntgt"), order="C"), lp.ValueArg("nsrc", np.int32), lp.ValueArg("ntgt", np.int32), ]+[ @@ -108,8 +108,8 @@ class P2P(KernelComputation): for expr in exprs] loopy_knl = lp.make_kernel(self.device, - "[nsrc,ntgt] -> {[isrc,itgt,idim]: 0<=itgt