From d57d3906987a8ccd5fe71ba5af2eef0bbce81ad9 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Mon, 26 Mar 2018 01:17:28 -0500 Subject: [PATCH 01/41] Introduce a smarter quadrature resolution measure This measure is aware of element aspect-ratios in 3D and and varying 'parametrization speeds' across an element. It's also computed using the regular expression evaluator, not with custom machinery. The evaluator was made smarter as needed--such as by allowing it to operate on the 'stage 2' discretization of a layer potential source. This quadrature resolution measure replaces panel sizes where appropriate. --- pytential/qbx/__init__.py | 68 ++++++++++-- pytential/qbx/refinement.py | 7 +- pytential/qbx/utils.py | 92 +++++++--------- pytential/symbolic/execution.py | 42 ++++++++ pytential/symbolic/mappers.py | 17 +++ pytential/symbolic/primitives.py | 176 +++++++++++++++++++++++++++---- test/test_global_qbx.py | 17 +-- 7 files changed, 321 insertions(+), 98 deletions(-) diff --git a/pytential/qbx/__init__.py b/pytential/qbx/__init__.py index d5d52ca6..848eeed1 100644 --- a/pytential/qbx/__init__.py +++ b/pytential/qbx/__init__.py @@ -399,8 +399,8 @@ class QBXLayerPotentialSource(LayerPotentialSourceBase): @memoize_method def h_max(self): with cl.CommandQueue(self.cl_context) as queue: - panel_sizes = self._panel_sizes("npanels").with_queue(queue) - return np.asscalar(cl.array.max(panel_sizes).get()) + quad_res = self._coarsest_quad_resolution("npanels").with_queue(queue) + return np.asscalar(cl.array.max(quad_res).get()) # {{{ internal API @@ -410,7 +410,7 @@ class QBXLayerPotentialSource(LayerPotentialSourceBase): return utils.element_centers_of_mass(self.density_discr) @memoize_method - def _fine_panel_centers_of_mass(self): + def _stage2_panel_centers_of_mass(self): import pytential.qbx.utils as utils return utils.element_centers_of_mass(self.stage2_density_discr) @@ -422,29 +422,77 @@ class QBXLayerPotentialSource(LayerPotentialSourceBase): "not allowed. Allowed values are 'nsources' and 'ncenters'.") with cl.CommandQueue(self.cl_context) as queue: - return (self._panel_sizes(last_dim_length).with_queue(queue) * 0.5 - ).with_queue(None) + return (self._coarsest_quad_resolution(last_dim_length) + .with_queue(queue) + * 0.5).with_queue(None) # _expansion_radii should not be needed for the fine discretization @memoize_method def _close_target_tunnel_radius(self, last_dim_length): with cl.CommandQueue(self.cl_context) as queue: - return (self._panel_sizes(last_dim_length).with_queue(queue) * 0.5 + return ( + self._expansion_radii(last_dim_length).with_queue(queue) + * 0.5 ).with_queue(None) @memoize_method - def _panel_sizes(self, last_dim_length="npanels"): + def _coarsest_quad_resolution(self, last_dim_length="npanels"): + """This measures the quadrature resolution across the + mesh. In a 1D uniform mesh of uniform 'parametrization speed', it + should be the same as the panel length. + + It is empirically about a factor of 1.2 larger than sym._panel_size for + an ellipse, presumably because it uses the largest 'parametrization + speed'/'stretch factor' across the whole element. + """ import pytential.qbx.utils as utils - return utils.panel_sizes(self.density_discr, last_dim_length) + from pytential import sym, bind + with cl.CommandQueue(self.cl_context) as queue: + maxstretch = bind( + self, sym.mapping_max_stretch_factor(self.ambient_dim))(queue) + + maxstretch = utils.to_last_dim_length( + self.density_discr, maxstretch, last_dim_length) + maxstretch = maxstretch.with_queue(None) + + return maxstretch @memoize_method - def _fine_panel_sizes(self, last_dim_length="npanels"): + def _stage2_coarsest_quad_resolution(self, last_dim_length="npanels"): + """This measures the quadrature resolution across the + mesh. In a 1D uniform mesh of uniform 'parametrization speed', it + should be the same as the panel length. + + It is empirically about a factor of 1.2 larger than sym._panel_size for + an ellipse, presumably because it uses the largest 'parametrization + speed'/'stretch factor' across the whole element. + """ if last_dim_length != "npanels": + # Not technically required below, but no need to loosen for now. raise NotImplementedError() import pytential.qbx.utils as utils - return utils.panel_sizes(self.stage2_density_discr, last_dim_length) + from pytential import sym, bind + with cl.CommandQueue(self.cl_context) as queue: + maxstretch = bind( + self, sym.mapping_max_stretch_factor( + self.ambient_dim, + where=sym._QBXSourceStage2(sym.DEFAULT_SOURCE)) + )(queue) + maxstretch = utils.to_last_dim_length( + self.stage2_density_discr, maxstretch, last_dim_length) + maxstretch = maxstretch.with_queue(None) + + return maxstretch + + @memoize_method + def _source_danger_zone_radii(self, last_dim_length="npanels"): + quad_res = self._stage2_coarsest_quad_resolution(last_dim_length) + with cl.CommandQueue(self.cl_context) as queue: + return (quad_res + .with_queue(queue) + * 0.25).with_queue(None) @memoize_method def qbx_fmm_geometry_data(self, target_discrs_and_qbx_sides): diff --git a/pytential/qbx/refinement.py b/pytential/qbx/refinement.py index 08539c68..2a1d8b7f 100644 --- a/pytential/qbx/refinement.py +++ b/pytential/qbx/refinement.py @@ -365,9 +365,8 @@ class RefinerWrangler(TreeWranglerBase): found_panel_to_refine = cl.array.zeros(self.queue, 1, np.int32) found_panel_to_refine.finish() - source_danger_zone_radii_by_panel = ( - lpot_source._fine_panel_sizes("npanels") - .with_queue(self.queue) / 4) + source_danger_zone_radii_by_panel = \ + lpot_source._source_danger_zone_radii("npanels") unwrap_args = AreaQueryElementwiseTemplate.unwrap_args evt = knl( @@ -410,7 +409,7 @@ class RefinerWrangler(TreeWranglerBase): npanels_to_refine_prev = cl.array.sum(refine_flags).get() evt, out = knl(self.queue, - panel_sizes=lpot_source._panel_sizes("npanels"), + panel_sizes=lpot_source._coarsest_quad_resolution("npanels"), refine_flags=refine_flags, refine_flags_updated=np.array(0), kernel_length_scale=np.array(kernel_length_scale), diff --git a/pytential/qbx/utils.py b/pytential/qbx/utils.py index 6673b450..84626479 100644 --- a/pytential/qbx/utils.py +++ b/pytential/qbx/utils.py @@ -205,68 +205,51 @@ class TreeWranglerBase(object): # }}} -# {{{ panel sizes +# {{{ to_last_dim_length -def panel_sizes(discr, last_dim_length): - if last_dim_length not in ("nsources", "ncenters", "npanels"): - raise ValueError( - "invalid value of last_dim_length: %s" % last_dim_length) +def to_last_dim_length(discr, vec, last_dim_length, queue=None): + """Takes a :class:`pyopencl.array.Array` with a last axis that has the same + length as the number of discretization nodes in the discretization *discr* + and converts it so that the last axis has a length as specified by + *last_dim_length*. + """ - # To get the panel size this does the equivalent of (∫ 1 ds)**(1/dim). - # FIXME: Kernel optimizations + queue = queue or vec.queue - if last_dim_length == "nsources" or last_dim_length == "ncenters": - knl = lp.make_kernel( - "{[i,j,k]: 0<=i= h / 4, \ - (dist, h, centers_panel.element_nr, sources_panel.element_nr) + assert dist >= dz_radius, \ + (dist, dz_radius, centers_panel.element_nr, sources_panel.element_nr) - def check_panel_size_to_helmholtz_k_ratio(panel): + def check_quad_res_to_helmholtz_k_ratio(panel): # Check wavenumber to panel size ratio. - assert panel_sizes[panel.element_nr] * helmholtz_k <= 5 + assert quad_res[panel.element_nr] * helmholtz_k <= 5 for i, panel_1 in enumerate(iter_elements(lpot_source.density_discr)): for panel_2 in iter_elements(lpot_source.density_discr): @@ -187,7 +188,7 @@ def run_source_refinement_test(ctx_getter, mesh, order, helmholtz_k=None): for panel_2 in iter_elements(lpot_source.quad_stage2_density_discr): check_sufficient_quadrature_resolution(panel_1, panel_2) if helmholtz_k is not None: - check_panel_size_to_helmholtz_k_ratio(panel_1) + check_quad_res_to_helmholtz_k_ratio(panel_1) # }}} -- GitLab From c0b6a434c19543d6d7cd81896d15f1ec46b67288 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Mon, 26 Mar 2018 15:39:44 -0500 Subject: [PATCH 02/41] Bump oversampling for inteq test --- test/test_scalar_int_eq.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/test_scalar_int_eq.py b/test/test_scalar_int_eq.py index f10f471e..b15905d3 100644 --- a/test/test_scalar_int_eq.py +++ b/test/test_scalar_int_eq.py @@ -303,7 +303,7 @@ def run_int_eq_test(cl_ctx, queue, case, resolution, visualize): cl_ctx, mesh, InterpolatoryQuadratureSimplexGroupFactory(case.target_order)) - source_order = 4*case.target_order + source_order = 5*case.target_order refiner_extra_kwargs = {} -- GitLab From a7efc51dc1d37ee4601d80aab1316ebc13b3afc3 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Mon, 26 Mar 2018 15:41:05 -0500 Subject: [PATCH 03/41] Don't use unary + on pymbolic expressions --- pytential/symbolic/primitives.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytential/symbolic/primitives.py b/pytential/symbolic/primitives.py index 23b3e85d..d949adfa 100644 --- a/pytential/symbolic/primitives.py +++ b/pytential/symbolic/primitives.py @@ -531,7 +531,7 @@ def _small_mat_eigenvalues(mat): (a, b), (c, d) = mat return make_obj_array([ -(sqrt(d**2-2*a*d+4*b*c+a**2)-d-a)/2, - +(sqrt(d**2-2*a*d+4*b*c+a**2)+d+a)/2 + (sqrt(d**2-2*a*d+4*b*c+a**2)+d+a)/2 ]) else: raise NotImplementedError( -- GitLab From 905a1282f120b9da198c98305ae1229afc5762b5 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Mon, 26 Mar 2018 23:05:05 -0500 Subject: [PATCH 04/41] Adequately exec-map pymbolic's Min/Max nodes --- pytential/symbolic/execution.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/pytential/symbolic/execution.py b/pytential/symbolic/execution.py index 88d85c4a..f07b1036 100644 --- a/pytential/symbolic/execution.py +++ b/pytential/symbolic/execution.py @@ -53,6 +53,26 @@ class EvaluationMapper(EvaluationMapperBase): # {{{ map_XXX + def _map_minmax(self, func, inherited, expr): + ev_children = [self.rec(ch) for ch in expr.children] + from functools import reduce, partial + if any(isinstance(ch, cl.array.Array) for ch in ev_children): + return reduce(partial(func, queue=self.queue), ev_children) + else: + return inherited(expr) + + def map_max(self, expr): + return self._map_minmax( + cl.array.maximum, + super(EvaluationMapper, self).map_max, + expr) + + def map_min(self, expr): + return self._map_minmax( + cl.array.minimum, + super(EvaluationMapper, self).map_min, + expr) + def map_node_sum(self, expr): return cl.array.sum(self.rec(expr.operand)).get()[()] -- GitLab From f4c0ce1749c30a566ad541aed43b1c7d75f0b1ca Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Mon, 26 Mar 2018 23:05:40 -0500 Subject: [PATCH 05/41] Elwise reductions: check input size --- pytential/symbolic/execution.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pytential/symbolic/execution.py b/pytential/symbolic/execution.py index f07b1036..110a89b5 100644 --- a/pytential/symbolic/execution.py +++ b/pytential/symbolic/execution.py @@ -95,6 +95,8 @@ class EvaluationMapper(EvaluationMapperBase): operand = self.rec(expr.operand) + assert operand.shape == (discr.nnodes,) + result = cl.array.empty(self.queue, discr.nnodes, operand.dtype) for group in discr.groups: knl()(self.queue, -- GitLab From 58c6983a652d7c9f97fc4099e57886ae66ef39d9 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Mon, 26 Mar 2018 23:06:16 -0500 Subject: [PATCH 06/41] BoundExpression.get_discretization: better naming in stage 2 getting --- pytential/symbolic/execution.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pytential/symbolic/execution.py b/pytential/symbolic/execution.py index 110a89b5..7f4194cd 100644 --- a/pytential/symbolic/execution.py +++ b/pytential/symbolic/execution.py @@ -314,8 +314,8 @@ class BoundExpression: def get_discretization(self, where): from pytential.symbolic.primitives import _QBXSourceStage2 if isinstance(where, _QBXSourceStage2): - discr = self.places[where.where] - return discr.stage2_density_discr + lpot_source = self.places[where.where] + return lpot_source.stage2_density_discr discr = self.places[where] -- GitLab From 1efabd643978b7477ee95e1993ce16b7af89fdb8 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Mon, 26 Mar 2018 23:06:54 -0500 Subject: [PATCH 07/41] Implement stringification of stage-2 where and elwise reductions --- pytential/symbolic/mappers.py | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/pytential/symbolic/mappers.py b/pytential/symbolic/mappers.py index 6510f957..680c8c97 100644 --- a/pytential/symbolic/mappers.py +++ b/pytential/symbolic/mappers.py @@ -453,6 +453,9 @@ class QBXPreprocessor(IdentityMapper): # {{{ stringifier def stringify_where(where): + if isinstance(where, prim._QBXSourceStage2): + return "stage2(%s)" % stringify_where(where.where) + if where is None: return "?" elif where is prim.DEFAULT_SOURCE: @@ -482,8 +485,20 @@ class StringifyMapper(BaseStringifyMapper): for name_expr in six.itervalues(expr.extra_vars)), set()) - def map_node_sum(self, expr, enclosing_prec): - return "NodeSum(%s)" % self.rec(expr.operand, PREC_NONE) + def map_elementwise_sum(self, expr, enclosing_prec): + return "ElwiseSum.%s(%s)" % ( + stringify_where(expr.where), + self.rec(expr.operand, PREC_NONE)) + + def map_elementwise_min(self, expr, enclosing_prec): + return "ElwiseMin.%s(%s)" % ( + stringify_where(expr.where), + self.rec(expr.operand, PREC_NONE)) + + def map_elementwise_max(self, expr, enclosing_prec): + return "ElwiseMax.%s(%s)" % ( + stringify_where(expr.where), + self.rec(expr.operand, PREC_NONE)) def map_node_max(self, expr, enclosing_prec): return "NodeMax(%s)" % self.rec(expr.operand, PREC_NONE) -- GitLab From 2e21d1851019006caee0543077e87f2dea76d0b5 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Mon, 26 Mar 2018 23:07:31 -0500 Subject: [PATCH 08/41] parametrization_derivative_matrix: pass where on to reference_jacobian --- pytential/symbolic/primitives.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytential/symbolic/primitives.py b/pytential/symbolic/primitives.py index d949adfa..54d66003 100644 --- a/pytential/symbolic/primitives.py +++ b/pytential/symbolic/primitives.py @@ -420,7 +420,7 @@ def parametrization_derivative_matrix(ambient_dim, dim, where=None): return cse( reference_jacobian( [NodeCoordinateComponent(i, where) for i in range(ambient_dim)], - ambient_dim, dim), + ambient_dim, dim, where=where), "pd_matrix", cse_scope.DISCRETIZATION) -- GitLab From b6f04da407b0d6437d0bda536ad1f6d15e3bbe5c Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Mon, 26 Mar 2018 23:08:00 -0500 Subject: [PATCH 09/41] mapping_max_stretch_factor: pass where on to ElementwiseMax --- pytential/symbolic/primitives.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pytential/symbolic/primitives.py b/pytential/symbolic/primitives.py index 54d66003..1c51d270 100644 --- a/pytential/symbolic/primitives.py +++ b/pytential/symbolic/primitives.py @@ -558,7 +558,10 @@ def mapping_max_stretch_factor(ambient_dim, dim=None, where=None): 2 * _parametrization_jtj(ambient_dim, dim, where)))] from pymbolic.primitives import Max - return cse(ElementwiseMax(Max(tuple(stretch_factors))), + return cse( + ElementwiseMax( + Max(tuple(stretch_factors)), + where=where), "mapping_max_stretch", cse_scope.DISCRETIZATION) # }}} -- GitLab From b94a041f04688a4914197e4091a555df8afb5bed Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Mon, 26 Mar 2018 23:21:35 -0500 Subject: [PATCH 10/41] mapping_max_stretch_factor: Add explanatory comment --- pytential/symbolic/primitives.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/pytential/symbolic/primitives.py b/pytential/symbolic/primitives.py index 1c51d270..0a82acf9 100644 --- a/pytential/symbolic/primitives.py +++ b/pytential/symbolic/primitives.py @@ -549,6 +549,15 @@ def mapping_max_stretch_factor(ambient_dim, dim=None, where=None): if dim is None: dim = ambient_dim - 1 + # The 'technique' here is ad-hoc, but I'm fairly confident it's better than + # what we had. The idea is that singular values of the mapping Jacobian + # yield "stretch factors" of the mapping Why? because it maps a right + # singular vector $`v_1`$ (of unit length) to $`\sigma_1 u_1`$, where + # $`u_1`$ is the corresponding left singular vector (also of unit length). + # And so the biggest one tells us about the direction with the 'biggest' + # stretching, where 'stretching' (*2 to remove bi-unit reference element) + # reflects available quadrature resolution in that direction. + stretch_factors = [ cse(sqrt(s), "mapping_singval_%d" % i, cse_scope.DISCRETIZATION) for i, s in enumerate( -- GitLab From 681ff389b4cc549d34cfe24269044ba59668166a Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Mon, 26 Mar 2018 23:52:45 -0500 Subject: [PATCH 11/41] Typo fix --- pytential/symbolic/primitives.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytential/symbolic/primitives.py b/pytential/symbolic/primitives.py index 0a82acf9..94f4228e 100644 --- a/pytential/symbolic/primitives.py +++ b/pytential/symbolic/primitives.py @@ -209,7 +209,7 @@ class _QBXSourceStage2(object): .. note:: - This is not documented functioanlity and only intended for + This is not documented functionality and only intended for internal use. """ -- GitLab From 332fa7fa95eaf010269edbf956e89398e44de620 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Wed, 28 Mar 2018 02:13:21 -0500 Subject: [PATCH 12/41] Bump FMM order in eigenvalues test --- test/test_layer_pot_eigenvalues.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/test_layer_pot_eigenvalues.py b/test/test_layer_pot_eigenvalues.py index e60b72bf..b8c9c9fd 100644 --- a/test/test_layer_pot_eigenvalues.py +++ b/test/test_layer_pot_eigenvalues.py @@ -100,7 +100,7 @@ def test_ellipse_eigenvalues(ctx_getter, ellipse_aspect, mode_nr, qbx_order, np.linspace(0, 1, nelements+1), target_order) - fmm_order = 10 + fmm_order = 12 if force_direct: fmm_order = False -- GitLab From 8664e79787cc9b60541a5180bea0dde8414977c2 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Wed, 28 Mar 2018 02:13:38 -0500 Subject: [PATCH 13/41] Revert oversampling bump in inteq test --- test/test_scalar_int_eq.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/test_scalar_int_eq.py b/test/test_scalar_int_eq.py index b15905d3..f10f471e 100644 --- a/test/test_scalar_int_eq.py +++ b/test/test_scalar_int_eq.py @@ -303,7 +303,7 @@ def run_int_eq_test(cl_ctx, queue, case, resolution, visualize): cl_ctx, mesh, InterpolatoryQuadratureSimplexGroupFactory(case.target_order)) - source_order = 5*case.target_order + source_order = 4*case.target_order refiner_extra_kwargs = {} -- GitLab From 46eb917584710d8c23ba43725fd0e8e16dc49a21 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Wed, 28 Mar 2018 02:14:00 -0500 Subject: [PATCH 14/41] Fix typo in run_source_refinement_test --- test/test_global_qbx.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/test_global_qbx.py b/test/test_global_qbx.py index c20e3c87..f3a21663 100644 --- a/test/test_global_qbx.py +++ b/test/test_global_qbx.py @@ -124,7 +124,7 @@ def run_source_refinement_test(ctx_getter, mesh, order, helmholtz_k=None): expansion_radii = lpot_source._expansion_radii("nsources").get(queue) quad_res = lpot_source._coarsest_quad_resolution("npanels").get(queue) source_danger_zone_radii = \ - lpot_source._source_danger_zone_radii("npanels").get("queue") + lpot_source._source_danger_zone_radii("npanels").get(queue) # {{{ check if satisfying criteria -- GitLab From e9a2339a5c15fe61e1817fe18b0ad32a915e7cf1 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Wed, 28 Mar 2018 02:17:23 -0500 Subject: [PATCH 15/41] Apply fudge factor to stretch-based quad resolution measure --- pytential/qbx/__init__.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/pytential/qbx/__init__.py b/pytential/qbx/__init__.py index 848eeed1..a0ca0469 100644 --- a/pytential/qbx/__init__.py +++ b/pytential/qbx/__init__.py @@ -449,8 +449,19 @@ class QBXLayerPotentialSource(LayerPotentialSourceBase): import pytential.qbx.utils as utils from pytential import sym, bind with cl.CommandQueue(self.cl_context) as queue: + # These fudge factors bring this measure to the same rough magnitude + # as the prior (el area)**(1/dim) measure. + if self.density_discr.dim == 1: + fudge_factor = 1.3 + elif self.density_discr.dim == 2: + fudge_factor = 0.7 + else: + fudge_factor = 1 # ?? + maxstretch = bind( - self, sym.mapping_max_stretch_factor(self.ambient_dim))(queue) + self, + fudge_factor*sym.mapping_max_stretch_factor(self.ambient_dim) + )(queue) maxstretch = utils.to_last_dim_length( self.density_discr, maxstretch, last_dim_length) -- GitLab From e800329c5ef991b62f88cac4b8f889e7b5760bba Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Mon, 2 Apr 2018 00:00:19 -0500 Subject: [PATCH 16/41] Fix 3D quadrature resolution computation to use equilateral reference triangles --- pytential/qbx/__init__.py | 17 +++++------ pytential/symbolic/primitives.py | 52 ++++++++++++++++++-------------- 2 files changed, 37 insertions(+), 32 deletions(-) diff --git a/pytential/qbx/__init__.py b/pytential/qbx/__init__.py index a0ca0469..fca64009 100644 --- a/pytential/qbx/__init__.py +++ b/pytential/qbx/__init__.py @@ -449,18 +449,15 @@ class QBXLayerPotentialSource(LayerPotentialSourceBase): import pytential.qbx.utils as utils from pytential import sym, bind with cl.CommandQueue(self.cl_context) as queue: - # These fudge factors bring this measure to the same rough magnitude - # as the prior (el area)**(1/dim) measure. - if self.density_discr.dim == 1: - fudge_factor = 1.3 - elif self.density_discr.dim == 2: - fudge_factor = 0.7 - else: - fudge_factor = 1 # ?? + # Potential FIXME: A triangle has half the area of a square, + # so the prior (area)**(1/dim) quadrature resolution measure + # may be viewed as having an extraneous factor of 1/sqrt(2) + # for triangles. maxstretch = bind( self, - fudge_factor*sym.mapping_max_stretch_factor(self.ambient_dim) + sym._simplex_mapping_max_stretch_factor( + self.ambient_dim) )(queue) maxstretch = utils.to_last_dim_length( @@ -487,7 +484,7 @@ class QBXLayerPotentialSource(LayerPotentialSourceBase): from pytential import sym, bind with cl.CommandQueue(self.cl_context) as queue: maxstretch = bind( - self, sym.mapping_max_stretch_factor( + self, sym._simplex_mapping_max_stretch_factor( self.ambient_dim, where=sym._QBXSourceStage2(sym.DEFAULT_SOURCE)) )(queue) diff --git a/pytential/symbolic/primitives.py b/pytential/symbolic/primitives.py index 94f4228e..f7a61133 100644 --- a/pytential/symbolic/primitives.py +++ b/pytential/symbolic/primitives.py @@ -508,18 +508,6 @@ def _panel_size(ambient_dim, dim=None, where=None): * QWeight())**(1/dim) -def _parametrization_jtj(ambient_dim, dim=None, where=None): - """For the mapping Jacobian :math:`J` as returned by - :func:`parametrization_derivative`, return :math:`J^TJ`. - """ - - pder_mat = parametrization_derivative_matrix(ambient_dim, dim, where) - - return cse( - np.dot(pder_mat.T, pder_mat), - "pd_mat_jtj", cse_scope.DISCRETIZATION) - - def _small_mat_eigenvalues(mat): m, n = mat.shape if m != n: @@ -538,7 +526,8 @@ def _small_mat_eigenvalues(mat): "eigenvalue formula for %dx%d matrices" % (m, n)) -def mapping_max_stretch_factor(ambient_dim, dim=None, where=None): +def _simplex_mapping_max_stretch_factor(ambient_dim, dim=None, where=None, + with_elementwise_max=True): """Return the largest factor by which the reference-to-global mapping stretches the bi-unit (i.e. :math:`[-1,1]`) reference element along any axis. @@ -558,20 +547,39 @@ def mapping_max_stretch_factor(ambient_dim, dim=None, where=None): # stretching, where 'stretching' (*2 to remove bi-unit reference element) # reflects available quadrature resolution in that direction. + pder_mat = parametrization_derivative_matrix(ambient_dim, dim, where) + + # The above procedure works well only when the 'reference' end of the + # mapping is in equilateral coordinates. + from modepy.tools import EQUILATERAL_TO_UNIT_MAP + equi_to_unit = EQUILATERAL_TO_UNIT_MAP[dim].a + + # This is the Jacobian of the (equilateral reference element) -> (global) map. + equi_pder_mat = cse( + np.dot(pder_mat, equi_to_unit), + "equilateral_pder_mat") + + # Compute eigenvalues of J^T to compute SVD. + equi_pder_mat_jtj = cse( + np.dot(equi_pder_mat.T, equi_pder_mat), + "pd_mat_jtj") + stretch_factors = [ - cse(sqrt(s), "mapping_singval_%d" % i, cse_scope.DISCRETIZATION) + cse(sqrt(s), "mapping_singval_%d" % i) for i, s in enumerate( _small_mat_eigenvalues( - # Multiply by 2 to compensate for bi-unit (i.e. [-1,1]) - # reference elements. - 2 * _parametrization_jtj(ambient_dim, dim, where)))] + # Multiply by 4 to compensate for equilateral reference + # elements of side length 2. (J^T J contains two factors of + # two.) + 4 * equi_pder_mat_jtj))] from pymbolic.primitives import Max - return cse( - ElementwiseMax( - Max(tuple(stretch_factors)), - where=where), - "mapping_max_stretch", cse_scope.DISCRETIZATION) + result = Max(tuple(stretch_factors)) + + if with_elementwise_max: + result = ElementwiseMax(result, where=where) + + return cse(result, "mapping_max_stretch", cse_scope.DISCRETIZATION) # }}} -- GitLab From 84b36e871f1f1a65e88f8c6cf2e8171a29a36369 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Mon, 2 Apr 2018 14:56:47 -0500 Subject: [PATCH 17/41] Add fudge factor to expansion radii in 3D to match prior scaling --- pytential/qbx/__init__.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/pytential/qbx/__init__.py b/pytential/qbx/__init__.py index fca64009..1c449a65 100644 --- a/pytential/qbx/__init__.py +++ b/pytential/qbx/__init__.py @@ -421,10 +421,19 @@ class QBXLayerPotentialSource(LayerPotentialSourceBase): "Passing 'npanels' as last_dim_length to _expansion_radii is " "not allowed. Allowed values are 'nsources' and 'ncenters'.") + if self.density_discr.dim == 2: + # A triangle has half the area of a square, + # so the prior (area)**(1/dim) quadrature resolution measure + # may be viewed as having an extraneous factor of 1/sqrt(2) + # for triangles. + fudge_factor = 0.5 + else: + fudge_factor = 1 + with cl.CommandQueue(self.cl_context) as queue: return (self._coarsest_quad_resolution(last_dim_length) .with_queue(queue) - * 0.5).with_queue(None) + * 0.5 * fudge_factor).with_queue(None) # _expansion_radii should not be needed for the fine discretization @@ -449,11 +458,6 @@ class QBXLayerPotentialSource(LayerPotentialSourceBase): import pytential.qbx.utils as utils from pytential import sym, bind with cl.CommandQueue(self.cl_context) as queue: - # Potential FIXME: A triangle has half the area of a square, - # so the prior (area)**(1/dim) quadrature resolution measure - # may be viewed as having an extraneous factor of 1/sqrt(2) - # for triangles. - maxstretch = bind( self, sym._simplex_mapping_max_stretch_factor( -- GitLab From 40fcbef9a10ad9c306832c921ecc5fd67dbfe767 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Mon, 2 Apr 2018 14:57:10 -0500 Subject: [PATCH 18/41] Tweak plotting of internal data structures (for debugging) --- pytential/qbx/__init__.py | 2 ++ pytential/qbx/geometry.py | 10 +++++++--- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/pytential/qbx/__init__.py b/pytential/qbx/__init__.py index 1c449a65..77384bb1 100644 --- a/pytential/qbx/__init__.py +++ b/pytential/qbx/__init__.py @@ -649,6 +649,8 @@ class QBXLayerPotentialSource(LayerPotentialSourceBase): geo_data = self.qbx_fmm_geometry_data(target_discrs_and_qbx_sides) + # geo_data.plot() + # FIXME Exert more positive control over geo_data attribute lifetimes using # geo_data..clear_cache(geo_data). diff --git a/pytential/qbx/geometry.py b/pytential/qbx/geometry.py index f9cc10e0..4c946e80 100644 --- a/pytential/qbx/geometry.py +++ b/pytential/qbx/geometry.py @@ -843,12 +843,14 @@ class QBXFMMGeometryData(object): This only works for two-dimensional geometries. """ + import matplotlib.pyplot as pt + pt.clf() + dims = self.tree().targets.shape[0] if dims != 2: raise ValueError("only 2-dimensional geometry info can be plotted") with cl.CommandQueue(self.cl_context) as queue: - import matplotlib.pyplot as pt from meshmode.discretization.visualization import draw_curve draw_curve(self.lpot_source.quad_stage2_density_discr) @@ -933,8 +935,10 @@ class QBXFMMGeometryData(object): # }}} pt.gca().set_aspect("equal") - pt.legend() - pt.show() + #pt.legend() + pt.savefig( + "geodata-stage2-nelem%d.pdf" + % self.lpot_source.stage2_density_discr.mesh.nelements) # }}} -- GitLab From 3052ba06940705c77b6e80804225ed5e7df7224f Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Mon, 2 Apr 2018 14:58:15 -0500 Subject: [PATCH 19/41] Move two tests further down the asymptote to hit EOC targets --- test/test_layer_pot_identity.py | 2 +- test/test_scalar_int_eq.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/test/test_layer_pot_identity.py b/test/test_layer_pot_identity.py index c019f930..942b6ed2 100644 --- a/test/test_layer_pot_identity.py +++ b/test/test_layer_pot_identity.py @@ -82,7 +82,7 @@ class StarfishGeometry(object): dim = 2 - resolutions = [30, 50, 70] + resolutions = [30, 50, 70, 90] def get_mesh(self, nelements, target_order): return make_curve_mesh( diff --git a/test/test_scalar_int_eq.py b/test/test_scalar_int_eq.py index f10f471e..0d9fca28 100644 --- a/test/test_scalar_int_eq.py +++ b/test/test_scalar_int_eq.py @@ -83,7 +83,7 @@ class IntEqTestCase: class CurveIntEqTestCase(IntEqTestCase): - resolutions = [30, 40, 50] + resolutions = [40, 50, 60] def get_mesh(self, resolution, target_order): return make_curve_mesh( -- GitLab From f7ce942d90680555fb0a6c9adb3e2e6209570b94 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Mon, 2 Apr 2018 17:57:09 -0500 Subject: [PATCH 20/41] Back GQBX test off from 18-to-1 to 20-to-1 ellipse --- test/test_global_qbx.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/test_global_qbx.py b/test/test_global_qbx.py index f3a21663..e82f9d9f 100644 --- a/test/test_global_qbx.py +++ b/test/test_global_qbx.py @@ -217,7 +217,7 @@ def test_source_refinement_3d(ctx_getter, surface_name, surface_f, order): @pytest.mark.parametrize(("curve_name", "curve_f", "nelements"), [ - ("20-to-1 ellipse", partial(ellipse, 20), 100), + ("18-to-1 ellipse", partial(ellipse, 18), 100), ("horseshoe", horseshoe, 64), ]) def test_target_association(ctx_getter, curve_name, curve_f, nelements): -- GitLab From f2f1adb3fa8094ef008d2351f0d3a6eca34ede74 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Mon, 2 Apr 2018 18:44:51 -0500 Subject: [PATCH 21/41] Add 2D visualization to test_target_association --- test/test_global_qbx.py | 32 +++++++++++++++++++++++++++++++- 1 file changed, 31 insertions(+), 1 deletion(-) diff --git a/test/test_global_qbx.py b/test/test_global_qbx.py index e82f9d9f..02fc4717 100644 --- a/test/test_global_qbx.py +++ b/test/test_global_qbx.py @@ -220,7 +220,8 @@ def test_source_refinement_3d(ctx_getter, surface_name, surface_f, order): ("18-to-1 ellipse", partial(ellipse, 18), 100), ("horseshoe", horseshoe, 64), ]) -def test_target_association(ctx_getter, curve_name, curve_f, nelements): +def test_target_association(ctx_getter, curve_name, curve_f, nelements, + visualize=False): cl_ctx = ctx_getter() queue = cl.CommandQueue(cl_ctx) @@ -316,6 +317,35 @@ def test_target_association(ctx_getter, curve_name, curve_f, nelements): int_targets = np.array([axis.get(queue) for axis in int_targets.nodes()]) ext_targets = np.array([axis.get(queue) for axis in ext_targets.nodes()]) + def visualize(): + import matplotlib.pyplot as plt + from meshmode.mesh.visualization import draw_curve + + draw_curve(lpot_source.density_discr.mesh) + + targets = int_targets + tgt_slice = surf_int_slice + + plt.plot(centers[0], centers[1], "+", color="orange") + ax = plt.gca() + + for tx, ty, tcenter in zip( + targets[0, tgt_slice], + targets[1, tgt_slice], + target_assoc.target_to_center[tgt_slice]): + if tcenter >= 0: + ax.add_artist( + plt.Line2D( + (tx, centers[0, tcenter]), + (ty, centers[1, tcenter]), + )) + + ax.set_aspect("equal") + plt.show() + + if visualize: + visualize() + # Checks that the sources match with their own centers. def check_on_surface_targets(nsources, true_side, target_to_center, target_to_side_result): -- GitLab From 1d3a5f6971f2f2fa6a701483141388f205f96768 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Mon, 2 Apr 2018 19:15:05 -0500 Subject: [PATCH 22/41] Fix identifier clash in test_global_qbx.py --- test/test_global_qbx.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/test_global_qbx.py b/test/test_global_qbx.py index 02fc4717..28f525a6 100644 --- a/test/test_global_qbx.py +++ b/test/test_global_qbx.py @@ -317,7 +317,7 @@ def test_target_association(ctx_getter, curve_name, curve_f, nelements, int_targets = np.array([axis.get(queue) for axis in int_targets.nodes()]) ext_targets = np.array([axis.get(queue) for axis in ext_targets.nodes()]) - def visualize(): + def visualize_curve_and_assoc(): import matplotlib.pyplot as plt from meshmode.mesh.visualization import draw_curve @@ -344,7 +344,7 @@ def test_target_association(ctx_getter, curve_name, curve_f, nelements, plt.show() if visualize: - visualize() + visualize_curve_and_assoc() # Checks that the sources match with their own centers. def check_on_surface_targets(nsources, true_side, target_to_center, -- GitLab From 68ad202709491e8999b4032c2020ec4842fb43f8 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Tue, 3 Apr 2018 12:04:30 -0500 Subject: [PATCH 23/41] Allow passing max_expansion_radius to with_refinement --- pytential/qbx/__init__.py | 11 ++-- pytential/qbx/refinement.py | 102 +++++++++++++++++++----------------- 2 files changed, 59 insertions(+), 54 deletions(-) diff --git a/pytential/qbx/__init__.py b/pytential/qbx/__init__.py index 77384bb1..dd168df7 100644 --- a/pytential/qbx/__init__.py +++ b/pytential/qbx/__init__.py @@ -369,7 +369,8 @@ class QBXLayerPotentialSource(LayerPotentialSourceBase): @memoize_method def with_refinement(self, target_order=None, kernel_length_scale=None, - maxiter=None, visualize=False, _expansion_disturbance_tolerance=None): + maxiter=None, visualize=False, _expansion_disturbance_tolerance=None, + _max_expansion_radius=None): """ :returns: a tuple ``(lpot_src, cnx)``, where ``lpot_src`` is a :class:`QBXLayerPotentialSource` and ``cnx`` is a @@ -391,7 +392,8 @@ class QBXLayerPotentialSource(LayerPotentialSourceBase): InterpolatoryQuadratureSimplexGroupFactory(target_order), kernel_length_scale=kernel_length_scale, maxiter=maxiter, visualize=visualize, - expansion_disturbance_tolerance=_expansion_disturbance_tolerance) + expansion_disturbance_tolerance=_expansion_disturbance_tolerance, + max_expansion_radius=_max_expansion_radius) return lpot, connection @@ -416,11 +418,6 @@ class QBXLayerPotentialSource(LayerPotentialSourceBase): @memoize_method def _expansion_radii(self, last_dim_length): - if last_dim_length == "npanels": - raise ValueError( - "Passing 'npanels' as last_dim_length to _expansion_radii is " - "not allowed. Allowed values are 'nsources' and 'ncenters'.") - if self.density_discr.dim == 2: # A triangle has half the area of a square, # so the prior (area)**(1/dim) quadrature resolution measure diff --git a/pytential/qbx/refinement.py b/pytential/qbx/refinement.py index 2a1d8b7f..3985d952 100644 --- a/pytential/qbx/refinement.py +++ b/pytential/qbx/refinement.py @@ -242,23 +242,24 @@ class RefinerCodeContainer(TreeCodeContainerMixin): extra_type_aliases=(("particle_id_t", particle_id_dtype),)) @memoize_method - def kernel_length_scale_to_panel_size_ratio_checker(self): + def element_prop_threshold_checker(self): knl = lp.make_kernel( - "{[panel]: 0<=panel oversize = panel_sizes[panel] > kernel_length_scale - if oversize - refine_flags[panel] = 1 + for ielement + <> over_threshold = element_property[ielement] > threshold + if over_threshold + refine_flags[ielement] = 1 refine_flags_updated = 1 {id=write_refine_flags_updated} end end """, options="return_dict", silenced_warnings="write_race(write_refine_flags_updated)", - name="refine_kernel_length_scale_to_panel_size_ratio", + name="refine_kernel_length_scale_to_quad_resolution_ratio", lang_version=MOST_RECENT_LANGUAGE_VERSION) - knl = lp.split_iname(knl, "panel", 128, inner_tag="l.0", outer_tag="g.0") + + knl = lp.split_iname(knl, "ielement", 128, inner_tag="l.0", outer_tag="g.0") return knl def get_wrangler(self, queue): @@ -399,9 +400,9 @@ class RefinerWrangler(TreeWranglerBase): return found_panel_to_refine.get()[0] == 1 - def check_kernel_length_scale_to_panel_size_ratio(self, lpot_source, - kernel_length_scale, refine_flags, debug, wait_for=None): - knl = self.code_container.kernel_length_scale_to_panel_size_ratio_checker() + def check_element_prop_threshold(self, element_property, threshold, refine_flags, + debug, wait_for=None): + knl = self.code_container.element_prop_threshold_checker() logger.info("refiner: checking kernel length scale to panel size ratio") @@ -409,10 +410,11 @@ class RefinerWrangler(TreeWranglerBase): npanels_to_refine_prev = cl.array.sum(refine_flags).get() evt, out = knl(self.queue, - panel_sizes=lpot_source._coarsest_quad_resolution("npanels"), + element_property=element_property, + # lpot_source._coarsest_quad_resolution("npanels")), refine_flags=refine_flags, refine_flags_updated=np.array(0), - kernel_length_scale=np.array(kernel_length_scale), + threshold=np.array(threshold), wait_for=wait_for) cl.wait_for_events([evt]) @@ -475,7 +477,8 @@ def make_empty_refine_flags(queue, lpot_source, use_stage2_discr=False): def refine_for_global_qbx(lpot_source, wrangler, group_factory, kernel_length_scale=None, - refine_flags=None, debug=None, maxiter=None, + max_expansion_radius=None, + debug=None, maxiter=None, visualize=None, expansion_disturbance_tolerance=None): """ Entry point for calling the refiner. @@ -491,11 +494,6 @@ def refine_for_global_qbx(lpot_source, wrangler, :arg kernel_length_scale: The kernel length scale, or *None* if not applicable. All panels are refined to below this size. - :arg refine_flags: A :class:`pyopencl.array.Array` indicating which - panels should get refined initially, or `None` if no initial - refinement should be done. Should have size equal to the number of - panels. See also :func:`make_empty_refine_flags()`. - :arg maxiter: The maximum number of refiner iterations. :returns: A tuple ``(lpot_source, *conn*)`` where ``lpot_source`` is the @@ -524,14 +522,6 @@ def refine_for_global_qbx(lpot_source, wrangler, refiner = Refiner(lpot_source.density_discr.mesh) connections = [] - # Do initial refinement. - if refine_flags is not None: - conn = wrangler.refine( - lpot_source.density_discr, refiner, refine_flags, group_factory, - debug) - connections.append(conn) - lpot_source = lpot_source.copy(density_discr=conn.to_discr) - # {{{ first stage refinement def visualize_refinement(niter, stage, flags): @@ -597,33 +587,53 @@ def refine_for_global_qbx(lpot_source, wrangler, warn_max_iterations() break - # Build tree and auxiliary data. - # FIXME: The tree should not have to be rebuilt at each iteration. - tree = wrangler.build_tree(lpot_source) - peer_lists = wrangler.find_peer_lists(tree) refine_flags = make_empty_refine_flags(wrangler.queue, lpot_source) - # Check condition 1. - has_disturbed_expansions = \ - wrangler.check_expansion_disks_undisturbed_by_sources( - lpot_source, tree, peer_lists, - expansion_disturbance_tolerance, - refine_flags, debug) - if has_disturbed_expansions: - iter_violated_criteria.append("disturbed expansions") - visualize_refinement(niter, "disturbed-expansions", refine_flags) - - # Check condition 3. if kernel_length_scale is not None: - violates_kernel_length_scale = \ - wrangler.check_kernel_length_scale_to_panel_size_ratio( - lpot_source, kernel_length_scale, refine_flags, debug) + wrangler.check_element_prop_threshold( + element_property=lpot_source._coarsest_quad_resolution( + "npanels"), + threshold=kernel_length_scale, + refine_flags=refine_flags, debug=debug) if violates_kernel_length_scale: iter_violated_criteria.append("kernel length scale") visualize_refinement(niter, "kernel-length-scale", refine_flags) + if max_expansion_radius is not None: + violates_expansion_radii = \ + wrangler.check_element_prop_threshold( + element_property=lpot_source._expansion_radii( + "npanels"), + threshold=max_expansion_radius, + refine_flags=refine_flags, debug=debug) + + if violates_expansion_radii: + iter_violated_criteria.append("expansion radii") + visualize_refinement(niter, "expansion-radii", refine_flags) + + if not iter_violated_criteria: + # Only start building trees once the simple length-based criteria + # are happy. + + # Build tree and auxiliary data. + # FIXME: The tree should not have to be rebuilt at each iteration. + tree = wrangler.build_tree(lpot_source) + peer_lists = wrangler.find_peer_lists(tree) + + has_disturbed_expansions = \ + wrangler.check_expansion_disks_undisturbed_by_sources( + lpot_source, tree, peer_lists, + expansion_disturbance_tolerance, + refine_flags, debug) + if has_disturbed_expansions: + iter_violated_criteria.append("disturbed expansions") + visualize_refinement(niter, "disturbed-expansions", refine_flags) + + del tree + del peer_lists + if iter_violated_criteria: violated_criteria.append(" and ".join(iter_violated_criteria)) @@ -633,9 +643,7 @@ def refine_for_global_qbx(lpot_source, wrangler, connections.append(conn) lpot_source = lpot_source.copy(density_discr=conn.to_discr) - del tree del refine_flags - del peer_lists # }}} -- GitLab From 0751ab23cde4825fa26723e661084b1172ac480d Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Sat, 7 Apr 2018 20:32:14 -0500 Subject: [PATCH 24/41] QBXLPSource.with_refinement: Allow passing in existing refiner --- pytential/qbx/__init__.py | 13 ++++++++++--- pytential/qbx/refinement.py | 9 +++++++-- 2 files changed, 17 insertions(+), 5 deletions(-) diff --git a/pytential/qbx/__init__.py b/pytential/qbx/__init__.py index dd168df7..34caa9e8 100644 --- a/pytential/qbx/__init__.py +++ b/pytential/qbx/__init__.py @@ -369,9 +369,15 @@ class QBXLayerPotentialSource(LayerPotentialSourceBase): @memoize_method def with_refinement(self, target_order=None, kernel_length_scale=None, - maxiter=None, visualize=False, _expansion_disturbance_tolerance=None, - _max_expansion_radius=None): + maxiter=None, visualize=False, refiner=None, + _expansion_disturbance_tolerance=None, _max_expansion_radius=None): """ + :arg refiner: If the mesh underlying :attr:`density_discr` + is itself the result of refinement, then its + :class:`meshmode.refinement.Refiner` instance may need to + be reused for continued refinement. This argument + provides the opportunity to pass in an existing refiner + that should be used for continued refinement. :returns: a tuple ``(lpot_src, cnx)``, where ``lpot_src`` is a :class:`QBXLayerPotentialSource` and ``cnx`` is a :class:`meshmode.discretization.connection.DiscretizationConnection` @@ -393,7 +399,8 @@ class QBXLayerPotentialSource(LayerPotentialSourceBase): kernel_length_scale=kernel_length_scale, maxiter=maxiter, visualize=visualize, expansion_disturbance_tolerance=_expansion_disturbance_tolerance, - max_expansion_radius=_max_expansion_radius) + max_expansion_radius=_max_expansion_radius, + refiner=refiner) return lpot, connection diff --git a/pytential/qbx/refinement.py b/pytential/qbx/refinement.py index 3985d952..df2c9771 100644 --- a/pytential/qbx/refinement.py +++ b/pytential/qbx/refinement.py @@ -479,7 +479,8 @@ def refine_for_global_qbx(lpot_source, wrangler, group_factory, kernel_length_scale=None, max_expansion_radius=None, debug=None, maxiter=None, - visualize=None, expansion_disturbance_tolerance=None): + visualize=None, expansion_disturbance_tolerance=None, + refiner=None): """ Entry point for calling the refiner. @@ -519,7 +520,11 @@ def refine_for_global_qbx(lpot_source, wrangler, from meshmode.discretization.connection import ( ChainedDiscretizationConnection, make_same_mesh_connection) - refiner = Refiner(lpot_source.density_discr.mesh) + if refiner is None: + assert refiner.get_current_mesh() == lpot_source.density_discr.mesh + else: + refiner = Refiner(lpot_source.density_discr.mesh) + connections = [] # {{{ first stage refinement -- GitLab From b0c4172e79c8a202e0ec1ebaed59d674aaf947a6 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Sat, 7 Apr 2018 20:32:57 -0500 Subject: [PATCH 25/41] Use same dim-dependent fudge factor in source danger zone and expansion radius --- pytential/qbx/__init__.py | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/pytential/qbx/__init__.py b/pytential/qbx/__init__.py index 34caa9e8..fadcbc4e 100644 --- a/pytential/qbx/__init__.py +++ b/pytential/qbx/__init__.py @@ -423,21 +423,18 @@ class QBXLayerPotentialSource(LayerPotentialSourceBase): import pytential.qbx.utils as utils return utils.element_centers_of_mass(self.stage2_density_discr) - @memoize_method - def _expansion_radii(self, last_dim_length): + def _dim_fudge_factor(self): if self.density_discr.dim == 2: - # A triangle has half the area of a square, - # so the prior (area)**(1/dim) quadrature resolution measure - # may be viewed as having an extraneous factor of 1/sqrt(2) - # for triangles. - fudge_factor = 0.5 + return 0.75 else: - fudge_factor = 1 + return 1 + @memoize_method + def _expansion_radii(self, last_dim_length): with cl.CommandQueue(self.cl_context) as queue: return (self._coarsest_quad_resolution(last_dim_length) .with_queue(queue) - * 0.5 * fudge_factor).with_queue(None) + * 0.5 * self._dim_fudge_factor()).with_queue(None) # _expansion_radii should not be needed for the fine discretization @@ -505,10 +502,11 @@ class QBXLayerPotentialSource(LayerPotentialSourceBase): @memoize_method def _source_danger_zone_radii(self, last_dim_length="npanels"): quad_res = self._stage2_coarsest_quad_resolution(last_dim_length) + with cl.CommandQueue(self.cl_context) as queue: return (quad_res .with_queue(queue) - * 0.25).with_queue(None) + * 0.25 * self._dim_fudge_factor()).with_queue(None) @memoize_method def qbx_fmm_geometry_data(self, target_discrs_and_qbx_sides): -- GitLab From cc3fdc0c2109b581619793963ea35e87693b4dff Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Tue, 10 Apr 2018 13:28:10 -0500 Subject: [PATCH 26/41] Tweak source danger zone radii for proper resolution control at a refinement boundary --- pytential/qbx/__init__.py | 38 +++++++++++++++++++++----------------- 1 file changed, 21 insertions(+), 17 deletions(-) diff --git a/pytential/qbx/__init__.py b/pytential/qbx/__init__.py index fadcbc4e..dd565278 100644 --- a/pytential/qbx/__init__.py +++ b/pytential/qbx/__init__.py @@ -438,6 +438,27 @@ class QBXLayerPotentialSource(LayerPotentialSourceBase): # _expansion_radii should not be needed for the fine discretization + @memoize_method + def _source_danger_zone_radii(self, last_dim_length="npanels"): + # This should be the expression of the expansion radii, but + # + # - in reference to the stage 2 discretization + # - mutliplied by 0.75 because + # + # - Setting this equal to the expansion radii ensures that *every* + # stage 2 element will be refined, which is wasteful. + # (so this needs to be smaller than that) + # + + # - Setting this equal to half the expansion radius will not provide + # a refinement 'buffer layer' at a 2x coarsening fringe. + + with cl.CommandQueue(self.cl_context) as queue: + return ( + (self._stage2_coarsest_quad_resolution(last_dim_length) + .with_queue(queue)) + * 0.5 * 0.75 * self._dim_fudge_factor()).with_queue(None) + @memoize_method def _close_target_tunnel_radius(self, last_dim_length): with cl.CommandQueue(self.cl_context) as queue: @@ -451,10 +472,6 @@ class QBXLayerPotentialSource(LayerPotentialSourceBase): """This measures the quadrature resolution across the mesh. In a 1D uniform mesh of uniform 'parametrization speed', it should be the same as the panel length. - - It is empirically about a factor of 1.2 larger than sym._panel_size for - an ellipse, presumably because it uses the largest 'parametrization - speed'/'stretch factor' across the whole element. """ import pytential.qbx.utils as utils from pytential import sym, bind @@ -476,10 +493,6 @@ class QBXLayerPotentialSource(LayerPotentialSourceBase): """This measures the quadrature resolution across the mesh. In a 1D uniform mesh of uniform 'parametrization speed', it should be the same as the panel length. - - It is empirically about a factor of 1.2 larger than sym._panel_size for - an ellipse, presumably because it uses the largest 'parametrization - speed'/'stretch factor' across the whole element. """ if last_dim_length != "npanels": # Not technically required below, but no need to loosen for now. @@ -499,15 +512,6 @@ class QBXLayerPotentialSource(LayerPotentialSourceBase): return maxstretch - @memoize_method - def _source_danger_zone_radii(self, last_dim_length="npanels"): - quad_res = self._stage2_coarsest_quad_resolution(last_dim_length) - - with cl.CommandQueue(self.cl_context) as queue: - return (quad_res - .with_queue(queue) - * 0.25 * self._dim_fudge_factor()).with_queue(None) - @memoize_method def qbx_fmm_geometry_data(self, target_discrs_and_qbx_sides): """ -- GitLab From 507db52c2d1e0b2f50576a6642992c83bd9b5ef4 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Tue, 10 Apr 2018 16:42:27 -0500 Subject: [PATCH 27/41] Fix inverted logic in refinement-with-existing-refiner --- pytential/qbx/refinement.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytential/qbx/refinement.py b/pytential/qbx/refinement.py index df2c9771..31cf9dda 100644 --- a/pytential/qbx/refinement.py +++ b/pytential/qbx/refinement.py @@ -520,7 +520,7 @@ def refine_for_global_qbx(lpot_source, wrangler, from meshmode.discretization.connection import ( ChainedDiscretizationConnection, make_same_mesh_connection) - if refiner is None: + if refiner is not None: assert refiner.get_current_mesh() == lpot_source.density_discr.mesh else: refiner = Refiner(lpot_source.density_discr.mesh) -- GitLab From b5a71fecb4999ba2b4712f259679b337fe7b1469 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Wed, 11 Apr 2018 01:27:17 -0500 Subject: [PATCH 28/41] Revert 3D dimension fudge factor to 0.5, for increased accuracy --- pytential/qbx/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytential/qbx/__init__.py b/pytential/qbx/__init__.py index dd565278..0228b0db 100644 --- a/pytential/qbx/__init__.py +++ b/pytential/qbx/__init__.py @@ -425,7 +425,7 @@ class QBXLayerPotentialSource(LayerPotentialSourceBase): def _dim_fudge_factor(self): if self.density_discr.dim == 2: - return 0.75 + return 0.5 else: return 1 -- GitLab From 386b2593afca6e8f35af3e63bfc1ad0da0d2946d Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Wed, 11 Apr 2018 10:58:02 -0500 Subject: [PATCH 29/41] Bump 3D jump rel tests further down the asymptote --- test/test_layer_pot.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/test_layer_pot.py b/test/test_layer_pot.py index b0766375..f81a5e55 100644 --- a/test/test_layer_pot.py +++ b/test/test_layer_pot.py @@ -404,7 +404,7 @@ def test_3d_jump_relations(ctx_factory, relation, visualize=False): from pytools.convergence import EOCRecorder eoc_rec = EOCRecorder() - for nel_factor in [6, 8, 12]: + for nel_factor in [6, 10, 14]: from meshmode.mesh.generation import generate_torus mesh = generate_torus( 5, 2, order=target_order, -- GitLab From 6fb8fbf6be632492f014804a1f365399a89ffb71 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Thu, 19 Apr 2018 16:56:35 -0500 Subject: [PATCH 30/41] Switch to RefinerWithoutAdjacency for QBX refinement --- pytential/qbx/refinement.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/pytential/qbx/refinement.py b/pytential/qbx/refinement.py index 31cf9dda..0d63c228 100644 --- a/pytential/qbx/refinement.py +++ b/pytential/qbx/refinement.py @@ -516,14 +516,16 @@ def refine_for_global_qbx(lpot_source, wrangler, # TODO: Stop doing redundant checks by avoiding panels which no longer need # refinement. - from meshmode.mesh.refinement import Refiner + from meshmode.mesh.refinement import RefinerWithoutAdjacency from meshmode.discretization.connection import ( ChainedDiscretizationConnection, make_same_mesh_connection) if refiner is not None: assert refiner.get_current_mesh() == lpot_source.density_discr.mesh else: - refiner = Refiner(lpot_source.density_discr.mesh) + # We may be handed a mesh that's already non-conforming, we don't rely + # on adjacency, and the no-adjacency refiner is faster. + refiner = RefinerWithoutAdjacency(lpot_source.density_discr.mesh) connections = [] -- GitLab From 0457546b86b70404b8bbcac24a2235c47135a4dc Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Thu, 19 Apr 2018 16:57:21 -0500 Subject: [PATCH 31/41] Introduce force_stage2_uniform_refinement_rounds refinement parameter --- pytential/qbx/__init__.py | 6 +++++- pytential/qbx/refinement.py | 16 ++++++++++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/pytential/qbx/__init__.py b/pytential/qbx/__init__.py index 0228b0db..b02fd1ca 100644 --- a/pytential/qbx/__init__.py +++ b/pytential/qbx/__init__.py @@ -370,7 +370,9 @@ class QBXLayerPotentialSource(LayerPotentialSourceBase): @memoize_method def with_refinement(self, target_order=None, kernel_length_scale=None, maxiter=None, visualize=False, refiner=None, - _expansion_disturbance_tolerance=None, _max_expansion_radius=None): + _expansion_disturbance_tolerance=None, + _max_expansion_radius=None, + _force_stage2_uniform_refinement_rounds=None): """ :arg refiner: If the mesh underlying :attr:`density_discr` is itself the result of refinement, then its @@ -400,6 +402,8 @@ class QBXLayerPotentialSource(LayerPotentialSourceBase): maxiter=maxiter, visualize=visualize, expansion_disturbance_tolerance=_expansion_disturbance_tolerance, max_expansion_radius=_max_expansion_radius, + force_stage2_uniform_refinement_rounds=( + _force_stage2_uniform_refinement_rounds), refiner=refiner) return lpot, connection diff --git a/pytential/qbx/refinement.py b/pytential/qbx/refinement.py index 0d63c228..915056e6 100644 --- a/pytential/qbx/refinement.py +++ b/pytential/qbx/refinement.py @@ -478,6 +478,7 @@ def make_empty_refine_flags(queue, lpot_source, use_stage2_discr=False): def refine_for_global_qbx(lpot_source, wrangler, group_factory, kernel_length_scale=None, max_expansion_radius=None, + force_stage2_uniform_refinement_rounds=None, debug=None, maxiter=None, visualize=None, expansion_disturbance_tolerance=None, refiner=None): @@ -513,6 +514,9 @@ def refine_for_global_qbx(lpot_source, wrangler, if expansion_disturbance_tolerance is None: expansion_disturbance_tolerance = 0.025 + if force_stage2_uniform_refinement_rounds is None: + force_stage2_uniform_refinement_rounds = 0 + # TODO: Stop doing redundant checks by avoiding panels which no longer need # refinement. @@ -700,6 +704,18 @@ def refine_for_global_qbx(lpot_source, wrangler, del refine_flags del peer_lists + for round in range(force_stage2_uniform_refinement_rounds): + conn = wrangler.refine( + stage2_density_discr, + refiner, + np.ones(stage2_density_discr.mesh.nelements, dtype=np.bool), + group_factory, debug) + stage2_density_discr = conn.to_discr + fine_connections.append(conn) + lpot_source = lpot_source.copy( + to_refined_connection=ChainedDiscretizationConnection( + fine_connections)) + # }}} lpot_source = lpot_source.copy(debug=debug, _refined_for_global_qbx=True) -- GitLab From 7c8b888ed0e066705aa525ab1f2796e4e4ed5376 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Thu, 19 Apr 2018 16:57:55 -0500 Subject: [PATCH 32/41] QBX refinement vis: Don't go overboard on order --- pytential/qbx/refinement.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytential/qbx/refinement.py b/pytential/qbx/refinement.py index 915056e6..82cb8eb8 100644 --- a/pytential/qbx/refinement.py +++ b/pytential/qbx/refinement.py @@ -541,7 +541,7 @@ def refine_for_global_qbx(lpot_source, wrangler, discr = lpot_source.density_discr from meshmode.discretization.visualization import make_visualizer - vis = make_visualizer(wrangler.queue, discr, 10) + vis = make_visualizer(wrangler.queue, discr, 3) flags = flags.get().astype(np.bool) nodes_flags = np.zeros(discr.nnodes) -- GitLab From 47cb31ee95996c0398f51e95a6215de393cc2954 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Thu, 19 Apr 2018 17:02:00 -0500 Subject: [PATCH 33/41] NumReferenceDerivative: Fix incorrect choice of data type for ref_axes parameter --- pytential/symbolic/execution.py | 4 ++- pytential/symbolic/mappers.py | 10 ++++++-- pytential/symbolic/primitives.py | 43 +++++++++++++++++++++++++------- 3 files changed, 45 insertions(+), 12 deletions(-) diff --git a/pytential/symbolic/execution.py b/pytential/symbolic/execution.py index 7f4194cd..c0fe0153 100644 --- a/pytential/symbolic/execution.py +++ b/pytential/symbolic/execution.py @@ -132,9 +132,11 @@ class EvaluationMapper(EvaluationMapperBase): def map_num_reference_derivative(self, expr): discr = self.bound_expr.get_discretization(expr.where) + from pytools import flatten + ref_axes = flatten([axis] * mult for axis, mult in expr.ref_axes) return discr.num_reference_derivative( self.queue, - expr.ref_axes, self.rec(expr.operand)) \ + ref_axes, self.rec(expr.operand)) \ .with_queue(self.queue) def map_q_weight(self, expr): diff --git a/pytential/symbolic/mappers.py b/pytential/symbolic/mappers.py index 680c8c97..0dfaa4f9 100644 --- a/pytential/symbolic/mappers.py +++ b/pytential/symbolic/mappers.py @@ -508,8 +508,14 @@ class StringifyMapper(BaseStringifyMapper): stringify_where(expr.where)) def map_num_reference_derivative(self, expr, enclosing_prec): - result = "d/dr%s.%s %s" % ( - ",".join(str(ax) for ax in expr.ref_axes), + diff_op = " ".join( + "d/dr%d" % axis + if mult == 1 else + "d/dr%d^%d" % (axis, mult) + for axis, mult in expr.ref_axes) + + result = "%s.%s %s" % ( + diff_op, stringify_where(expr.where), self.rec(expr.operand, PREC_PRODUCT), ) diff --git a/pytential/symbolic/primitives.py b/pytential/symbolic/primitives.py index f7a61133..27a36268 100644 --- a/pytential/symbolic/primitives.py +++ b/pytential/symbolic/primitives.py @@ -374,18 +374,46 @@ class NumReferenceDerivative(DiscretizationProperty): reference coordinates. """ + def __new__(cls, ref_axes, operand, where=None): + # If the constructor is handed a multivector object, return an + # object array of the operator applied to each of the + # coefficients in the multivector. + + if isinstance(operand, (np.ndarray)): + def make_op(operand_i): + return cls(ref_axes, operand_i, where=where) + + return componentwise(make_op, operand) + else: + return DiscretizationProperty.__new__(cls) + def __init__(self, ref_axes, operand, where=None): """ - :arg ref_axes: a :class:`frozenset` of indices of - reference coordinates along which derivatives - will be taken. + :arg ref_axes: a :class:`tuple` of tuples indicating indices of + coordinate axes of the reference element to the number of derivatives + which will be taken. For example, the value ``((0, 2), (1, 1))`` + indicates that Each axis must occur at most once. The tuple must be + sorted by the axis index. + + May also be a singile integer *i*, which is viewed as equivalent + to ``((i, 1),)``. :arg where: |where-blurb| """ - if not isinstance(ref_axes, frozenset): - raise ValueError("ref_axes must be a frozenset") + if isinstance(ref_axes, int): + ref_axes = ((ref_axes, 1),) + + if not isinstance(ref_axes, tuple): + raise ValueError("ref_axes must be a tuple") + + if tuple(sorted(ref_axes)) != ref_axes: + raise ValueError("ref_axes must be sorted") + + if len(dict(ref_axes)) != len(ref_axes): + raise ValueError("ref_axes must not contain an axis more than once") self.ref_axes = ref_axes + self.operand = operand DiscretizationProperty.__init__(self, where) @@ -404,10 +432,7 @@ def reference_jacobian(func, output_dim, dim, where=None): for i in range(output_dim): func_component = func[i] for j in range(dim): - jac[i, j] = NumReferenceDerivative( - frozenset([j]), - func_component, - where) + jac[i, j] = NumReferenceDerivative(j, func_component, where) return jac -- GitLab From b6afe5a1cbfa4f7ef43d0eaaf26acc719cd26b55 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Fri, 20 Apr 2018 10:35:05 -0500 Subject: [PATCH 34/41] Implement computation of first/second fundamental form and shape operator --- pytential/symbolic/primitives.py | 79 ++++++++++++++++++++++++++++++++ 1 file changed, 79 insertions(+) diff --git a/pytential/symbolic/primitives.py b/pytential/symbolic/primitives.py index 27a36268..68848ff8 100644 --- a/pytential/symbolic/primitives.py +++ b/pytential/symbolic/primitives.py @@ -121,6 +121,10 @@ Discretization properties .. autofunction:: area_element .. autofunction:: sqrt_jac_q_weight .. autofunction:: normal +.. autofunction:: mean_curvature +.. autofunction:: first_fundamental_form +.. autofunction:: second_fundamental_form +.. autofunction:: shape_operator Elementary numerics ^^^^^^^^^^^^^^^^^^^ @@ -522,6 +526,81 @@ def mean_curvature(ambient_dim, dim=None, where=None): return (xp[0]*ypp[0] - yp[0]*xpp[0]) / (xp[0]**2 + yp[0]**2)**(3/2) +def first_fundamental_form(ambient_dim, dim=None, where=None): + if dim is None: + dim = ambient_dim - 1 + + if ambient_dim != 3 and dim != 2: + raise NotImplementedError("only available for surfaces in 3D") + + pd_mat = parametrization_derivative_matrix(ambient_dim, dim, where) + + return cse( + np.dot(pd_mat.T, pd_mat), + "fundform1") + + +def second_fundamental_form(ambient_dim, dim=None, where=None): + """Compute the second fundamental form of a surface. This is in reference + to the reference-to-global mapping in use for each element. + + .. note:: + + Some references assume that the second fundamental form is computed + with respect to an orthonormal basis, which this is not. + """ + if dim is None: + dim = ambient_dim - 1 + + if ambient_dim != 3 and dim != 2: + raise NotImplementedError("only available for surfaces in 3D") + + r = nodes(ambient_dim, where=where).as_vector() + + # https://en.wikipedia.org/w/index.php?title=Second_fundamental_form&oldid=821047433#Classical_notation + + from functools import partial + d = partial(NumReferenceDerivative, where=where) + ruu = d(((0, 2),), r) + ruv = d(((0, 1), (1, 1)), r) + rvv = d(((1, 2),), r) + + nrml = normal(ambient_dim, dim, where).as_vector() + + ff2_l = cse(np.dot(ruu, nrml), "fundform2_L") + ff2_m = cse(np.dot(ruv, nrml), "fundform2_M") + ff2_n = cse(np.dot(rvv, nrml), "fundform2_N") + + result = np.zeros((2, 2), dtype=object) + result[0, 0] = ff2_l + result[0, 1] = result[1, 0] = ff2_m + result[1, 1] = ff2_n + + return result + + +def shape_operator(ambient_dim, dim=None, where=None): + if dim is None: + dim = ambient_dim - 1 + + if ambient_dim != 3 and dim != 2: + raise NotImplementedError("only available for surfaces in 3D") + + # https://en.wikipedia.org/w/index.php?title=Differential_geometry_of_surfaces&oldid=833587563 + (E, F), (F, G) = first_fundamental_form(ambient_dim, dim, where) + (e, f), (f, g) = second_fundamental_form(ambient_dim, dim, where) + + result = np.zeros((2, 2), dtype=object) + result[0, 0] = e*G-f*F + result[0, 1] = f*G-g*F + result[1, 0] = f*E-e*F + result[1, 1] = g*E-f*F + + return cse( + 1/(E*G-F*F)*result, + "shape_operator") + + def _panel_size(ambient_dim, dim=None, where=None): # A broken quasi-1D approximation of 1D element size. Do not use. -- GitLab From 4497770dc7f8abfb32e77fd90987f59459960358 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Fri, 20 Apr 2018 10:35:48 -0500 Subject: [PATCH 35/41] Add a docstring for tangential_onb --- pytential/symbolic/primitives.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pytential/symbolic/primitives.py b/pytential/symbolic/primitives.py index 68848ff8..23dba044 100644 --- a/pytential/symbolic/primitives.py +++ b/pytential/symbolic/primitives.py @@ -1278,6 +1278,10 @@ def Dp(kernel, *args, **kwargs): # noqa # {{{ conventional vector calculus def tangential_onb(ambient_dim, dim=None, where=None): + """Return a matrix of shape ``(ambient_dim, dim)`` with orthogonal columns + spanning the tangential space of the surface of *where*. + """ + if dim is None: dim = ambient_dim - 1 -- GitLab From 0fde3b783a5fb281c75cd51999914c4aa8b7b4b4 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Fri, 20 Apr 2018 10:36:24 -0500 Subject: [PATCH 36/41] Add _small_mat_inverse to primitives --- pytential/symbolic/primitives.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/pytential/symbolic/primitives.py b/pytential/symbolic/primitives.py index 23dba044..21b31e54 100644 --- a/pytential/symbolic/primitives.py +++ b/pytential/symbolic/primitives.py @@ -612,6 +612,24 @@ def _panel_size(ambient_dim, dim=None, where=None): * QWeight())**(1/dim) +def _small_mat_inverse(mat): + m, n = mat.shape + if m != n: + raise ValueError("inverses only make sense for square matrices") + + if m == 1: + return make_obj_array([1/mat[0, 0]]) + elif m == 2: + (a, b), (c, d) = mat + return 1/(a*d-b*c) * make_obj_array([ + [d, -b], + [-c, a], + ]) + else: + raise NotImplementedError( + "inverse formula for %dx%d matrices" % (m, n)) + + def _small_mat_eigenvalues(mat): m, n = mat.shape if m != n: -- GitLab From 362e5f130137feb81ce9d97fa033bb5d6bc8d9a3 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Fri, 20 Apr 2018 10:52:57 -0500 Subject: [PATCH 37/41] Primitives: Add curvature-per-resolution measure --- pytential/symbolic/primitives.py | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/pytential/symbolic/primitives.py b/pytential/symbolic/primitives.py index 21b31e54..ecbdc899 100644 --- a/pytential/symbolic/primitives.py +++ b/pytential/symbolic/primitives.py @@ -703,6 +703,38 @@ def _simplex_mapping_max_stretch_factor(ambient_dim, dim=None, where=None, return cse(result, "mapping_max_stretch", cse_scope.DISCRETIZATION) + +def _max_curvature(ambient_dim, dim=None, where=None): + # An attempt at a 'max curvature' criterion. + + if dim is None: + dim = ambient_dim - 1 + + if ambient_dim == 2: + return abs(mean_curvature(ambient_dim, dim, where=where)) + elif ambient_dim == 3: + shape_op = shape_operator(ambient_dim, dim, where=where) + + abs_principal_curvatures = [ + abs(x) for x in _small_mat_eigenvalues(shape_op)] + from pymbolic.primitives import Max + return cse(Max(tuple(abs_principal_curvatures))) + else: + raise NotImplementedError("curvature criterion not implemented in %d " + "dimensions" % ambient_dim) + + +def _scaled_max_curvature(ambient_dim, dim=None, where=None): + """An attempt at a unit-less, scale-invariant quantity that characterizes + 'how much curviness there is on an element'. Values seem to hover around 1 + on typical meshes. Empirical evidence suggests that elements exceeding + a threshold of about 0.8-1 will have high QBX truncation error. + """ + + return _max_curvature(ambient_dim, dim, where=where) * \ + _simplex_mapping_max_stretch_factor(ambient_dim, dim, where=where, + with_elementwise_max=False) + # }}} -- GitLab From eb0f719f9fef67280fcfb1dc8800b4fb6602946a Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Fri, 20 Apr 2018 11:00:07 -0500 Subject: [PATCH 38/41] Allow refinement by scaled max curvature, remove max radius refinement criterion --- pytential/qbx/__init__.py | 7 ++++--- pytential/qbx/refinement.py | 27 ++++++++++++++++++--------- 2 files changed, 22 insertions(+), 12 deletions(-) diff --git a/pytential/qbx/__init__.py b/pytential/qbx/__init__.py index b02fd1ca..73a65097 100644 --- a/pytential/qbx/__init__.py +++ b/pytential/qbx/__init__.py @@ -371,8 +371,8 @@ class QBXLayerPotentialSource(LayerPotentialSourceBase): def with_refinement(self, target_order=None, kernel_length_scale=None, maxiter=None, visualize=False, refiner=None, _expansion_disturbance_tolerance=None, - _max_expansion_radius=None, - _force_stage2_uniform_refinement_rounds=None): + _force_stage2_uniform_refinement_rounds=None, + _scaled_max_curvature_threshold=None): """ :arg refiner: If the mesh underlying :attr:`density_discr` is itself the result of refinement, then its @@ -401,9 +401,10 @@ class QBXLayerPotentialSource(LayerPotentialSourceBase): kernel_length_scale=kernel_length_scale, maxiter=maxiter, visualize=visualize, expansion_disturbance_tolerance=_expansion_disturbance_tolerance, - max_expansion_radius=_max_expansion_radius, force_stage2_uniform_refinement_rounds=( _force_stage2_uniform_refinement_rounds), + scaled_max_curvature_threshold=( + _scaled_max_curvature_threshold), refiner=refiner) return lpot, connection diff --git a/pytential/qbx/refinement.py b/pytential/qbx/refinement.py index 82cb8eb8..d203670b 100644 --- a/pytential/qbx/refinement.py +++ b/pytential/qbx/refinement.py @@ -477,8 +477,8 @@ def make_empty_refine_flags(queue, lpot_source, use_stage2_discr=False): def refine_for_global_qbx(lpot_source, wrangler, group_factory, kernel_length_scale=None, - max_expansion_radius=None, force_stage2_uniform_refinement_rounds=None, + scaled_max_curvature_threshold=None, debug=None, maxiter=None, visualize=None, expansion_disturbance_tolerance=None, refiner=None): @@ -612,17 +612,26 @@ def refine_for_global_qbx(lpot_source, wrangler, iter_violated_criteria.append("kernel length scale") visualize_refinement(niter, "kernel-length-scale", refine_flags) - if max_expansion_radius is not None: - violates_expansion_radii = \ + if scaled_max_curvature_threshold is not None: + from pytential.qbx.utils import to_last_dim_length + from pytential import sym, bind + scaled_max_curv = to_last_dim_length( + lpot_source.density_discr, + bind(lpot_source, + sym.ElementwiseMax( + sym._scaled_max_curvature( + lpot_source.density_discr.ambient_dim))) + (wrangler.queue), "npanels") + + violates_scaled_max_curv = \ wrangler.check_element_prop_threshold( - element_property=lpot_source._expansion_radii( - "npanels"), - threshold=max_expansion_radius, + element_property=scaled_max_curv, + threshold=scaled_max_curvature_threshold, refine_flags=refine_flags, debug=debug) - if violates_expansion_radii: - iter_violated_criteria.append("expansion radii") - visualize_refinement(niter, "expansion-radii", refine_flags) + if violates_scaled_max_curv: + iter_violated_criteria.append("curvature") + visualize_refinement(niter, "curvature", refine_flags) if not iter_violated_criteria: # Only start building trees once the simple length-based criteria -- GitLab From 491a0313ec4a63a31a368f8d186f671126fa6d59 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Fri, 20 Apr 2018 11:28:19 -0500 Subject: [PATCH 39/41] Add TOOD about too-strict test for test_target_association [ci skip] --- test/test_global_qbx.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/test/test_global_qbx.py b/test/test_global_qbx.py index 28f525a6..e7a4eaa6 100644 --- a/test/test_global_qbx.py +++ b/test/test_global_qbx.py @@ -217,6 +217,12 @@ def test_source_refinement_3d(ctx_getter, surface_name, surface_f, order): @pytest.mark.parametrize(("curve_name", "curve_f", "nelements"), [ + # TODO: This used to pass for a 20-to-1 ellipse (with different center + # placement). It still produces valid output right now, but the current + # test is not smart enough to recognize it. It might be useful to fix that. + # + # See discussion at + # https://gitlab.tiker.net/inducer/pytential/merge_requests/95#note_24003 ("18-to-1 ellipse", partial(ellipse, 18), 100), ("horseshoe", horseshoe, 64), ]) -- GitLab From be7475636c872eacfbe05313634513fde2284dac Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Fri, 20 Apr 2018 15:57:14 -0500 Subject: [PATCH 40/41] Switch to new pytools logging infrastructure --- pytential/qbx/fmm.py | 28 ++++------- pytential/qbx/fmmlib.py | 6 +++ pytential/qbx/geometry.py | 16 ++----- pytential/qbx/refinement.py | 90 ++++++++++++++++------------------- pytential/qbx/target_assoc.py | 15 ++---- pytential/qbx/utils.py | 7 ++- 6 files changed, 67 insertions(+), 95 deletions(-) diff --git a/pytential/qbx/fmm.py b/pytential/qbx/fmm.py index 037f8188..ede7ede5 100644 --- a/pytential/qbx/fmm.py +++ b/pytential/qbx/fmm.py @@ -33,6 +33,8 @@ from sumpy.fmm import (SumpyExpansionWranglerCodeContainer, from pytools import memoize_method from pytential.qbx.interactions import P2QBXLFromCSR, M2QBXL, L2QBXL, QBXL2P +from pytools import log_process, ProcessLogger + import logging logger = logging.getLogger(__name__) @@ -192,6 +194,7 @@ QBXFMMGeometryData.non_qbx_box_target_lists`), # {{{ qbx-related + @log_process(logger) def form_global_qbx_locals(self, src_weights): local_exps = self.qbx_local_expansion_zeros() @@ -228,6 +231,7 @@ QBXFMMGeometryData.non_qbx_box_target_lists`), return result + @log_process(logger) def translate_box_multipoles_to_qbx_local(self, multipole_exps): qbx_expansions = self.qbx_local_expansion_zeros() @@ -276,6 +280,7 @@ QBXFMMGeometryData.non_qbx_box_target_lists`), return qbx_expansions + @log_process(logger) def translate_box_local_to_qbx_local(self, local_exps): qbx_expansions = self.qbx_local_expansion_zeros() @@ -320,6 +325,7 @@ QBXFMMGeometryData.non_qbx_box_target_lists`), return qbx_expansions + @log_process(logger) def eval_qbx_expansions(self, qbx_expansions): pot = self.full_output_zeros() @@ -381,16 +387,12 @@ def drive_fmm(expansion_wrangler, src_weights): # Interface guidelines: Attributes of the tree are assumed to be known # to the expansion wrangler and should not be passed. - from time import time - start_time = time() - logger.info("start qbx fmm") + fmm_proc = ProcessLogger(logger, "qbx fmm") - logger.info("reorder source weights") src_weights = wrangler.reorder_sources(src_weights) # {{{ construct local multipoles - logger.info("construct local multipoles") mpole_exps = wrangler.form_multipoles( traversal.level_start_source_box_nrs, traversal.source_boxes, @@ -400,7 +402,6 @@ def drive_fmm(expansion_wrangler, src_weights): # {{{ propagate multipoles upward - logger.info("propagate multipoles upward") wrangler.coarsen_multipoles( traversal.level_start_source_parent_box_nrs, traversal.source_parent_boxes, @@ -410,7 +411,6 @@ def drive_fmm(expansion_wrangler, src_weights): # {{{ direct evaluation from neighbor source boxes ("list 1") - logger.info("direct evaluation from neighbor source boxes ('list 1')") non_qbx_potentials = wrangler.eval_direct( traversal.target_boxes, traversal.neighbor_source_boxes_starts, @@ -421,7 +421,6 @@ def drive_fmm(expansion_wrangler, src_weights): # {{{ translate separated siblings' ("list 2") mpoles to local - logger.info("translate separated siblings' ('list 2') mpoles to local") local_exps = wrangler.multipole_to_local( traversal.level_start_target_or_target_parent_box_nrs, traversal.target_or_target_parent_boxes, @@ -433,8 +432,6 @@ def drive_fmm(expansion_wrangler, src_weights): # {{{ evaluate sep. smaller mpoles ("list 3") at particles - logger.info("evaluate sep. smaller mpoles at particles ('list 3 far')") - # (the point of aiming this stage at particles is specifically to keep its # contribution *out* of the downward-propagating local expansions) @@ -450,8 +447,6 @@ def drive_fmm(expansion_wrangler, src_weights): # {{{ form locals for separated bigger source boxes ("list 4") - logger.info("form locals for separated bigger source boxes ('list 4 far')") - local_exps = local_exps + wrangler.form_locals( traversal.level_start_target_or_target_parent_box_nrs, traversal.target_or_target_parent_boxes, @@ -466,7 +461,6 @@ def drive_fmm(expansion_wrangler, src_weights): # {{{ propagate local_exps downward - logger.info("propagate local_exps downward") wrangler.refine_locals( traversal.level_start_target_or_target_parent_box_nrs, traversal.target_or_target_parent_boxes, @@ -476,7 +470,6 @@ def drive_fmm(expansion_wrangler, src_weights): # {{{ evaluate locals - logger.info("evaluate locals") non_qbx_potentials = non_qbx_potentials + wrangler.eval_locals( traversal.level_start_target_box_nrs, traversal.target_boxes, @@ -486,19 +479,14 @@ def drive_fmm(expansion_wrangler, src_weights): # {{{ wrangle qbx expansions - logger.info("form global qbx expansions from list 1") qbx_expansions = wrangler.form_global_qbx_locals(src_weights) - logger.info("translate from list 3 multipoles to qbx local expansions") qbx_expansions = qbx_expansions + \ wrangler.translate_box_multipoles_to_qbx_local(mpole_exps) - logger.info("translate from box local expansions to contained " - "qbx local expansions") qbx_expansions = qbx_expansions + \ wrangler.translate_box_local_to_qbx_local(local_exps) - logger.info("evaluate qbx local expansions") qbx_potentials = wrangler.eval_qbx_expansions( qbx_expansions) @@ -528,7 +516,7 @@ def drive_fmm(expansion_wrangler, src_weights): # }}} - logger.info("qbx fmm complete in %.2f s" % (time() - start_time)) + fmm_proc.done() return result diff --git a/pytential/qbx/fmmlib.py b/pytential/qbx/fmmlib.py index 578dadce..2b6cbf88 100644 --- a/pytential/qbx/fmmlib.py +++ b/pytential/qbx/fmmlib.py @@ -30,6 +30,8 @@ from boxtree.pyfmmlib_integration import FMMLibExpansionWrangler from sumpy.kernel import LaplaceKernel, HelmholtzKernel +from pytools import log_process + import logging logger = logging.getLogger(__name__) @@ -298,6 +300,7 @@ class QBXFMMLibExpansionWrangler(FMMLibExpansionWrangler): # {{{ p2qbxl + @log_process(logger) def form_global_qbx_locals(self, src_weights): geo_data = self.geo_data trav = geo_data.traversal() @@ -348,6 +351,7 @@ class QBXFMMLibExpansionWrangler(FMMLibExpansionWrangler): # {{{ m2qbxl + @log_process(logger) def translate_box_multipoles_to_qbx_local(self, multipole_exps): qbx_exps = self.qbx_local_expansion_zeros() @@ -458,6 +462,7 @@ class QBXFMMLibExpansionWrangler(FMMLibExpansionWrangler): # }}} + @log_process(logger) def translate_box_local_to_qbx_local(self, local_exps): qbx_expansions = self.qbx_local_expansion_zeros() @@ -518,6 +523,7 @@ class QBXFMMLibExpansionWrangler(FMMLibExpansionWrangler): return qbx_expansions + @log_process(logger) def eval_qbx_expansions(self, qbx_expansions): output = self.full_output_zeros() diff --git a/pytential/qbx/geometry.py b/pytential/qbx/geometry.py index 4c946e80..4c450881 100644 --- a/pytential/qbx/geometry.py +++ b/pytential/qbx/geometry.py @@ -36,6 +36,7 @@ from cgen import Enum from pytential.qbx.utils import TreeCodeContainerMixin +from pytools import log_process import logging logger = logging.getLogger(__name__) @@ -665,6 +666,7 @@ class QBXFMMGeometryData(object): return result.with_queue(None) @memoize_method + @log_process(logger) def global_qbx_centers(self): """Build a list of indices of QBX centers that use global QBX. This indexes into the global list of targets, (see :meth:`target_info`) of @@ -677,8 +679,6 @@ class QBXFMMGeometryData(object): user_target_to_center = self.user_target_to_center() with cl.CommandQueue(self.cl_context) as queue: - logger.info("find global qbx centers: start") - tgt_assoc_result = ( user_target_to_center.with_queue(queue)[self.ncenters:]) @@ -703,8 +703,6 @@ class QBXFMMGeometryData(object): ], queue=queue) - logger.info("find global qbx centers: done") - if self.debug: logger.debug( "find global qbx centers: using %d/%d centers" @@ -754,6 +752,7 @@ class QBXFMMGeometryData(object): return result.with_queue(None) @memoize_method + @log_process(logger) def center_to_tree_targets(self): """Return a :class:`CenterToTargetList`. See :meth:`target_to_center` for the reverse look-up table with targets in user order. @@ -764,8 +763,6 @@ class QBXFMMGeometryData(object): user_ttc = self.user_target_to_center() with cl.CommandQueue(self.cl_context) as queue: - logger.info("build center -> targets lookup table: start") - tree_ttc = cl.array.empty_like(user_ttc).with_queue(queue) tree_ttc[self.tree().sorted_target_ids] = user_ttc @@ -788,8 +785,6 @@ class QBXFMMGeometryData(object): filtered_tree_ttc, filtered_target_ids, self.ncenters, tree_ttc.dtype) - logger.info("build center -> targets lookup table: done") - result = CenterToTargetList( starts=center_target_starts, lists=targets_sorted_by_center).with_queue(None) @@ -797,6 +792,7 @@ class QBXFMMGeometryData(object): return result @memoize_method + @log_process(logger) def non_qbx_box_target_lists(self): """Build a list of targets per box that don't need to bother with QBX. Returns a :class:`boxtree.tree.FilteredTargetListsInTreeOrder`. @@ -807,8 +803,6 @@ class QBXFMMGeometryData(object): """ with cl.CommandQueue(self.cl_context) as queue: - logger.info("find non-qbx box target lists: start") - flags = (self.user_target_to_center().with_queue(queue) == target_state.NO_QBX_NEEDED) @@ -824,8 +818,6 @@ class QBXFMMGeometryData(object): plfilt = self.code_getter.particle_list_filter() result = plfilt.filter_target_lists_in_tree_order(queue, tree, flags) - logger.info("find non-qbx box target lists: done") - return result.with_queue(None) # {{{ plotting (for debugging) diff --git a/pytential/qbx/refinement.py b/pytential/qbx/refinement.py index d203670b..2ab458a2 100644 --- a/pytential/qbx/refinement.py +++ b/pytential/qbx/refinement.py @@ -39,6 +39,8 @@ from pytential.qbx.utils import ( QBX_TREE_C_PREAMBLE, QBX_TREE_MAKO_DEFS, TreeWranglerBase, TreeCodeContainerMixin) +from pytools import ProcessLogger, log_process + import logging logger = logging.getLogger(__name__) @@ -281,6 +283,7 @@ class RefinerWrangler(TreeWranglerBase): # {{{ check subroutines for conditions 1-3 + @log_process(logger) def check_expansion_disks_undisturbed_by_sources(self, lpot_source, tree, peer_lists, expansion_disturbance_tolerance, @@ -299,9 +302,6 @@ class RefinerWrangler(TreeWranglerBase): tree.particle_id_dtype, max_levels) - logger.info("refiner: checking that expansion disk is " - "undisturbed by sources") - if debug: npanels_to_refine_prev = cl.array.sum(refine_flags).get() @@ -339,10 +339,9 @@ class RefinerWrangler(TreeWranglerBase): logger.debug("refiner: found {} panel(s) to refine".format( npanels_to_refine - npanels_to_refine_prev)) - logger.info("refiner: done checking center is closest to orig panel") - return found_panel_to_refine.get()[0] == 1 + @log_process(logger) def check_sufficient_source_quadrature_resolution( self, lpot_source, tree, peer_lists, refine_flags, debug, wait_for=None): @@ -361,8 +360,6 @@ class RefinerWrangler(TreeWranglerBase): if debug: npanels_to_refine_prev = cl.array.sum(refine_flags).get() - logger.info("refiner: checking sufficient quadrature resolution") - found_panel_to_refine = cl.array.zeros(self.queue, 1, np.int32) found_panel_to_refine.finish() @@ -396,16 +393,12 @@ class RefinerWrangler(TreeWranglerBase): logger.debug("refiner: found {} panel(s) to refine".format( npanels_to_refine - npanels_to_refine_prev)) - logger.info("refiner: done checking sufficient quadrature resolution") - return found_panel_to_refine.get()[0] == 1 def check_element_prop_threshold(self, element_property, threshold, refine_flags, debug, wait_for=None): knl = self.code_container.element_prop_threshold_checker() - logger.info("refiner: checking kernel length scale to panel size ratio") - if debug: npanels_to_refine_prev = cl.array.sum(refine_flags).get() @@ -425,8 +418,6 @@ class RefinerWrangler(TreeWranglerBase): logger.debug("refiner: found {} panel(s) to refine".format( npanels_to_refine - npanels_to_refine_prev)) - logger.info("refiner: done checking kernel length scale to panel size ratio") - return (out["refine_flags_updated"].get() == 1).all() # }}} @@ -439,13 +430,10 @@ class RefinerWrangler(TreeWranglerBase): refine_flags = refine_flags.get(self.queue) refine_flags = refine_flags.astype(np.bool) - logger.info("refiner: calling meshmode") - - refiner.refine(refine_flags) - from meshmode.discretization.connection import make_refinement_connection - conn = make_refinement_connection(refiner, density_discr, factory) - - logger.info("refiner: done calling meshmode") + with ProcessLogger(logger, "refine mesh"): + refiner.refine(refine_flags) + from meshmode.discretization.connection import make_refinement_connection + conn = make_refinement_connection(refiner, density_discr, factory) return conn @@ -601,37 +589,43 @@ def refine_for_global_qbx(lpot_source, wrangler, refine_flags = make_empty_refine_flags(wrangler.queue, lpot_source) if kernel_length_scale is not None: - violates_kernel_length_scale = \ - wrangler.check_element_prop_threshold( - element_property=lpot_source._coarsest_quad_resolution( - "npanels"), - threshold=kernel_length_scale, - refine_flags=refine_flags, debug=debug) + with ProcessLogger(logger, + "checking kernel length scale to panel size ratio"): - if violates_kernel_length_scale: - iter_violated_criteria.append("kernel length scale") - visualize_refinement(niter, "kernel-length-scale", refine_flags) + violates_kernel_length_scale = \ + wrangler.check_element_prop_threshold( + element_property=( + lpot_source._coarsest_quad_resolution( + "npanels")), + threshold=kernel_length_scale, + refine_flags=refine_flags, debug=debug) + + if violates_kernel_length_scale: + iter_violated_criteria.append("kernel length scale") + visualize_refinement(niter, "kernel-length-scale", refine_flags) if scaled_max_curvature_threshold is not None: - from pytential.qbx.utils import to_last_dim_length - from pytential import sym, bind - scaled_max_curv = to_last_dim_length( - lpot_source.density_discr, - bind(lpot_source, - sym.ElementwiseMax( - sym._scaled_max_curvature( - lpot_source.density_discr.ambient_dim))) - (wrangler.queue), "npanels") - - violates_scaled_max_curv = \ - wrangler.check_element_prop_threshold( - element_property=scaled_max_curv, - threshold=scaled_max_curvature_threshold, - refine_flags=refine_flags, debug=debug) - - if violates_scaled_max_curv: - iter_violated_criteria.append("curvature") - visualize_refinement(niter, "curvature", refine_flags) + with ProcessLogger(logger, + "checking scaled max curvature threshold"): + from pytential.qbx.utils import to_last_dim_length + from pytential import sym, bind + scaled_max_curv = to_last_dim_length( + lpot_source.density_discr, + bind(lpot_source, + sym.ElementwiseMax( + sym._scaled_max_curvature( + lpot_source.density_discr.ambient_dim))) + (wrangler.queue), "npanels") + + violates_scaled_max_curv = \ + wrangler.check_element_prop_threshold( + element_property=scaled_max_curv, + threshold=scaled_max_curvature_threshold, + refine_flags=refine_flags, debug=debug) + + if violates_scaled_max_curv: + iter_violated_criteria.append("curvature") + visualize_refinement(niter, "curvature", refine_flags) if not iter_violated_criteria: # Only start building trees once the simple length-based criteria diff --git a/pytential/qbx/target_assoc.py b/pytential/qbx/target_assoc.py index 7b9736ce..bf15bf6e 100644 --- a/pytential/qbx/target_assoc.py +++ b/pytential/qbx/target_assoc.py @@ -43,6 +43,7 @@ from pytential.qbx.utils import ( unwrap_args = AreaQueryElementwiseTemplate.unwrap_args +from pytools import log_process import logging logger = logging.getLogger(__name__) @@ -438,6 +439,7 @@ class TargetAssociationWrangler(TreeWranglerBase): self.code_container = code_container self.queue = queue + @log_process(logger) def mark_targets(self, tree, peer_lists, lpot_source, target_status, debug, wait_for=None): # Round up level count--this gets included in the kernel as @@ -497,8 +499,6 @@ class TargetAssociationWrangler(TreeWranglerBase): wait_for=wait_for) wait_for = [evt] - logger.info("target association: marking targets close to panels") - tunnel_radius_by_source = lpot_source._close_target_tunnel_radius("nsources") evt = knl( @@ -527,10 +527,9 @@ class TargetAssociationWrangler(TreeWranglerBase): cl.wait_for_events([evt]) - logger.info("target association: done marking targets close to panels") - return (found_target_close_to_panel == 1).all().get() + @log_process(logger) def try_find_centers(self, tree, peer_lists, lpot_source, target_status, target_flags, target_assoc, target_association_tolerance, debug, wait_for=None): @@ -582,8 +581,6 @@ class TargetAssociationWrangler(TreeWranglerBase): wait_for.extend(min_dist_to_center.events) - logger.info("target association: finding centers for targets") - evt = knl( *unwrap_args( tree, peer_lists, @@ -613,9 +610,9 @@ class TargetAssociationWrangler(TreeWranglerBase): .format(ntargets_associated)) cl.wait_for_events([evt]) - logger.info("target association: done finding centers for targets") return + @log_process(logger) def mark_panels_for_refinement(self, tree, peer_lists, lpot_source, target_status, refine_flags, debug, wait_for=None): @@ -653,8 +650,6 @@ class TargetAssociationWrangler(TreeWranglerBase): wait_for=wait_for) wait_for = [evt] - logger.info("target association: marking panels for refinement") - evt = knl( *unwrap_args( tree, peer_lists, @@ -684,8 +679,6 @@ class TargetAssociationWrangler(TreeWranglerBase): cl.wait_for_events([evt]) - logger.info("target association: done marking panels for refinement") - return (found_panel_to_refine == 1).all().get() def make_target_flags(self, target_discrs_and_qbx_sides): diff --git a/pytential/qbx/utils.py b/pytential/qbx/utils.py index 84626479..b0ffc066 100644 --- a/pytential/qbx/utils.py +++ b/pytential/qbx/utils.py @@ -37,6 +37,8 @@ from loopy.version import MOST_RECENT_LANGUAGE_VERSION import logging logger = logging.getLogger(__name__) +from pytools import log_process + # {{{ c and mako snippets @@ -443,6 +445,7 @@ class TreeWithQBXMetadata(Tree): MAX_REFINE_WEIGHT = 64 +@log_process(logger) def build_tree_with_qbx_metadata( queue, tree_builder, particle_list_filter, lpot_source, targets_list=(), use_stage2_discr=False): @@ -472,8 +475,6 @@ def build_tree_with_qbx_metadata( # - then panels (=centers of mass) # - then targets - logger.info("start building tree with qbx metadata") - sources = ( lpot_source.density_discr.nodes() if not use_stage2_discr @@ -585,8 +586,6 @@ def build_tree_with_qbx_metadata( tree_attrs.update(particle_classes) - logger.info("done building tree with qbx metadata") - return TreeWithQBXMetadata( qbx_panel_to_source_starts=qbx_panel_to_source_starts, qbx_panel_to_center_starts=qbx_panel_to_center_starts, -- GitLab From a061bc7b5415989e7ce2e41d3699908a00f6e062 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Fri, 20 Apr 2018 19:03:31 -0500 Subject: [PATCH 41/41] Remove another stray logging statement --- pytential/qbx/fmm.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/pytential/qbx/fmm.py b/pytential/qbx/fmm.py index ede7ede5..d3622e59 100644 --- a/pytential/qbx/fmm.py +++ b/pytential/qbx/fmm.py @@ -494,8 +494,6 @@ def drive_fmm(expansion_wrangler, src_weights): # {{{ reorder potentials - logger.info("reorder potentials") - nqbtl = geo_data.non_qbx_box_target_lists() all_potentials_in_tree_order = wrangler.full_output_zeros() -- GitLab