diff --git a/pytential/muller.py b/pytential/muller.py index 22fe5458e895236e3ec0fcc6ab7634290e0a84ed..5fc6a82378af0e8c86ac5858187014399cf8c42d 100644 --- a/pytential/muller.py +++ b/pytential/muller.py @@ -25,7 +25,7 @@ THE SOFTWARE. import numpy as np -def muller_deflate(f, n, maxiter=100, eps=1e-14): +def muller_deflate(f, n, maxiter=100, eps=1e-14, z_start=None): """ :arg n: number of zeros sought :returns: (roots, niter) - roots of the given function; number of @@ -52,7 +52,7 @@ def muller_deflate(f, n, maxiter=100, eps=1e-14): niter.append(niter0) while (np.isnan(roots[i]) or niter[i] == maxiter) and miter < 50: - roots0, niter0 = muller(f_deflated, maxiter, eps) + roots0, niter0 = muller(f_deflated, maxiter, eps, z_start=z_start) roots[i] = roots0 niter[i] = niter0 miter = miter+1 diff --git a/pytential/qbx/__init__.py b/pytential/qbx/__init__.py index 9c9b4ea399f56895c10b80003752c56f6fe95e98..0d058c820944180ed27bea27579051cb71fc9b5b 100644 --- a/pytential/qbx/__init__.py +++ b/pytential/qbx/__init__.py @@ -25,7 +25,6 @@ THE SOFTWARE. import six -import loopy as lp import numpy as np from pytools import memoize_method from meshmode.discretization import Discretization @@ -45,45 +44,6 @@ __doc__ = """ """ -# {{{ jump term interface helper - -class _JumpTermArgumentProvider(object): - def __init__(self, discr, density, ds_direction, side=None): - self.discr = discr - self.density = density - self.ds_direction = ds_direction - self.side = side - - @property - def normal(self): - return self.discr.curve.normals.reshape(2, -1).T - - @property - def tangent(self): - return self.discr.curve.tangents.reshape(2, -1).T - - @property - def src_derivative_dir(self): - return self.ds_direction - - @property - def mean_curvature(self): - return self.discr.curve.curvature.reshape(-1) - - @property - def density_0(self): - return self.density.reshape(-1) - - @property - @memoize_method - def density_0_prime(self): - diff_mat = self.discr.curve.expansion.get_differentiation_matrix() - return (2 * np.dot(diff_mat, self.density.T).T.reshape(-1) - / self.discr.curve.speed.reshape(-1)) - -# }}} - - class LayerPotentialSource(PotentialSource): pass @@ -128,13 +88,14 @@ class QBXLayerPotentialSource(LayerPotentialSource): .. attribute :: qbx_order .. attribute :: fmm_order .. attribute :: cl_context - .. automethod :: centers - .. automethod :: panel_sizes .. automethod :: weights_and_area_elements .. automethod :: with_refinement See :ref:`qbxguts` for some information on the inner workings of this. """ + + # {{{ constructor / copy + def __init__(self, density_discr, fine_order, qbx_order=None, fmm_order=None, @@ -146,6 +107,7 @@ class QBXLayerPotentialSource(LayerPotentialSource): # FIXME default debug=False once everything works debug=True, refined_for_global_qbx=False, + expansion_disks_in_tree_have_extent=False, performance_data_file=None): """ :arg fine_order: The total degree to which the (upsampled) @@ -184,6 +146,8 @@ class QBXLayerPotentialSource(LayerPotentialSource): self.debug = debug self.refined_for_global_qbx = refined_for_global_qbx + self.expansion_disks_in_tree_have_extent = \ + expansion_disks_in_tree_have_extent self.performance_data_file = performance_data_file def copy( @@ -216,12 +180,17 @@ class QBXLayerPotentialSource(LayerPotentialSource): refined_for_global_qbx=( refined_for_global_qbx if refined_for_global_qbx is not None else self.refined_for_global_qbx), + expansion_disks_in_tree_have_extent=( + self.expansion_disks_in_tree_have_extent), performance_data_file=self.performance_data_file) + # }}} + @property def base_fine_density_discr(self): - """The fine density discretization before upsampling is applied. + """The refined, interpolation-focused density discretization (no oversampling). """ + # FIXME: Maybe rename refined_interp_density_discr return (self._base_resampler.to_discr if self._base_resampler is not None else self.density_discr) @@ -229,6 +198,9 @@ class QBXLayerPotentialSource(LayerPotentialSource): @property @memoize_method def fine_density_discr(self): + """The refined, quadrature-focused density discretization (with upsampling). + """ + # FIXME: Maybe rename refined_quad_density_discr from meshmode.discretization.poly_element import ( QuadratureSimplexGroupFactory) @@ -251,22 +223,6 @@ class QBXLayerPotentialSource(LayerPotentialSource): return conn - def el_view(self, discr, group_nr, global_array): - """Return a view of *global_array* of shape - ``(..., discr.groups[group_nr].nelements)`` - where *global_array* is of shape ``(..., nelements)``, - where *nelements* is the global (per-discretization) node count. - """ - - group = discr.groups[group_nr] - el_nr_base = sum(group.nelements for group in discr.groups[:group_nr]) - - return global_array[ - ..., el_nr_base:el_nr_base + group.nelements] \ - .reshape( - global_array.shape[:-1] - + (group.nelements,)) - @property @memoize_method def refiner_code_container(self): @@ -305,7 +261,7 @@ class QBXLayerPotentialSource(LayerPotentialSource): @memoize_method def h_max(self): with cl.CommandQueue(self.cl_context) as queue: - panel_sizes = self.panel_sizes("npanels").with_queue(queue) + panel_sizes = self._panel_sizes("npanels").with_queue(queue) return np.asscalar(cl.array.max(panel_sizes).get()) @property @@ -328,121 +284,72 @@ class QBXLayerPotentialSource(LayerPotentialSource): def complex_dtype(self): return self.density_discr.complex_dtype - def _centers_of_mass_for_discr(self, discr): - knl = lp.make_kernel( - """{[dim,k,i]: - 0<=dim targets lookup table: done") @@ -927,31 +718,6 @@ class QBXFMMGeometryData(object): return result - @memoize_method - def global_qbx_centers_box_target_lists(self): - """Build a list of targets per box consisting only of global QBX centers. - Returns a :class:`boxtree.tree.FilteredTargetListsInUserOrder`. - (I.e. no new target order is created for these targets, as we expect - there to be (relatively) not many of them.) - - |cached| - """ - - center_info = self.center_info() - with cl.CommandQueue(self.cl_context) as queue: - logger.info("find global qbx centers box target list: start") - - flags = cl.array.zeros(queue, self.tree().ntargets, np.int8) - - flags[:center_info.ncenters] = self.global_qbx_flags() - - from boxtree.tree import filter_target_lists_in_user_order - result = filter_target_lists_in_user_order(queue, self.tree(), flags) - - logger.info("find global qbx centers box target list: done") - - return result.with_queue(None) - @memoize_method def non_qbx_box_target_lists(self): """Build a list of targets per box that don't need to bother with QBX. @@ -973,7 +739,7 @@ class QBXFMMGeometryData(object): # 'flags' is in user order, and should be. - nqbx_centers = self.center_info().ncenters + nqbx_centers = self.ncenters flags[:nqbx_centers] = 0 from boxtree.tree import filter_target_lists_in_tree_order @@ -1012,22 +778,22 @@ class QBXFMMGeometryData(object): # {{{ draw centers and circles - center_info = self.center_info() + centers = self.centers() centers = [ - center_info.centers[0].get(queue), - center_info.centers[1].get(queue)] + centers[0].get(queue), + centers[1].get(queue)] pt.plot(centers[0][global_flags == 0], centers[1][global_flags == 0], "oc", label="centers needing local qbx") ax = pt.gca() - for icenter, (cx, cy, r) in enumerate(zip( - centers[0], centers[1], center_info.radii.get(queue))): - ax.add_artist( - pt.Circle((cx, cy), r, fill=False, ls="dotted", lw=1)) - pt.text(cx, cy, - str(icenter), fontsize=8, - ha="left", va="center", - bbox=dict(facecolor='white', alpha=0.5, lw=0)) + # for icenter, (cx, cy, r) in enumerate(zip( + # centers[0], centers[1], center_info.radii.get(queue))): + # ax.add_artist( + # pt.Circle((cx, cy), r, fill=False, ls="dotted", lw=1)) + # pt.text(cx, cy, + # str(icenter), fontsize=8, + # ha="left", va="center", + # bbox=dict(facecolor='white', alpha=0.5, lw=0)) # }}} @@ -1054,9 +820,9 @@ class QBXFMMGeometryData(object): tccount = 0 checked = 0 for tx, ty, tcenter in zip( - targets[0][center_info.ncenters:], - targets[1][center_info.ncenters:], - ttc[center_info.ncenters:]): + targets[0][self.ncenters:], + targets[1][self.ncenters:], + ttc[self.ncenters:]): checked += 1 if tcenter >= 0: tccount += 1 diff --git a/pytential/qbx/refinement.py b/pytential/qbx/refinement.py index 9ae54bb681bc96f98c31a2027b2b466868094531..8efd9a8800f614f9d54a5d30285f108c42072ba1 100644 --- a/pytential/qbx/refinement.py +++ b/pytential/qbx/refinement.py @@ -270,6 +270,8 @@ class RefinerWrangler(object): self.code_container = code_container self.queue = queue + # {{{ check subroutines for conditions 1-3 + def check_expansion_disks_undisturbed_by_sources(self, lpot_source, tree, peer_lists, refine_flags, debug, wait_for=None): @@ -295,9 +297,9 @@ class RefinerWrangler(object): found_panel_to_refine.finish() unwrap_args = AreaQueryElementwiseTemplate.unwrap_args - center_danger_zone_radii_by_panel = ( - lpot_source.panel_sizes("npanels") - .with_queue(self.queue) / 2) + # FIXME: This shouldn't be by panel + center_danger_zone_radii_by_panel = \ + lpot_source._expansion_radii("npanels") evt = knl( *unwrap_args( @@ -354,7 +356,7 @@ class RefinerWrangler(object): found_panel_to_refine.finish() source_danger_zone_radii_by_panel = ( - lpot_source.fine_panel_sizes("npanels") + lpot_source._fine_panel_sizes("npanels") .with_queue(self.queue) / 4) unwrap_args = AreaQueryElementwiseTemplate.unwrap_args @@ -398,7 +400,7 @@ class RefinerWrangler(object): npanels_to_refine_prev = cl.array.sum(refine_flags).get() evt, out = knl(self.queue, - panel_sizes=lpot_source.panel_sizes("npanels"), + panel_sizes=lpot_source._panel_sizes("npanels"), refine_flags=refine_flags, refine_flags_updated=np.array(0), kernel_length_scale=np.array(kernel_length_scale), @@ -416,6 +418,8 @@ class RefinerWrangler(object): return (out["refine_flags_updated"].get() == 1).all() + # }}} + def build_tree(self, lpot_source, use_base_fine_discr=False): tb = self.code_container.tree_builder() from pytential.qbx.utils import build_tree_with_qbx_metadata @@ -470,6 +474,8 @@ def make_empty_refine_flags(queue, lpot_source, use_base_fine_discr=False): return result +# {{{ main entry point + def refine_for_global_qbx(lpot_source, code_container, group_factory, fine_group_factory, kernel_length_scale=None, # FIXME: Set debug=False once everything works. @@ -621,6 +627,7 @@ def refine_for_global_qbx(lpot_source, code_container, lpot_source = lpot_source.copy(debug=debug, refined_for_global_qbx=True) if len(connections) == 0: + # FIXME: This is inefficient connection = make_same_mesh_connection( lpot_source.density_discr, lpot_source.density_discr) @@ -629,5 +636,7 @@ def refine_for_global_qbx(lpot_source, code_container, return lpot_source, connection +# }}} + # vim: foldmethod=marker:filetype=pyopencl diff --git a/pytential/qbx/target_assoc.py b/pytential/qbx/target_assoc.py index b5f6b153e2e340f8e4f2315be2543017a469fe2f..4255d865f753da087e1cca1e7942248d14b684b0 100644 --- a/pytential/qbx/target_assoc.py +++ b/pytential/qbx/target_assoc.py @@ -128,7 +128,7 @@ QBX_TARGET_MARKER = AreaQueryElementwiseTemplate( particle_id_t source_offset, particle_id_t target_offset, particle_id_t *sorted_target_ids, - coord_t *panel_sizes, + coord_t *tunnel_radius_by_source, coord_t *box_to_search_dist, /* output */ @@ -140,7 +140,7 @@ QBX_TARGET_MARKER = AreaQueryElementwiseTemplate( coord_t *particles_${ax}, %endfor """, - ball_center_and_radius_expr=QBX_TREE_C_PREAMBLE + QBX_TREE_MAKO_DEFS + r""" + ball_center_and_radius_expr=QBX_TREE_C_PREAMBLE + QBX_TREE_MAKO_DEFS + r"""//CL// coord_vec_t tgt_coords; ${load_particle("INDEX_FOR_TARGET_PARTICLE(i)", "tgt_coords")} { @@ -149,7 +149,7 @@ QBX_TARGET_MARKER = AreaQueryElementwiseTemplate( ${ball_radius} = box_to_search_dist[my_box]; } """, - leaf_found_op=QBX_TREE_MAKO_DEFS + r""" + leaf_found_op=QBX_TREE_MAKO_DEFS + r"""//CL// for (particle_id_t source_idx = box_to_source_starts[${leaf_box_id}]; source_idx < box_to_source_starts[${leaf_box_id} + 1]; ++source_idx) @@ -158,7 +158,8 @@ QBX_TARGET_MARKER = AreaQueryElementwiseTemplate( coord_vec_t source_coords; ${load_particle("INDEX_FOR_SOURCE_PARTICLE(source)", "source_coords")} - if (distance(source_coords, tgt_coords) <= panel_sizes[source] / 2) + if (distance(source_coords, tgt_coords) + <= tunnel_radius_by_source[source]) { target_status[i] = MARKED_QBX_CENTER_PENDING; *found_target_close_to_panel = 1; @@ -177,9 +178,8 @@ QBX_CENTER_FINDER = AreaQueryElementwiseTemplate( particle_id_t center_offset, particle_id_t target_offset, particle_id_t *sorted_target_ids, - coord_t *panel_sizes, + coord_t *expansion_radii_by_center_with_stick_out, coord_t *box_to_search_dist, - coord_t stick_out_factor, int *target_flags, /* input/output */ @@ -222,7 +222,7 @@ QBX_CENTER_FINDER = AreaQueryElementwiseTemplate( coord_t my_dist_to_center = distance(tgt_coords, center_coords); if (my_dist_to_center - <= (panel_sizes[center] / 2) * (1 + stick_out_factor) + <= expansion_radii_by_center_with_stick_out[center] && my_dist_to_center < min_dist_to_center[i]) { target_status[i] = MARKED_QBX_CENTER_FOUND; @@ -245,7 +245,7 @@ QBX_FAILED_TARGET_ASSOCIATION_REFINER = AreaQueryElementwiseTemplate( particle_id_t target_offset, int npanels, particle_id_t *sorted_target_ids, - coord_t *panel_sizes, + coord_t *tunnel_radius_by_source, int *target_status, coord_t *box_to_search_dist, @@ -258,7 +258,7 @@ QBX_FAILED_TARGET_ASSOCIATION_REFINER = AreaQueryElementwiseTemplate( coord_t *particles_${ax}, %endfor """, - ball_center_and_radius_expr=QBX_TREE_C_PREAMBLE + QBX_TREE_MAKO_DEFS + r""" + ball_center_and_radius_expr=QBX_TREE_C_PREAMBLE + QBX_TREE_MAKO_DEFS + r"""//CL// coord_vec_t target_coords; ${load_particle("INDEX_FOR_TARGET_PARTICLE(i)", "target_coords")} { @@ -267,7 +267,7 @@ QBX_FAILED_TARGET_ASSOCIATION_REFINER = AreaQueryElementwiseTemplate( ${ball_radius} = box_to_search_dist[my_box]; } """, - leaf_found_op=QBX_TREE_MAKO_DEFS + r""" + leaf_found_op=QBX_TREE_MAKO_DEFS + r"""//CL// for (particle_id_t source_idx = box_to_source_starts[${leaf_box_id}]; source_idx < box_to_source_starts[${leaf_box_id} + 1]; ++source_idx) @@ -279,7 +279,7 @@ QBX_FAILED_TARGET_ASSOCIATION_REFINER = AreaQueryElementwiseTemplate( bool is_close = distance(target_coords, source_coords) - <= panel_sizes[source] / 2; + <= tunnel_radius_by_source[source]; if (is_close && target_status[i] == MARKED_QBX_CENTER_PENDING) { @@ -398,19 +398,37 @@ class QBXTargetAssociator(object): # Perform a space invader query over the sources. source_slice = tree.user_source_ids[tree.qbx_user_source_slice] sources = [axis.with_queue(queue)[source_slice] for axis in tree.sources] - panel_sizes = lpot_source.panel_sizes("nsources").with_queue(queue) + tunnel_radius_by_source = \ + lpot_source._close_target_tunnel_radius("nsources").with_queue(queue) + + # Target-marking algorithm (TGTMARK): + # + # (1) Use a space invader query to tag each leaf box that intersects with the + # "near-source-detection tunnel" with the distance to the closest source. + # + # (2) Do an area query around all targets with the radius resulting + # from the space invader query, enumerate sources in that vicinity. + # If a source is found whose distance to the target is less than the + # source's tunnel radius, mark that target as pending. + # (or below: mark the source for refinement) + + # Note that this comment is referred to below by "TGTMARK". If you + # remove this comment or change the algorithm here, make sure that + # the reference below is still accurate. box_to_search_dist, evt = self.space_invader_query( queue, tree, sources, - panel_sizes / 2, + tunnel_radius_by_source, peer_lists, wait_for=wait_for) wait_for = [evt] logger.info("target association: marking targets close to panels") + tunnel_radius_by_source = lpot_source._close_target_tunnel_radius("nsources") + evt = knl( *unwrap_args( tree, peer_lists, @@ -419,7 +437,7 @@ class QBXTargetAssociator(object): tree.qbx_user_source_slice.start, tree.qbx_user_target_slice.start, tree.sorted_target_ids, - panel_sizes, + tunnel_radius_by_source, box_to_search_dist, target_status, found_target_close_to_panel, @@ -463,13 +481,22 @@ class QBXTargetAssociator(object): center_slice = \ tree.sorted_target_ids[tree.qbx_user_center_slice].with_queue(queue) centers = [axis.with_queue(queue)[center_slice] for axis in tree.sources] - panel_sizes = lpot_source.panel_sizes("ncenters").with_queue(queue) + expansion_radii_by_center = \ + lpot_source._expansion_radii("ncenters").with_queue(queue) + expansion_radii_by_center_with_stick_out = \ + expansion_radii_by_center * (1 + stick_out_factor) + + # Idea: + # + # (1) Tag leaf boxes around centers with max distance to usable center. + # (2) Area query from targets with those radii to find closest eligible + # center. box_to_search_dist, evt = self.space_invader_query( queue, tree, centers, - panel_sizes * ((1 + stick_out_factor) / 2), + expansion_radii_by_center_with_stick_out, peer_lists, wait_for=wait_for) wait_for = [evt] @@ -490,9 +517,8 @@ class QBXTargetAssociator(object): tree.qbx_user_center_slice.start, tree.qbx_user_target_slice.start, tree.sorted_target_ids, - panel_sizes, + expansion_radii_by_center_with_stick_out, box_to_search_dist, - stick_out_factor, target_flags, target_status, target_assoc.target_to_center, @@ -535,13 +561,16 @@ class QBXTargetAssociator(object): # Perform a space invader query over the sources. source_slice = tree.user_source_ids[tree.qbx_user_source_slice] sources = [axis.with_queue(queue)[source_slice] for axis in tree.sources] - panel_sizes = lpot_source.panel_sizes("nsources").with_queue(queue) + tunnel_radius_by_source = \ + lpot_source._close_target_tunnel_radius("nsources").with_queue(queue) + + # See (TGTMARK) above for algorithm. box_to_search_dist, evt = self.space_invader_query( queue, tree, sources, - panel_sizes / 2, + tunnel_radius_by_source, peer_lists, wait_for=wait_for) wait_for = [evt] @@ -558,7 +587,7 @@ class QBXTargetAssociator(object): tree.qbx_user_target_slice.start, tree.nqbxpanels, tree.sorted_target_ids, - lpot_source.panel_sizes("nsources"), + lpot_source._close_target_tunnel_radius("nsources"), target_status, box_to_search_dist, refine_flags, diff --git a/pytential/qbx/utils.py b/pytential/qbx/utils.py index 8b363250d644bfaebb65d738208ebab057c677aa..02a19e007eae86e90066224f97b0b261a02a7e6a 100644 --- a/pytential/qbx/utils.py +++ b/pytential/qbx/utils.py @@ -99,8 +99,8 @@ def get_interleaved_centers(queue, lpot_source): next to corresponding exterior centers. """ knl = get_interleaver_kernel(lpot_source.density_discr.real_dtype) - int_centers = lpot_source.centers(-1) - ext_centers = lpot_source.centers(+1) + int_centers = get_centers_on_side(lpot_source, -1) + ext_centers = get_centers_on_side(lpot_source, +1) result = [] wait_for = [] @@ -118,6 +118,160 @@ def get_interleaved_centers(queue, lpot_source): # }}} +# {{{ make interleaved radii + +def get_interleaved_radii(queue, lpot_source): + """ + Return an array of shape (dim, ncenters) in which interior centers are placed + next to corresponding exterior centers. + """ + knl = get_interleaver_kernel(lpot_source.density_discr.real_dtype) + radii = lpot_source._expansion_radii("nsources") + + result = cl.array.empty(queue, len(radii) * 2, radii.dtype) + evt, _ = knl(queue, src1=radii, src2=radii, dst=result) + evt.wait() + + return result + +# }}} + + +# {{{ panel sizes + +def panel_sizes(discr, last_dim_length): + if last_dim_length not in ("nsources", "ncenters", "npanels"): + raise ValueError( + "invalid value of last_dim_length: %s" % last_dim_length) + + # To get the panel size this does the equivalent of (∫ 1 ds)**(1/dim). + # FIXME: Kernel optimizations + + if last_dim_length == "nsources" or last_dim_length == "ncenters": + knl = lp.make_kernel( + "{[i,j,k]: 0<=i 0 _, (mat,) = mat_gen(self.queue, target_discr.nodes(), source.fine_density_discr.nodes(), - source.centers(expr.qbx_forced_limit), + get_centers_on_side(source, expr.qbx_forced_limit), **kernel_args) mat = mat.get() diff --git a/pytential/symbolic/pde/maxwell/waveguide.py.orig b/pytential/symbolic/pde/maxwell/waveguide.py.orig new file mode 100644 index 0000000000000000000000000000000000000000..4b5e192f698d55c36240305f3f3d7bc810669417 --- /dev/null +++ b/pytential/symbolic/pde/maxwell/waveguide.py.orig @@ -0,0 +1,989 @@ +from __future__ import division, absolute_import, print_function + +__copyright__ = "Copyright (C) 2013-2016 Andreas Kloeckner" + +__license__ = """ +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. +""" + + +__doc__ = """ +Second-Kind Waveguide +^^^^^^^^^^^^^^^^^^^^^ + +.. autoclass:: SecondKindInfZMuellerOperator + +2D Dielectric (old-style) +^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. autoclass:: DielectricSRep2DBoundaryOperator +.. autoclass:: DielectricSDRep2DBoundaryOperator +""" + +import numpy as np +from collections import namedtuple +from six.moves import range + +from pytential import sym +from pytools import memoize_method +from pytential.symbolic.pde.scalar import L2WeightedPDEOperator + + +# {{{ second-kind infinite-z Mueller operator + +class SecondKindInfZMuellerOperator(L2WeightedPDEOperator): + """ + Second-kind IE representation by + `Lai and Jiang `_. + """ + + def __init__(self, domain_n_exprs, ne, + interfaces, use_l2_weighting=None): + """ + :attr interfaces: a tuple of tuples + ``(outer_domain, inner_domain, interface_id)``, + where *outer_domain* and *inner_domain* are indices into + *domain_k_names*, + and *interface_id* is a symbolic name for the discretization of the + interface. 'outer' designates the side of the interface to which + the normal points. + :attr domain_n_exprs: a tuple of variable names of the Helmholtz + parameter *k*, to be used inside each part of the source geometry. + May also be a tuple of strings, which will be transformed into + variable references of the corresponding names. + :attr beta: A symbolic expression for the wave number in the :math:`z` + direction. May be a string, which will be interpreted as a variable + name. + """ + + self.interfaces = interfaces + + ne = sym.var(ne) + self.ne = sym.cse(ne, "ne") + + self.domain_n_exprs = [ + sym.var(n_expr) + for idom, n_expr in enumerate(domain_n_exprs)] + del domain_n_exprs + + import pymbolic.primitives as p + + def upper_half_square_root(x): + return p.If( + p.Comparison( + (x**0.5).a.imag, + "<", 0), + 1j*(-x)**0.5, + x**0.5) + + self.domain_K_exprs = [ + sym.cse( + upper_half_square_root(n_expr**2-ne**2), + "K%d" % i) + for i, n_expr in enumerate(self.domain_n_exprs)] + + from sumpy.kernel import HelmholtzKernel + self.kernel = HelmholtzKernel(2, allow_evanescent=True) + + def make_unknown(self, name): + return sym.make_sym_vector(name, len(self.unknown_index)) + + @property + @memoize_method + def unknown_index(self): + result = {} + for i in range(len(self.interfaces)): + for current in ["j", "m"]: + for cur_dir in ["z", "t"]: + result[(current + cur_dir, i)] = len(result) + + return result + + def tangent(self, where): + return sym.cse( + (sym.pseudoscalar(2, 1, where) + / sym.area_element(2, 1, where)), + "tangent") + + def S(self, dom_idx, density, qbx_forced_limit=+1): # noqa + return sym.S( + self.kernel, density, + k=self.domain_K_exprs[dom_idx], + qbx_forced_limit=qbx_forced_limit) + + def D(self, dom_idx, density, qbx_forced_limit="avg"): # noqa + return sym.D( + self.kernel, density, + k=self.domain_K_exprs[dom_idx], + qbx_forced_limit=qbx_forced_limit) + + def T(self, where, dom_idx, density): # noqa + return sym.int_g_dsource( + 2, + self.tangent(where), + self.kernel, + density, + k=self.domain_K_exprs[dom_idx], + # ???? + qbx_forced_limit=1).xproject(0) + + def representation(self, domain_idx, unknown): + """"Return a tuple of vectors [Ex, Ey, Ez] and [Hx, Hy, Hz] + representing the solution to Maxwell's equation on domain + *domain_idx*. + """ + result = np.zeros(4, dtype=object) + + unk_idx = self.unknown_index + k_v = self.k_vacuum + + for i in range(len(self.interfaces)): + dom0i, dom1i, where = self.interfaces[i] + + if domain_idx == dom0i: + domi = dom0i + elif domain_idx == dom1i: + domi = dom1i + else: + # Interface does not border the requested domain + continue + + beta = self.ne*k_v + k = sym.cse(k_v * self.domain_n_exprs[domi], + "k%d" % domi) + + jt = unknown[unk_idx["jz", i]] + jz = unknown[unk_idx["jt", i]] + mt = unknown[unk_idx["mz", i]] + mz = unknown[unk_idx["mt", i]] + + def diff_axis(iaxis, operand): + v = np.array(2, dtype=np.float64) + v[iaxis] = 1 + d = sym.Derivative() + return d.resolve( + (sym.MultiVector(v).scalar_product(d.dnabla(2))) + * d(operand)) + + from functools import partial + dx = partial(diff_axis, 1) + dy = partial(diff_axis, 1) + + tangent = self.tangent(where) + tau1 = tangent[0] + tau2 = tangent[1] + + S = self.S # noqa + D = self.D # noqa + T = self.T # noqa + + # Ex + result[0] += ( + -1/(1j*k_v) * dx(T(where, domi, jt)) + + beta/k_v * dx(S(domi, jz)) + + k**2/(1j*k_v) * S(domi, jt * tau1) + - dy(S(domi, mz)) + + 1j*beta * S(domi, mt*tau2) + ) + + # Ey + result[1] += ( + - 1/(1j*k_v) * dy(T(where, domi, jt)) + + beta/k_v * dy(S(domi, jz)) + + k**2/(1j*k_v) * S(domi, jt * tau2) + + dx(S(domi, mz)) + - 1j*beta*S(domi, mt * tau1) + ) + + # Ez + result[2] += ( + - beta/k_v * T(where, domi, jt) + + (k**2 - beta**2)/(1j*k_v) * S(domi, jz) + + D(domi, mt) + ) + + # Hx + result[3] += ( + 1/(1j*k_v) * dx(T(where, domi, mt)) + - beta/k_v * dx(S(domi, mz)) + - k**2/(1j*k_v) * S(domi, mt*tau1) + - k**2/k_v**2 * dy(S(domi, jz)) + + 1j * beta * k**2/k_v**2 * S(domi, jt*tau2) + ) + + # Hy + result[4] += ( + 1/(1j*k_v) * dy(T(where, domi, mt)) + - beta/k_v * dy(S(domi, mz)) + - k**2/(1j*k_v) * S(domi, mt * tau2) + + k**2/k_v**2 * dx(S(domi, jz)) + - 1j*beta * k**2/k_v**2 * S(domi, jt*tau1) + ) + + # Hz + result[5] += ( + beta/k_v * T(where, domi, mt) + - (k**2 - beta**2)/(1j*k_v) * S(domi, mz) + + k**2/k_v**2 * D(domi, jt) + ) + + return result + + def operator(self, unknown): + result = np.zeros(4*len(self.interfaces), dtype=object) + + unk_idx = self.unknown_index + + for i in range(len(self.interfaces)): + idx_jt = unk_idx["jz", i] + idx_jz = unk_idx["jt", i] + idx_mt = unk_idx["mz", i] + idx_mz = unk_idx["mt", i] + + phi1 = unknown[idx_jt] + phi2 = unknown[idx_jz] + phi3 = unknown[idx_mt] + phi4 = unknown[idx_mz] + + ne = self.ne + + dom0i, dom1i, where = self.interfaces[i] + + tangent = self.tangent(where) + normal = sym.cse( + sym.normal(2, 1, where), + "normal") + + S = self.S # noqa + D = self.D # noqa + T = self.T # noqa + + def Tt(where, dom, density): # noqa + return sym.tangential_derivative( + 2, T(where, dom, density)).xproject(0) + + def Sn(dom, density): # noqa + return sym.normal_derivative( + 2, + S(dom, density, + qbx_forced_limit="avg")) + + def St(dom, density): # noqa + return sym.tangential_derivative(2, S(dom, density)).xproject(0) + + n0 = self.domain_n_exprs[dom0i] + n1 = self.domain_n_exprs[dom1i] + + a11 = sym.cse(n0**2 * D(dom0i, phi1) - n1**2 * D(dom1i, phi1), "a11") + a22 = sym.cse(-n0**2 * Sn(dom0i, phi2) + n1**2 * Sn(dom1i, phi2), "a22") + a33 = sym.cse(D(dom0i, phi3)-D(dom1i, phi3), "a33") + a44 = sym.cse(-Sn(dom0i, phi4) + Sn(dom1i, phi4), "a44") + + a21 = sym.cse(-1j * ne * ( + n0**2 * tangent.scalar_product( + S(dom0i, normal * phi1)) + - n1**2 * tangent.scalar_product( + S(dom1i, normal * phi1))), "a21") + + a43 = sym.cse(-1j * ne * ( + tangent.scalar_product( + S(dom0i, normal * phi3)) + - tangent.scalar_product( + S(dom1i, normal * phi3))), "a43") + + a13 = +1*sym.cse( + ne*(T(where, dom0i, phi3) - T(where, dom1i, phi3)), "a13") + a31 = -1*sym.cse( + ne*(T(where, dom0i, phi1) - T(where, dom1i, phi1)), "a31") + + a24 = +1*sym.cse(ne*(St(dom0i, phi4) - St(dom1i, phi4)), "a24") + a42 = -1*sym.cse(ne*(St(dom0i, phi2) - St(dom1i, phi2)), "a42") + + a14 = sym.cse(1j*( + (n0**2 - ne**2) * S(dom0i, phi4) + - (n1**2 - ne**2) * S(dom1i, phi4) + ), "a14") + a32 = -sym.cse(1j*( + (n0**2 - ne**2) * S(dom0i, phi2) + - (n1**2 - ne**2) * S(dom1i, phi2) + ), "a32") + + def a23_expr(phi): + return ( + 1j * (Tt(where, dom0i, phi) - Tt(where, dom1i, phi)) + - 1j * ( + n0**2 * tangent.scalar_product( +<<<<<<< HEAD + S(dom0i, normal * phi)) + - n1**2 * tangent.scalar_product( + S(dom1i, normal * phi)))) +======= + S(0, tangent * phi)) + - n1**2 * tangent.scalar_product( + S(1, tangent * phi)))) +>>>>>>> 9af8f2bde0070ee8488c6bc58c8ece5114004a71 + + a23 = +1*sym.cse(a23_expr(phi3), "a23") + a41 = -1*sym.cse(a23_expr(phi1), "a41") + + d1 = (n0**2 + n1**2)/2 * phi1 + d2 = (n0**2 + n1**2)/2 * phi2 + d3 = phi3 + d4 = phi4 + + result[idx_jt] += d1 + a11 + 000 + a13 + a14 + result[idx_jz] += d2 + a21 + a22 + a23 + a24 + result[idx_mt] += d3 + a31 + a32 + a33 + 0 + result[idx_mz] += d4 + a41 + a42 + a43 + a44 + + # TODO: L2 weighting + # TODO: Add representation contributions to other boundaries + # abutting the domain + return result + +# }}} + + +# {{{ old-style waveguide + +class Dielectric2DBoundaryOperatorBase(L2WeightedPDEOperator): + r""" + Solves the following system of BVPs on :math:`\mathbb{R}^2`, in which + a disjoint family of domains :math:`\Omega_i` is embedded: + + .. math:: + + \triangle E + (k_i^2-\beta^2) E = 0\quad \text{on $\Omega_i$}\\ + \triangle H + (k_i^2-\beta^2) H = 0\quad \text{on $\Omega_i$}\\ + [H] = 0 \text{ on $\partial \Omega_i$},\quad + [E] = 0 \text{ on $\partial \Omega_i$}\\ + \left[ \frac{k_0}{k^2-\beta^2} \partial_{\hat n}H\right] = 0 + \quad\text{on $\partial \Omega_i$},\quad\\ + \left[ \frac{k_0}{k^2-\beta^2} \partial_{\hat n}E\right] = 0 + \quad\text{on $\partial \Omega_i$} + + :math:`E` and :math:`H` are assumed to be of the form + + .. math:: + + E(x,y,z,t)=E(x,y)e^{i(\beta z-\omega t) + H(x,y,z,t)=H(x,y)e^{i(\beta z-\omega t) + + where :math:`[\cdot]` denotes the jump across an interface, and :math:`k` + (without an index) denotes the value of :math:`k` on either side of the + interface, for the purpose of computing the jump. :math:`\hat n` denotes + the unit normal of the interface. + + .. automethod:: make_unknown + .. automethod:: representation_outer + .. automethod:: representation_inner + .. automethod:: operator + """ + + field_kind_e = 0 + field_kind_h = 1 + field_kinds = [field_kind_e, field_kind_h] + + side_in = 0 + side_out = 1 + sides = [side_in, side_out] + side_to_sign = { + side_in: -1, + side_out: 1, + } + + dir_none = 0 + dir_normal = 1 + dir_tangential = 2 + + BCTermDescriptor = namedtuple("BCDescriptor", + "i_interface direction field_kind coeff_inner coeff_outer".split()) + + # {{{ constructor + + def __init__(self, mode, k_vacuum, domain_k_exprs, beta, + interfaces, use_l2_weighting=None): + """ + :attr mode: one of 'te', 'tm', 'tem' + :attr k_vacuum: A symbolic expression for the wave number in vacuum. + May be a string, which will be interpreted as a variable name. + :attr interfaces: a tuple of tuples + ``(outer_domain, inner_domain, interface_id)``, + where *outer_domain* and *inner_domain* are indices into + *domain_k_names*, + and *interface_id* is a symbolic name for the discretization of the + interface. 'outer' designates the side of the interface to which + the normal points. + :attr domain_k_exprs: a tuple of variable names of the Helmholtz + parameter *k*, to be used inside each part of the source geometry. + May also be a tuple of strings, which will be transformed into + variable references of the corresponding names. + :attr beta: A symbolic expression for the wave number in the :math:`z` + direction. May be a string, which will be interpreted as a variable + name. + """ + + if use_l2_weighting is None: + use_l2_weighting = False + + super(Dielectric2DBoundaryOperatorBase, self).__init__( + use_l2_weighting=use_l2_weighting) + + if mode == "te": + self.ez_enabled = False + self.hz_enabled = True + elif mode == "tm": + self.ez_enabled = True + self.hz_enabled = False + elif mode == "tem": + self.ez_enabled = True + self.hz_enabled = True + else: + raise ValueError("invalid mode '%s'" % mode) + + self.interfaces = interfaces + + fk_e = self.field_kind_e + fk_h = self.field_kind_h + + dir_none = self.dir_none + dir_normal = self.dir_normal + dir_tangential = self.dir_tangential + + if isinstance(beta, str): + beta = sym.var(beta) + beta = sym.cse(beta, "beta") + + if isinstance(k_vacuum, str): + k_vacuum = sym.var(k_vacuum) + k_vacuum = sym.cse(k_vacuum, "k_vac") + + self.domain_k_exprs = [ + sym.var(k_expr) + if isinstance(k_expr, str) + else sym.cse(k_expr, "k%d" % idom) + for idom, k_expr in enumerate(domain_k_exprs)] + del domain_k_exprs + + # Note the case of k/K! + # "K" is the 2D Helmholtz parameter. + # "k" is the 3D Helmholtz parameter. + + self.domain_K_exprs = [ + sym.cse((k_expr**2-beta**2)**0.5, "K%d" % i) + for i, k_expr in enumerate(self.domain_k_exprs)] + + from sumpy.kernel import HelmholtzKernel + self.kernel = HelmholtzKernel(2, allow_evanescent=True) + + # {{{ build bc list + + # list of tuples, where each tuple consists of BCTermDescriptor instances + + all_bcs = [] + for i_interface, (outer_domain, inner_domain, _) in ( + enumerate(self.interfaces)): + k_outer = self.domain_k_exprs[outer_domain] + k_inner = self.domain_k_exprs[inner_domain] + + all_bcs += [ + ( # [E] = 0 + self.BCTermDescriptor( + i_interface=i_interface, + direction=dir_none, + field_kind=fk_e, + coeff_outer=1, + coeff_inner=-1), + ), + ( # [H] = 0 + self.BCTermDescriptor( + i_interface=i_interface, + direction=dir_none, + field_kind=fk_h, + coeff_outer=1, + coeff_inner=-1), + ), + ( + self.BCTermDescriptor( + i_interface=i_interface, + direction=dir_tangential, + field_kind=fk_e, + coeff_outer=beta/(k_outer**2-beta**2), + coeff_inner=-beta/(k_inner**2-beta**2)), + self.BCTermDescriptor( + i_interface=i_interface, + direction=dir_normal, + field_kind=fk_h, + coeff_outer=sym.cse(-k_vacuum/(k_outer**2-beta**2)), + coeff_inner=sym.cse(k_vacuum/(k_inner**2-beta**2))), + ), + ( + self.BCTermDescriptor( + i_interface=i_interface, + direction=dir_tangential, + field_kind=fk_h, + coeff_outer=beta/(k_outer**2-beta**2), + coeff_inner=-beta/(k_inner**2-beta**2)), + self.BCTermDescriptor( + i_interface=i_interface, + direction=dir_normal, + field_kind=fk_e, + coeff_outer=sym.cse( + (k_outer**2/k_vacuum)/(k_outer**2-beta**2)), + coeff_inner=sym.cse( + -(k_inner**2/k_vacuum) + / (k_inner**2-beta**2))) + ), + ] + + del k_outer + del k_inner + + self.bcs = [] + for bc in all_bcs: + any_significant_e = any( + term.field_kind == fk_e + and term.direction in [dir_normal, dir_none] + for term in bc) + any_significant_h = any( + term.field_kind == fk_h + and term.direction in [dir_normal, dir_none] + for term in bc) + is_necessary = ( + (self.ez_enabled and any_significant_e) + or + (self.hz_enabled and any_significant_h)) + + # Only keep tangential modes for TEM. Otherwise, + # no jump in H already implies jump condition on + # tangential derivative. + is_tem = self.ez_enabled and self.hz_enabled + terms = tuple( + term + for term in bc + if term.direction != dir_tangential + or is_tem) + + if is_necessary: + self.bcs.append(terms) + + assert (len(all_bcs) + * (int(self.ez_enabled) + int(self.hz_enabled)) // 2 + == len(self.bcs)) + + # }}} + + # }}} + + def is_field_present(self, field_kind): + return ( + (field_kind == self.field_kind_e and self.ez_enabled) + or + (field_kind == self.field_kind_h and self.hz_enabled)) + + def make_unknown(self, name): + num_densities = ( + 2 + * (int(self.ez_enabled) + int(self.hz_enabled)) + * len(self.interfaces)) + + assert num_densities == len(self.bcs) + + return sym.make_sym_vector(name, num_densities) + + def bc_term_to_operator_contrib(self, term, side, raw_potential_op, + density, discrete): + potential_op = raw_potential_op + + side_sign = self.side_to_sign[side] + + domain_outer, domain_inner, interface_id = \ + self.interfaces[term.i_interface] + if side == self.side_in: + K_expr = self.domain_K_exprs[domain_inner] # noqa + bc_coeff = term.coeff_inner + elif side == self.side_out: + K_expr = self.domain_K_exprs[domain_outer] # noqa + bc_coeff = term.coeff_outer + else: + raise ValueError("invalid value of 'side'") + + potential_op = potential_op( + self.kernel, density, source=interface_id, + k=K_expr) + + if term.direction == self.dir_none: + if raw_potential_op is sym.S: + jump_term = 0 + elif raw_potential_op is sym.D: + jump_term = (side_sign*0.5) * discrete + else: + assert False, raw_potential_op + elif term.direction == self.dir_normal: + potential_op = sym.normal_derivative( + potential_op, interface_id) + + if raw_potential_op is sym.S: + # S' + jump_term = (-side_sign*0.5) * discrete + elif raw_potential_op is sym.D: + jump_term = 0 + else: + assert False, raw_potential_op + + elif term.direction == self.dir_tangential: + potential_op = sym.tangential_derivative( + raw_potential_op( + self.kernel, density, source=interface_id, + k=K_expr, qbx_forced_limit=side_sign), + interface_id).a.as_scalar() + + # Some of these may have jumps, but QBX does the dirty + # work here by directly computing the limit. + jump_term = 0 + + else: + raise ValueError("invalid direction") + + potential_op = ( + jump_term + + self.get_sqrt_weight(interface_id)*potential_op) + + del jump_term + + contrib = bc_coeff * potential_op + + if (raw_potential_op is sym.D + and term.direction == self.dir_normal): + # FIXME The hypersingular part should perhaps be + # treated specially to avoid cancellation. + pass + + return contrib + + +# {{{ single-layer representation + +class DielectricSRep2DBoundaryOperator(Dielectric2DBoundaryOperatorBase): + def _structured_unknown(self, unknown, with_l2_weights): + """ + :arg with_l2_weights: If True, return the 'bare' unknowns + that do not have the :math:`L^2` weights divided out. + Note: Those unknowns should *not* be interpreted as + point values of a density. + :returns: an array of unknowns, with the following index axes: + ``[side, field_kind, i_interface]``, where + ``side`` is o for the outside part and i for the interior part, + ``field_kind`` is 0 for the E-field and 1 for the H-field part, + ``i_interface`` is the number of the enclosed domain, starting from 0. + """ + result = np.zeros((2, 2, len(self.interfaces)), dtype=np.object) + + i_unknown = 0 + for side in self.sides: + for field_kind in self.field_kinds: + for i_interface in range(len(self.interfaces)): + + if self.is_field_present(field_kind): + dens = unknown[i_unknown] + i_unknown += 1 + else: + dens = 0 + + _, _, interface_id = self.interfaces[i_interface] + + if not with_l2_weights: + dens = sym.cse( + dens/self.get_sqrt_weight(interface_id), + "dens_{side}_{field}_{dom}".format( + side={ + self.side_out: "o", + self.side_in: "i"} + [side], + field={ + self.field_kind_e: "E", + self.field_kind_h: "H" + } + [field_kind], + dom=i_interface)) + + result[side, field_kind, i_interface] = dens + + assert i_unknown == len(unknown) + return result + + def representation(self, unknown, i_domain): + """ + :return: a symbolic expression for the representation of the PDE solution + in domain number *i_domain*. + """ + unk = self._structured_unknown(unknown, with_l2_weights=False) + + result = [] + + for field_kind in self.field_kinds: + if not self.is_field_present(field_kind): + continue + + field_result = 0 + for i_interface, (i_domain_outer, i_domain_inner, interface_id) in ( + enumerate(self.interfaces)): + if i_domain_outer == i_domain: + side = self.side_out + elif i_domain_inner == i_domain: + side = self.side_in + else: + continue + + my_unk = unk[side, field_kind, i_interface] + if my_unk: + field_result += sym.S( + self.kernel, + my_unk, + source=interface_id, + k=self.domain_K_exprs[i_domain]) + + result.append(field_result) + + from pytools.obj_array import make_obj_array + return make_obj_array(result) + + def operator(self, unknown): + density_unk = self._structured_unknown(unknown, with_l2_weights=False) + discrete_unk = self._structured_unknown(unknown, with_l2_weights=True) + + result = [] + for bc in self.bcs: + op = 0 + + for side in self.sides: + for term in bc: + unk_index = (side, term.field_kind, term.i_interface) + density = density_unk[unk_index] + discrete = discrete_unk[unk_index] + + op += self.bc_term_to_operator_contrib( + term, side, sym.S, density, discrete) + + result.append(op) + + return np.array(result, dtype=np.object) + +# }}} + + +# {{{ single + double layer representation + +class DielectricSDRep2DBoundaryOperator(Dielectric2DBoundaryOperatorBase): + pot_kind_S = 0 + pot_kind_D = 1 + pot_kinds = [pot_kind_S, pot_kind_D] + potential_ops = { + pot_kind_S: sym.S, + pot_kind_D: sym.D, + } + + def __init__(self, mode, k_vacuum, domain_k_exprs, beta, + interfaces, use_l2_weighting=None): + + super(DielectricSDRep2DBoundaryOperator, self).__init__( + mode, k_vacuum, domain_k_exprs, beta, + interfaces, use_l2_weighting=use_l2_weighting) + + side_in = self.side_in + side_out = self.side_out + + def find_normal_derivative_bc_coeff(field_kind, i_interface, side): + result = 0 + for bc in self.bcs: + for term in bc: + if term.field_kind != field_kind: + continue + if term.i_interface != i_interface: + continue + if term.direction != self.dir_normal: + continue + + if side == side_in: + result += term.coeff_inner + elif side == side_out: + result += term.coeff_outer + else: + raise ValueError("invalid side") + + return result + + self.density_coeffs = np.zeros( + (len(self.pot_kinds), len(self.field_kinds), + len(self.interfaces), len(self.sides)), + dtype=np.object) + for field_kind in self.field_kinds: + for i_interface in range(len(self.interfaces)): + self.density_coeffs[ + self.pot_kind_S, field_kind, i_interface, side_in] = 1 + self.density_coeffs[ + self.pot_kind_S, field_kind, i_interface, side_out] = 1 + + # These need to satisfy + # + # [dens_coeff_D * bc_coeff * dn D] + # = dens_coeff_D_out * bc_coeff_out * (dn D) + # + dens_coeff_D_in * bc_coeff_in * dn D + # = 0 + # + # (because dn D is hypersingular, which we'd like to cancel out) + # + # NB: bc_coeff_{in,out} already contain the signs to realize + # the subtraction for the jump. (So the "+" above is as it + # should be.) + + dens_coeff_D_in = find_normal_derivative_bc_coeff( # noqa + field_kind, i_interface, side_out) + dens_coeff_D_out = - find_normal_derivative_bc_coeff( # noqa + field_kind, i_interface, side_in) + + self.density_coeffs[ + self.pot_kind_D, field_kind, i_interface, side_in] \ + = dens_coeff_D_in + self.density_coeffs[ + self.pot_kind_D, field_kind, i_interface, side_out] \ + = dens_coeff_D_out + + def _structured_unknown(self, unknown, with_l2_weights): + """ + :arg with_l2_weights: If True, return the 'bare' unknowns + that do not have the :math:`L^2` weights divided out. + Note: Those unknowns should *not* be interpreted as + point values of a density. + :returns: an array of unknowns, with the following index axes: + ``[pot_kind, field_kind, i_interface]``, where + ``pot_kind`` is 0 for the single-layer part and 1 for the double-layer + part, + ``field_kind`` is 0 for the E-field and 1 for the H-field part, + ``i_interface`` is the number of the enclosed domain, starting from 0. + """ + result = np.zeros((2, 2, len(self.interfaces)), dtype=np.object) + + i_unknown = 0 + for pot_kind in self.pot_kinds: + for field_kind in self.field_kinds: + for i_interface in range(len(self.interfaces)): + + if self.is_field_present(field_kind): + dens = unknown[i_unknown] + i_unknown += 1 + else: + dens = 0 + + _, _, interface_id = self.interfaces[i_interface] + + if not with_l2_weights: + dens = sym.cse( + dens/self.get_sqrt_weight(interface_id), + "dens_{pot}_{field}_{intf}".format( + pot={0: "S", 1: "D"}[pot_kind], + field={ + self.field_kind_e: "E", + self.field_kind_h: "H" + } + [field_kind], + intf=i_interface)) + + result[pot_kind, field_kind, i_interface] = dens + + assert i_unknown == len(unknown) + return result + + def representation(self, unknown, i_domain): + """ + :return: a symbolic expression for the representation of the PDE solution + in domain number *i_domain*. + """ + unk = self._structured_unknown(unknown, with_l2_weights=False) + + result = [] + + for field_kind in self.field_kinds: + if not self.is_field_present(field_kind): + continue + + field_result = 0 + for pot_kind in self.pot_kinds: + for i_interface, (i_domain_outer, i_domain_inner, interface_id) in ( + enumerate(self.interfaces)): + if i_domain_outer == i_domain: + side = self.side_out + elif i_domain_inner == i_domain: + side = self.side_in + else: + continue + + my_unk = unk[pot_kind, field_kind, i_interface] + if my_unk: + field_result += ( + self.density_coeffs[ + pot_kind, field_kind, i_interface, side] + * self.potential_ops[pot_kind]( + self.kernel, + my_unk, + source=interface_id, + k=self.domain_K_exprs[i_domain] + )) + + result.append(field_result) + + from pytools.obj_array import make_obj_array + return make_obj_array(result) + + def operator(self, unknown): + density_unk = self._structured_unknown(unknown, with_l2_weights=False) + discrete_unk = self._structured_unknown(unknown, with_l2_weights=True) + + result = [] + for bc in self.bcs: + op = 0 + + for pot_kind in self.pot_kinds: + for term in bc: + + for side in self.sides: + raw_potential_op = \ + self.potential_ops[pot_kind] + + unk_index = (pot_kind, term.field_kind, term.i_interface) + density = density_unk[unk_index] + discrete = discrete_unk[unk_index] + + op += ( + self.density_coeffs[ + pot_kind, term.field_kind, term.i_interface, + side] + * self.bc_term_to_operator_contrib( + term, side, raw_potential_op, density, discrete) + ) + + result.append(op) + + return np.array(result, dtype=np.object) + +# }}} + +# }}} + +# vim: foldmethod=marker diff --git a/test/test_global_qbx.py b/test/test_global_qbx.py index 4013b8f31081d6e0d4deda9a846fd4bf0e0af552..7326efc13d212be30c83adecedc7a3e579722605 100644 --- a/test/test_global_qbx.py +++ b/test/test_global_qbx.py @@ -107,20 +107,21 @@ def run_source_refinement_test(ctx_getter, mesh, order, helmholtz_k=None): lpot_source, RefinerCodeContainer(cl_ctx), factory, fine_factory, **refiner_extra_kwargs) + from pytential.qbx.utils import get_centers_on_side + discr_nodes = lpot_source.density_discr.nodes().get(queue) fine_discr_nodes = lpot_source.fine_density_discr.nodes().get(queue) - int_centers = lpot_source.centers(-1) + int_centers = get_centers_on_side(lpot_source, -1) int_centers = np.array([axis.get(queue) for axis in int_centers]) - ext_centers = lpot_source.centers(+1) + ext_centers = get_centers_on_side(lpot_source, +1) ext_centers = np.array([axis.get(queue) for axis in ext_centers]) - panel_sizes = lpot_source.panel_sizes("npanels").get(queue) - fine_panel_sizes = lpot_source.fine_panel_sizes("npanels").get(queue) + expansion_radii = lpot_source._expansion_radii("npanels").get(queue) + panel_sizes = lpot_source._panel_sizes("npanels").get(queue) + fine_panel_sizes = lpot_source._fine_panel_sizes("npanels").get(queue) # {{{ check if satisfying criteria def check_disk_undisturbed_by_sources(centers_panel, sources_panel): - h = panel_sizes[centers_panel.element_nr] - if centers_panel.element_nr == sources_panel.element_nr: # Same panel return @@ -143,8 +144,9 @@ def run_source_refinement_test(ctx_getter, mesh, order, helmholtz_k=None): # A center cannot be closer to another panel than to its originating # panel. - assert dist >= h / 2, \ - (dist, h, centers_panel.element_nr, sources_panel.element_nr) + rad = expansion_radii[centers_panel.element_nr] + assert dist >= rad, \ + (dist, rad, centers_panel.element_nr, sources_panel.element_nr) def check_sufficient_quadrature_resolution(centers_panel, sources_panel): h = fine_panel_sizes[sources_panel.element_nr] @@ -232,8 +234,10 @@ def test_target_association(ctx_getter, curve_name, curve_f, nelements): lpot_source, conn = QBXLayerPotentialSource(discr, order).with_refinement() del discr - int_centers = lpot_source.centers(-1) - ext_centers = lpot_source.centers(+1) + from pytential.qbx.utils import get_centers_on_side + + int_centers = get_centers_on_side(lpot_source, -1) + ext_centers = get_centers_on_side(lpot_source, +1) # }}} @@ -243,7 +247,8 @@ def test_target_association(ctx_getter, curve_name, curve_f, nelements): rng = PhiloxGenerator(cl_ctx, seed=RNG_SEED) nsources = lpot_source.density_discr.nnodes noise = rng.uniform(queue, nsources, dtype=np.float, a=0.01, b=1.0) - panel_sizes = lpot_source.panel_sizes("nsources").with_queue(queue) + tunnel_radius = \ + lpot_source._close_target_tunnel_radius("nsources").with_queue(queue) def targets_from_sources(sign, dist): from pytential import sym, bind @@ -254,8 +259,8 @@ def test_target_association(ctx_getter, curve_name, curve_f, nelements): from pytential.target import PointsTarget - int_targets = PointsTarget(targets_from_sources(-1, noise * panel_sizes / 2)) - ext_targets = PointsTarget(targets_from_sources(+1, noise * panel_sizes / 2)) + int_targets = PointsTarget(targets_from_sources(-1, noise * tunnel_radius)) + ext_targets = PointsTarget(targets_from_sources(+1, noise * tunnel_radius)) far_targets = PointsTarget(targets_from_sources(+1, FAR_TARGET_DIST_FROM_SOURCE)) # Create target discretizations. @@ -290,7 +295,7 @@ def test_target_association(ctx_getter, curve_name, curve_f, nelements): QBXTargetAssociator(cl_ctx)(lpot_source, target_discrs) .get(queue=queue)) - panel_sizes = lpot_source.panel_sizes("nsources").get(queue) + expansion_radii = lpot_source._expansion_radii("nsources").get(queue) int_centers = np.array([axis.get(queue) for axis in int_centers]) ext_centers = np.array([axis.get(queue) for axis in ext_centers]) @@ -310,7 +315,7 @@ def test_target_association(ctx_getter, curve_name, curve_f, nelements): target_to_source_result, target_to_side_result): assert (target_to_side_result == true_side).all() dists = la.norm((targets.T - centers.T[target_to_source_result]), axis=1) - assert (dists <= panel_sizes[target_to_source_result] / 2).all() + assert (dists <= expansion_radii[target_to_source_result]).all() # Checks that far targets are not assigned a center. def check_far_targets(target_to_source_result): diff --git a/test/test_layer_pot.py b/test/test_layer_pot.py index 870cc6f0438502e767dc23426d839243619a3023..af43bb94307ebce188706ea072a11ee419f69592 100644 --- a/test/test_layer_pot.py +++ b/test/test_layer_pot.py @@ -160,7 +160,10 @@ def test_ellipse_eigenvalues(ctx_getter, ellipse_aspect, mode_nr, qbx_order): if 0: # plot geometry, centers, normals - centers = qbx.centers(density_discr, 1) + + from pytential.qbx.utils import get_centers_on_side + centers = get_centers_on_side(qbx, 1) + nodes_h = nodes.get() centers_h = [centers[0].get(), centers[1].get()] pt.plot(nodes_h[0], nodes_h[1], "x-") diff --git a/test/test_muller.py b/test/test_muller.py index 30d1a47d8564f607a125bdfd0df5f6e67f811606..fb23e911d32c1c720a3284004014cb83088d75d7 100644 --- a/test/test_muller.py +++ b/test/test_muller.py @@ -37,10 +37,14 @@ def test_muller(true_roots): :arg n: number of zeros sought :return: (roots, niter) """ + np.random.seed(15) + z_start = np.random.rand(3) + 1j*np.random.rand(3) + eps = 1e-12 from pytential.muller import muller_deflate roots, niter = muller_deflate( - lambda z: poly_with_roots(z, true_roots), len(true_roots)) + lambda z: poly_with_roots(z, true_roots), len(true_roots), + z_start=z_start) for r_i in roots: min_dist, true_root = min(