diff --git a/pytential/linalg/proxy.py b/pytential/linalg/proxy.py index abb8e8dec7bc5b4b132258b4e8334221762c143c..ffc3e0aa9786e1db5b4ec3ab49c86d4e1273cabc 100644 --- a/pytential/linalg/proxy.py +++ b/pytential/linalg/proxy.py @@ -94,8 +94,8 @@ def partition_by_nodes(discr, max_particles_in_box=max_nodes_in_box) tree = tree.get(queue) - leaf_boxes, = (tree.box_flags & - box_flags_enum.HAS_CHILDREN == 0).nonzero() + leaf_boxes, = (tree.box_flags + & box_flags_enum.HAS_CHILDREN == 0).nonzero() indices = np.empty(len(leaf_boxes), dtype=np.object) for i, ibox in enumerate(leaf_boxes): @@ -161,8 +161,8 @@ def partition_by_elements(discr, groups = discr.groups tree = tree.get(queue) - leaf_boxes, = (tree.box_flags & - box_flags_enum.HAS_CHILDREN == 0).nonzero() + leaf_boxes, = (tree.box_flags + & box_flags_enum.HAS_CHILDREN == 0).nonzero() indices = np.empty(len(leaf_boxes), dtype=np.object) for i, ibox in enumerate(leaf_boxes): @@ -225,8 +225,8 @@ def partition_from_coarse(resampler, from_indices): # construct ranges from_discr = resampler.from_discr - from_grp_ranges = np.cumsum([0] + - [grp.nelements for grp in from_discr.mesh.groups]) + from_grp_ranges = np.cumsum( + [0] + [grp.nelements for grp in from_discr.mesh.groups]) from_el_ranges = np.hstack([ np.arange(grp.node_nr_base, grp.nnodes + 1, grp.nunit_nodes) for grp in from_discr.groups]) @@ -574,9 +574,9 @@ def gather_block_neighbor_points(discr, indices, pxycenters, pxyradii, # get nodes inside the ball but outside the current range center = pxycenters[:, iproxy].reshape(-1, 1) radius = pxyradii[iproxy] - mask = (la.norm(nodes - center, axis=0) < radius) & \ - ((isources < indices.ranges[iproxy]) | - (indices.ranges[iproxy + 1] <= isources)) + mask = ((la.norm(nodes - center, axis=0) < radius) + & ((isources < indices.ranges[iproxy]) + | (indices.ranges[iproxy + 1] <= isources))) nbrindices[iproxy] = indices.indices[isources[mask]] diff --git a/pytential/qbx/direct.py b/pytential/qbx/direct.py index 7ad1782c7478f1d6394a613e1542fe1aaa799d31..496259c921f7e6a6fdba59ceb42eda48a805bcd1 100644 --- a/pytential/qbx/direct.py +++ b/pytential/qbx/direct.py @@ -45,8 +45,8 @@ class LayerPotentialOnTargetAndCenterSubset(LayerPotentialBase): from sumpy.tools import gather_loopy_source_arguments arguments = ( - gather_loopy_source_arguments(self.kernels) + - [ + gather_loopy_source_arguments(self.kernels) + + [ lp.GlobalArg("src", None, shape=(self.dim, "nsources"), order="C"), lp.GlobalArg("tgt", None, @@ -62,11 +62,11 @@ class LayerPotentialOnTargetAndCenterSubset(LayerPotentialBase): lp.ValueArg("nsources", np.int32), lp.ValueArg("ntargets", np.int32), lp.ValueArg("ntargets_total", np.int32), - lp.ValueArg("ncenters_total", np.int32)] + - [lp.GlobalArg("strength_%d" % i, None, + lp.ValueArg("ncenters_total", np.int32)] + + [lp.GlobalArg("strength_%d" % i, None, shape="nsources", order="C") - for i in range(self.strength_count)] + - [lp.GlobalArg("result_%d" % i, self.value_dtypes[i], + for i in range(self.strength_count)] + + [lp.GlobalArg("result_%d" % i, self.value_dtypes[i], shape="ntargets_total", order="C") for i in range(len(self.kernels))]) diff --git a/pytential/qbx/fmmlib.py b/pytential/qbx/fmmlib.py index bf233b76a16680b84436e993cc85703deb546bbd..2d21a3d2ac6865d0d8d3c3d3f41c388b0fe160b0 100644 --- a/pytential/qbx/fmmlib.py +++ b/pytential/qbx/fmmlib.py @@ -385,15 +385,15 @@ class QBXFMMLibExpansionWrangler(FMMLibExpansionWrangler): kwargs = {} if self.dim == 3 and self.eqn_letter == "h": - kwargs["radius"] = (0.5 * - geo_data.expansion_radii()[geo_data.global_qbx_centers()]) + kwargs["radius"] = (0.5 + * geo_data.expansion_radii()[geo_data.global_qbx_centers()]) nsrc_boxes_per_gqbx_center = np.zeros(icontaining_tgt_box_vec.shape, dtype=traversal.tree.box_id_dtype) mask = (icontaining_tgt_box_vec != -1) nsrc_boxes_per_gqbx_center[mask] = ( - ssn.starts[icontaining_tgt_box_vec[mask] + 1] - - ssn.starts[icontaining_tgt_box_vec[mask]] + ssn.starts[icontaining_tgt_box_vec[mask] + 1] + - ssn.starts[icontaining_tgt_box_vec[mask]] ) nsrc_boxes = np.sum(nsrc_boxes_per_gqbx_center) diff --git a/pytential/solve.py b/pytential/solve.py index e46fa5ea5e57b4d7f77510e5ea7b70fea6e4798a..dce9d8a31082bf815936d47f98c8e8d06d6c7f35 100644 --- a/pytential/solve.py +++ b/pytential/solve.py @@ -208,9 +208,9 @@ def _gmres(A, b, restart=None, tol=None, x0=None, dot=None, # noqa else: print("*** WARNING: non-monotonic residuals in GMRES") - if (stall_iterations and - len(residual_norms) > stall_iterations and - norm_r > ( + if (stall_iterations + and len(residual_norms) > stall_iterations + and norm_r > ( residual_norms[-stall_iterations] / no_progress_factor)): diff --git a/pytential/symbolic/pde/cahn_hilliard.py b/pytential/symbolic/pde/cahn_hilliard.py index fd6ec4824cc107fb5d4ea4de8520997f59557ca5..3263f18e1b9c144a743335107339770a12425c30 100644 --- a/pytential/symbolic/pde/cahn_hilliard.py +++ b/pytential/symbolic/pde/cahn_hilliard.py @@ -54,12 +54,10 @@ class CahnHilliardOperator(L2WeightedPDEOperator): return ( # FIXME: Verify scaling -1/(2*np.pi*(lam1**2-lam2**2)) / hhk_scaling - * - ( + * ( op_map(sym.S(hhk, density, k=1j*lam1, qbx_forced_limit=qbx_forced_limit)) - - - op_map(sym.S(hhk, density, k=1j*lam2, + - op_map(sym.S(hhk, density, k=1j*lam2, qbx_forced_limit=qbx_forced_limit)))) else: return ( diff --git a/pytential/symbolic/pde/maxwell/__init__.py b/pytential/symbolic/pde/maxwell/__init__.py index 86fdd993b813d5593d73b2d7be81e25ce64250be..8bdab9df9fc837962cade74bf0af9d14a1d88b11 100644 --- a/pytential/symbolic/pde/maxwell/__init__.py +++ b/pytential/symbolic/pde/maxwell/__init__.py @@ -42,7 +42,7 @@ __doc__ = """ # {{{ point source def get_sym_maxwell_point_source(kernel, jxyz, k): - """Return a symbolic expression that, when bound to a + r"""Return a symbolic expression that, when bound to a :class:`pytential.source.PointPotentialSource` will yield a field satisfying Maxwell's equations. @@ -71,7 +71,7 @@ def get_sym_maxwell_point_source(kernel, jxyz, k): # {{{ plane wave def get_sym_maxwell_plane_wave(amplitude_vec, v, omega, epsilon=1, mu=1, where=None): - """Return a symbolic expression that, when bound to a + r"""Return a symbolic expression that, when bound to a :class:`pytential.source.PointPotentialSource` will yield a field satisfying Maxwell's equations. @@ -230,14 +230,14 @@ class MuellerAugmentedMFIEOperator(object): grad = partial(sym.grad, 3) - E0 = sym.cse(1j*omega*mu0*eps0*S(Jxyz, k=k0) + - mu0*curl_S(Mxyz, k=k0) - grad(S(u.rho_e, k=k0)), "E0") - H0 = sym.cse(-1j*omega*mu0*eps0*S(Mxyz, k=k0) + - eps0*curl_S(Jxyz, k=k0) + grad(S(u.rho_m, k=k0)), "H0") - E1 = sym.cse(1j*omega*mu1*eps1*S(Jxyz, k=k1) + - mu1*curl_S(Mxyz, k=k1) - grad(S(u.rho_e, k=k1)), "E1") - H1 = sym.cse(-1j*omega*mu1*eps1*S(Mxyz, k=k1) + - eps1*curl_S(Jxyz, k=k1) + grad(S(u.rho_m, k=k1)), "H1") + E0 = sym.cse(1j*omega*mu0*eps0*S(Jxyz, k=k0) + + mu0*curl_S(Mxyz, k=k0) - grad(S(u.rho_e, k=k0)), "E0") + H0 = sym.cse(-1j*omega*mu0*eps0*S(Mxyz, k=k0) + + eps0*curl_S(Jxyz, k=k0) + grad(S(u.rho_m, k=k0)), "H0") + E1 = sym.cse(1j*omega*mu1*eps1*S(Jxyz, k=k1) + + mu1*curl_S(Mxyz, k=k1) - grad(S(u.rho_e, k=k1)), "E1") + H1 = sym.cse(-1j*omega*mu1*eps1*S(Mxyz, k=k1) + + eps1*curl_S(Jxyz, k=k1) + grad(S(u.rho_m, k=k1)), "H1") F1 = (xyz_to_tangential(sym.n_cross(H1-H0) + 0.5*(eps0+eps1)*Jxyz)) F2 = (sym.n_dot(eps1*E1-eps0*E0) + 0.5*(eps1+eps0)*u.rho_e) diff --git a/pytential/symbolic/pde/maxwell/waveguide.py b/pytential/symbolic/pde/maxwell/waveguide.py index 6a303570d853cd0c9203fa66c930b5000efd5bfd..4049cf17eb5555b048aaa5a9490d4c8c3e85b6fc 100644 --- a/pytential/symbolic/pde/maxwell/waveguide.py +++ b/pytential/symbolic/pde/maxwell/waveguide.py @@ -561,8 +561,7 @@ class Dielectric2DBoundaryOperatorBase(L2WeightedPDEOperator): for term in bc) is_necessary = ( (self.ez_enabled and any_significant_e) - or - (self.hz_enabled and any_significant_h)) + or (self.hz_enabled and any_significant_h)) # Only keep tangential modes for TEM. Otherwise, # no jump in H already implies jump condition on @@ -588,8 +587,7 @@ class Dielectric2DBoundaryOperatorBase(L2WeightedPDEOperator): def is_field_present(self, field_kind): return ( (field_kind == self.field_kind_e and self.ez_enabled) - or - (field_kind == self.field_kind_h and self.hz_enabled)) + or (field_kind == self.field_kind_h and self.hz_enabled)) def make_unknown(self, name): num_densities = ( diff --git a/pytential/symbolic/primitives.py b/pytential/symbolic/primitives.py index 5ee758a44982ed4f1ef51534ab3c4077a5e230a7..de679e68c428c62f536209a1a3b885b3f052c465 100644 --- a/pytential/symbolic/primitives.py +++ b/pytential/symbolic/primitives.py @@ -287,8 +287,7 @@ def make_sym_surface_mv(name, ambient_dim, dim, where=None): return sum( var("%s%d" % (name, i)) - * - cse(MultiVector(vec), "tangent%d" % i, cse_scope.DISCRETIZATION) + * cse(MultiVector(vec), "tangent%d" % i, cse_scope.DISCRETIZATION) for i, vec in enumerate(par_grad.T)) @@ -901,8 +900,7 @@ def area(ambient_dim, dim, where=None): def mean(ambient_dim, dim, operand, where=None): return ( integral(ambient_dim, dim, operand, where) - / - area(ambient_dim, dim, where)) + / area(ambient_dim, dim, where)) class IterativeInverse(Expression): @@ -1105,8 +1103,7 @@ class IntG(Expression): karg.loopy_arg.name for karg in ( kernel.get_args() - + - kernel.get_source_args())) + + kernel.get_source_args())) kernel_arguments = kernel_arguments.copy() if kwargs: diff --git a/pytential/symbolic/stokes.py b/pytential/symbolic/stokes.py index adfc23d5eaa5e50144b242e23ada9d6b8e23024d..7e47bdb222a106faaeb97d36b525c056286156bf 100644 --- a/pytential/symbolic/stokes.py +++ b/pytential/symbolic/stokes.py @@ -476,8 +476,8 @@ class StressletWrapper(object): for j in range(self.dim): sym_expr[comp] = sym_expr[comp] + ( dir_vec_sym[j] * mu_sym * ( - sym_grad_matrix[comp][j] + - sym_grad_matrix[j][comp]) + sym_grad_matrix[comp][j] + + sym_grad_matrix[j][comp]) ) return sym_expr diff --git a/test/extra_curve_data.py b/test/extra_curve_data.py index c6679953f523ad5aac6bb56ed4307c41fec97bf8..4d2dacca6bbae937119f59757cdd738b4f0f95e3 100644 --- a/test/extra_curve_data.py +++ b/test/extra_curve_data.py @@ -84,8 +84,8 @@ class Segment(Curve): def __call__(self, ts): return ( - self.start[:, np.newaxis] + - ts * (self.end - self.start)[:, np.newaxis]) + self.start[:, np.newaxis] + + ts * (self.end - self.start)[:, np.newaxis]) class Arc(Curve): @@ -134,12 +134,12 @@ class Arc(Curve): def __call__(self, t): if self.theta_increasing: thetas = ( - self.theta_range[0] + - t * (self.theta_range[1] - self.theta_range[0])) + self.theta_range[0] + + t * (self.theta_range[1] - self.theta_range[0])) else: thetas = ( - self.theta_range[1] - - t * (self.theta_range[1] - self.theta_range[0])) + self.theta_range[1] + - t * (self.theta_range[1] - self.theta_range[0])) val = (self.r * np.exp(1j * thetas)) + self.center return np.array([val.real, val.imag]) @@ -149,19 +149,19 @@ class Arc(Curve): # To avoid issues with crossing non-smooth regions, make sure the number of # panels given to this function (for make_curve_mesh) is a multiple of 8. horseshoe = ( - Segment((0, 0), (-5, 0)) + - Arc((-5, 0), (-5.5, -0.5), (-5, -1)) + - Segment((-5, -1), (0, -1)) + - Arc((0, -1), (1.5, 0.5), (0, 2)) + - Segment((0, 2), (-5, 2)) + - Arc((-5, 2), (-5.5, 1.5), (-5, 1)) + - Segment((-5, 1), (0, 1)) + - Arc((0, 1), (0.5, 0.5), (0, 0)) + Segment((0, 0), (-5, 0)) + + Arc((-5, 0), (-5.5, -0.5), (-5, -1)) + + Segment((-5, -1), (0, -1)) + + Arc((0, -1), (1.5, 0.5), (0, 2)) + + Segment((0, 2), (-5, 2)) + + Arc((-5, 2), (-5.5, 1.5), (-5, 1)) + + Segment((-5, 1), (0, 1)) + + Arc((0, 1), (0.5, 0.5), (0, 0)) ) # unit square unit_square = ( - Segment((1, -1), (1, 1)) + - Segment((1, 1), (-1, 1)) + - Segment((-1, 1), (-1, -1)) + - Segment((-1, -1), (1, -1))) + Segment((1, -1), (1, 1)) + + Segment((1, 1), (-1, 1)) + + Segment((-1, 1), (-1, -1)) + + Segment((-1, -1), (1, -1))) diff --git a/test/test_global_qbx.py b/test/test_global_qbx.py index 86832d6eb2fb33b7ada862a96e1f748d5f5da050..7f700fa104b2bc10e99416d594aecd090eca69eb 100644 --- a/test/test_global_qbx.py +++ b/test/test_global_qbx.py @@ -144,8 +144,8 @@ def run_source_refinement_test(ctx_getter, mesh, order, helmholtz_k=None): # =distance(centers of panel 1, panel 2) dist = ( la.norm(( - all_centers[..., np.newaxis] - - nodes[:, np.newaxis, ...]).T, + all_centers[..., np.newaxis] + - nodes[:, np.newaxis, ...]).T, axis=-1) .min()) @@ -169,8 +169,8 @@ def run_source_refinement_test(ctx_getter, mesh, order, helmholtz_k=None): # =distance(centers of panel 1, panel 2) dist = ( la.norm(( - all_centers[..., np.newaxis] - - nodes[:, np.newaxis, ...]).T, + all_centers[..., np.newaxis] + - nodes[:, np.newaxis, ...]).T, axis=-1) .min()) diff --git a/test/test_layer_pot.py b/test/test_layer_pot.py index b4f4a02fe0dede451c6172d747b5a0357ab66333..4b5d70b675871031023d3f5cf40c049c008d0f5b 100644 --- a/test/test_layer_pot.py +++ b/test/test_layer_pot.py @@ -488,8 +488,7 @@ def test_3d_jump_relations(ctx_factory, relation, visualize=False): err = ( norm(qbx, queue, jump_identity, np.inf) - / - norm(qbx, queue, density, np.inf)) + / norm(qbx, queue, density, np.inf)) print("ERROR", qbx.h_max, err) eoc_rec.add_data_point(qbx.h_max, err) diff --git a/test/test_layer_pot_eigenvalues.py b/test/test_layer_pot_eigenvalues.py index c5f927c2880892786f3077ce8654ca16a6c8bd76..b4e986d4af6e16cc149effac6a08403d11668b4c 100644 --- a/test/test_layer_pot_eigenvalues.py +++ b/test/test_layer_pot_eigenvalues.py @@ -166,8 +166,7 @@ def test_ellipse_eigenvalues(ctx_getter, ellipse_aspect, mode_nr, qbx_order, s_err = ( norm(density_discr, queue, s_sigma - s_sigma_ref) - / - norm(density_discr, queue, s_sigma_ref)) + / norm(density_discr, queue, s_sigma_ref)) s_eoc_rec.add_data_point(qbx.h_max, s_err) # }}} @@ -198,8 +197,7 @@ def test_ellipse_eigenvalues(ctx_getter, ellipse_aspect, mode_nr, qbx_order, d_err = ( norm(density_discr, queue, d_sigma - d_sigma_ref) - / - d_ref_norm) + / d_ref_norm) d_eoc_rec.add_data_point(qbx.h_max, d_err) # }}} @@ -218,8 +216,7 @@ def test_ellipse_eigenvalues(ctx_getter, ellipse_aspect, mode_nr, qbx_order, sp_err = ( norm(density_discr, queue, sp_sigma - sp_sigma_ref) - / - norm(density_discr, queue, sigma)) + / norm(density_discr, queue, sigma)) sp_eoc_rec.add_data_point(qbx.h_max, sp_err) # }}} @@ -277,8 +274,7 @@ def test_sphere_eigenvalues(ctx_getter, mode_m, mode_n, qbx_order, def rel_err(comp, ref): return ( norm(density_discr, queue, comp - ref) - / - norm(density_discr, queue, ref)) + / norm(density_discr, queue, ref)) for nrefinements in [0, 1]: from meshmode.mesh.generation import generate_icosphere diff --git a/test/test_scalar_int_eq.py b/test/test_scalar_int_eq.py index 8830e84b037beaa9159d6355d1bee9106c471ac4..72d08fcf3dcee46ebbdbd03f960c40eb1839992f 100644 --- a/test/test_scalar_int_eq.py +++ b/test/test_scalar_int_eq.py @@ -713,8 +713,7 @@ def run_int_eq_test(cl_ctx, queue, case, resolution, visualize): rel_grad_err_inf = ( la.norm(grad_err[0].get(), np.inf) - / - la.norm(grad_ref[0].get(), np.inf)) + / la.norm(grad_ref[0].get(), np.inf)) print("rel_grad_err_inf: %g" % rel_grad_err_inf)