From 80c914c351ba16aee96dcc71d362d4c1381e76e1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Kl=C3=B6ckner?= Date: Wed, 9 Jun 2021 23:25:20 -0500 Subject: [PATCH] Enable flake8-bugbear (#405) * Enable flake8-bugbear * Fix temp_var_type arg handling in Assignment --- examples/python/ispc-stream-harness.py | 2 +- loopy/auto_test.py | 10 ++++++---- loopy/check.py | 2 +- loopy/codegen/__init__.py | 2 +- loopy/codegen/control.py | 2 +- loopy/kernel/__init__.py | 16 +++++++++------- loopy/kernel/creation.py | 4 +++- loopy/kernel/function_interface.py | 2 +- loopy/kernel/instruction.py | 11 +++++++++-- loopy/kernel/tools.py | 7 ++++--- loopy/schedule/__init__.py | 17 +++++++++++------ loopy/statistics.py | 11 +++++------ loopy/symbolic.py | 2 +- loopy/target/c/c_execution.py | 25 +++++++++++++++++++------ loopy/target/c/codegen/expression.py | 2 +- loopy/target/cuda.py | 2 +- loopy/target/ispc.py | 2 +- loopy/target/opencl.py | 2 +- loopy/target/pyopencl.py | 2 +- loopy/tools.py | 11 +++++++++-- loopy/transform/arithmetic.py | 2 +- loopy/transform/array_buffer_map.py | 3 +-- loopy/transform/data.py | 4 +++- loopy/transform/diff.py | 4 ++-- loopy/transform/iname.py | 8 +++++--- loopy/transform/instruction.py | 2 +- loopy/transform/pack_and_unpack_args.py | 3 +-- loopy/transform/precompute.py | 9 +++++++-- loopy/transform/save.py | 6 +++--- loopy/translation_unit.py | 13 ++++++++++--- loopy/type_inference.py | 5 ++++- setup.cfg | 2 ++ test/test_expression.py | 8 ++++---- test/test_statistics.py | 4 ++-- 34 files changed, 132 insertions(+), 75 deletions(-) diff --git a/examples/python/ispc-stream-harness.py b/examples/python/ispc-stream-harness.py index ce40487b1..722cd917c 100644 --- a/examples/python/ispc-stream-harness.py +++ b/examples/python/ispc-stream-harness.py @@ -136,7 +136,7 @@ def main(): start_time = time() - for irun in range(NRUNS): + for _irun in range(NRUNS): call_kernel() elapsed = time() - start_time diff --git a/loopy/auto_test.py b/loopy/auto_test.py index e3e41beef..d89294a9c 100644 --- a/loopy/auto_test.py +++ b/loopy/auto_test.py @@ -365,12 +365,12 @@ def _enumerate_cl_devices_for_ref_test(blacklist_ref_vendors, need_image_support # {{{ main automatic testing entrypoint def auto_test_vs_ref( - ref_prog, ctx, test_prog=None, op_count=[], op_label=[], parameters={}, + ref_prog, ctx, test_prog=None, op_count=(), op_label=(), parameters=None, print_ref_code=False, print_code=True, warmup_rounds=2, dump_binary=False, fills_entire_output=None, do_check=True, check_result=None, max_test_kernel_count=1, - quiet=False, blacklist_ref_vendors=[], ref_entrypoint=None, + quiet=False, blacklist_ref_vendors=(), ref_entrypoint=None, test_entrypoint=None): """Compare results of `ref_knl` to the kernels generated by scheduling *test_knl*. @@ -380,6 +380,8 @@ def auto_test_vs_ref( message) indicating correctness/acceptability of the result :arg max_test_kernel_count: Stop testing after this many *test_knl* """ + if parameters is None: + parameters = {} import pyopencl as cl @@ -565,7 +567,7 @@ def auto_test_vs_ref( logger.info("%s: run warmup" % (test_entrypoint)) - for i in range(warmup_rounds): + for _i in range(warmup_rounds): if not AUTO_TEST_SKIP_RUN: test_prog(queue, **args) @@ -610,7 +612,7 @@ def auto_test_vs_ref( evt_start = cl.enqueue_marker(queue) - for i in range(timing_rounds): + for _i in range(timing_rounds): if not AUTO_TEST_SKIP_RUN: evt, _ = test_prog(queue, **args) events.append(evt) diff --git a/loopy/check.py b/loopy/check.py index b1a60a68d..5adb295be 100644 --- a/loopy/check.py +++ b/loopy/check.py @@ -1413,7 +1413,7 @@ def pre_codegen_checks(t_unit): for e in t_unit.entrypoints: pre_codegen_entrypoint_checks(t_unit[e], t_unit.callables_table) - for name, clbl in t_unit.callables_table.items(): + for clbl in t_unit.callables_table.values(): if isinstance(clbl, CallableKernel): pre_codegen_callable_checks(clbl.subkernel, t_unit.callables_table) except Exception: diff --git a/loopy/codegen/__init__.py b/loopy/codegen/__init__.py index e6f9e9c34..077819239 100644 --- a/loopy/codegen/__init__.py +++ b/loopy/codegen/__init__.py @@ -792,7 +792,7 @@ def generate_code_v2(program): # {{{ collect preambles - for func_id, clbl in program.callables_table.items(): + for clbl in program.callables_table.values(): device_preambles.extend(list(clbl.generate_preambles(program.target))) # }}} diff --git a/loopy/codegen/control.py b/loopy/codegen/control.py index 7c5fa2fbc..1867c7b18 100644 --- a/loopy/codegen/control.py +++ b/loopy/codegen/control.py @@ -332,7 +332,7 @@ def build_loop_nest(codegen_state, schedule_index): check_inames, self.impl_domain, self.kernel.cache_manager) def build_insn_group(sched_index_info_entries, codegen_state, - done_group_lengths=set()): + done_group_lengths=frozenset()): """ :arg done_group_lengths: A set of group lengths (integers) that grows from empty to include the longest found group and downwards with every diff --git a/loopy/kernel/__init__.py b/loopy/kernel/__init__.py index 9fbeaae90..7510ff634 100644 --- a/loopy/kernel/__init__.py +++ b/loopy/kernel/__init__.py @@ -46,7 +46,7 @@ from warnings import warn class _UniqueVarNameGenerator(UniqueNameGenerator): - def __init__(self, existing_names=set(), forced_prefix=""): + def __init__(self, existing_names=frozenset(), forced_prefix=""): super().__init__(existing_names, forced_prefix) array_prefix_pattern = re.compile("(.*)_s[0-9]+$") @@ -264,7 +264,7 @@ class LoopKernel(ImmutableRecordWithoutPickling, Taggable): inames=None, iname_to_tags=None, substitutions=None, - symbol_manglers=[], + symbol_manglers=None, iname_slab_increments=None, loop_priority=frozenset(), @@ -301,6 +301,8 @@ class LoopKernel(ImmutableRecordWithoutPickling, Taggable): temporary_variables = {} if substitutions is None: substitutions = {} + if symbol_manglers is None: + symbol_manglers = [] if iname_slab_increments is None: iname_slab_increments = {} @@ -466,7 +468,7 @@ class LoopKernel(ImmutableRecordWithoutPickling, Taggable): return UniqueNameGenerator(used_ids) def make_unique_instruction_id(self, insns=None, based_on="insn", - extra_used_ids=set()): + extra_used_ids=frozenset()): if insns is None: insns = self.instructions @@ -573,7 +575,7 @@ class LoopKernel(ImmutableRecordWithoutPickling, Taggable): else: parent = None - for i in range(discard_level_count): + for _i in range(discard_level_count): assert parent is not None parent = result[parent] @@ -600,7 +602,7 @@ class LoopKernel(ImmutableRecordWithoutPickling, Taggable): result = [] ppd = self.parents_per_domain() - for dom, parent in zip(self.domains, ppd): + for parent in ppd: # keep walking up tree to find *all* parents dom_result = [] while parent is not None: @@ -626,7 +628,7 @@ class LoopKernel(ImmutableRecordWithoutPickling, Taggable): for dom in self.domains: return dom.get_ctx() - assert False + raise AssertionError() @memoize_method def combine_domains(self, domains): @@ -1479,7 +1481,7 @@ class LoopKernel(ImmutableRecordWithoutPickling, Taggable): result[arg.name] = arg continue - for index, sub_arg_name in subscripts_and_names: + for _index, sub_arg_name in subscripts_and_names: result[sub_arg_name] = arg return result diff --git a/loopy/kernel/creation.py b/loopy/kernel/creation.py index b9cf234c6..f7c296b74 100644 --- a/loopy/kernel/creation.py +++ b/loopy/kernel/creation.py @@ -2101,7 +2101,7 @@ def realize_slices_array_inputs_as_sub_array_refs(kernel): # {{{ kernel creation top-level -def make_function(domains, instructions, kernel_data=["..."], **kwargs): +def make_function(domains, instructions, kernel_data=None, **kwargs): """User-facing kernel creation entrypoint. :arg domains: @@ -2215,6 +2215,8 @@ def make_function(domains, instructions, kernel_data=["..."], **kwargs): logger, "%s: instantiate" % kwargs.get("name", "(unnamed)")) + if kernel_data is None: + kernel_data = [...] defines = kwargs.pop("defines", {}) default_order = kwargs.pop("default_order", "C") default_offset = kwargs.pop("default_offset", 0) diff --git a/loopy/kernel/function_interface.py b/loopy/kernel/function_interface.py index 55a38f3e8..4ebbf3dbc 100644 --- a/loopy/kernel/function_interface.py +++ b/loopy/kernel/function_interface.py @@ -639,7 +639,7 @@ class ScalarCallable(InKernelCallable): par_dtypes, arg_dtypes)] - for i, (a, tgt_dtype) in enumerate(zip(assignees, assignee_dtypes)): + for a, tgt_dtype in zip(assignees, assignee_dtypes): if tgt_dtype != expression_to_code_mapper.infer_type(a): raise LoopyError("Type Mismatch in function %s. Expected: %s" "Got: %s" % (self.name, tgt_dtype, diff --git a/loopy/kernel/instruction.py b/loopy/kernel/instruction.py index e561dd030..9fb9757a6 100644 --- a/loopy/kernel/instruction.py +++ b/loopy/kernel/instruction.py @@ -822,6 +822,10 @@ class MultiAssignmentBase(InstructionBase): # {{{ instruction: assignment +class _not_provided: # noqa: N801 + pass + + class Assignment(MultiAssignmentBase): """ .. attribute:: assignee @@ -889,9 +893,12 @@ class Assignment(MultiAssignmentBase): within_inames_is_final=None, within_inames=None, tags=None, - temp_var_type=Optional(), atomicity=(), + temp_var_type=_not_provided, atomicity=(), priority=0, predicates=frozenset()): + if temp_var_type is _not_provided: + temp_var_type = Optional() + super().__init__( id=id, depends_on=depends_on, @@ -1394,7 +1401,7 @@ class CInstruction(InstructionBase): | frozenset(self.read_variables)) from loopy.symbolic import get_dependencies - for name, iname_expr in self.iname_exprs: + for _name, iname_expr in self.iname_exprs: result = result | get_dependencies(iname_expr) for subscript_deps in self.assignee_subscript_deps(): diff --git a/loopy/kernel/tools.py b/loopy/kernel/tools.py index 7724bb005..d0148c432 100644 --- a/loopy/kernel/tools.py +++ b/loopy/kernel/tools.py @@ -1143,7 +1143,7 @@ class SetTrie: if len(key) == 0: return - for child_key, child in self.children.items(): + for child_key, child in self.children.items(): # noqa: B007 common = child_key & key if common: break @@ -1539,7 +1539,7 @@ def stringify_instruction_list(kernel): elif not is_in_new and is_in_current: removed.append(iname) else: - assert False + raise AssertionError() if removed: indent_level[0] -= indent_increment * len(removed) @@ -1550,7 +1550,8 @@ def stringify_instruction_list(kernel): current_inames[0] = new_inames - for insn, (arrows, extender) in zip(printed_insn_order, arrows_and_extenders): + for insn, (arrows, extender) in zip( # noqa: B007 + printed_insn_order, arrows_and_extenders): if isinstance(insn, lp.MultiAssignmentBase): lhs = ", ".join(str(a) for a in insn.assignees) rhs = str(insn.expression) diff --git a/loopy/schedule/__init__.py b/loopy/schedule/__init__.py index 745d996f7..6e046da81 100644 --- a/loopy/schedule/__init__.py +++ b/loopy/schedule/__init__.py @@ -132,7 +132,7 @@ def gather_schedule_block(schedule, start_idx): i += 1 - assert False + raise AssertionError() def generate_sub_sched_items(schedule, start_idx): @@ -157,7 +157,7 @@ def generate_sub_sched_items(schedule, start_idx): i += 1 - assert False + raise AssertionError() def get_insn_ids_for_block_at(schedule, start_idx): @@ -241,7 +241,7 @@ def find_loop_nest_around_map(kernel): if iname_to_insns[inner_iname] < iname_to_insns[outer_iname]: result[inner_iname].add(outer_iname) - for dom_idx, dom in enumerate(kernel.domains): + for dom in kernel.domains: for outer_iname in dom.get_var_names(isl.dim_type.param): if outer_iname not in all_inames: continue @@ -470,7 +470,7 @@ def dump_schedule(kernel, schedule): lines.append(indent + "... %sbarrier" % sched_item.synchronization_kind[0]) else: - assert False + raise AssertionError() return "\n".join( "% 4d: %s" % (i, line) @@ -1929,7 +1929,7 @@ class MinRecursionLimitForScheduling(MinRecursionLimit): # {{{ main scheduling entrypoint -def generate_loop_schedules(kernel, callables_table, debug_args={}): +def generate_loop_schedules(kernel, callables_table, debug_args=None): """ .. warning:: @@ -1940,13 +1940,18 @@ def generate_loop_schedules(kernel, callables_table, debug_args={}): generator chain may not be successfully garbage-collected and cause an internal error in the Python runtime. """ + if debug_args is None: + debug_args = {} with MinRecursionLimitForScheduling(kernel): yield from generate_loop_schedules_inner(kernel, callables_table, debug_args=debug_args) -def generate_loop_schedules_inner(kernel, callables_table, debug_args={}): +def generate_loop_schedules_inner(kernel, callables_table, debug_args=None): + if debug_args is None: + debug_args = {} + from loopy.kernel import KernelState if kernel.state not in (KernelState.PREPROCESSED, KernelState.LINEARIZED): raise LoopyError("cannot schedule a kernel that has not been " diff --git a/loopy/statistics.py b/loopy/statistics.py index 59fb88e6e..9b6b411d8 100755 --- a/loopy/statistics.py +++ b/loopy/statistics.py @@ -450,7 +450,7 @@ class ToCountMap: total = self._zero() - for k, v in self.count_map.items(): + for v in self.count_map.values(): total = v + total return total @@ -478,7 +478,7 @@ class ToCountPolynomialMap(ToCountMap): space_param_tuple = _get_param_tuple(space) - for key, val in count_map.items(): + for val in count_map.values(): if isinstance(val, isl.PwQPolynomial): assert val.dim(dim_type.out) == 1 elif isinstance(val, GuardedPwQPolynomial): @@ -1163,7 +1163,7 @@ def _get_lid_and_gid_strides(knl, array, index): else: dim_tags = array.dim_tags - for tag, iname in tag_to_iname_dict.items(): + for tag in tag_to_iname_dict: total_iname_stride = 0 # find total stride of this iname for each axis for idx, axis_tag in zip(index, dim_tags): @@ -1621,7 +1621,7 @@ def _get_insn_count(knl, callables_table, insn_id, subgroup_size, "get_insn_count: No count granularity passed, " "assuming %s granularity." % (CountGranularity.WORKITEM)) - count_granularity == CountGranularity.WORKITEM + count_granularity = CountGranularity.WORKITEM if count_granularity == CountGranularity.WORKITEM: return count_insn_runs( @@ -2153,8 +2153,7 @@ def _gather_access_footprints_for_single_kernel(kernel, ignore_uncountable): afg = AccessFootprintGatherer(kernel, domain, ignore_uncountable=ignore_uncountable) - for assignee in insn.assignees: - write_footprints.append(afg(insn.assignees)) + write_footprints.append(afg(insn.assignees)) read_footprints.append(afg(insn.expression)) return write_footprints, read_footprints diff --git a/loopy/symbolic.py b/loopy/symbolic.py index 9917de098..841bb6333 100644 --- a/loopy/symbolic.py +++ b/loopy/symbolic.py @@ -2079,7 +2079,7 @@ class AffineConditionToISLSetMapper(IdentityMapper): elif expr.operator == "<": cnst = isl.Constraint.inequality_from_aff(right_aff-left_aff-1) else: - assert False + raise AssertionError() return isl.Set.universe(self.space).add_constraint(cnst) diff --git a/loopy/target/c/c_execution.py b/loopy/target/c/c_execution.py index 70430d0e5..55ce85093 100644 --- a/loopy/target/c/c_execution.py +++ b/loopy/target/c/c_execution.py @@ -212,10 +212,23 @@ class CCompiler: """ def __init__(self, toolchain=None, - cc="gcc", cflags="-std=c99 -O3 -fPIC".split(), - ldflags="-shared".split(), libraries=[], - include_dirs=[], library_dirs=[], defines=[], + cc="gcc", cflags=None, + ldflags=None, libraries=None, + include_dirs=None, library_dirs=None, defines=None, source_suffix="c"): + if cflags is None: + cflags = "-std=c99 -O3 -fPIC".split() + if ldflags is None: + ldflags = "-shared".split() + if libraries is None: + libraries = [] + if include_dirs is None: + include_dirs = [] + if library_dirs is None: + library_dirs = [] + if defines is None: + defines = [] + # try to get a default toolchain # or subclass supplied version if available self.toolchain = toolchain @@ -289,9 +302,9 @@ class CPlusPlusCompiler(CCompiler): """Subclass of CCompiler to invoke a C++ compiler.""" def __init__(self, toolchain=None, - cc="g++", cflags="-std=c++98 -O3 -fPIC".split(), - ldflags=[], libraries=[], - include_dirs=[], library_dirs=[], defines=[], + cc="g++", cflags=None, + ldflags=None, libraries=None, + include_dirs=None, library_dirs=None, defines=None, source_suffix="cpp"): super().__init__( diff --git a/loopy/target/c/codegen/expression.py b/loopy/target/c/codegen/expression.py index f7fe79c98..5b2ccdcdc 100644 --- a/loopy/target/c/codegen/expression.py +++ b/loopy/target/c/codegen/expression.py @@ -275,7 +275,7 @@ class ExpressionToCExpressionMapper(IdentityMapper): return result else: - assert False + raise AssertionError() def map_linear_subscript(self, expr, type_context): from pymbolic.primitives import Variable diff --git a/loopy/target/cuda.py b/loopy/target/cuda.py index 06518e1d0..396bbd3b1 100644 --- a/loopy/target/cuda.py +++ b/loopy/target/cuda.py @@ -492,7 +492,7 @@ class CUDACASTBuilder(CFamilyASTBuilder): elif lhs_dtype.numpy_dtype == np.float64: ctype = "long" else: - assert False + raise AssertionError() old_val = "*(%s *) &" % ctype + old_val new_val = "*(%s *) &" % ctype + new_val diff --git a/loopy/target/ispc.py b/loopy/target/ispc.py index 67af90a24..0af1d09d1 100644 --- a/loopy/target/ispc.py +++ b/loopy/target/ispc.py @@ -175,7 +175,7 @@ class ISPCTarget(CFamilyTarget): gsize, lsize = kernel.get_grid_size_upper_bounds_as_exprs( callables_table) if len(lsize) > 1: - for i, ls_i in enumerate(lsize[1:]): + for ls_i in lsize[1:]: if ls_i != 1: raise LoopyError("local axis %d (0-based) " "has length > 1, which is unsupported " diff --git a/loopy/target/opencl.py b/loopy/target/opencl.py index a1705f9c6..38fccc6e9 100644 --- a/loopy/target/opencl.py +++ b/loopy/target/opencl.py @@ -799,7 +799,7 @@ class OpenCLCASTBuilder(CFamilyASTBuilder): elif lhs_dtype.numpy_dtype == np.float64: ctype = "long" else: - assert False + raise AssertionError() from loopy.kernel.data import (TemporaryVariable, ArrayArg) if ( diff --git a/loopy/target/pyopencl.py b/loopy/target/pyopencl.py index d389a434a..d0bcbdc25 100644 --- a/loopy/target/pyopencl.py +++ b/loopy/target/pyopencl.py @@ -487,7 +487,7 @@ class ExpressionToPyOpenCLCExpressionMapper(ExpressionToOpenCLCExpressionMapper) if expr.exponent in [2, 3, 4]: value = expr.base - for i in range(expr.exponent-1): + for _i in range(expr.exponent-1): value = value * expr.base return self.rec(value, type_context) else: diff --git a/loopy/tools.py b/loopy/tools.py index 644082ed6..90d7c587a 100644 --- a/loopy/tools.py +++ b/loopy/tools.py @@ -209,7 +209,9 @@ def remove_common_indentation(code, require_leading_newline=True, return code # accommodate pyopencl-ish syntax highlighting - code = code.lstrip("//CL//") + cl_prefix = "//CL//" + if code.startswith(cl_prefix): + code = code[len(cl_prefix):] if require_leading_newline and not code.startswith("\n"): return code @@ -262,10 +264,15 @@ def remove_common_indentation(code, require_leading_newline=True, def build_ispc_shared_lib( cwd, ispc_sources, cxx_sources, - ispc_options=[], cxx_options=[], + ispc_options=None, cxx_options=None, ispc_bin="ispc", cxx_bin="g++", quiet=True): + if ispc_options is None: + ispc_options = [] + if cxx_options is None: + cxx_options = [] + from os.path import join ispc_source_names = [] diff --git a/loopy/transform/arithmetic.py b/loopy/transform/arithmetic.py index 8203f0d52..f6ad10826 100644 --- a/loopy/transform/arithmetic.py +++ b/loopy/transform/arithmetic.py @@ -115,7 +115,7 @@ def collect_common_factors_on_increment(kernel, var_name, vary_by_axes=()): common_factors = [] def find_unifiable_cf_index(index_key): - for i, (key, val) in enumerate(common_factors): + for i, (key, _val) in enumerate(common_factors): unif = UnidirectionalUnifier( lhs_mapping_candidates=get_dependencies(key)) diff --git a/loopy/transform/array_buffer_map.py b/loopy/transform/array_buffer_map.py index 4ef5fac77..3c4092b74 100644 --- a/loopy/transform/array_buffer_map.py +++ b/loopy/transform/array_buffer_map.py @@ -237,8 +237,7 @@ class ArrayToBufferMap: non1_storage_axis_flags = [] non1_storage_shape = [] - for saxis, bi, saxis_len in zip( - storage_axis_names, storage_base_indices, storage_shape): + for saxis_len in storage_shape: has_length_non1 = saxis_len != 1 non1_storage_axis_flags.append(has_length_non1) diff --git a/loopy/transform/data.py b/loopy/transform/data.py index d866f8a5e..42ca0f9d9 100644 --- a/loopy/transform/data.py +++ b/loopy/transform/data.py @@ -141,7 +141,7 @@ class _not_provided: # noqa: N801 def add_prefetch_for_single_kernel(kernel, callables_table, var_name, - sweep_inames=[], dim_arg_names=None, + sweep_inames=None, dim_arg_names=None, # "None" is a valid value here, distinct from the default. default_tag=_not_provided, @@ -241,6 +241,8 @@ def add_prefetch_for_single_kernel(kernel, callables_table, var_name, This function internally uses :func:`extract_subst` and :func:`precompute`. """ assert isinstance(kernel, LoopKernel) + if sweep_inames is None: + sweep_inames = [] # {{{ fish indexing out of var_name and into footprint_subscripts diff --git a/loopy/transform/diff.py b/loopy/transform/diff.py index 124568f45..6a5a3a710 100644 --- a/loopy/transform/diff.py +++ b/loopy/transform/diff.py @@ -133,7 +133,7 @@ class LoopyDiffMapper(DifferentiationMapper, RuleAwareIdentityMapper): elif len(conds) > 1: and_conds = p.LogicalAnd(tuple(conds)) else: - assert False + raise AssertionError() return p.If(and_conds, 1, 0) @@ -370,7 +370,7 @@ class DifferentiationContext: # {{{ entrypoint def diff_kernel(kernel, diff_outputs, by, diff_iname_prefix="diff_i", - batch_axes_in_by=frozenset(), copy_outputs=set()): + batch_axes_in_by=frozenset(), copy_outputs=frozenset()): """ :arg batch_axes_in_by: a :class:`set` of axis indices in the variable named *by* diff --git a/loopy/transform/iname.py b/loopy/transform/iname.py index 8cb649b91..102756d2f 100644 --- a/loopy/transform/iname.py +++ b/loopy/transform/iname.py @@ -601,7 +601,7 @@ def join_inames(kernel, inames, new_iname=None, tag=None, within=None): isl.Constraint.equality_from_aff( iname_rel_aff(new_domain.get_space(), new_iname, "==", joint_aff))) - for i, iname in enumerate(inames): + for iname in inames: iname_to_dim = new_domain.get_space().get_var_dict() iname_dt, iname_idx = iname_to_dim[iname] @@ -850,11 +850,13 @@ class _InameDuplicator(RuleAwareIdentityMapper): @for_each_kernel def duplicate_inames(kernel, inames, within, new_inames=None, suffix=None, - tags={}): + tags=None): """ :arg within: a stack match as understood by :func:`loopy.match.parse_stack_match`. """ + if tags is None: + tags = {} # {{{ normalize arguments, find unique new_inames @@ -1328,7 +1330,7 @@ class _ReductionSplitter(RuleAwareIdentityMapper): expr.allow_simultaneous), expr.allow_simultaneous) else: - assert False + raise AssertionError() else: return super().map_reduction(expr, expn_state) diff --git a/loopy/transform/instruction.py b/loopy/transform/instruction.py index 287321e3e..0bb387bd8 100644 --- a/loopy/transform/instruction.py +++ b/loopy/transform/instruction.py @@ -203,7 +203,7 @@ def replace_instruction_ids(kernel, replacements): new_insns = [] - for i, insn in enumerate(kernel.instructions): + for insn in kernel.instructions: changed = False new_depends_on = list(insn.depends_on) extra_depends_on = [] diff --git a/loopy/transform/pack_and_unpack_args.py b/loopy/transform/pack_and_unpack_args.py index 9335bb0bb..c221c8235 100644 --- a/loopy/transform/pack_and_unpack_args.py +++ b/loopy/transform/pack_and_unpack_args.py @@ -250,8 +250,7 @@ def pack_and_unpack_args_for_call_for_single_kernel(kernel, updated_swept_inames = [] - for i, _ in enumerate( - in_knl_callable.arg_id_to_descr[arg_id].shape): + for _ in in_knl_callable.arg_id_to_descr[arg_id].shape: updated_swept_inames.append(var(vng("i_packsweep_"+arg))) ctx = kernel.isl_context diff --git a/loopy/transform/precompute.py b/loopy/transform/precompute.py index 9ba572efe..1d3ffe650 100644 --- a/loopy/transform/precompute.py +++ b/loopy/transform/precompute.py @@ -258,9 +258,9 @@ class _not_provided: # noqa: N801 def precompute_for_single_kernel(kernel, callables_table, subst_use, - sweep_inames=[], within=None, storage_axes=None, temporary_name=None, + sweep_inames=None, within=None, storage_axes=None, temporary_name=None, precompute_inames=None, precompute_outer_inames=None, - storage_axis_to_tag={}, + storage_axis_to_tag=None, # "None" is a valid value here, distinct from the default. default_tag=_not_provided, @@ -393,6 +393,11 @@ def precompute_for_single_kernel(kernel, callables_table, subst_use, # {{{ check, standardize arguments + if sweep_inames is None: + sweep_inames = [] + if storage_axis_to_tag is None: + storage_axis_to_tag = {} + if isinstance(sweep_inames, str): sweep_inames = [iname.strip() for iname in sweep_inames.split(",")] diff --git a/loopy/transform/save.py b/loopy/transform/save.py index 078147104..ead1d5571 100644 --- a/loopy/transform/save.py +++ b/loopy/transform/save.py @@ -157,13 +157,13 @@ class LivenessAnalysis: def print_liveness(self): print(75 * "-") print("LIVE IN:") - for sched_idx, sched_item in enumerate(self.schedule): + for sched_idx in range(len(self.schedule)): print("{item}: {{{vars}}}".format( item=sched_idx, vars=", ".join(sorted(self[sched_idx].live_in)))) print(75 * "-") print("LIVE OUT:") - for sched_idx, sched_item in enumerate(self.schedule): + for sched_idx in range(len(self.schedule)): print("{item}: {{{vars}}}".format( item=sched_idx, vars=", ".join(sorted(self[sched_idx].live_out)))) @@ -329,7 +329,7 @@ class TemporarySaver: within_subkernel = False result = {} - for sched_item_idx, sched_item in enumerate(self.kernel.linearization): + for sched_item in self.kernel.linearization: if isinstance(sched_item, CallKernel): within_subkernel = True result[sched_item.kernel_name] = frozenset(current_outer_inames) diff --git a/loopy/translation_unit.py b/loopy/translation_unit.py index 83ceeef68..2a6bdb042 100644 --- a/loopy/translation_unit.py +++ b/loopy/translation_unit.py @@ -173,12 +173,17 @@ class TranslationUnit(ImmutableRecord): """ def __init__(self, entrypoints=frozenset(), - callables_table=pmap(), + callables_table=None, target=None, - func_id_to_in_knl_callable_mappers=[]): + func_id_to_in_knl_callable_mappers=None): # {{{ sanity checks + if callables_table is None: + callables_table = pmap() + if func_id_to_in_knl_callable_mappers is None: + func_id_to_in_knl_callable_mappers = [] + assert isinstance(callables_table, collections.abc.Mapping) assert isinstance(entrypoints, frozenset) @@ -462,8 +467,10 @@ class CallablesInferenceContext(ImmutableRecord): """ def __init__(self, callables, clbl_name_gen, - renames=collections.defaultdict(frozenset), + renames=None, new_entrypoints=frozenset()): + if renames is None: + renames = collections.defaultdict(frozenset) assert isinstance(callables, collections.abc.Mapping) super().__init__(callables=dict(callables), diff --git a/loopy/type_inference.py b/loopy/type_inference.py index dd9135483..d7747253f 100644 --- a/loopy/type_inference.py +++ b/loopy/type_inference.py @@ -586,7 +586,10 @@ class TypeInferenceMapper(CombineMapper): # {{{ TypeReader class TypeReader(TypeInferenceMapper): - def __init__(self, kernel, callables, new_assignments={}): + def __init__(self, kernel, callables, new_assignments=None): + if new_assignments is None: + new_assignments = {} + self.kernel = kernel self.callables = callables self.new_assignments = new_assignments diff --git a/setup.cfg b/setup.cfg index 9495d106c..f4df1c55e 100644 --- a/setup.cfg +++ b/setup.cfg @@ -8,3 +8,5 @@ exclude= inline-quotes = " docstring-quotes = """ multiline-quotes = """ + +# enable-flake8-bugbear diff --git a/test/test_expression.py b/test/test_expression.py index 444aab749..1aca17a9d 100644 --- a/test/test_expression.py +++ b/test/test_expression.py @@ -129,7 +129,7 @@ def make_random_fp_expression(prefix, var_values, size, use_complex): make_random_fp_expression(prefix, var_values, size, use_complex), make_random_fp_expression(prefix, var_values, size, use_complex)) else: - assert False + raise AssertionError() def make_random_int_value(nonneg): @@ -207,7 +207,7 @@ def make_nonzero_random_int_expression(prefix, var_values, size, nonneg): return result - assert False + raise AssertionError() def generate_random_fuzz_examples(expr_type): @@ -275,7 +275,7 @@ def test_fuzz_expression_code_gen(ctx_factory, expr_type, random_seed, target): elif expr_type in ["int", "int_nonneg"]: result_type = np.int64 else: - assert False + raise AssertionError() var_names = [] @@ -359,7 +359,7 @@ def test_fuzz_expression_code_gen(ctx_factory, expr_type, random_seed, target): elif expr_type in ["int", "int_nonneg"]: err = abs(ref_value-lp_value) else: - assert False + raise AssertionError() if abs(err) > 1e-10: print(80*"-") diff --git a/test/test_statistics.py b/test/test_statistics.py index 5982d220c..f4949a9f1 100644 --- a/test/test_statistics.py +++ b/test/test_statistics.py @@ -943,7 +943,7 @@ def test_count_granularity_val_checks(): lp.MemAccess(count_granularity=None) assert True lp.MemAccess(count_granularity="bushel") - assert False + raise AssertionError() except ValueError: assert True @@ -954,7 +954,7 @@ def test_count_granularity_val_checks(): lp.Op(count_granularity=None) assert True lp.Op(count_granularity="bushel") - assert False + raise AssertionError() except ValueError: assert True -- GitLab