From cdf8ad6d59fa6c18a9a2cb1ed1a80cd0dcee38ae Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Mon, 2 Nov 2020 14:31:06 -0600 Subject: [PATCH] run pyupgrade --py36-plus --- examples/python/call-external.py | 14 ++--- loopy/auto_test.py | 2 +- loopy/check.py | 12 ++-- loopy/kernel/creation.py | 4 +- loopy/kernel/function_interface.py | 49 +++++++--------- loopy/kernel/tools.py | 4 +- loopy/library/function.py | 8 +-- loopy/library/reduction.py | 60 +++++++++---------- loopy/preprocess.py | 8 +-- loopy/program.py | 77 ++++++++++++------------- loopy/statistics.py | 40 ++++++------- loopy/symbolic.py | 12 ++-- loopy/target/c/__init__.py | 5 +- loopy/target/c/compyte | 2 +- loopy/target/cuda.py | 10 ++-- loopy/target/opencl.py | 16 ++--- loopy/target/pyopencl.py | 4 +- loopy/target/python.py | 3 +- loopy/transform/callable.py | 43 ++++++-------- loopy/transform/diff.py | 2 +- loopy/transform/iname.py | 5 +- loopy/transform/make_scalar.py | 4 +- loopy/transform/pack_and_unpack_args.py | 26 ++++----- loopy/type_inference.py | 30 +++++----- test/test_callables.py | 2 - test/test_transform.py | 2 +- 26 files changed, 213 insertions(+), 231 deletions(-) diff --git a/examples/python/call-external.py b/examples/python/call-external.py index c13d99bd0..104d12f38 100644 --- a/examples/python/call-external.py +++ b/examples/python/call-external.py @@ -68,8 +68,8 @@ class BLASCallable(lp.ScalarCallable): par_dtype).expr for par, par_dtype in zip( parameters, par_dtypes)] - c_parameters.insert(0, var('CblasRowMajor')) - c_parameters.insert(1, var('CblasNoTrans')) + c_parameters.insert(0, var("CblasRowMajor")) + c_parameters.insert(1, var("CblasNoTrans")) c_parameters.insert(2, mat_descr.shape[0]) c_parameters.insert(3, mat_descr.shape[1]) c_parameters.insert(4, 1) @@ -85,8 +85,8 @@ class BLASCallable(lp.ScalarCallable): def blas_fn_lookup(target, identifier): - if identifier == 'gemv': - return BLASCallable(name='gemv') + if identifier == "gemv": + return BLASCallable(name="gemv") return None # }}} @@ -99,9 +99,9 @@ knl = lp.make_kernel( """ y[:] = gemv(A[:, :], x[:]) """, [ - lp.GlobalArg('A', dtype=np.float64, shape=(n, n)), - lp.GlobalArg('x', dtype=np.float64, shape=(n, )), - lp.GlobalArg('y', shape=(n, )), ...], + lp.GlobalArg("A", dtype=np.float64, shape=(n, n)), + lp.GlobalArg("x", dtype=np.float64, shape=(n, )), + lp.GlobalArg("y", shape=(n, )), ...], target=CTarget(), lang_version=(2018, 2)) diff --git a/loopy/auto_test.py b/loopy/auto_test.py index ff2bda7ef..dfcfe2a2f 100644 --- a/loopy/auto_test.py +++ b/loopy/auto_test.py @@ -641,7 +641,7 @@ def auto_test_vs_ref( rates = "" for cnt, lbl in zip(op_count, op_label): - rates += " %g %s/s" % (cnt/elapsed_wall, lbl) + rates += " {:g} {}/s".format(cnt/elapsed_wall, lbl) if not quiet: def format_float_or_none(v): diff --git a/loopy/check.py b/loopy/check.py index 32db02b65..44fbfe155 100644 --- a/loopy/check.py +++ b/loopy/check.py @@ -111,11 +111,11 @@ class UnscopedCallCollector(CombineMapper): def map_call_with_kwargs(self, expr): if not isinstance(expr.function, ResolvedFunction): return (frozenset([expr.function.name]) | - self.combine((self.rec(child) for child in expr.parameters - + tuple(expr.kw_parameters.values())))) + self.combine(self.rec(child) for child in expr.parameters + + tuple(expr.kw_parameters.values()))) else: - return self.combine((self.rec(child) for child in - expr.parameters+tuple(expr.kw_parameters.values()))) + return self.combine(self.rec(child) for child in + expr.parameters+tuple(expr.kw_parameters.values())) def map_constant(self, expr): return frozenset() @@ -262,9 +262,9 @@ def _get_all_unique_iname_tags(kernel): from itertools import chain iname_tags = list(chain(*(kernel.iname_to_tags.get(iname, []) for iname in kernel.all_inames()))) - return set( + return { tag for tag in iname_tags if - isinstance(tag, UniqueTag)) + isinstance(tag, UniqueTag)} def check_multiple_tags_allowed(kernel): diff --git a/loopy/kernel/creation.py b/loopy/kernel/creation.py index f73bf278f..a9665f354 100644 --- a/loopy/kernel/creation.py +++ b/loopy/kernel/creation.py @@ -2375,12 +2375,12 @@ def make_kernel(domains, instructions, kernel_data=["..."], **kwargs): def make_function(*args, **kwargs): - lang_version = kwargs.pop('lang_version', None) + lang_version = kwargs.pop("lang_version", None) if lang_version: raise LoopyError("lang_version should be set for program, not " "functions.") - kwargs['is_callee_kernel'] = True + kwargs["is_callee_kernel"] = True return make_kernel(*args, **kwargs) # }}} diff --git a/loopy/kernel/function_interface.py b/loopy/kernel/function_interface.py index 0cb610074..58f5f4db7 100644 --- a/loopy/kernel/function_interface.py +++ b/loopy/kernel/function_interface.py @@ -1,5 +1,3 @@ -from __future__ import division, absolute_import - __copyright__ = "Copyright (C) 2018 Andreas Kloeckner, Kaushik Kulkarni" __license__ = """ @@ -22,9 +20,6 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ - -from six.moves import zip - from pytools import ImmutableRecord from loopy.diagnostic import LoopyError @@ -82,7 +77,7 @@ class ArrayArgDescriptor(ImmutableRecord): A tuple of instances of :class:`loopy.kernel.array._StrideArrayDimTagBase` """ - fields = set(['shape', 'address_space', 'dim_tags']) + fields = {"shape", "address_space", "dim_tags"} def __init__(self, shape, address_space, dim_tags): @@ -99,7 +94,7 @@ class ArrayArgDescriptor(ImmutableRecord): # }}} - super(ArrayArgDescriptor, self).__init__( + super().__init__( shape=shape, address_space=address_space, dim_tags=dim_tags) @@ -264,7 +259,7 @@ class GridOverrideForCalleeKernel(ImmutableRecord): This class acts as a pseudo-callable and its significance lies in solving picklability issues. """ - fields = set(["local_size", "global_size"]) + fields = {"local_size", "global_size"} def __init__(self, global_size, local_size): self.global_size = global_size @@ -317,12 +312,12 @@ class InKernelCallable(ImmutableRecord): .. automethod:: is_ready_for_codegen """ - fields = set(["arg_id_to_dtype", "arg_id_to_descr"]) + fields = {"arg_id_to_dtype", "arg_id_to_descr"} init_arg_names = ("arg_id_to_dtype", "arg_id_to_descr") def __init__(self, arg_id_to_dtype=None, arg_id_to_descr=None): - super(InKernelCallable, self).__init__( + super().__init__( arg_id_to_dtype=arg_id_to_dtype, arg_id_to_descr=arg_id_to_descr) @@ -394,8 +389,8 @@ class InKernelCallable(ImmutableRecord): new_arg_id_to_dtype = None if self.arg_id_to_dtype is not None: - new_arg_id_to_dtype = dict((id, with_target_if_not_None(dtype)) for id, - dtype in self.arg_id_to_dtype.items()) + new_arg_id_to_dtype = {id: with_target_if_not_None(dtype) for id, + dtype in self.arg_id_to_dtype.items()} return self.copy(arg_id_to_dtype=new_arg_id_to_dtype) @@ -461,7 +456,7 @@ class ScalarCallable(InKernelCallable): derived subclasses. """ - fields = set(["name", "arg_id_to_dtype", "arg_id_to_descr", "name_in_target"]) + fields = {"name", "arg_id_to_dtype", "arg_id_to_descr", "name_in_target"} init_arg_names = ("name", "arg_id_to_dtype", "arg_id_to_descr", "name_in_target") hash_fields = fields @@ -469,7 +464,7 @@ class ScalarCallable(InKernelCallable): def __init__(self, name, arg_id_to_dtype=None, arg_id_to_descr=None, name_in_target=None): - super(ScalarCallable, self).__init__( + super().__init__( arg_id_to_dtype=arg_id_to_dtype, arg_id_to_descr=arg_id_to_descr) @@ -627,7 +622,7 @@ class CallableKernel(InKernelCallable): sizes for the :attr:`subkernel` of the callable. """ - fields = set(["subkernel", "arg_id_to_dtype", "arg_id_to_descr"]) + fields = {"subkernel", "arg_id_to_dtype", "arg_id_to_descr"} init_arg_names = ("subkernel", "arg_id_to_dtype", "arg_id_to_descr") hash_fields = fields @@ -635,7 +630,7 @@ class CallableKernel(InKernelCallable): arg_id_to_descr=None): assert isinstance(subkernel, LoopKernel) - super(CallableKernel, self).__init__( + super().__init__( arg_id_to_dtype=arg_id_to_dtype, arg_id_to_descr=arg_id_to_descr) @@ -729,8 +724,8 @@ class CallableKernel(InKernelCallable): subst_mapper = SubstitutionMapper(subst_func) - arg_id_to_descr = dict((arg_id, descr.map_expr(subst_mapper)) for - arg_id, descr in arg_id_to_descr.items()) + arg_id_to_descr = {arg_id: descr.map_expr(subst_mapper) for + arg_id, descr in arg_id_to_descr.items()} # }}} @@ -793,8 +788,8 @@ class CallableKernel(InKernelCallable): callables_table)) if assumptions: - args_added_knl = assume(args_added_knl, ' and '.join([ - '{0}={1}'.format(key, val) for key, val in assumptions.items()])) + args_added_knl = assume(args_added_knl, " and ".join([ + f"{key}={val}" for key, val in assumptions.items()])) return ( self.copy( @@ -900,19 +895,19 @@ class ManglerCallable(ScalarCallable): A function of signature ``(kernel, name , arg_dtypes)`` and returns an instance of ``loopy.CallMangleInfo``. """ - fields = set(["name", "function_mangler", "arg_id_to_dtype", "arg_id_to_descr", - "name_in_target"]) + fields = {"name", "function_mangler", "arg_id_to_dtype", "arg_id_to_descr", + "name_in_target"} init_arg_names = ("name", "function_mangler", "arg_id_to_dtype", "arg_id_to_descr", "name_in_target") - hash_fields = set(["name", "arg_id_to_dtype", "arg_id_to_descr", - "name_in_target"]) + hash_fields = {"name", "arg_id_to_dtype", "arg_id_to_descr", + "name_in_target"} def __init__(self, name, function_mangler, arg_id_to_dtype=None, arg_id_to_descr=None, name_in_target=None): self.function_mangler = function_mangler - super(ManglerCallable, self).__init__( + super().__init__( name=name, arg_id_to_dtype=arg_id_to_dtype, arg_id_to_descr=arg_id_to_descr, @@ -941,8 +936,8 @@ class ManglerCallable(ScalarCallable): arg_dtypes) if mangle_result: new_arg_id_to_dtype = dict(enumerate(mangle_result.arg_dtypes)) - new_arg_id_to_dtype.update(dict((-i-1, dtype) for i, dtype in - enumerate(mangle_result.result_dtypes))) + new_arg_id_to_dtype.update({-i-1: dtype for i, dtype in + enumerate(mangle_result.result_dtypes)}) return ( self.copy(name_in_target=mangle_result.target_name, arg_id_to_dtype=new_arg_id_to_dtype), diff --git a/loopy/kernel/tools.py b/loopy/kernel/tools.py index 725566c36..6f76f0144 100644 --- a/loopy/kernel/tools.py +++ b/loopy/kernel/tools.py @@ -1956,8 +1956,8 @@ class CallCollector(CombineMapper): def map_call_with_kwargs(self, expr): return (frozenset([expr.function.name]) | - self.combine((self.rec(child) for child in expr.parameters - + tuple(expr.kw_parameters.values())))) + self.combine(self.rec(child) for child in expr.parameters + + tuple(expr.kw_parameters.values()))) def map_constant(self, expr): return frozenset() diff --git a/loopy/library/function.py b/loopy/library/function.py index f0914189a..291f0c372 100644 --- a/loopy/library/function.py +++ b/loopy/library/function.py @@ -36,8 +36,8 @@ class MakeTupleCallable(ScalarCallable): def with_descrs(self, arg_id_to_descr, caller_kernel, callables_table, expr): from loopy.kernel.function_interface import ValueArgDescriptor - new_arg_id_to_descr = dict(((id, ValueArgDescriptor()), - (-id-1, ValueArgDescriptor())) for id in arg_id_to_descr.keys()) + new_arg_id_to_descr = {(id, ValueArgDescriptor()): + (-id-1, ValueArgDescriptor()) for id in arg_id_to_descr.keys()} return ( self.copy(arg_id_to_descr=new_arg_id_to_descr), @@ -46,8 +46,8 @@ class MakeTupleCallable(ScalarCallable): class IndexOfCallable(ScalarCallable): def with_types(self, arg_id_to_dtype, kernel, callables_table): - new_arg_id_to_dtype = dict((i, dtype) for i, dtype in - arg_id_to_dtype.items() if dtype is not None) + new_arg_id_to_dtype = {i: dtype for i, dtype in + arg_id_to_dtype.items() if dtype is not None} new_arg_id_to_dtype[-1] = kernel.index_dtype return (self.copy(arg_id_to_dtype=new_arg_id_to_dtype), diff --git a/loopy/library/reduction.py b/loopy/library/reduction.py index 28cfb8ba2..f44d24323 100644 --- a/loopy/library/reduction.py +++ b/loopy/library/reduction.py @@ -486,28 +486,28 @@ class ReductionCallable(ScalarCallable): prefix = op.prefix(scalar_dtype, index_dtype) yield (prefix, """ - inline %(scalar_t)s %(prefix)s_op( - %(scalar_t)s op1, %(index_t)s index1, - %(scalar_t)s op2, %(index_t)s index2, - %(index_t)s *index_out) - { - if (op2 %(comp)s op1) - { + inline {scalar_t} {prefix}_op( + {scalar_t} op1, {index_t} index1, + {scalar_t} op2, {index_t} index2, + {index_t} *index_out) + {{ + if (op2 {comp} op1) + {{ *index_out = index2; return op2; - } + }} else - { + {{ *index_out = index1; return op1; - } - } - """ % { - "scalar_t": target.dtype_to_typename(scalar_dtype), - "prefix": prefix, - "index_t": target.dtype_to_typename(index_dtype), - "comp": op.update_comparison, - }) + }} + }} + """.format( + scalar_t=target.dtype_to_typename(scalar_dtype), + prefix=prefix, + index_t=target.dtype_to_typename(index_dtype), + comp=op.update_comparison, + )) elif isinstance(self.name, SegmentedOp): op = self.name.reduction_op scalar_dtype = self.arg_id_to_dtype[-1] @@ -515,20 +515,20 @@ class ReductionCallable(ScalarCallable): prefix = op.prefix(scalar_dtype, segment_flag_dtype) yield (prefix, """ - inline %(scalar_t)s %(prefix)s_op( - %(scalar_t)s op1, %(segment_flag_t)s segment_flag1, - %(scalar_t)s op2, %(segment_flag_t)s segment_flag2, - %(segment_flag_t)s *segment_flag_out) - { + inline {scalar_t} {prefix}_op( + {scalar_t} op1, {segment_flag_t} segment_flag1, + {scalar_t} op2, {segment_flag_t} segment_flag2, + {segment_flag_t} *segment_flag_out) + {{ *segment_flag_out = segment_flag1 | segment_flag2; - return segment_flag2 ? op2 : %(combined)s; - } - """ % { - "scalar_t": target.dtype_to_typename(scalar_dtype), - "prefix": prefix, - "segment_flag_t": target.dtype_to_typename(segment_flag_dtype), - "combined": op.op % ("op1", "op2"), - }) + return segment_flag2 ? op2 : {combined}; + }} + """.format( + scalar_t=target.dtype_to_typename(scalar_dtype), + prefix=prefix, + segment_flag_t=target.dtype_to_typename(segment_flag_dtype), + combined=op.op % ("op1", "op2"), + )) return diff --git a/loopy/preprocess.py b/loopy/preprocess.py index b70be0816..365c30d7f 100644 --- a/loopy/preprocess.py +++ b/loopy/preprocess.py @@ -2049,7 +2049,7 @@ class ArgDescrInferenceMapper(RuleAwareIdentityMapper): def __init__(self, rule_mapping_context, caller_kernel, callables_table): - super(ArgDescrInferenceMapper, self).__init__( + super().__init__( rule_mapping_context) self.caller_kernel = caller_kernel self.callables_table = callables_table @@ -2060,15 +2060,15 @@ class ArgDescrInferenceMapper(RuleAwareIdentityMapper): if not isinstance(expr.function, ResolvedFunction): # ignore if the call is not to a ResolvedFunction - return super(ArgDescrInferenceMapper, self).map_call(expr, expn_state) + return super().map_call(expr, expn_state) arg_id_to_val = dict(enumerate(expr.parameters)) if isinstance(expr, CallWithKwargs): arg_id_to_val.update(expr.kw_parameters) - if 'assignees' in kwargs: + if "assignees" in kwargs: # If supplied with assignees then this is a CallInstruction - assignees = kwargs['assignees'] + assignees = kwargs["assignees"] for i, arg in enumerate(assignees): arg_id_to_val[-i-1] = arg diff --git a/loopy/program.py b/loopy/program.py index 1fb691531..7224a7bbe 100644 --- a/loopy/program.py +++ b/loopy/program.py @@ -1,5 +1,3 @@ -from __future__ import division, absolute_import - __copyright__ = "Copyright (C) 2018 Kaushik Kulkarni" __license__ = """ @@ -22,7 +20,6 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ -import six import re from pytools import ImmutableRecord, memoize_method @@ -76,7 +73,7 @@ class ResolvedFunctionMarker(RuleAwareIdentityMapper): """ def __init__(self, rule_mapping_context, kernel, callables_table, function_id_to_in_knl_callable_mappers): - super(ResolvedFunctionMarker, self).__init__(rule_mapping_context) + super().__init__(rule_mapping_context) self.kernel = kernel self.callables_table = callables_table self.function_id_to_in_knl_callable_mappers = ( @@ -131,13 +128,13 @@ class ResolvedFunctionMarker(RuleAwareIdentityMapper): ResolvedFunction(new_func_id), tuple(self.rec(child, expn_state) for child in expr.parameters), - dict( - (key, self.rec(val, expn_state)) - for key, val in six.iteritems(expr.kw_parameters)) + { + key: self.rec(val, expn_state) + for key, val in expr.kw_parameters.items()} ) # this is an unknown function as of yet, do not modify it - return super(ResolvedFunctionMarker, self).map_call_with_kwargs(expr, + return super().map_call_with_kwargs(expr, expn_state) def map_reduction(self, expr, expn_state): @@ -148,7 +145,7 @@ class ResolvedFunctionMarker(RuleAwareIdentityMapper): self.callables_table, _ = ( self.callables_table.with_added_callable(func_id, in_knl_callable)) - return super(ResolvedFunctionMarker, self).map_reduction(expr, expn_state) + return super().map_reduction(expr, expn_state) def _default_func_id_to_kernel_callable_mappers(target): @@ -243,7 +240,7 @@ class Program(ImmutableRecord): assert name in callables_table - super(Program, self).__init__( + super().__init__( name=name, callables_table=callables_table, target=target, @@ -260,10 +257,10 @@ class Program(ImmutableRecord): update_persistent_hash = update_persistent_hash def copy(self, **kwargs): - if 'target' in kwargs: + if "target" in kwargs: # target attribute of all the callable kernels should be updated. - target = kwargs['target'] - new_self = super(Program, self).copy(**kwargs) + target = kwargs["target"] + new_self = super().copy(**kwargs) new_resolved_functions = {} for func_id, in_knl_callable in ( new_self.callables_table.items()): @@ -280,7 +277,7 @@ class Program(ImmutableRecord): return super(Program, new_self).copy( callables_table=callables_table) else: - return super(Program, self).copy(**kwargs) + return super().copy(**kwargs) def get_grid_size_upper_bounds(self, ignore_auto=False): """Return a tuple (global_size, local_size) containing a grid that @@ -371,7 +368,7 @@ class Program(ImmutableRecord): resolved_functions=new_resolved_functions)) def __iter__(self): - return six.iterkeys(self.callables_table.resolved_functions) + return self.callables_table.resolved_functions.keys() def __getitem__(self, name): result = self.callables_table[name] @@ -427,13 +424,13 @@ def next_indexed_function_identifier(function_id): match = func_name.match(function_id) if match is None: - if function_id[-1] == '_': - return "{old_name}0".format(old_name=function_id) + if function_id[-1] == "_": + return f"{function_id}0" else: - return "{old_name}_0".format(old_name=function_id) + return f"{function_id}_0" - return "{alpha}_{num}".format(alpha=match.group('alpha'), - num=int(match.group('num'))+1) + return "{alpha}_{num}".format(alpha=match.group("alpha"), + num=int(match.group("num"))+1) class ResolvedFunctionRenamer(RuleAwareIdentityMapper): @@ -442,7 +439,7 @@ class ResolvedFunctionRenamer(RuleAwareIdentityMapper): *renaming_dict*. """ def __init__(self, rule_mapping_context, renaming_dict): - super(ResolvedFunctionRenamer, self).__init__( + super().__init__( rule_mapping_context) self.renaming_dict = renaming_dict @@ -450,7 +447,7 @@ class ResolvedFunctionRenamer(RuleAwareIdentityMapper): if expr.name in self.renaming_dict: return ResolvedFunction(self.renaming_dict[expr.name]) else: - return super(ResolvedFunctionRenamer, self).map_resolved_function( + return super().map_resolved_function( expr, expn_state) @@ -499,8 +496,8 @@ class CallablesCountingMapper(CombineMapper): in_knl_callable = self.callables_table[expr.function.name] if isinstance(in_knl_callable, ScalarCallable): return (Counter([expr.function.name]) + - self.combine((self.rec(child) for child in expr.parameters - + tuple(kw_parameters.values())))) + self.combine(self.rec(child) for child in expr.parameters + + tuple(kw_parameters.values()))) elif isinstance(in_knl_callable, CallableKernel): @@ -511,22 +508,22 @@ class CallablesCountingMapper(CombineMapper): self.callables_table)) return (Counter([expr.function.name]) + - self.combine((self.rec(child) for child in expr.parameters - + tuple(kw_parameters.values())))) + ( + self.combine(self.rec(child) for child in expr.parameters + + tuple(kw_parameters.values()))) + ( callables_count_in_subkernel) else: raise NotImplementedError("Unknown callable type %s." % ( type)) else: return ( - self.combine((self.rec(child) for child in expr.parameters - + tuple(kw_parameters.values())))) + self.combine(self.rec(child) for child in expr.parameters + + tuple(kw_parameters.values()))) map_call_with_kwargs = map_call def map_reduction(self, expr): return Counter(expr.operation.get_scalar_callables()) + ( - super(CallablesCountingMapper, self).map_reduction(expr)) + super().map_reduction(expr)) def map_constant(self, expr): return Counter() @@ -604,10 +601,10 @@ class CallablesTable(ImmutableRecord): history=None, is_being_edited=False): if history is None: - history = dict((func_id, frozenset([func_id])) for func_id in - resolved_functions) + history = {func_id: frozenset([func_id]) for func_id in + resolved_functions} - super(CallablesTable, self).__init__( + super().__init__( resolved_functions=resolved_functions, history=history, is_being_edited=is_being_edited) @@ -619,8 +616,8 @@ class CallablesTable(ImmutableRecord): def __hash__(self): return hash(( - frozenset(six.iteritems(self.resolved_functions)), - frozenset(six.iteritems(self.history)), + frozenset(self.resolved_functions.items()), + frozenset(self.history.items()), self.is_being_edited )) @@ -780,8 +777,8 @@ class CallablesTable(ImmutableRecord): # equal to the old version of the callable. return self, function else: - print('Old: ', self.resolved_functions[function.name]) - print('New: ', in_kernel_callable) + print("Old: ", self.resolved_functions[function.name]) + print("New: ", in_kernel_callable) raise LoopyError("Use 'with_enter_edit_callables_mode' first.") # }}} @@ -869,7 +866,7 @@ class CallablesTable(ImmutableRecord): # this implies that all the function instances having the name # "func_id" have been renamed to something else. for new_func_id in ( - six.viewkeys(new_callables_count)-six.viewkeys(renames_needed)): + new_callables_count.keys()-renames_needed.keys()): if old_func_id in self.history[new_func_id]: renames_needed[new_func_id] = old_func_id break @@ -926,13 +923,13 @@ class CallablesTable(ImmutableRecord): return item in self.resolved_functions def items(self): - return six.iteritems(self.resolved_functions) + return self.resolved_functions.items() def values(self): - return six.itervalues(self.resolved_functions) + return self.resolved_functions.values() def keys(self): - return six.iterkeys(self.resolved_functions) + return self.resolved_functions.keys() # }}} diff --git a/loopy/statistics.py b/loopy/statistics.py index 20b936ceb..a1c86d88b 100755 --- a/loopy/statistics.py +++ b/loopy/statistics.py @@ -164,7 +164,7 @@ class GuardedPwQPolynomial: # {{{ ToCountMap -class ToCountMap(object): +class ToCountMap: """A map from work descriptors like :class:`Op` and :class:`MemAccess` to any arithmetic type. @@ -215,9 +215,9 @@ class ToCountMap(object): def __mul__(self, other): if isinstance(other, GuardedPwQPolynomial): - return self.copy(dict( - (index, value*other) - for index, value in self.count_map.items())) + return self.copy({ + index: value*other + for index, value in self.count_map.items()}) else: raise ValueError("ToCountMap: Attempted to multiply " "ToCountMap by {} {}." @@ -233,7 +233,7 @@ class ToCountMap(object): def __str__(self): return "\n".join( - "%s: %s" % (k, v) + f"{k}: {v}" for k, v in sorted(self.count_map.items(), key=lambda k: str(k))) @@ -400,9 +400,9 @@ class ToCountMap(object): for self_key, self_val in self.count_map.items(): new_key = key_type( - **dict( - (field, getattr(self_key, field)) - for field in args)) + **{ + field: getattr(self_key, field) + for field in args}) new_count_map[new_key] = new_count_map.get(new_key, 0) + self_val @@ -487,7 +487,7 @@ class ToCountPolynomialMap(ToCountMap): assert _get_param_tuple(val.space) == space_param_tuple - super(ToCountPolynomialMap, self).__init__(count_map) + super().__init__(count_map) def _zero(self): space = self.space.insert_dims(dim_type.out, 0, 1) @@ -584,7 +584,7 @@ def stringify_stats_mapping(m): # {{{ CountGranularity -class CountGranularity(object): +class CountGranularity: """Strings specifying whether an operation should be counted once per *work-item*, *sub-group*, or *work-group*. @@ -658,7 +658,7 @@ class Op(ImmutableRecord): from loopy.types import to_loopy_type dtype = to_loopy_type(dtype) - super(Op, self).__init__(dtype=dtype, name=name, + super().__init__(dtype=dtype, name=name, count_granularity=count_granularity, kernel_name=kernel_name) @@ -752,7 +752,7 @@ class MemAccess(ImmutableRecord): from loopy.types import to_loopy_type dtype = to_loopy_type(dtype) - super(MemAccess, self).__init__(mtype=mtype, dtype=dtype, + super().__init__(mtype=mtype, dtype=dtype, lid_strides=lid_strides, gid_strides=gid_strides, direction=direction, variable=variable, variable_tag=variable_tag, @@ -797,11 +797,11 @@ class Sync(ImmutableRecord): """ def __init__(self, kind=None, kernel_name=None): - super(Sync, self).__init__(kind=kind, kernel_name=kernel_name) + super().__init__(kind=kind, kernel_name=kernel_name) def __repr__(self): # Record.__repr__ overridden for consistent ordering and conciseness - return "Sync(%s, %s)" % (self.kind, self.kernel_name) + return f"Sync({self.kind}, {self.kernel_name})" # }}} @@ -846,12 +846,12 @@ class CounterBase(CombineMapper): if isinstance(clbl, CallableKernel): sub_result = self.kernel_rec(clbl.subkernel) - arg_dict = dict( - (arg.name, value) + arg_dict = { + arg.name: value for arg, value in zip( clbl.subkernel.args, expr.parameters) - if isinstance(arg, ValueArg)) + if isinstance(arg, ValueArg)} return subst_into_to_count_map( self.param_space, @@ -911,7 +911,7 @@ class CounterBase(CombineMapper): class ExpressionOpCounter(CounterBase): def __init__(self, knl, callables_table, kernel_rec, count_within_subscripts=True): - super(ExpressionOpCounter, self).__init__( + super().__init__( knl, callables_table, kernel_rec) self.count_within_subscripts = count_within_subscripts @@ -940,7 +940,7 @@ class ExpressionOpCounter(CounterBase): kernel_name=self.knl.name): self.one} ) + self.rec(expr.parameters) else: - return super(ExpressionOpCounter, self).map_call(expr) + return super().map_call(expr) def map_subscript(self, expr): if self.count_within_subscripts: @@ -1190,7 +1190,7 @@ class MemAccessCounterBase(CounterBase): if not isinstance(clbl, CallableKernel): return self.rec(expr.parameters) else: - return super(MemAccessCounterBase, self).map_call(expr) + return super().map_call(expr) # }}} diff --git a/loopy/symbolic.py b/loopy/symbolic.py index a9c8ab172..0c9f8307b 100644 --- a/loopy/symbolic.py +++ b/loopy/symbolic.py @@ -289,7 +289,7 @@ class StringifyMapper(StringifyMapperBase): def map_sub_array_ref(self, expr, prec): return "[{inames}]: {subscr}".format( - inames=','.join(self.rec(iname, prec) for iname in + inames=",".join(self.rec(iname, prec) for iname in expr.swept_inames), subscr=self.rec(expr.subscript, prec)) @@ -386,7 +386,7 @@ class DependencyMapper(DependencyMapperBase): def map_sub_array_ref(self, expr, *args): deps = self.rec(expr.subscript, *args) - return deps - set(iname for iname in expr.swept_inames) + return deps - {iname for iname in expr.swept_inames} map_linear_subscript = DependencyMapperBase.map_subscript @@ -838,7 +838,7 @@ class SweptInameStrideCollector(CoefficientCollectorBase): or expr.aggregate.name not in self.target_names): return {1: expr} - return super(SweptInameStrideCollector, self).map_algebraic_leaf(expr) + return super().map_algebraic_leaf(expr) class SubArrayRef(p.Expression): @@ -888,8 +888,8 @@ class SubArrayRef(p.Expression): subscript would be ``a[0, j, 0, l]`` """ # TODO: Set the zero to the minimum value of the iname. - swept_inames_to_zeros = dict( - (swept_iname.name, 0) for swept_iname in self.swept_inames) + swept_inames_to_zeros = { + swept_iname.name: 0 for swept_iname in self.swept_inames} return EvaluatorWithDeficientContext(swept_inames_to_zeros)( self.subscript) @@ -2215,7 +2215,7 @@ class BatchedAccessRangeMapper(WalkMapper): return self.rec(expr.child, inames) def map_sub_array_ref(self, expr, inames): - total_inames = inames | set([iname.name for iname in expr.swept_inames]) + total_inames = inames | {iname.name for iname in expr.swept_inames} return self.rec(expr.subscript, total_inames) diff --git a/loopy/target/c/__init__.py b/loopy/target/c/__init__.py index e618d75a1..37997d7ab 100644 --- a/loopy/target/c/__init__.py +++ b/loopy/target/c/__init__.py @@ -464,7 +464,7 @@ class CMathCallable(ScalarCallable): elif dtype == np.float128: # pylint:disable=no-member name = name + "l" # fabsl else: - raise LoopyTypeError("%s does not support type %s" % (name, + raise LoopyTypeError("{} does not support type {}".format(name, dtype)) return ( @@ -553,8 +553,7 @@ class CFamilyASTBuilder(ASTBuilderBase): def function_id_in_knl_callable_mapper(self): return ( - super(CFamilyASTBuilder, - self).function_id_in_knl_callable_mapper() + [ + super().function_id_in_knl_callable_mapper() + [ scope_c_math_functions]) # }}} diff --git a/loopy/target/c/compyte b/loopy/target/c/compyte index 7e48e1166..d1f993dae 160000 --- a/loopy/target/c/compyte +++ b/loopy/target/c/compyte @@ -1 +1 @@ -Subproject commit 7e48e1166a13cfbb7b60f909b071f088034ffda1 +Subproject commit d1f993daecc03947d9e6e3e60d2a5145ecbf3786 diff --git a/loopy/target/cuda.py b/loopy/target/cuda.py index 64b401b8b..83697e601 100644 --- a/loopy/target/cuda.py +++ b/loopy/target/cuda.py @@ -170,8 +170,8 @@ class CudaCallable(ScalarCallable): raise LoopyError("%s does not support complex numbers" % name) - updated_arg_id_to_dtype = dict((id, NumpyType(dtype)) for id in range(-1, - num_args)) + updated_arg_id_to_dtype = {id: NumpyType(dtype) for id in range(-1, + num_args)} return ( self.copy(name_in_target=name, @@ -184,7 +184,7 @@ class CudaCallable(ScalarCallable): def scope_cuda_functions(target, identifier): - if identifier in set(["dot"]) | set( + if identifier in {"dot"} | set( _CUDA_SPECIFIC_FUNCTIONS): return CudaCallable(name=identifier) @@ -355,7 +355,7 @@ class CUDACASTBuilder(CFamilyASTBuilder): def preamble_generators(self): return ( - super(CUDACASTBuilder, self).preamble_generators() + [ + super().preamble_generators() + [ cuda_preamble_generator]) # }}} @@ -455,7 +455,7 @@ class CUDACASTBuilder(CFamilyASTBuilder): lhs_expr_code = ecm(lhs_expr) rhs_expr_code = ecm(new_rhs_expr) - return Statement("atomicAdd(&{0}, {1})".format( + return Statement("atomicAdd(&{}, {})".format( lhs_expr_code, rhs_expr_code)) else: from cgen import Block, DoWhile, Assign diff --git a/loopy/target/opencl.py b/loopy/target/opencl.py index 6455cacc9..0cc93ca28 100644 --- a/loopy/target/opencl.py +++ b/loopy/target/opencl.py @@ -186,9 +186,9 @@ class OpenCLCallable(ScalarCallable): [], [dtype.numpy_dtype for id, dtype in arg_id_to_dtype.items() if (id >= 0 and dtype is not None)]) - if dtype.kind in ['u', 'i', 'f']: - if dtype.kind == 'f': - name = 'f'+name + if dtype.kind in ["u", "i", "f"]: + if dtype.kind == "f": + name = "f"+name dtype = NumpyType(dtype) return ( self.copy(name_in_target=name, @@ -242,8 +242,8 @@ class OpenCLCallable(ScalarCallable): raise LoopyError("%s does not support complex numbers" % name) - updated_arg_id_to_dtype = dict((id, NumpyType(dtype)) for id in range(-1, - num_args)) + updated_arg_id_to_dtype = {id: NumpyType(dtype) for id in range(-1, + num_args)} return ( self.copy(name_in_target=name, @@ -266,8 +266,8 @@ class OpenCLCallable(ScalarCallable): self.copy(arg_id_to_dtype=arg_id_to_dtype), callables_table) - updated_arg_id_to_dtype = dict((id, NumpyType(dtype)) for id in - range(count)) + updated_arg_id_to_dtype = {id: NumpyType(dtype) for id in + range(count)} updated_arg_id_to_dtype[-1] = OpenCLTarget().vector_dtype( NumpyType(dtype), count) @@ -288,7 +288,7 @@ def scope_opencl_functions(target, identifier): Returns an instance of :class:`InKernelCallable` if the function defined by *identifier* is known in OpenCL. """ - opencl_function_ids = set(["max", "min", "dot"]) | set( + opencl_function_ids = {"max", "min", "dot"} | set( _CL_SIMPLE_MULTI_ARG_FUNCTIONS) | set(VECTOR_LITERAL_FUNCS) if identifier in opencl_function_ids: diff --git a/loopy/target/pyopencl.py b/loopy/target/pyopencl.py index 898d1323e..2008c9224 100644 --- a/loopy/target/pyopencl.py +++ b/loopy/target/pyopencl.py @@ -229,7 +229,7 @@ class PyOpenCLCallable(ScalarCallable): raise LoopyTypeError("unexpected complex type '%s'" % dtype) return ( - self.copy(name_in_target="%s_%s" % (tpname, name), + self.copy(name_in_target=f"{tpname}_{name}", arg_id_to_dtype={0: dtype, -1: NumpyType( np.dtype(dtype.numpy_dtype.type(0).real))}), callables_table) @@ -248,7 +248,7 @@ class PyOpenCLCallable(ScalarCallable): raise LoopyTypeError("unexpected complex type '%s'" % dtype) return ( - self.copy(name_in_target="%s_%s" % (tpname, name), + self.copy(name_in_target=f"{tpname}_{name}", arg_id_to_dtype={0: dtype, -1: dtype}), callables_table) else: diff --git a/loopy/target/python.py b/loopy/target/python.py index c02943fd6..c27b4484d 100644 --- a/loopy/target/python.py +++ b/loopy/target/python.py @@ -111,7 +111,8 @@ class ExpressionToPythonMapper(StringifyMapper): str_parameters = [self.rec(par, PREC_NONE) for par in expr.parameters] - return "%s(%s)" % (in_knl_callable.name_in_target, ", ".join(str_parameters)) + return "{}({})".format(in_knl_callable.name_in_target, + ", ".join(str_parameters)) def map_group_hw_index(self, expr, enclosing_prec): raise LoopyError("plain Python does not have group hw axes") diff --git a/loopy/transform/callable.py b/loopy/transform/callable.py index 479843697..6195f0b4c 100644 --- a/loopy/transform/callable.py +++ b/loopy/transform/callable.py @@ -1,5 +1,3 @@ -from __future__ import division, absolute_import - __copyright__ = "Copyright (C) 2018 Kaushik Kulkarni" __license__ = """ @@ -22,8 +20,6 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ -import six - import islpy as isl from pymbolic.primitives import CallWithKwargs @@ -63,10 +59,10 @@ def _resolved_callables_from_function_lookup(program, """ callables_table = program.callables_table - callable_knls = dict( - (func_id, in_knl_callable) for func_id, in_knl_callable in + callable_knls = { + func_id: in_knl_callable for func_id, in_knl_callable in callables_table.items() if isinstance(in_knl_callable, - CallableKernel)) + CallableKernel)} edited_callable_knls = {} for func_id, in_knl_callable in callable_knls.items(): @@ -143,7 +139,7 @@ class _RegisterCalleeKernel(ImmutableRecord): :func:`loopy.transform.register_callable_kernel` picklable. As python cannot pickle lexical closures. """ - fields = set(['callable_kernel']) + fields = {"callable_kernel"} def __init__(self, callable_kernel): self.callable_kernel = callable_kernel @@ -166,8 +162,7 @@ def register_callable_kernel(program, callee_kernel): # {{{ sanity checks assert isinstance(program, Program) - assert isinstance(callee_kernel, LoopKernel), ('{0} !=' - '{1}'.format(type(callee_kernel), LoopKernel)) + assert isinstance(callee_kernel, LoopKernel) # check to make sure that the variables with 'out' direction is equal to # the number of assigness in the callee kernel intructions. @@ -263,7 +258,7 @@ class KernelInliner(SubstitutionMapper): """ def __init__(self, subst_func, caller, arg_map, arg_dict): - super(KernelInliner, self).__init__(subst_func) + super().__init__(subst_func) self.caller = caller self.arg_map = arg_map self.arg_dict = arg_dict @@ -287,7 +282,7 @@ class KernelInliner(SubstitutionMapper): from numbers import Integral if not all(isinstance(d, Integral) for d in callee_arg.shape): raise LoopyError( - "Argument: {0} in callee kernel does not have " + "Argument: {} in callee kernel does not have " "constant shape.".format(callee_arg)) flatten_index = 0 @@ -311,7 +306,7 @@ class KernelInliner(SubstitutionMapper): return aggregate.index(tuple(new_indices)) else: - return super(KernelInliner, self).map_subscript(expr) + return super().map_subscript(expr) # }}} @@ -360,7 +355,7 @@ def _inline_call_instruction(caller_kernel, callee_knl, instruction): temp_map = {} new_temps = kernel.temporary_variables.copy() - for name, temp in six.iteritems(callee_knl.temporary_variables): + for name, temp in callee_knl.temporary_variables.items(): new_name = vng(callee_label+name) temp_map[name] = new_name new_temps[new_name] = temp.copy(name=new_name) @@ -404,11 +399,11 @@ def _inline_call_instruction(caller_kernel, callee_knl, instruction): import pymbolic.primitives as p from pymbolic.mapper.substitutor import make_subst_func - var_map = dict((p.Variable(k), p.Variable(v)) - for k, v in six.iteritems(iname_map)) - var_map.update(dict((p.Variable(k), p.Variable(v)) - for k, v in six.iteritems(temp_map))) - for k, v in six.iteritems(arg_map): + var_map = {p.Variable(k): p.Variable(v) + for k, v in iname_map.items()} + var_map.update({p.Variable(k): p.Variable(v) + for k, v in temp_map.items()}) + for k, v in arg_map.items(): if isinstance(v, SubArrayRef): var_map[p.Variable(k)] = v.subscript.aggregate else: @@ -425,10 +420,10 @@ def _inline_call_instruction(caller_kernel, callee_knl, instruction): dep_map = callee_knl.recursive_insn_dep_map() # roots depend on nothing - heads = set(insn for insn, deps in six.iteritems(dep_map) if not deps) + heads = {insn for insn, deps in dep_map.items() if not deps} # leaves have nothing that depends on them tails = set(dep_map.keys()) - for insn, deps in six.iteritems(dep_map): + for insn, deps in dep_map.items(): tails = tails - deps # }}} @@ -458,7 +453,7 @@ def _inline_call_instruction(caller_kernel, callee_knl, instruction): depends_on = frozenset(map(insn_id.get, insn.depends_on)) | ( instruction.depends_on) if insn.id in heads: - depends_on = depends_on | set([noop_start.id]) + depends_on = depends_on | {noop_start.id} new_atomicity = tuple( type(atomicity)(var_map[p.Variable(atomicity.var_name)].name) @@ -598,7 +593,7 @@ class DimChanger(IdentityMapper): def map_subscript(self, expr): if expr.aggregate.name not in self.callee_arg_dict: - return super(DimChanger, self).map_subscript(expr) + return super().map_subscript(expr) callee_arg_dim_tags = self.callee_arg_dict[expr.aggregate.name].dim_tags flattened_index = sum(dim_tag.stride*idx for dim_tag, idx in zip(callee_arg_dim_tags, expr.index_tuple)) @@ -645,7 +640,7 @@ def _match_caller_callee_argument_dimension_for_single_kernel( get_kw_pos_association) _, pos_to_kw = get_kw_pos_association(callee_knl) arg_id_to_shape = {} - for arg_id, arg in six.iteritems(insn.arg_id_to_val()): + for arg_id, arg in insn.arg_id_to_val().items(): arg_id = pos_to_kw[arg_id] arg_descr = get_arg_descriptor_for_expression(caller_knl, arg) diff --git a/loopy/transform/diff.py b/loopy/transform/diff.py index a85a8aa29..5a4297352 100644 --- a/loopy/transform/diff.py +++ b/loopy/transform/diff.py @@ -378,7 +378,7 @@ def diff_kernel(kernel, diff_outputs, by, diff_iname_prefix="diff_i", *diff_context.by_name*, or *None* if no dependency exists. """ - assert isinstance(knl, LoopKernel) + assert isinstance(kernel, LoopKernel) from loopy.kernel.creation import apply_single_writer_depencency_heuristic kernel = apply_single_writer_depencency_heuristic(kernel, warn_if_used=True) diff --git a/loopy/transform/iname.py b/loopy/transform/iname.py index 378b4f2f7..473dbbca7 100644 --- a/loopy/transform/iname.py +++ b/loopy/transform/iname.py @@ -1090,9 +1090,8 @@ def get_iname_duplication_options_for_single_kernel(kernel, def get_iname_duplication_options(program, use_boostable_into=False): for in_knl_callable in program.callables_table.values(): if isinstance(in_knl_callable, CallableKernel): - for option in get_iname_duplication_options_for_single_kernel( - in_knl_callable.subkernel, use_boostable_into): - yield option + yield from get_iname_duplication_options_for_single_kernel( + in_knl_callable.subkernel, use_boostable_into) elif isinstance(in_knl_callable, ScalarCallable): pass else: diff --git a/loopy/transform/make_scalar.py b/loopy/transform/make_scalar.py index ab91fdf78..9f33e8394 100644 --- a/loopy/transform/make_scalar.py +++ b/loopy/transform/make_scalar.py @@ -7,13 +7,13 @@ from loopy.transform.iname import remove_unused_inames class ScalarChanger(RuleAwareIdentityMapper): def __init__(self, rule_mapping_context, var_name): self.var_name = var_name - super(ScalarChanger, self).__init__(rule_mapping_context) + super().__init__(rule_mapping_context) def map_subscript(self, expr, expn_state): if expr.aggregate.name == self.var_name: return Variable(self.var_name) - return super(ScalarChanger, self).map_subscript(expr, expn_state) + return super().map_subscript(expr, expn_state) def make_scalar(kernel, var_name): diff --git a/loopy/transform/pack_and_unpack_args.py b/loopy/transform/pack_and_unpack_args.py index a18326187..6fb4988f0 100644 --- a/loopy/transform/pack_and_unpack_args.py +++ b/loopy/transform/pack_and_unpack_args.py @@ -1,5 +1,3 @@ -from __future__ import division, absolute_import - __copyright__ = "Copyright (C) 2018 Tianjiao Sun, Kaushik Kulkarni" __license__ = """ @@ -121,9 +119,9 @@ def pack_and_unpack_args_for_call_for_single_kernel(kernel, from pymbolic import var dim_type = isl.dim_type.set - ilp_inames = set(iname for iname in insn.within_inames + ilp_inames = {iname for iname in insn.within_inames if all(isinstance(tag, (IlpBaseTag, VectorizeTag)) - for tag in kernel.iname_to_tags.get(iname, []))) + for tag in kernel.iname_to_tags.get(iname, []))} new_ilp_inames = set() ilp_inames_map = {} for iname in ilp_inames: @@ -156,10 +154,10 @@ def pack_and_unpack_args_for_call_for_single_kernel(kernel, new_pack_inames = ilp_inames_map.copy() # packing-specific inames new_unpack_inames = ilp_inames_map.copy() # unpacking-specific iname - new_pack_inames = dict((iname, var(vng(iname.name + - "_pack"))) for iname in p.swept_inames) - new_unpack_inames = dict((iname, var(vng(iname.name + - "_unpack"))) for iname in p.swept_inames) + new_pack_inames = {iname: var(vng(iname.name + + "_pack")) for iname in p.swept_inames} + new_unpack_inames = {iname: var(vng(iname.name + + "_unpack")) for iname in p.swept_inames} # Updating the domains corresponding to the new inames. for iname in p.swept_inames: @@ -228,8 +226,8 @@ def pack_and_unpack_args_for_call_for_single_kernel(kernel, packing_insns.append(Assignment( assignee=pack_lhs_assignee, expression=pack_subst_mapper.map_subscript(p.subscript), - within_inames=insn.within_inames - ilp_inames | set( - new_pack_inames[i].name for i in p.swept_inames) | ( + within_inames=insn.within_inames - ilp_inames | { + new_pack_inames[i].name for i in p.swept_inames} | ( new_ilp_inames), depends_on=insn.depends_on, id=ing(insn.id+"_pack"), @@ -240,8 +238,8 @@ def pack_and_unpack_args_for_call_for_single_kernel(kernel, unpacking_insns.append(Assignment( expression=unpack_rhs, assignee=unpack_subst_mapper.map_subscript(p.subscript), - within_inames=insn.within_inames - ilp_inames | set( - new_unpack_inames[i].name for i in p.swept_inames) | ( + within_inames=insn.within_inames - ilp_inames | { + new_unpack_inames[i].name for i in p.swept_inames} | ( new_ilp_inames), id=ing(insn.id+"_unpack"), depends_on=frozenset([insn.id]), @@ -282,8 +280,8 @@ def pack_and_unpack_args_for_call_for_single_kernel(kernel, new_assignees = tuple(subst_mapper(new_id_to_parameters[-i-1]) for i, _ in enumerate(insn.assignees)) new_call_insn = new_call_insn.copy( - depends_on=new_call_insn.depends_on | set( - pack.id for pack in packing_insns), + depends_on=new_call_insn.depends_on | { + pack.id for pack in packing_insns}, within_inames=new_call_insn.within_inames - ilp_inames | ( new_ilp_inames), expression=new_call_insn.expression.function(*new_params), diff --git a/loopy/type_inference.py b/loopy/type_inference.py index e95146349..ac4afaac7 100644 --- a/loopy/type_inference.py +++ b/loopy/type_inference.py @@ -54,8 +54,8 @@ def get_return_types_as_tuple(arg_id_to_dtype): :arg arg_id_to_dtype: An instance of :class:`dict` which denotes a mapping from the arguments to their inferred types. """ - return_arg_id_to_dtype = dict((id, dtype) for id, dtype in - arg_id_to_dtype.items() if (isinstance(id, int) and id < 0)) + return_arg_id_to_dtype = {id: dtype for id, dtype in + arg_id_to_dtype.items() if (isinstance(id, int) and id < 0)} return_arg_pos = sorted(return_arg_id_to_dtype.keys(), reverse=True) return tuple(return_arg_id_to_dtype[id] for id in return_arg_pos) @@ -71,7 +71,7 @@ class FunctionNameChanger(RuleAwareIdentityMapper): def __init__(self, rule_mapping_context, calls_to_new_names, subst_expander): - super(FunctionNameChanger, self).__init__(rule_mapping_context) + super().__init__(rule_mapping_context) self.calls_to_new_names = calls_to_new_names self.subst_expander = subst_expander @@ -94,7 +94,7 @@ class FunctionNameChanger(RuleAwareIdentityMapper): tuple(self.rec(child, expn_state) for child in expanded_expr.parameters)) else: - return super(FunctionNameChanger, self).map_call( + return super().map_call( expr, expn_state) else: return self.map_substitution(name, tag, expr.parameters, expn_state) @@ -106,12 +106,12 @@ class FunctionNameChanger(RuleAwareIdentityMapper): ResolvedFunction(self.calls_to_new_names[expr]), tuple(self.rec(child, expn_state) for child in expr.parameters), - dict( - (key, self.rec(val, expn_state)) - for key, val in six.iteritems(expr.kw_parameters)) + { + key: self.rec(val, expn_state) + for key, val in expr.kw_parameters.items()} ) else: - return super(FunctionNameChanger, self).map_call_with_kwargs( + return super().map_call_with_kwargs( expr, expn_state) @@ -422,8 +422,8 @@ class TypeInferenceMapper(CombineMapper): else: return None - arg_id_to_dtype = dict((i, none_if_empty(self.rec(par))) for (i, par) in - tuple(enumerate(expr.parameters)) + tuple(kw_parameters.items())) + arg_id_to_dtype = {i: none_if_empty(self.rec(par)) for (i, par) in + tuple(enumerate(expr.parameters)) + tuple(kw_parameters.items())} # specializing the known function wrt type if isinstance(expr.function, ResolvedFunction): @@ -521,11 +521,11 @@ class TypeInferenceMapper(CombineMapper): ValueArgDescriptor) # creating arg_id_to_dtype, arg_id_to_descr from arg_dtypes - arg_id_to_dtype = dict((i, dt.with_target(self.kernel.target)) - for i, dt in enumerate(mangle_result.arg_dtypes)) - arg_id_to_dtype.update(dict((-i-1, - dtype.with_target(self.kernel.target)) for i, dtype in enumerate( - mangle_result.result_dtypes))) + arg_id_to_dtype = {i: dt.with_target(self.kernel.target) + for i, dt in enumerate(mangle_result.arg_dtypes)} + arg_id_to_dtype.update({-i-1: + dtype.with_target(self.kernel.target) for i, dtype in enumerate( + mangle_result.result_dtypes)}) arg_descrs = tuple((i, ValueArgDescriptor()) for i, _ in enumerate(mangle_result.arg_dtypes)) res_descrs = tuple((-i-1, ValueArgDescriptor()) for i, _ in diff --git a/test/test_callables.py b/test/test_callables.py index f2f3acbd6..efb1e5e72 100644 --- a/test/test_callables.py +++ b/test/test_callables.py @@ -1,5 +1,3 @@ -from __future__ import division, absolute_import, print_function - __copyright__ = "Copyright (C) 2018 Kaushik Kulkarni" __license__ = """ diff --git a/test/test_transform.py b/test/test_transform.py index 684381c52..ff593a0c8 100644 --- a/test/test_transform.py +++ b/test/test_transform.py @@ -588,7 +588,7 @@ def test_nested_substs_in_insns(ctx_factory): prg = lp.expand_subst(ref_prg) assert not any( cknl.subkernel.substitutions - for cknl in six.itervalues(prg.callables_table.resolved_functions)) + for cknl in prg.callables_table.resolved_functions.values()) lp.auto_test_vs_ref(ref_prg, ctx, prg) -- GitLab