From ecbe138aa3c374cfa23d2d22150ff9e1e82cd379 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 6 May 2019 23:22:57 -0500 Subject: [PATCH 001/415] made package importable --- __init__.py | 0 dependency.py | 195 ++++++++++++++++++++++ dependency_check_experiments.py | 148 +++++++++++++++++ example_lex_map_creation.py | 137 ++++++++++++++++ lexicographic_order_map.py | 120 ++++++++++++++ sched_check_utils.py | 4 + schedule.py | 271 +++++++++++++++++++++++++++++++ schedule_creation_experiments.py | 219 +++++++++++++++++++++++++ version.py | 1 + 9 files changed, 1095 insertions(+) create mode 100644 __init__.py create mode 100644 dependency.py create mode 100644 dependency_check_experiments.py create mode 100644 example_lex_map_creation.py create mode 100644 lexicographic_order_map.py create mode 100644 sched_check_utils.py create mode 100644 schedule.py create mode 100644 schedule_creation_experiments.py create mode 100644 version.py diff --git a/__init__.py b/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/dependency.py b/dependency.py new file mode 100644 index 000000000..40ebaa99a --- /dev/null +++ b/dependency.py @@ -0,0 +1,195 @@ +import islpy as isl + + +class DependencyType: + NONE = "none" + SAME = "same" + PRIOR = "prior" + ALL = "all" + + +class Dependency(object): + def __init__( + self, + statement_before, + statement_after, + dep_type, + iname, + ): + self.statement_before = statement_before + self.statement_after = statement_after + self.dep_type = dep_type + self.iname = iname + + + def __str__(self): + return "%s -> %s {%s dep: %s}" % ( + self.statement_before, + self.statement_after, + self.iname, + self.dep_type) + + +def append_apostrophes(strings): + if not isinstance(strings, list): + raise ValueError("append_apostrophes did not receive a list") + else: + return [s+"'" for s in strings] + + +def create_equality_conjunction_set(names0, names1, islvars): + + # initialize set with constraint that is always true + eq_set = islvars[0].eq_set(islvars[0]) + for n0, n1 in zip(names0, names1): + eq_set = eq_set & islvars[n0].eq_set(islvars[n1]) + + return eq_set + + +def _convert_constraint_set_to_map(constraint_set, mv_count, src_position=None): + dim_type = isl.dim_type + constraint_map = isl.Map.from_domain(constraint_set) + if src_position: + return constraint_map.move_dims(dim_type.out, 0, dim_type.in_, src_position, mv_count) + else: + return constraint_map.move_dims(dim_type.out, 0, dim_type.in_, mv_count, mv_count) + + +def _make_islvars_with_var_primes(var_names, param_names): + return isl.make_zero_and_vars( + var_names+append_apostrophes(var_names), param_names) + + +def _create_bounded_set_for_dependency_constraints( + var_names, param_names, upper_bounds): + + # TODO assumes lower bound is zero + islvars = _make_islvars_with_var_primes(var_names, param_names) + + bounded_set = islvars[0].eq_set(islvars[0]) # initialize to True + + for v, p, b in zip(var_names, param_names, upper_bounds): + # create constraint 0 <= v,v'< p = b + v_prime = v+"'" + bounded_set = bounded_set \ + & islvars[v].lt_set(islvars[p]) \ + & islvars[v_prime].lt_set(islvars[p]) \ + & (islvars[0]-1).lt_set(islvars[v]) \ + & (islvars[0]-1).lt_set(islvars[v_prime]) \ + & islvars[p].eq_set(islvars[0]+b) + + return bounded_set + + +class DependencyConstraintVars(object): + def __init__( + self, + inames, + param_names, + param_vals, + statement_var, + statement_param, + statement_param_val, + ): + self.inames = inames + self.param_names = param_names + self.param_vals = param_vals + self.statement_var = statement_var + self.statement_param = statement_param + self.statement_param_val = statement_param_val + + def get_bounds_constraint_set(self): + var_names = [self.statement_var]+self.inames + param_names = [self.statement_param]+self.param_names + param_vals = [self.statement_param_val]+self.param_vals + # TODO assumes lower bound is zero + islvars = _make_islvars_with_var_primes( + var_names, param_names) + + bounded_set = islvars[0].eq_set(islvars[0]) # initialize to True + + for v, p, b in zip(var_names, param_names, param_vals): + # create constraint 0 <= v,v'< p = b + v_prime = v+"'" + bounded_set = bounded_set \ + & islvars[v].lt_set(islvars[p]) \ + & islvars[v_prime].lt_set(islvars[p]) \ + & (islvars[0]-1).lt_set(islvars[v]) \ + & (islvars[0]-1).lt_set(islvars[v_prime]) \ + & islvars[p].eq_set(islvars[0]+b) + + return bounded_set + + def __str__(self): + return str(self.get_bounds_constraint_set()) + + +def create_dependency_constraint( + dependencies, + dep_constraint_vars, + ): + # This function uses the dependencies given to create the following constraint: + # Statement [s,i,j] comes before statement [s',i',j'] iff + + # assumes statements are numbered sequentially + # (statement_bound = max statement id + 1) + + statement_param = dep_constraint_vars.statement_param + param_names = dep_constraint_vars.param_names + all_inames = dep_constraint_vars.inames + statement_var = dep_constraint_vars.statement_var + + # make sure all dependencies involve same two statements + if len(set([dep.statement_before for dep in dependencies])) != 1 or \ + len(set([dep.statement_after for dep in dependencies])) != 1: + raise ValueError("All depencencies must be between same two statements.") + # make sure all dependencies involve different inames # TODO upate after allowing prior(i,k) + if len(set([dep.iname for dep in dependencies])) != len(dependencies): + raise ValueError("All depencencies must apply to different inames.") + + DT = DependencyType + statement_var_prime = statement_var+"'" + islvars = _make_islvars_with_var_primes( + [statement_var]+all_inames, + [statement_param]+param_names) + + # initialize constraints to False + # this will disappear as soon as we add a constraint that is not DT.NONE + all_constraints_set = islvars[0].eq_set(islvars[0] + 1) + + for dep in dependencies: + iname = dep.iname + dep_type = dep.dep_type + if dep_type == DT.NONE: + continue + + iname_prime = iname+"'" # i' + other_inames = all_inames.copy() + other_inames.remove(iname) # remaining inames, e.g., [j, k] + other_inames_prime = append_apostrophes(other_inames) # e.g., [j', k'] + + # initialize constraint set with what we know about other inames (e.g., j = j', k = k') + constraint_set = create_equality_conjunction_set(other_inames, other_inames_prime, islvars) + if dep_type == DT.SAME: + constraint_set = constraint_set & islvars[iname].eq_set(islvars[iname_prime]) + elif dep_type == DT.PRIOR: + constraint_set = constraint_set & islvars[iname].lt_set(islvars[iname_prime]) + elif dep_type == DT.ALL: + constraint_set = constraint_set & islvars[0].eq_set(islvars[0]) # True + + constraint_set = constraint_set & islvars[statement_var].eq_set(islvars[0]+dep.statement_before) + constraint_set = constraint_set & islvars[statement_var_prime].eq_set(islvars[0]+dep.statement_after) + # TODO get this working + # add 'or' to indicate that this constraint doesn't apply to other statements + #remainder_set = islvars[statement_var].ne_set(islvars[0]+dep.statement_before) \ + # | islvars[statement_var_prime].ne_set(islvars[0]+dep.statement_after) + #print("remainder_set", remainder_set) + #constraint_set = constraint_set | remainder_set + + + all_constraints_set = all_constraints_set | constraint_set + + all_constraints_set = all_constraints_set & dep_constraint_vars.get_bounds_constraint_set() + + return _convert_constraint_set_to_map(all_constraints_set, len(dep_constraint_vars.inames)+1) diff --git a/dependency_check_experiments.py b/dependency_check_experiments.py new file mode 100644 index 000000000..2ef0bfce9 --- /dev/null +++ b/dependency_check_experiments.py @@ -0,0 +1,148 @@ +import islpy as isl +import loopy as lp +from schedule_checker.dependency import ( + Dependency, + DependencyType as DT, + create_dependency_constraint, + append_apostrophes, + DependencyConstraintVars, +) +from schedule_checker.lexicographic_order_map import ( + make_lex_mapping_tuple_pairs, + create_explicit_map_from_tuples, + get_statement_ordering_map, + set_space_names, + get_space, + create_symbolic_lex_mapping, +) +from schedule_checker.sched_check_utils import prettier_map_string + + +# make example kernel +knl = lp.make_kernel( + "{[i,j]: 0<=i,j<2}", + [ + "a[i,j] = b[i,j] {id=0}", + "a[i,j] = a[i,j] + 1 {id=1,dep=0}", + ], + name="example", + ) +knl = lp.tag_inames(knl, {"i": "l.0"}) +print("Kernel:") +print(knl) + +all_inames = ['i', 'j'] +iname_params = ['p0', 'p1'] +iname_param_vals = [2, 2] +statement_var = 's' +statement_param = 'ps' +statement_bound = 2 + +# example sched: +print("---------------------------------------------------------------------------") + +# i is parallel, suppose we want to enforce the following: +# for a given i, statement 0 happens before statement 1 + +params_sched = [statement_param]+iname_params +in_names_sched = [statement_var]+all_inames +out_names_sched = ['l0', 'l1'] +sched_space = get_space(params_sched, in_names_sched, out_names_sched) + +example_sched_valid = create_explicit_map_from_tuples( + [ + ((0,0,0), (0, 0)), + ((0,1,0), (0, 0)), + ((1,0,0), (0, 1)), + ((1,1,0), (0, 1)), + ((0,0,1), (1, 0)), + ((0,1,1), (1, 0)), + ((1,0,1), (1, 1)), + ((1,1,1), (1, 1)), + ], + sched_space, + ) +print("example sched (valid):") +print(prettier_map_string(example_sched_valid)) + +example_sched_invalid = create_explicit_map_from_tuples( + [ + ((0,0,0), (0, 0)), + ((0,1,0), (1, 1)), # these two are out of order, violation + ((1,0,0), (0, 1)), + ((1,1,0), (0, 1)), + ((0,0,1), (1, 0)), + ((0,1,1), (1, 0)), + ((1,0,1), (1, 1)), + ((1,1,1), (0, 0)), # these two are out of order, violation + ], + sched_space, + ) +print("example sched (invalid):") +print(prettier_map_string(example_sched_invalid)) + +# *Explicit* lexicographic mapping- map each tuple to all tuples occuring later +print("---------------------------------------------------------------------------") +lex_dim_bounds = [(0,2), (0,2)] # max vals for each dim (e.g., 0 <= i0 < max0 ...) +lex_params = [] +lex_in_names = out_names_sched +lex_out_names = append_apostrophes(out_names_sched) + +explicit_lex_map_pairs = make_lex_mapping_tuple_pairs(lex_dim_bounds) +# for pair in explicit_lex_map_pairs: +# print(pair[0], pair[1]) +lex_space_explicit = get_space(lex_params, lex_in_names, lex_out_names) +lex_map_explicit = create_explicit_map_from_tuples(explicit_lex_map_pairs, + lex_space_explicit) +print("lex_map (explicit):") +print(prettier_map_string(lex_map_explicit)) + +# Statement instance ordering (valid sched) +print("----------------------------------------------------------------------") +SIO_explicit_valid = get_statement_ordering_map( + example_sched_valid, lex_map_explicit) +print("statement instance ordering explicit (valid_sched):") +print(prettier_map_string(SIO_explicit_valid)) +# Statement instance ordering (invalid sched) +print("----------------------------------------------------------------------") +SIO_explicit_invalid = get_statement_ordering_map( + example_sched_invalid, lex_map_explicit) +print("statement instance ordering explicit (invalid_sched):") +print(prettier_map_string(SIO_explicit_invalid)) + +# Dependencies and constraints: +print("----------------------------------------------------------------------") + +dep_constraint_vars = DependencyConstraintVars( + all_inames, + iname_params, + iname_param_vals, + statement_var, + statement_param, + statement_bound, + ) + +# i is parallel, suppose we want to enforce the following: +# for a given i, statement 0 happens before statement 1 +# i dependency is none, j dependency is `prior` + +deps = [ + #Dependency(0, 1, DT.NONE, 'i'), + Dependency(0, 1, DT.SAME, 'i'), + Dependency(0, 1, DT.SAME, 'j'), + ] +print([str(dep) for dep in deps]) +constraint_map = create_dependency_constraint( + deps, dep_constraint_vars) +assert constraint_map.space == SIO_explicit_valid.space +print("constraint map:") +print(prettier_map_string(constraint_map)) + +print("is valid sched valid?") +print(constraint_map.is_subset(SIO_explicit_valid)) +#print(SIO_explicit_valid.is_subset(constraint_map)) + +print("is invalid sched valid?") +print(constraint_map.is_subset(SIO_explicit_invalid)) +#print(SIO_explicit_invalid.is_subset(constraint_map)) + diff --git a/example_lex_map_creation.py b/example_lex_map_creation.py new file mode 100644 index 000000000..fec169ea1 --- /dev/null +++ b/example_lex_map_creation.py @@ -0,0 +1,137 @@ +import islpy as isl +from schedule_checker.dependency import ( + Dependency, + DependencyType as DT, + create_dependency_constraint, +) +from schedule_checker.lexicographic_order_map import ( + make_lex_mapping_tuple_pairs, + create_explicit_map_from_tuples, + get_statement_ordering_map, + set_space_names, + get_space, + create_symbolic_lex_mapping, +) + + +# *Symbolic* lexicographic mapping- map each tuple to all tuples occuring later + +#dim_bounds = [3, 2, 2] # max vals for each dim (e.g., 0 <= i0 <= max0 ...) +#param_names = ["p0", "p1", "p2"] +#in_names = ["i0", "i1", "i2"] +#out_names = ["o0", "o1", "o2"] +dim_bounds = [(0,2), (0,2)] # max vals for each dim (e.g., 0 <= i0 < max0 ...) +param_names = ["p0", "p1"] +in_names = ["i", "j"] +out_names = ["i'", "j'"] + +lex_map_symbolic = create_symbolic_lex_mapping(param_names, in_names, out_names, dim_bounds) +print("lex_map (symbolic):") +print(lex_map_symbolic) + + +# *Explicit* lexicographic mapping- map each tuple to all tuples occuring later + +explicit_lex_map_pairs = make_lex_mapping_tuple_pairs(dim_bounds) +# for pair in explicit_lex_map_pairs: +# print(pair[0], pair[1]) +lex_map_explicit = create_explicit_map_from_tuples(explicit_lex_map_pairs, + lex_map_symbolic.space) +print("lex_map (explicit):") +print(lex_map_explicit) + + +# Example *explicit* schedule (map statement instances to lex time) + +param_names_sched = [] +in_names_sched = ["s"] +out_names_sched = ["i", "j"] +sched_space = get_space(param_names_sched, in_names_sched, out_names_sched) +example_sched = create_explicit_map_from_tuples( + [ + #((0,), (2, 0, 0)), + #((1,), (2, 0, 1)), + #((2,), (2, 1, 0)), + #((3,), (2, 1, 1)), + ((0,), (0, 0)), + ((1,), (0, 1)), + ((2,), (1, 0)), + ((3,), (1, 1)), + ], + sched_space, + ) +print("example sched:") +print(example_sched) + +# statement ordering: +# map each statement instance to all statement instances that occur later +# S -> L -> S^-1 + +statement_instance_ordering_explicit = get_statement_ordering_map( + example_sched, lex_map_explicit) +print("statement instance ordering explicit:") +print(statement_instance_ordering_explicit) + +# TODO figure out where these "p0 >= 2 and p1 >= 2" are coming from: +statement_instance_ordering_symbolic = get_statement_ordering_map( + example_sched, lex_map_symbolic) +print("statement instance ordering symbolic:") +print(statement_instance_ordering_symbolic) + + +# example constraint test: +print("---------------------------------------------------------------------------") +""" +param_names_sched = ["ps", "p0", "p1"] +in_names_sched = ["s"] +out_names_sched = ["i", "j"] +sched_space = isl.Space.alloc(isl.DEFAULT_CONTEXT, 3, 1, 2) +sched_space = set_space_names( + sched_space, + param_names=param_names_sched, + in_names=in_names_sched, + out_names=out_names_sched) +example_sched = create_explicit_map_from_tuples( + [ + #((0,0), (0, 0)), + #((1,0), (0, 1)), + #((2,1), (1, 0)), + #((3,1), (1, 1)), + ((0,), (0, 0)), + ((1,), (0, 1)), + ((2,), (1, 0)), + ((3,), (1, 1)), + ], + sched_space, + ) +print("example sched:") +print(example_sched) +""" +param_names_sched = ["ps", "p0", "p1"] +in_names_sched = ["s","i","j"] +out_names_sched = ["l0","l1"] +sched_space = get_space(param_names_sched, in_names_sched, out_names_sched) +example_sched = create_explicit_map_from_tuples( + [ + ((0,0,0), (0, 0)), + ((0,1,0), (0, 0)), + ((1,0,0), (0, 1)), + ((1,1,0), (0, 1)), + ((0,0,1), (1, 0)), + ((0,1,1), (1, 0)), + ((1,0,1), (1, 1)), + ((1,1,1), (1, 1)), + ], + sched_space, + ) +print("example sched:") +print(example_sched) + +print("lex map explicit:") +print(lex_map_explicit) + +statement_instance_ordering_explicit = get_statement_ordering_map( + example_sched, lex_map_explicit) +print("statement instance ordering explicit:") +print(statement_instance_ordering_explicit) + diff --git a/lexicographic_order_map.py b/lexicographic_order_map.py new file mode 100644 index 000000000..f1fe02655 --- /dev/null +++ b/lexicographic_order_map.py @@ -0,0 +1,120 @@ +import islpy as isl + + +def make_lex_mapping_tuple_pairs(dim_bounds): + + import itertools + # all lex tuples in order: + lex_tuples = list( + itertools.product(*[range(l,u) for l,u in dim_bounds])) + # TODO: is itertools.product ordering guaranteed? + + map_pairs = [] + for i, l_before in enumerate(lex_tuples): + for l_after in lex_tuples[i+1:]: + map_pairs.append((l_before, l_after)) + return map_pairs + + +def create_explicit_map_from_tuples(tuple_pairs, space): + + dim_type = isl.dim_type + individual_maps = [] + + for tup_in, tup_out in tuple_pairs: + constraints = [] + for i, val_in in enumerate(tup_in): + constraints.append( + isl.Constraint.equality_alloc(space) + .set_coefficient_val(dim_type.in_, i, 1) + .set_constant_val(-1*val_in)) + for i, val_out in enumerate(tup_out): + constraints.append( + isl.Constraint.equality_alloc(space) + .set_coefficient_val(dim_type.out, i, 1) + .set_constant_val(-1*val_out)) + individual_maps.append( + isl.Map.universe(space).add_constraints(constraints)) + + union_map = individual_maps[0] + for m in individual_maps[1:]: + union_map = union_map.union(m) + + return union_map + + +def get_statement_ordering_map(sched_map, lex_map): + # statement ordering: + # map each statement instance to all statement instances that occur later + # S -> L -> S^-1 + return sched_map.apply_range(lex_map).apply_range(sched_map.reverse()) + + +def set_space_names(space, param_names=None, in_names=None, out_names=None): + new_space = space.copy() + dim_type = isl.dim_type + if param_names: + for i, p in enumerate(param_names): + new_space = new_space.set_dim_name(dim_type.param, i, p) + else: + for i in range(len(space.get_var_names(dim_type.param))): + new_space = new_space.set_dim_name(dim_type.param, i, "p%d" % (i)) + if in_names: + for i, p in enumerate(in_names): + new_space = new_space.set_dim_name(dim_type.in_, i, p) + else: + for i in range(len(space.get_var_names(dim_type.in_))): + new_space = new_space.set_dim_name(dim_type.in_, i, "i%d" % (i)) + if out_names: + for i, p in enumerate(out_names): + new_space = new_space.set_dim_name(dim_type.out, i, p) + else: + for i in range(len(space.get_var_names(dim_type.out))): + new_space = new_space.set_dim_name(dim_type.out, i, "o%d" % (i)) + return new_space + + +def get_space(param_names, in_names, out_names): + space = isl.Space.alloc(isl.DEFAULT_CONTEXT, len(param_names), len(in_names), len(out_names)) + return set_space_names(space, param_names=param_names, in_names=in_names, out_names=out_names) + + +def create_symbolic_lex_mapping(param_names, in_names, out_names, + dim_bounds): + # assumes dim vars are bounded between 0 and corresponding dim_bound + assert len(in_names) == len(out_names) + dim_type = isl.dim_type + + islvars = isl.make_zero_and_vars(in_names+out_names, param_names) + + # initialize set with constraint that is always true + lex_set_outer_bounds = islvars[0].eq_set(islvars[0]) + # make constraints to bound dim vars 0 <= ix < dim_bound_x + for i, dim_bound in enumerate(dim_bounds): + lex_set_outer_bounds = lex_set_outer_bounds \ + & islvars[0].le_set(islvars[in_names[i]]) \ + & islvars[in_names[i]].le_set(islvars[param_names[i]]-1) \ + & islvars[0].le_set(islvars[out_names[i]]) \ + & islvars[out_names[i]].le_set(islvars[param_names[i]]-1) + + # create constraint enforcing lex ordering, e.g., in the 3-dim case: + # i0 < o0 or ((i0 = o0) and (i1 < o1)) + # or ((i0 = o0) and (i1 = o1) and (i2 < o2)) + lex_set_order_bound = islvars[in_names[0]].le_set(islvars[out_names[0]]-1) + for i in range(1, len(in_names)): + lex_set_order_bound_conj = islvars[in_names[i]].le_set( + islvars[out_names[i]]-1) + for j in range(i): + lex_set_order_bound_conj = lex_set_order_bound_conj & \ + islvars[in_names[j]].eq_set(islvars[out_names[j]]) + lex_set_order_bound = lex_set_order_bound | lex_set_order_bound_conj + + lex_set = lex_set_outer_bounds & lex_set_order_bound + lex_map = isl.Map.from_domain(lex_set) + lex_map = lex_map.move_dims( + dim_type.out, 0, dim_type.in_, + len(in_names), len(out_names)) + + return lex_map + + diff --git a/sched_check_utils.py b/sched_check_utils.py new file mode 100644 index 000000000..878d42183 --- /dev/null +++ b/sched_check_utils.py @@ -0,0 +1,4 @@ + + +def prettier_map_string(isl_map): + return str(isl_map).replace("{ ", "{\n").replace(" }","\n}").replace("; ",";\n") diff --git a/schedule.py b/schedule.py new file mode 100644 index 000000000..410489d5f --- /dev/null +++ b/schedule.py @@ -0,0 +1,271 @@ +import islpy as isl +from collections import OrderedDict + + +class Statement(object): + def __init__( + self, + statement_id, + active_inames, + ): + self.statement_id = statement_id # string + self.active_inames = active_inames # [string, ] + + def __str__(self): + return "%s {%s}" % ( + self.statement_id, ",".join(self.active_inames)) + + +class StatementInstance(object): + def __init__( + self, + statement, + iname_vals, + ): + assert all( + [iname in statement.active_inames + for iname, val in iname_vals.items()]) + self.statement = statement # statement + self.iname_vals = iname_vals # dict{string:int} + + def __str__(self): + import six + return "[%s,%s]" % ( + self.statement.statement_id, ",".join( + ["%d" % (v) for k, v in sorted(six.iteritems(self.iname_vals))])) + + def __eq__(self, other): + return self.iname_vals == other.iname_vals and \ + self.statement.statement_id == other.statement.statement_id + + def __hash__(self): + return hash(str(self)) + + +class LexSchedule(object): + # TODO this should hold a map of statement instances to lex order space + def __init__( + self, + knl, + iname_bounds, + ): + self.lex_schedule = OrderedDict() # statement instance: lex point + self.inames_enumerated = [] # symbolic inames in sched that have been enumerated into explicit statement instances + self.lp_insnid_to_id = {} + + from loopy.schedule import (EnterLoop, LeaveLoop, Barrier, RunInstruction) + cur_nest_lex_prefix = [] + for sched_item in knl.schedule: + if isinstance(sched_item, EnterLoop): + iname = sched_item.iname + #conc_dict = get_iname_concurrency_dict([iname], knl) + #print("EnterLoop: %s" % (conc_dict)) + if self: + cur_nest_lex_prefix.append(self.get_last_lex_pt()[-1]) + else: + cur_nest_lex_prefix.append(0) + cur_nest_lex_prefix.append(iname) + elif isinstance(sched_item, LeaveLoop): + #conc_dict = get_iname_concurrency_dict([sched_item.iname], knl) + #print("LeaveLoop: %s" % (conc_dict)) + cur_nest_lex_prefix.pop() # pop loop variable + cur_nest_lex_prefix.pop() # pop insn ct variable + elif isinstance(sched_item, RunInstruction): + self.add_new_lp_insnid(sched_item.insn_id) + insn_id_int = self.lp_insnid_to_id[sched_item.insn_id] + #inames = knl.id_to_insn[insn_id].within_inames + #conc_dict = get_iname_concurrency_dict(inames, knl) + #print("RunInstruction: id: %s; inames: %s" % (sched_item.insn_id, conc_dict)) + self.append_item( + (insn_id_int,), + cur_nest_lex_prefix + [self.get_next_lex_val_in_series(cur_nest_lex_prefix, iname_bounds)]) + elif isinstance(sched_item, Barrier): + pass + else: + pass + self.pad_lex_pts_with_zeros() + + def max_lex_dims(self): + return max(len(lex_pt) for insn, lex_pt in self.items()) + + def pad_lex_pts_with_zeros(self): + max_lex_dim = self.max_lex_dims() + new_sched = OrderedDict() + for insn, lex_pt in self.items(): + new_sched[insn] = lex_pt + [0]*(max_lex_dim-len(lex_pt)) + self.lex_schedule = new_sched + + def enumerate_iname(self, iname, bound): + new_sched = OrderedDict() + iname_found = False + for insn, lex_pt in self.lex_schedule.items(): + if iname in lex_pt: + for v in range(bound[0],bound[1]): + new_sched[tuple(list(insn)+[v])] = [l if l != iname else v for l in lex_pt] + iname_found = True + else: + new_sched[insn] = lex_pt + self.lex_schedule = new_sched + if iname_found: + self.inames_enumerated.append(iname) + + def enumerate_inames(self, iname_bounds): + inames_found = [] + for iname, bound in iname_bounds.items(): + self.enumerate_iname(iname, bound) + + def add_new_lp_insnid(self, lp_insnid): + if self.lp_insnid_to_id: + self.lp_insnid_to_id[lp_insnid] = max(self.lp_insnid_to_id.values()) + 1 + else: + self.lp_insnid_to_id[lp_insnid] = 0 + + def get_sched_space(self): + params_sched = ["ps"] + ["p"+iname for iname in self.inames_enumerated] + in_names_sched = ["s"] + self.inames_enumerated + out_names_sched = ["l"+str(i) for i in range(self.max_lex_dims())] + from schedule_checker.lexicographic_order_map import get_space + return get_space(params_sched, in_names_sched, out_names_sched) + + def get_max_lex_dim_vals(self): + return [max(dim_pts) for dim_pts in zip(*self.lex_schedule.values())] + + def get_min_lex_dim_vals(self): + return [min(dim_pts) for dim_pts in zip(*self.lex_schedule.values())] + + def append_item(self, sched_item, lex_pt): + self.lex_schedule[sched_item] = lex_pt + + def get_last_schedule_item(self): + return next(reversed(self.lex_schedule)) + + def get_last_lex_pt(self): + return self.lex_schedule[self.get_last_schedule_item()] + + def get_next_lex_val_in_series(self, cur_nest_lex_prefix, iname_bounds): + if not self.lex_schedule: + return 0 + last_lex_pt = self.get_last_lex_pt() + #print(last_lex_pt) + if len(last_lex_pt) == len(cur_nest_lex_prefix) + 1: + # we're still in same loop, increment current lex dim val + return last_lex_pt[-1] + 1 + elif len(last_lex_pt) > len(cur_nest_lex_prefix) + 1: + # we just ended one or more loops, increment appropriate lex dim val + return last_lex_pt[len(cur_nest_lex_prefix)] + 1 + else: # len(last_lex_pt) < cur_nest_lex_prefix + 1: + # we just entered one or more loops + #return 0 + return iname_bounds[cur_nest_lex_prefix[-1]][0] + + def create_explicit_isl_map(self, sched_space): + from schedule_checker.lexicographic_order_map import create_explicit_map_from_tuples + return create_explicit_map_from_tuples(list(self.items()), sched_space) + + def enumerate_symbolic_inames_and_create_explicit_isl_map(self, iname_bounds): + self.enumerate_inames(iname_bounds) + sched_space = self.get_sched_space() + return self.create_explicit_isl_map(sched_space) + + def get_lex_map_explicit(self): + + from schedule_checker.lexicographic_order_map import ( + make_lex_mapping_tuple_pairs, + create_explicit_map_from_tuples, + get_space, + ) + from schedule_checker.dependency import append_apostrophes + + # TODO lower bound may not be zero + lex_dim_bounds = list(zip(self.get_min_lex_dim_vals(), + [1 + v for v in self.get_max_lex_dim_vals()])) + sched_space = self.get_sched_space() + + lex_in_names = sched_space.get_var_names(isl.dim_type.out) + lex_out_names = append_apostrophes(lex_in_names) + lex_params = [] + + # TODO lex map routines currently assume lower bound is zero, fix this + explicit_lex_map_pairs = make_lex_mapping_tuple_pairs(lex_dim_bounds) + lex_space_explicit = get_space(lex_params, lex_in_names, lex_out_names) + + return create_explicit_map_from_tuples(explicit_lex_map_pairs, + lex_space_explicit) + + #def get_isl_map(self): + def get_isl_map_str(self): + map_str = "{" + for state_inst, lex in self.lex_schedule.items(): + domain_elem = "[s=%s,%s]" % ( + state_inst.statement.statement_id, ",".join( + ["%s=%d" % (iname, val) for iname, val in state_inst.iname_vals.items()])) + range_elem = "[%s]" % (",".join("%s" % (l) for l in lex)) + map_str += "%s -> %s; " % (domain_elem, range_elem) + map_str += "}" + #TODO return map not string + return map_str + + def __bool__(self): + return bool(self.lex_schedule) + + def __nonzero__(self): + return self.__bool__() + + def __eq__(self, other): + return self.lex_schedule == other.lex_schedule + + def __iter__(self): + return iter(self.lex_schedule) + + def keys(self): + return self.lex_schedule.keys() + + def items(self): + return self.lex_schedule.items() + + def values(self): + return self.lex_schedule.values() + + #def __str__(self): + # #return str(self.get_isl_map()) + # return str(self.get_isl_map_str()) + + def __str__(self): + return str(list(self.lex_schedule)) + + # TODO remove after stripping useful parts: + """ + def add_run_instructions_within_loop_nesting( + self, + insn_ids_ordered, + nest_order, # sequential lex dims in nest order (other lex dims assumed parallel) + iname_bounds, # dict w/bounds for sequential lex dims + concurrent_inames, + ): + # TODO don't pass explicit iname bounds, get them from kernel + + # TODO for now, assuming loop nestings are not re-encountered + + # create a lex dim for this set of (sequential) insns + self.add_lex_dim("s"+"".join(str(i) for i in insn_ids_ordered)) + + nested_iname_bounds_ordered = [iname_bounds[i] for i in nest_order] + import itertools + all_iname_val_sets = list( + itertools.product(*[range(b) for b in nested_iname_bounds_ordered])) + #TODO is there an order guarantee with product? + + for n_insn, insn_id in enumerate(insn_ids_ordered): # for each statement + st = Statement(insn_id, concurrent_inames+nest_order) + new_st_instances = [] + for iname_vals in all_iname_val_sets: + iname_vals = list(iname_vals) + # TODO handle concurrent inames + concurrent_iname_vals = [-1 for iname in range(len(concurrent_inames))] + st_i = StatementInstance( + st, + dict(zip(concurrent_inames+nest_order, + concurrent_iname_vals+iname_vals))) + self.lex_schedule[st_i] = iname_vals+[n_insn] + """ + diff --git a/schedule_creation_experiments.py b/schedule_creation_experiments.py new file mode 100644 index 000000000..dc5fa5535 --- /dev/null +++ b/schedule_creation_experiments.py @@ -0,0 +1,219 @@ +import islpy as isl +import loopy as lp +import numpy as np +from schedule_checker.dependency import ( + Dependency, + DependencyType, + append_apostrophes, +) +from schedule_checker.schedule import Statement, StatementInstance, LexSchedule +from schedule_checker.sched_check_utils import prettier_map_string +from schedule_checker.lexicographic_order_map import ( + create_explicit_map_from_tuples, + get_statement_ordering_map, + #set_space_names, + get_space, + #create_symbolic_lex_mapping, +) +from schedule_checker.sched_check_utils import prettier_map_string + + +def get_iname_bounds_dict(knl, _set_arbitrary_bounds=None): + # TODO don't require explicit bounds + + if _set_arbitrary_bounds: + return dict((iname, _set_arbitrary_bounds) for iname in knl.all_inames()) + + from loopy.symbolic import aff_to_expr + int_bounds = {} + for iname in knl.all_inames(): + bounds_record = knl.get_iname_bounds(iname, constants_only=True) + (_, iname_min_aff), = bounds_record.lower_bound_pw_aff.get_pieces() + (_, iname_max_aff), = bounds_record.upper_bound_pw_aff.get_pieces() + int_bounds[iname] = [ + aff_to_expr(iname_min_aff), + aff_to_expr(iname_max_aff) + 1, + ] + assert all(isinstance(i,int) for i in int_bounds[iname]) + return int_bounds + +# make example kernel +knl = lp.make_kernel( + #"{[i,j]: 0<=i,j<2}", + "{[i,j]: 0<=i<2 and 1<=j<3}", + [ + "<>temp = b[i,j] {id=0}", + "a[i,j] = temp + 1 {id=1,dep=0}", + "c[i,j] = d[i,j] {id=2}" + ], + name="example", + lang_version=(2018, 2) + ) +knl = lp.add_and_infer_dtypes(knl, {"b": np.float32, "d": np.float32}) +knl = lp.tag_inames(knl, {"i": "l.0"}) +knl = lp.preprocess_kernel(knl) +knl = lp.get_one_scheduled_kernel(knl) + +# make some dependencies manually for now: +s0 = Statement("0", ["i", "j"]) +s1 = Statement("1", ["i", "j"]) +s2 = Statement("2", ["i", "j"]) +dep_s1_i = Dependency(s0, s1, "i", DependencyType.SAME) +dep_s1_j = Dependency(s0, s1, "j", DependencyType.SAME) +insn_to_deps = {"0":[], "1":[dep_s1_i, dep_s1_j], "2":[]} + +# enforce explicit iname bounds for now TODO +print("Kernel:") +print(knl) +print(lp.generate_code_v2(knl).device_code()) +print("="*80) +print("Iname tags: %s" % (knl.iname_to_tags)) +print("="*80) +print("Loopy schedule:") +for sched_item in knl.schedule: + print(sched_item) +print("="*80) + +def get_iname_concurrency_dict(inames, knl): + from loopy.kernel.data import LocalIndexTag, GroupIndexTag + conc_dict = {} + for iname in inames: + iname_tags = knl.iname_to_tags.get(iname, None) + concurrent = False + if iname_tags: + if len(iname_tags) > 1: + 1/0 + else: + iname_tag = list(iname_tags)[0] + if isinstance(iname_tag, (LocalIndexTag, GroupIndexTag)): + concurrent = True + conc_dict[iname] = "concurrent" if concurrent else "sequential" + return conc_dict + +# Get schedule ------------------------------------------------------ +iname_bounds = get_iname_bounds_dict(knl) +#iname_bounds = get_iname_bounds_dict(knl, _set_arbitrary_bounds=[0,2]) +print(iname_bounds) +sched = LexSchedule(knl, iname_bounds) +example_sched_valid = sched.enumerate_symbolic_inames_and_create_explicit_isl_map(iname_bounds) +# ------------------------------------------------------------------- + +print("example LexSched (valid):") +print(prettier_map_string(example_sched_valid)) + +# *Explicit* lexicographic mapping- map each tuple to all tuples occuring later +print("---------------------------------------------------------------------------") +lex_map_explicit = sched.get_lex_map_explicit() + +print("lex map explicit:") +print(prettier_map_string(lex_map_explicit)) + +# Statement instance ordering +print("----------------------------------------------------------------------") +SIO_explicit_valid = get_statement_ordering_map( + example_sched_valid, lex_map_explicit) +print("statement instance ordering explicit (valid_sched):") +print(prettier_map_string(SIO_explicit_valid)) + +''' +all_inames = ['i', 'j'] +iname_params = ['p0', 'p1'] +iname_param_vals = [2, 2] +statement_var = 's' +statement_param = 'ps' +statement_bound = 2 + + + +s0 = Statement("0", ["i", "j"]) +s1 = Statement("1", ["i", "j"]) +print("Statements:") +print(s0) +print(s1) + +s0_00 = StatementInstance(s0, {"i": 0, "j": 0}) +s0_10 = StatementInstance(s0, {"i": 1, "j": 0}) +s0_01 = StatementInstance(s0, {"i": 0, "j": 1}) +s0_11 = StatementInstance(s0, {"i": 1, "j": 1}) +s1_00 = StatementInstance(s1, {"i": 0, "j": 0}) +s1_10 = StatementInstance(s1, {"i": 1, "j": 0}) +s1_01 = StatementInstance(s1, {"i": 0, "j": 1}) +s1_11 = StatementInstance(s1, {"i": 1, "j": 1}) +print("Statement instances:") +print(s0_00) +print(s0_10) +print(s0_01) +print(s0_11) +print(s1_00) +print(s1_10) +print(s1_01) +print(s1_11) + +state_inst_to_lex_time_dict = { + s0_00: (0,0), + s1_00: (0,1), + s0_10: (0,0), + s1_10: (0,1), + s0_01: (1,0), + s1_01: (1,1), + s0_11: (1,0), + s1_11: (1,1), + } + +sched = LexSchedule(state_inst_to_lex_time_dict) +print("LexSchedule:") +print(sched) + +# sched map should be this: +schedule_explicit_map = isl.Map( + """{ + [s,i,j] -> [0,0] : s = 0 and i = 0 and j = 0; + [s,i,j] -> [0,1] : s = 1 and i = 0 and j = 0; + [s,i,j] -> [0,0] : s = 0 and i = 1 and j = 0; + [s,i,j] -> [0,1] : s = 1 and i = 1 and j = 0; + [s,i,j] -> [1,0] : s = 0 and i = 0 and j = 1; + [s,i,j] -> [1,1] : s = 1 and i = 0 and j = 1; + [s,i,j] -> [1,0] : s = 0 and i = 1 and j = 1; + [s,i,j] -> [1,1] : s = 1 and i = 1 and j = 1; + }""") + +schedule_general_map = isl.Map("{[s,i,j] -> [j,s]}") + +print("Map representing schedule generally:") +print(schedule_general_map) + +# the following is equivalent to explicit map above: +schedule_explicit_map2 = isl.Map( + """{ + [s=0,i=0,j=0] -> [0,0]; + [s=1,i=0,j=0] -> [0,1]; + [s=0,i=1,j=0] -> [0,0]; + [s=1,i=1,j=0] -> [0,1]; + [s=0,i=0,j=1] -> [1,0]; + [s=1,i=0,j=1] -> [1,1]; + [s=0,i=1,j=1] -> [1,0]; + [s=1,i=1,j=1] -> [1,1]; + }""") +assert schedule_explicit_map2 == schedule_explicit_map == sched.get_isl_map() + +''' + +""" +dep_i_same = Dependency(s0, s1, "i", DependencyType.SAME) +dep_i_none = Dependency(s0, s1, "i", DependencyType.NONE) +dep_i_prior = Dependency(s0, s1, "i", DependencyType.PRIOR) +dep_i_all = Dependency(s0, s1, "i", DependencyType.ALL) +dep_j_same = Dependency(s0, s1, "j", DependencyType.SAME) +dep_j_none = Dependency(s0, s1, "j", DependencyType.NONE) +dep_j_prior = Dependency(s0, s1, "j", DependencyType.PRIOR) +dep_j_all = Dependency(s0, s1, "j", DependencyType.ALL) +print("Example dependencies: ") +print(dep_i_same) +print(dep_i_none) +print(dep_i_prior) +print(dep_i_all) +print(dep_j_same) +print(dep_j_none) +print(dep_j_prior) +print(dep_j_all) +""" diff --git a/version.py b/version.py new file mode 100644 index 000000000..b6a75f587 --- /dev/null +++ b/version.py @@ -0,0 +1 @@ +VERSION_TEXT = "0.1" -- GitLab From 535c3001eb759fa507b80fe0a42a65eddcd8181f Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sun, 12 May 2019 16:51:24 -0500 Subject: [PATCH 002/415] create_symbolic_lex_mapping allows variable params and no longer requires user to provide map variable names --- example_lex_map_creation.py | 22 +++++++++++-------- lexicographic_order_map.py | 43 +++++++++++++++++++++++++++---------- 2 files changed, 45 insertions(+), 20 deletions(-) diff --git a/example_lex_map_creation.py b/example_lex_map_creation.py index fec169ea1..92bfe2930 100644 --- a/example_lex_map_creation.py +++ b/example_lex_map_creation.py @@ -3,6 +3,7 @@ from schedule_checker.dependency import ( Dependency, DependencyType as DT, create_dependency_constraint, + append_apostrophes, ) from schedule_checker.lexicographic_order_map import ( make_lex_mapping_tuple_pairs, @@ -16,22 +17,19 @@ from schedule_checker.lexicographic_order_map import ( # *Symbolic* lexicographic mapping- map each tuple to all tuples occuring later -#dim_bounds = [3, 2, 2] # max vals for each dim (e.g., 0 <= i0 <= max0 ...) -#param_names = ["p0", "p1", "p2"] -#in_names = ["i0", "i1", "i2"] -#out_names = ["o0", "o1", "o2"] dim_bounds = [(0,2), (0,2)] # max vals for each dim (e.g., 0 <= i0 < max0 ...) -param_names = ["p0", "p1"] -in_names = ["i", "j"] -out_names = ["i'", "j'"] - -lex_map_symbolic = create_symbolic_lex_mapping(param_names, in_names, out_names, dim_bounds) +#in_names = ["i", "j"] +#out_names = append_apostrophes(in_names) +n_dims = 2 #len(in_names) +lex_map_symbolic = create_symbolic_lex_mapping( + n_dims, dim_bound_vals=dim_bounds) print("lex_map (symbolic):") print(lex_map_symbolic) # *Explicit* lexicographic mapping- map each tuple to all tuples occuring later +""" explicit_lex_map_pairs = make_lex_mapping_tuple_pairs(dim_bounds) # for pair in explicit_lex_map_pairs: # print(pair[0], pair[1]) @@ -39,6 +37,7 @@ lex_map_explicit = create_explicit_map_from_tuples(explicit_lex_map_pairs, lex_map_symbolic.space) print("lex_map (explicit):") print(lex_map_explicit) +""" # Example *explicit* schedule (map statement instances to lex time) @@ -67,10 +66,12 @@ print(example_sched) # map each statement instance to all statement instances that occur later # S -> L -> S^-1 +""" statement_instance_ordering_explicit = get_statement_ordering_map( example_sched, lex_map_explicit) print("statement instance ordering explicit:") print(statement_instance_ordering_explicit) +""" # TODO figure out where these "p0 >= 2 and p1 >= 2" are coming from: statement_instance_ordering_symbolic = get_statement_ordering_map( @@ -106,6 +107,8 @@ example_sched = create_explicit_map_from_tuples( ) print("example sched:") print(example_sched) +""" + """ param_names_sched = ["ps", "p0", "p1"] in_names_sched = ["s","i","j"] @@ -134,4 +137,5 @@ statement_instance_ordering_explicit = get_statement_ordering_map( example_sched, lex_map_explicit) print("statement instance ordering explicit:") print(statement_instance_ordering_explicit) +""" diff --git a/lexicographic_order_map.py b/lexicographic_order_map.py index f1fe02655..df1b0f894 100644 --- a/lexicographic_order_map.py +++ b/lexicographic_order_map.py @@ -79,23 +79,44 @@ def get_space(param_names, in_names, out_names): return set_space_names(space, param_names=param_names, in_names=in_names, out_names=out_names) -def create_symbolic_lex_mapping(param_names, in_names, out_names, - dim_bounds): - # assumes dim vars are bounded between 0 and corresponding dim_bound - assert len(in_names) == len(out_names) +def create_symbolic_lex_mapping( + n_dims, + param_names=None, + in_names=None, + out_names=None, + dim_bound_vals=None, + ): + if param_names is None: + param_names = [["lo%s" % (i), "up%s" % (i)] for i in range(n_dims)] + if in_names is None: + in_names = ["i%s" % (i) for i in range(n_dims)] + if out_names is None: + from schedule_checker.dependency import append_apostrophes + out_names = append_apostrophes(in_names) + if dim_bound_vals is None: + raise NotImplementedError("dim_bound_vals cannot be None") + + assert len(in_names) == len(out_names) == len(param_names) == len(dim_bound_vals) == n_dims dim_type = isl.dim_type - islvars = isl.make_zero_and_vars(in_names+out_names, param_names) + islvars = isl.make_zero_and_vars( + in_names+out_names, + [param for param_pair in param_names for param in param_pair]) # initialize set with constraint that is always true lex_set_outer_bounds = islvars[0].eq_set(islvars[0]) - # make constraints to bound dim vars 0 <= ix < dim_bound_x - for i, dim_bound in enumerate(dim_bounds): + # make constraints to bound dim vars dim_bound[0] <= ix < dim_bound[1] + #for i, dim_bound in enumerate(dim_bound_vals): + for i in range(n_dims): lex_set_outer_bounds = lex_set_outer_bounds \ - & islvars[0].le_set(islvars[in_names[i]]) \ - & islvars[in_names[i]].le_set(islvars[param_names[i]]-1) \ - & islvars[0].le_set(islvars[out_names[i]]) \ - & islvars[out_names[i]].le_set(islvars[param_names[i]]-1) + & islvars[in_names[i]].ge_set(islvars[param_names[i][0]]) \ + & islvars[in_names[i]].le_set(islvars[param_names[i][1]]-1) \ + & islvars[out_names[i]].ge_set(islvars[param_names[i][0]]) \ + & islvars[out_names[i]].le_set(islvars[param_names[i][1]]-1) + if dim_bound_vals: + lex_set_outer_bounds = lex_set_outer_bounds \ + & islvars[param_names[i][0]].eq_set(islvars[0]+dim_bound_vals[i][0]) \ + & islvars[param_names[i][1]].eq_set(islvars[0]+dim_bound_vals[i][1]) # create constraint enforcing lex ordering, e.g., in the 3-dim case: # i0 < o0 or ((i0 = o0) and (i1 < o1)) -- GitLab From 103abfccebca7c81323180015ba3f8c78f48e2b9 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sun, 12 May 2019 16:54:08 -0500 Subject: [PATCH 003/415] renaming example/experimental code files for consistency --- ...ndency_check_experiments.py => example_dependency_checking.py | 1 - schedule_creation_experiments.py => example_schedule_creation.py | 0 2 files changed, 1 deletion(-) rename dependency_check_experiments.py => example_dependency_checking.py (99%) rename schedule_creation_experiments.py => example_schedule_creation.py (100%) diff --git a/dependency_check_experiments.py b/example_dependency_checking.py similarity index 99% rename from dependency_check_experiments.py rename to example_dependency_checking.py index 2ef0bfce9..fffbd23fa 100644 --- a/dependency_check_experiments.py +++ b/example_dependency_checking.py @@ -13,7 +13,6 @@ from schedule_checker.lexicographic_order_map import ( get_statement_ordering_map, set_space_names, get_space, - create_symbolic_lex_mapping, ) from schedule_checker.sched_check_utils import prettier_map_string diff --git a/schedule_creation_experiments.py b/example_schedule_creation.py similarity index 100% rename from schedule_creation_experiments.py rename to example_schedule_creation.py -- GitLab From e93b1518093514059dafe241b404cde45ff57fe0 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 13 May 2019 11:57:57 -0500 Subject: [PATCH 004/415] temporary hack for getting symbolic iname bounds from kernel --- example_schedule_creation.py | 48 ++++++++++++++++++++++++++++++------ schedule.py | 14 +++++++---- 2 files changed, 49 insertions(+), 13 deletions(-) diff --git a/example_schedule_creation.py b/example_schedule_creation.py index dc5fa5535..483bfe656 100644 --- a/example_schedule_creation.py +++ b/example_schedule_creation.py @@ -25,28 +25,55 @@ def get_iname_bounds_dict(knl, _set_arbitrary_bounds=None): return dict((iname, _set_arbitrary_bounds) for iname in knl.all_inames()) from loopy.symbolic import aff_to_expr - int_bounds = {} + from loopy.isl_helpers import static_max_of_pw_aff + from loopy.isl_helpers import static_value_of_pw_aff + + def _param_in_expr_hack(expr, all_params): + expr_str = str(expr) + for p in all_params: + if p in expr_str: + return p + return None + + bounds = {} + all_params = knl.all_params() for iname in knl.all_inames(): - bounds_record = knl.get_iname_bounds(iname, constants_only=True) + #bounds_record = knl.get_iname_bounds(iname, constants_only=True) + bounds_record = knl.get_iname_bounds(iname) (_, iname_min_aff), = bounds_record.lower_bound_pw_aff.get_pieces() (_, iname_max_aff), = bounds_record.upper_bound_pw_aff.get_pieces() - int_bounds[iname] = [ - aff_to_expr(iname_min_aff), - aff_to_expr(iname_max_aff) + 1, + iname_min_aff = aff_to_expr(iname_min_aff) + iname_max_aff = aff_to_expr(iname_max_aff) + param_bound_min = _param_in_expr_hack(iname_min_aff, all_params) + param_bound_max = _param_in_expr_hack(iname_max_aff, all_params) + + if param_bound_min is None: + param_bound_min = int(iname_min_aff) # TODO what if this fails? + if param_bound_max is None: + param_bound_max = int(iname_max_aff) # TODO what if this fails? + + #int_bounds[iname] = [ + bounds[iname] = [ + param_bound_min, + param_bound_max, ] - assert all(isinstance(i,int) for i in int_bounds[iname]) - return int_bounds + #assert all(isinstance(i,int) for i in int_bounds[iname]) + return bounds # make example kernel knl = lp.make_kernel( #"{[i,j]: 0<=i,j<2}", - "{[i,j]: 0<=i<2 and 1<=j<3}", + #"{[i,j]: 0<=i<2 and 1<=j<3}", + #"{[i,j]: loi<=itemp = b[i,j] {id=0}", "a[i,j] = temp + 1 {id=1,dep=0}", "c[i,j] = d[i,j] {id=2}" ], name="example", + #assumptions="loi,upi,loj,upj >= 1", + assumptions="upi,upj >= 1", lang_version=(2018, 2) ) knl = lp.add_and_infer_dtypes(knl, {"b": np.float32, "d": np.float32}) @@ -92,9 +119,14 @@ def get_iname_concurrency_dict(inames, knl): # Get schedule ------------------------------------------------------ iname_bounds = get_iname_bounds_dict(knl) + #iname_bounds = get_iname_bounds_dict(knl, _set_arbitrary_bounds=[0,2]) print(iname_bounds) sched = LexSchedule(knl, iname_bounds) + +print(sched) +1/0 + example_sched_valid = sched.enumerate_symbolic_inames_and_create_explicit_isl_map(iname_bounds) # ------------------------------------------------------------------- diff --git a/schedule.py b/schedule.py index 410489d5f..14ac242cf 100644 --- a/schedule.py +++ b/schedule.py @@ -43,7 +43,7 @@ class StatementInstance(object): class LexSchedule(object): - # TODO this should hold a map of statement instances to lex order space + # TODO this should hold a map from statement instances to lex order space def __init__( self, knl, @@ -120,7 +120,7 @@ class LexSchedule(object): else: self.lp_insnid_to_id[lp_insnid] = 0 - def get_sched_space(self): + def get_space_for_explicit_sched(self): params_sched = ["ps"] + ["p"+iname for iname in self.inames_enumerated] in_names_sched = ["s"] + self.inames_enumerated out_names_sched = ["l"+str(i) for i in range(self.max_lex_dims())] @@ -164,9 +164,13 @@ class LexSchedule(object): def enumerate_symbolic_inames_and_create_explicit_isl_map(self, iname_bounds): self.enumerate_inames(iname_bounds) - sched_space = self.get_sched_space() + sched_space = self.get_space_for_explicit_sched() return self.create_explicit_isl_map(sched_space) + def create_symbolic_isl_map(self, iname_bounds): + sched_space = self.get_space_for_symbolic_sched() + return None + def get_lex_map_explicit(self): from schedule_checker.lexicographic_order_map import ( @@ -179,7 +183,7 @@ class LexSchedule(object): # TODO lower bound may not be zero lex_dim_bounds = list(zip(self.get_min_lex_dim_vals(), [1 + v for v in self.get_max_lex_dim_vals()])) - sched_space = self.get_sched_space() + sched_space = self.get_space_for_explicit_sched() lex_in_names = sched_space.get_var_names(isl.dim_type.out) lex_out_names = append_apostrophes(lex_in_names) @@ -231,7 +235,7 @@ class LexSchedule(object): # return str(self.get_isl_map_str()) def __str__(self): - return str(list(self.lex_schedule)) + return str(list(self.lex_schedule.items())) # TODO remove after stripping useful parts: """ -- GitLab From d3cddf5bbeb6121e82229385166ff1ddc27c2f2a Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 13 May 2019 14:07:48 -0500 Subject: [PATCH 005/415] added flatten_2d_list --- sched_check_utils.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/sched_check_utils.py b/sched_check_utils.py index 878d42183..50233e5bc 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -2,3 +2,7 @@ def prettier_map_string(isl_map): return str(isl_map).replace("{ ", "{\n").replace(" }","\n}").replace("; ",";\n") + + +def flatten_2d_list(list2d): + return [item for inner_list in list2d for item in inner_list] -- GitLab From 169e4a4c2bdc618cb94bffc2b0936dd910c3c7ba Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 13 May 2019 14:08:48 -0500 Subject: [PATCH 006/415] (WIP) started symbolic schedule creation (rather than explicit) and broke everything... --- example_schedule_creation.py | 44 +++++++++++++++++++++++------------ lexicographic_order_map.py | 39 ++++++++++++++++++++++++++++++- schedule.py | 45 +++++++++++++++++++++++++++++++++--- 3 files changed, 109 insertions(+), 19 deletions(-) diff --git a/example_schedule_creation.py b/example_schedule_creation.py index 483bfe656..5f67232c7 100644 --- a/example_schedule_creation.py +++ b/example_schedule_creation.py @@ -50,7 +50,7 @@ def get_iname_bounds_dict(knl, _set_arbitrary_bounds=None): if param_bound_min is None: param_bound_min = int(iname_min_aff) # TODO what if this fails? if param_bound_max is None: - param_bound_max = int(iname_max_aff) # TODO what if this fails? + param_bound_max = int(iname_max_aff)+1 # TODO what if this fails? #int_bounds[iname] = [ bounds[iname] = [ @@ -64,16 +64,16 @@ def get_iname_bounds_dict(knl, _set_arbitrary_bounds=None): knl = lp.make_kernel( #"{[i,j]: 0<=i,j<2}", #"{[i,j]: 0<=i<2 and 1<=j<3}", - #"{[i,j]: loi<=itemp = b[i,j] {id=0}", "a[i,j] = temp + 1 {id=1,dep=0}", "c[i,j] = d[i,j] {id=2}" ], name="example", - #assumptions="loi,upi,loj,upj >= 1", - assumptions="upi,upj >= 1", + #assumptions="pi_lo,pi_up,pj_lo,pj_up >= 1", + assumptions="pi_up,pj_up >= 1", lang_version=(2018, 2) ) knl = lp.add_and_infer_dtypes(knl, {"b": np.float32, "d": np.float32}) @@ -121,31 +121,45 @@ def get_iname_concurrency_dict(inames, knl): iname_bounds = get_iname_bounds_dict(knl) #iname_bounds = get_iname_bounds_dict(knl, _set_arbitrary_bounds=[0,2]) +print("iname bounds:") print(iname_bounds) sched = LexSchedule(knl, iname_bounds) - +print("LexSchedule:") print(sched) + + +#example_sched_explicit = sched.enumerate_symbolic_inames_and_create_explicit_isl_map(iname_bounds) +example_sched_symbolic = sched.create_symbolic_isl_map(iname_bounds) + 1/0 -example_sched_valid = sched.enumerate_symbolic_inames_and_create_explicit_isl_map(iname_bounds) # ------------------------------------------------------------------- print("example LexSched (valid):") -print(prettier_map_string(example_sched_valid)) +#print(prettier_map_string(example_sched_explicit)) +print(prettier_map_string(example_sched_symbolic)) +1/0 # *Explicit* lexicographic mapping- map each tuple to all tuples occuring later print("---------------------------------------------------------------------------") -lex_map_explicit = sched.get_lex_map_explicit() +#lex_map_explicit = sched.get_lex_map_explicit() +lex_map_symbolic = sched.get_lex_map_symbolic() -print("lex map explicit:") -print(prettier_map_string(lex_map_explicit)) +#print("lex map explicit:") +#print(prettier_map_string(lex_map_explicit)) +print("lex map symbolic:") +print(prettier_map_string(lex_map_symbolic)) # Statement instance ordering print("----------------------------------------------------------------------") -SIO_explicit_valid = get_statement_ordering_map( - example_sched_valid, lex_map_explicit) -print("statement instance ordering explicit (valid_sched):") -print(prettier_map_string(SIO_explicit_valid)) +#SIO_explicit_valid = get_statement_ordering_map( +# example_sched_explicit, lex_map_explicit) +#print("statement instance ordering explicit (valid_sched):") +#print(prettier_map_string(SIO_explicit_valid)) +SIO_symbolic_valid = get_statement_ordering_map( + example_sched_symbolic, lex_map_symbolic) +print("statement instance ordering symbolic (valid_sched):") +print(prettier_map_string(SIO_symbolic_valid)) ''' all_inames = ['i', 'j'] diff --git a/lexicographic_order_map.py b/lexicographic_order_map.py index df1b0f894..1ef8c741e 100644 --- a/lexicographic_order_map.py +++ b/lexicographic_order_map.py @@ -43,6 +43,41 @@ def create_explicit_map_from_tuples(tuple_pairs, space): return union_map +def create_symbolic_map_from_tuples(tuple_pairs, space, var_bounds): + + dim_type = isl.dim_type + individual_maps = [] + print(tuple_pairs) + print(space) + + for tup_in, tup_out in tuple_pairs: + constraints = [] + for i, val_in in enumerate(tup_in): + if isinstance(val_in, int): + constraints.append( + isl.Constraint.equality_alloc(space) + .set_coefficient_val(dim_type.in_, i, 1) + .set_constant_val(-1*val_in)) + for i, val_out in enumerate(tup_out): + if isinstance(val_out, int): + constraints.append( + isl.Constraint.equality_alloc(space) + .set_coefficient_val(dim_type.out, i, 1) + .set_constant_val(-1*val_out)) + # TODO left off here, problem: need to match up symbolic inames with corresponding space names and add bound constraints + # TODO maybe rewrite this code with w/more convenient islvars approach + individual_maps.append( + isl.Map.universe(space).add_constraints(constraints)) + + union_map = individual_maps[0] + for m in individual_maps[1:]: + union_map = union_map.union(m) + print(union_map) + 1/0 + + return union_map + + def get_statement_ordering_map(sched_map, lex_map): # statement ordering: # map each statement instance to all statement instances that occur later @@ -99,9 +134,11 @@ def create_symbolic_lex_mapping( assert len(in_names) == len(out_names) == len(param_names) == len(dim_bound_vals) == n_dims dim_type = isl.dim_type + from schedule_checker.sched_check_utils import flatten_2d_list islvars = isl.make_zero_and_vars( in_names+out_names, - [param for param_pair in param_names for param in param_pair]) + flatten_2d_list(param_names)) + # [param for param_pair in param_names for param in param_pair]) # initialize set with constraint that is always true lex_set_outer_bounds = islvars[0].eq_set(islvars[0]) diff --git a/schedule.py b/schedule.py index 14ac242cf..7c590c7e4 100644 --- a/schedule.py +++ b/schedule.py @@ -51,6 +51,7 @@ class LexSchedule(object): ): self.lex_schedule = OrderedDict() # statement instance: lex point self.inames_enumerated = [] # symbolic inames in sched that have been enumerated into explicit statement instances + self.inames_not_enumerated = [] # TODO better way to do this self.lp_insnid_to_id = {} from loopy.schedule import (EnterLoop, LeaveLoop, Barrier, RunInstruction) @@ -110,10 +111,23 @@ class LexSchedule(object): self.inames_enumerated.append(iname) def enumerate_inames(self, iname_bounds): - inames_found = [] for iname, bound in iname_bounds.items(): self.enumerate_iname(iname, bound) + def add_symbolic_inames_to_statement_instances(self, inames): + for iname in inames: + new_sched = OrderedDict() + iname_found = False + for insn, lex_pt in self.lex_schedule.items(): + if iname in lex_pt: + new_sched[tuple(list(insn)+[iname])] = lex_pt + iname_found = True + else: + new_sched[insn] = lex_pt + self.lex_schedule = new_sched + if iname_found: + self.inames_not_enumerated.append(iname) + def add_new_lp_insnid(self, lp_insnid): if self.lp_insnid_to_id: self.lp_insnid_to_id[lp_insnid] = max(self.lp_insnid_to_id.values()) + 1 @@ -127,6 +141,23 @@ class LexSchedule(object): from schedule_checker.lexicographic_order_map import get_space return get_space(params_sched, in_names_sched, out_names_sched) + def get_space_for_symbolic_sched(self, iname_bounds): + iname_bound_params = [] + for iname in self.inames_not_enumerated: + lo, up = iname_bounds[iname] + if not isinstance(lo, int): + #iname_bound_params.append("p"+iname+"up") + iname_bound_params.append(lo) + if not isinstance(up, int): + #iname_bound_params.append("p"+iname+"up") + iname_bound_params.append(up) + + params_sched = ["ps"] + iname_bound_params + in_names_sched = ["s"] + self.inames_not_enumerated + out_names_sched = ["l"+str(i) for i in range(self.max_lex_dims())] + from schedule_checker.lexicographic_order_map import get_space + return get_space(params_sched, in_names_sched, out_names_sched) + def get_max_lex_dim_vals(self): return [max(dim_pts) for dim_pts in zip(*self.lex_schedule.values())] @@ -168,8 +199,16 @@ class LexSchedule(object): return self.create_explicit_isl_map(sched_space) def create_symbolic_isl_map(self, iname_bounds): - sched_space = self.get_space_for_symbolic_sched() - return None + from schedule_checker.lexicographic_order_map import create_symbolic_map_from_tuples + #from schedule_checker.lexicographic_order_map import create_explicit_map_from_tuples + from schedule_checker.sched_check_utils import flatten_2d_list + self.add_symbolic_inames_to_statement_instances(iname_bounds.keys()) + #print(self) + extra_params = [b for b in flatten_2d_list(iname_bounds.values()) + if isinstance(b,str)] + sched_space = self.get_space_for_symbolic_sched(iname_bounds) + #sched_space = self.get_space_for_explicit_sched() + return create_symbolic_map_from_tuples(list(self.items()), sched_space, iname_bounds) def get_lex_map_explicit(self): -- GitLab From e23336ed321ec68a3ccf80c559ed327a6705b24e Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 13 May 2019 19:23:23 -0500 Subject: [PATCH 007/415] added get_islvars_from_space() --- sched_check_utils.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/sched_check_utils.py b/sched_check_utils.py index 50233e5bc..6c370e5f2 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -6,3 +6,12 @@ def prettier_map_string(isl_map): def flatten_2d_list(list2d): return [item for inner_list in list2d for item in inner_list] + + +def get_islvars_from_space(space): + import islpy as isl + param_names = space.get_var_names(isl.dim_type.param) + in_names = space.get_var_names(isl.dim_type.in_) + out_names = space.get_var_names(isl.dim_type.out) + return isl.make_zero_and_vars(in_names+out_names, param_names) + -- GitLab From 0d7db89017f0412fdc6b3cf541c776a98e7cc117 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 13 May 2019 19:24:07 -0500 Subject: [PATCH 008/415] (WIP) more work on symbolic schedule,;still broken --- lexicographic_order_map.py | 42 +++++++++++++++++++++++++++++++++++--- 1 file changed, 39 insertions(+), 3 deletions(-) diff --git a/lexicographic_order_map.py b/lexicographic_order_map.py index 1ef8c741e..d8a7c54f0 100644 --- a/lexicographic_order_map.py +++ b/lexicographic_order_map.py @@ -50,6 +50,37 @@ def create_symbolic_map_from_tuples(tuple_pairs, space, var_bounds): print(tuple_pairs) print(space) + from schedule_checker.sched_check_utils import get_islvars_from_space + #param_names = space.get_var_names(isl.dim_type.param) + out_names = space.get_var_names(dim_type.out) + in_names = space.get_var_names(isl.dim_type.in_) + + islvars = get_islvars_from_space(space) + + # initialize set with constraint that is always false + constraints_set = islvars[0].eq_set(islvars[0] + 1) + for tup_in, tup_out in tuple_pairs: + # initialize set with constraint that is always true + constraint = islvars[0].eq_set(islvars[0]) + for i, val_in in enumerate(tup_in): + if isinstance(val_in, int): + constraint = constraint \ + & islvars[in_names[i]].eq_set(islvars[0]+val_in) + #& islvars[out_names[i]].eq_set(islvars[0]+val_in) + else: + constraint = constraint \ + & islvars[in_names[i]].eq_set(islvars[val_in]) + #& islvars[out_names[i]].eq_set(islvars[val_in]) + for i, val_out in enumerate(tup_out): + if isinstance(val_out, int): + constraint = constraint \ + & islvars[out_names[i]].eq_set(islvars[0]+val_out) + else: + constraint = constraint \ + & islvars[out_names[i]].eq_set(islvars[val_out]) + print(constraint) + constraints_set = constraints_set | constraint + """ for tup_in, tup_out in tuple_pairs: constraints = [] for i, val_in in enumerate(tup_in): @@ -64,15 +95,20 @@ def create_symbolic_map_from_tuples(tuple_pairs, space, var_bounds): isl.Constraint.equality_alloc(space) .set_coefficient_val(dim_type.out, i, 1) .set_constant_val(-1*val_out)) - # TODO left off here, problem: need to match up symbolic inames with corresponding space names and add bound constraints - # TODO maybe rewrite this code with w/more convenient islvars approach individual_maps.append( isl.Map.universe(space).add_constraints(constraints)) - union_map = individual_maps[0] for m in individual_maps[1:]: union_map = union_map.union(m) print(union_map) + """ + # TODO left off here, problem: need to match up symbolic inames with corresponding space names and add bound constraints + + result_map = isl.Map.from_domain(constraints_set) + result_map = result_map.move_dims( + dim_type.out, 0, dim_type.in_, + len(in_names), len(out_names)) + print(result_map) 1/0 return union_map -- GitLab From 9274af76e60905d0c3d11857c3bf61aec3951980 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sun, 19 May 2019 22:31:00 -0500 Subject: [PATCH 009/415] got symbolic schedule creation working...? --- example_schedule_creation.py | 39 +++++++++++++++++++++++++++++------- lexicographic_order_map.py | 35 +++----------------------------- sched_check_utils.py | 5 +++-- schedule.py | 35 ++++++++++++++++++++++++++------ 4 files changed, 67 insertions(+), 47 deletions(-) diff --git a/example_schedule_creation.py b/example_schedule_creation.py index 5f67232c7..cd787029d 100644 --- a/example_schedule_creation.py +++ b/example_schedule_creation.py @@ -52,6 +52,8 @@ def get_iname_bounds_dict(knl, _set_arbitrary_bounds=None): if param_bound_max is None: param_bound_max = int(iname_max_aff)+1 # TODO what if this fails? + dom = knl.get_inames_domain(iname) + #int_bounds[iname] = [ bounds[iname] = [ param_bound_min, @@ -60,6 +62,26 @@ def get_iname_bounds_dict(knl, _set_arbitrary_bounds=None): #assert all(isinstance(i,int) for i in int_bounds[iname]) return bounds +def get_iname_to_param_dict(knl): + from loopy.symbolic import aff_to_expr + bounds = {} + all_params = knl.all_params() + for iname in knl.all_inames(): + #bounds_record = knl.get_iname_bounds(iname, constants_only=True) + bounds_record = knl.get_iname_bounds(iname) + (_, iname_min_aff), = bounds_record.lower_bound_pw_aff.get_pieces() + (_, iname_max_aff), = bounds_record.upper_bound_pw_aff.get_pieces() + iname_min_aff = aff_to_expr(iname_min_aff) + iname_max_aff = aff_to_expr(iname_max_aff) + bounds_strs = str(iname_min_aff)+str(iname_max_aff) + params_found = [] + for param in all_params: + if param in bounds_strs: + params_found.append(param) + + bounds[iname] = params_found + return bounds + # make example kernel knl = lp.make_kernel( #"{[i,j]: 0<=i,j<2}", @@ -120,25 +142,26 @@ def get_iname_concurrency_dict(inames, knl): # Get schedule ------------------------------------------------------ iname_bounds = get_iname_bounds_dict(knl) +domains = {} +for iname in knl.all_inames(): + domains[iname] = knl.get_inames_domain(iname) + #iname_bounds = get_iname_bounds_dict(knl, _set_arbitrary_bounds=[0,2]) print("iname bounds:") print(iname_bounds) sched = LexSchedule(knl, iname_bounds) -print("LexSchedule:") +print("LexSchedule before processing:") print(sched) - +iname_to_params_dict = get_iname_to_param_dict(knl) #example_sched_explicit = sched.enumerate_symbolic_inames_and_create_explicit_isl_map(iname_bounds) -example_sched_symbolic = sched.create_symbolic_isl_map(iname_bounds) - -1/0 +example_sched_symbolic = sched.create_symbolic_isl_map(iname_bounds, domains, iname_to_params_dict) # TODO don't need all of these # ------------------------------------------------------------------- -print("example LexSched (valid):") +print("LexSched (valid):") #print(prettier_map_string(example_sched_explicit)) print(prettier_map_string(example_sched_symbolic)) -1/0 # *Explicit* lexicographic mapping- map each tuple to all tuples occuring later print("---------------------------------------------------------------------------") @@ -150,6 +173,8 @@ lex_map_symbolic = sched.get_lex_map_symbolic() print("lex map symbolic:") print(prettier_map_string(lex_map_symbolic)) +1/0 + # Statement instance ordering print("----------------------------------------------------------------------") #SIO_explicit_valid = get_statement_ordering_map( diff --git a/lexicographic_order_map.py b/lexicographic_order_map.py index d8a7c54f0..3aea1bdfc 100644 --- a/lexicographic_order_map.py +++ b/lexicographic_order_map.py @@ -43,12 +43,11 @@ def create_explicit_map_from_tuples(tuple_pairs, space): return union_map -def create_symbolic_map_from_tuples(tuple_pairs, space, var_bounds): +#def create_symbolic_map_from_tuples(tuple_pairs, space, var_bounds, var_to_domain_dict, var_to_params_dict): +def create_symbolic_map_from_tuples(tuple_pairs, space, var_bounds, domain_to_intersect): dim_type = isl.dim_type individual_maps = [] - print(tuple_pairs) - print(space) from schedule_checker.sched_check_utils import get_islvars_from_space #param_names = space.get_var_names(isl.dim_type.param) @@ -66,11 +65,9 @@ def create_symbolic_map_from_tuples(tuple_pairs, space, var_bounds): if isinstance(val_in, int): constraint = constraint \ & islvars[in_names[i]].eq_set(islvars[0]+val_in) - #& islvars[out_names[i]].eq_set(islvars[0]+val_in) else: constraint = constraint \ & islvars[in_names[i]].eq_set(islvars[val_in]) - #& islvars[out_names[i]].eq_set(islvars[val_in]) for i, val_out in enumerate(tup_out): if isinstance(val_out, int): constraint = constraint \ @@ -78,40 +75,14 @@ def create_symbolic_map_from_tuples(tuple_pairs, space, var_bounds): else: constraint = constraint \ & islvars[out_names[i]].eq_set(islvars[val_out]) - print(constraint) constraints_set = constraints_set | constraint - """ - for tup_in, tup_out in tuple_pairs: - constraints = [] - for i, val_in in enumerate(tup_in): - if isinstance(val_in, int): - constraints.append( - isl.Constraint.equality_alloc(space) - .set_coefficient_val(dim_type.in_, i, 1) - .set_constant_val(-1*val_in)) - for i, val_out in enumerate(tup_out): - if isinstance(val_out, int): - constraints.append( - isl.Constraint.equality_alloc(space) - .set_coefficient_val(dim_type.out, i, 1) - .set_constant_val(-1*val_out)) - individual_maps.append( - isl.Map.universe(space).add_constraints(constraints)) - union_map = individual_maps[0] - for m in individual_maps[1:]: - union_map = union_map.union(m) - print(union_map) - """ - # TODO left off here, problem: need to match up symbolic inames with corresponding space names and add bound constraints result_map = isl.Map.from_domain(constraints_set) result_map = result_map.move_dims( dim_type.out, 0, dim_type.in_, len(in_names), len(out_names)) - print(result_map) - 1/0 - return union_map + return result_map.intersect_domain(domain_to_intersect) def get_statement_ordering_map(sched_map, lex_map): diff --git a/sched_check_utils.py b/sched_check_utils.py index 6c370e5f2..4e9faeb67 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -1,4 +1,4 @@ - +import islpy as isl def prettier_map_string(isl_map): return str(isl_map).replace("{ ", "{\n").replace(" }","\n}").replace("; ",";\n") @@ -9,9 +9,10 @@ def flatten_2d_list(list2d): def get_islvars_from_space(space): - import islpy as isl param_names = space.get_var_names(isl.dim_type.param) in_names = space.get_var_names(isl.dim_type.in_) out_names = space.get_var_names(isl.dim_type.out) return isl.make_zero_and_vars(in_names+out_names, param_names) +def get_dim_for_isl_space_param(space, param): + return space.get_var_names(isl.dim_type.param).index(param) diff --git a/schedule.py b/schedule.py index 7c590c7e4..6baf4c99d 100644 --- a/schedule.py +++ b/schedule.py @@ -198,17 +198,40 @@ class LexSchedule(object): sched_space = self.get_space_for_explicit_sched() return self.create_explicit_isl_map(sched_space) - def create_symbolic_isl_map(self, iname_bounds): + def create_symbolic_isl_map(self, iname_bounds, domains, iname_to_params_dict): # TODO don't need all of these + # TODO assumes all knl inames included in iname_bounds from schedule_checker.lexicographic_order_map import create_symbolic_map_from_tuples - #from schedule_checker.lexicographic_order_map import create_explicit_map_from_tuples - from schedule_checker.sched_check_utils import flatten_2d_list - self.add_symbolic_inames_to_statement_instances(iname_bounds.keys()) - #print(self) + from schedule_checker.sched_check_utils import flatten_2d_list, get_dim_for_isl_space_param + all_inames = list(iname_bounds.keys()) + self.add_symbolic_inames_to_statement_instances(all_inames) extra_params = [b for b in flatten_2d_list(iname_bounds.values()) if isinstance(b,str)] sched_space = self.get_space_for_symbolic_sched(iname_bounds) #sched_space = self.get_space_for_explicit_sched() - return create_symbolic_map_from_tuples(list(self.items()), sched_space, iname_bounds) + + # intersect all domains for symbolic (non-enumerated) inames found in statement instances + domain_intersection = domains[self.inames_not_enumerated[0]] + #TODO what if self.inames_not_enumerated is empty? + for iname in self.inames_not_enumerated[1:]: + domain_intersection = domain_intersection.intersect(domains[iname]) + + # inames not found in statement instance tuples should be removed + inames_to_remove_from_domains = all_inames.copy() + for iname in self.inames_not_enumerated: + inames_to_remove_from_domains.remove(iname) + + #dom = domains['j'] # TODO which domains(s) do we use? + #vars_to_remove = ['i'] # TODO where do we get this? in_names? + domain_stripped = domain_intersection.copy() + for iname in inames_to_remove_from_domains: + for p in iname_to_params_dict[iname]: + domain_stripped = domain_intersection.remove_dims( + isl.dim_type.param, + get_dim_for_isl_space_param(domain_intersection.space, p), + 1) + # TODO is projecting out iname necessary? + + return create_symbolic_map_from_tuples(list(self.items()), sched_space, iname_bounds, domain_stripped) def get_lex_map_explicit(self): -- GitLab From bba229d9b5fab602f5d63235a6632af33455ca91 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 20 May 2019 18:39:21 -0500 Subject: [PATCH 010/415] got symbolic lex map with variable iname bounds working --- example_schedule_creation.py | 3 ++- lexicographic_order_map.py | 48 ++++++++++++++++++++++++++++-------- schedule.py | 43 ++++++++++++++++++++++++++------ 3 files changed, 76 insertions(+), 18 deletions(-) diff --git a/example_schedule_creation.py b/example_schedule_creation.py index cd787029d..4ab2fe106 100644 --- a/example_schedule_creation.py +++ b/example_schedule_creation.py @@ -166,13 +166,14 @@ print(prettier_map_string(example_sched_symbolic)) # *Explicit* lexicographic mapping- map each tuple to all tuples occuring later print("---------------------------------------------------------------------------") #lex_map_explicit = sched.get_lex_map_explicit() -lex_map_symbolic = sched.get_lex_map_symbolic() +lex_map_symbolic = sched.get_lex_map_symbolic(iname_bounds) #print("lex map explicit:") #print(prettier_map_string(lex_map_explicit)) print("lex map symbolic:") print(prettier_map_string(lex_map_symbolic)) +# TODO left off here 1/0 # Statement instance ordering diff --git a/lexicographic_order_map.py b/lexicographic_order_map.py index 3aea1bdfc..c8b5f2789 100644 --- a/lexicographic_order_map.py +++ b/lexicographic_order_map.py @@ -7,6 +7,7 @@ def make_lex_mapping_tuple_pairs(dim_bounds): # all lex tuples in order: lex_tuples = list( itertools.product(*[range(l,u) for l,u in dim_bounds])) + # goes up to u-1 because u is a non-inclusive upper bound # TODO: is itertools.product ordering guaranteed? map_pairs = [] @@ -121,6 +122,10 @@ def get_space(param_names, in_names, out_names): return set_space_names(space, param_names=param_names, in_names=in_names, out_names=out_names) +#TODO rename these functions for clarity +#(distinguish betwen map representing lex order from all before pts to all after pts +# from map representing a schedule +# from other things...) def create_symbolic_lex_mapping( n_dims, param_names=None, @@ -140,11 +145,18 @@ def create_symbolic_lex_mapping( assert len(in_names) == len(out_names) == len(param_names) == len(dim_bound_vals) == n_dims dim_type = isl.dim_type - from schedule_checker.sched_check_utils import flatten_2d_list + + #TODO left off here, need to add params from dim_bounds to islvars? + params_in_dim_bounds = [] + for v in flatten_2d_list(dim_bound_vals): + if not isinstance(v, int): + params_in_dim_bounds.append(v) + islvars = isl.make_zero_and_vars( in_names+out_names, - flatten_2d_list(param_names)) + #flatten_2d_list(param_names)) + flatten_2d_list(param_names)+params_in_dim_bounds) # [param for param_pair in param_names for param in param_pair]) # initialize set with constraint that is always true @@ -154,21 +166,37 @@ def create_symbolic_lex_mapping( for i in range(n_dims): lex_set_outer_bounds = lex_set_outer_bounds \ & islvars[in_names[i]].ge_set(islvars[param_names[i][0]]) \ - & islvars[in_names[i]].le_set(islvars[param_names[i][1]]-1) \ + & islvars[in_names[i]].lt_set(islvars[param_names[i][1]]) \ & islvars[out_names[i]].ge_set(islvars[param_names[i][0]]) \ - & islvars[out_names[i]].le_set(islvars[param_names[i][1]]-1) + & islvars[out_names[i]].lt_set(islvars[param_names[i][1]]) if dim_bound_vals: - lex_set_outer_bounds = lex_set_outer_bounds \ - & islvars[param_names[i][0]].eq_set(islvars[0]+dim_bound_vals[i][0]) \ - & islvars[param_names[i][1]].eq_set(islvars[0]+dim_bound_vals[i][1]) + #lex_set_outer_bounds = lex_set_outer_bounds \ + # & islvars[param_names[i][0]].eq_set(islvars[0]+dim_bound_vals[i][0]) \ + # & islvars[param_names[i][1]].eq_set(islvars[0]+dim_bound_vals[i][1]) + lower_bound = dim_bound_vals[i][0] + upper_bound = dim_bound_vals[i][1] + if isinstance(lower_bound, int): + lex_set_outer_bounds = lex_set_outer_bounds \ + & islvars[param_names[i][0]].eq_set(islvars[0]+lower_bound) + else: + # lower bound is variable + lex_set_outer_bounds = lex_set_outer_bounds \ + & islvars[param_names[i][0]].eq_set(islvars[lower_bound]) + if isinstance(upper_bound, int): + lex_set_outer_bounds = lex_set_outer_bounds \ + & islvars[param_names[i][1]].eq_set(islvars[0]+upper_bound) + else: + # upper bound is variable + lex_set_outer_bounds = lex_set_outer_bounds \ + & islvars[param_names[i][1]].eq_set(islvars[upper_bound]) # create constraint enforcing lex ordering, e.g., in the 3-dim case: # i0 < o0 or ((i0 = o0) and (i1 < o1)) # or ((i0 = o0) and (i1 = o1) and (i2 < o2)) - lex_set_order_bound = islvars[in_names[0]].le_set(islvars[out_names[0]]-1) + lex_set_order_bound = islvars[in_names[0]].lt_set(islvars[out_names[0]]) for i in range(1, len(in_names)): - lex_set_order_bound_conj = islvars[in_names[i]].le_set( - islvars[out_names[i]]-1) + lex_set_order_bound_conj = islvars[in_names[i]].lt_set( + islvars[out_names[i]]) for j in range(i): lex_set_order_bound_conj = lex_set_order_bound_conj & \ islvars[in_names[j]].eq_set(islvars[out_names[j]]) diff --git a/schedule.py b/schedule.py index 6baf4c99d..992a0726d 100644 --- a/schedule.py +++ b/schedule.py @@ -158,11 +158,29 @@ class LexSchedule(object): from schedule_checker.lexicographic_order_map import get_space return get_space(params_sched, in_names_sched, out_names_sched) - def get_max_lex_dim_vals(self): - return [max(dim_pts) for dim_pts in zip(*self.lex_schedule.values())] - - def get_min_lex_dim_vals(self): - return [min(dim_pts) for dim_pts in zip(*self.lex_schedule.values())] + def get_max_lex_dim_bounds(self, var_bounds_dict): + # this only works for integer lex pts (no symbolic vars) + #return [max(dim_pts) for dim_pts in zip(*self.lex_schedule.values())] + result = [] + for dim_pts in zip(*self.lex_schedule.values()): + if all(isinstance(pt, int) for pt in dim_pts): + result.append(max(dim_pts) + 1) # +1 because this is the non-inclusive upper bound + else: + assert all(pt == dim_pts[0] for pt in dim_pts) + result.append(var_bounds_dict[dim_pts[0]][1]) # upper bound for this variable + return result + + def get_min_lex_dim_vals(self, var_bounds_dict): + # this only works for integer lex pts (no symbolic vars) + #return [min(dim_pts) for dim_pts in zip(*self.lex_schedule.values())] + result = [] + for dim_pts in zip(*self.lex_schedule.values()): + if all(isinstance(pt, int) for pt in dim_pts): + result.append(min(dim_pts)) + else: + assert all(pt == dim_pts[0] for pt in dim_pts) + result.append(var_bounds_dict[dim_pts[0]][0]) # lower bound for this variable + return result def append_item(self, sched_item, lex_pt): self.lex_schedule[sched_item] = lex_pt @@ -244,20 +262,31 @@ class LexSchedule(object): # TODO lower bound may not be zero lex_dim_bounds = list(zip(self.get_min_lex_dim_vals(), - [1 + v for v in self.get_max_lex_dim_vals()])) + self.get_max_lex_dim_vals())) sched_space = self.get_space_for_explicit_sched() lex_in_names = sched_space.get_var_names(isl.dim_type.out) lex_out_names = append_apostrophes(lex_in_names) lex_params = [] - # TODO lex map routines currently assume lower bound is zero, fix this explicit_lex_map_pairs = make_lex_mapping_tuple_pairs(lex_dim_bounds) lex_space_explicit = get_space(lex_params, lex_in_names, lex_out_names) return create_explicit_map_from_tuples(explicit_lex_map_pairs, lex_space_explicit) + def get_lex_map_symbolic(self, var_bounds_dict): + from schedule_checker.lexicographic_order_map import ( + create_symbolic_lex_mapping, + ) + + n_dims = self.max_lex_dims() + #lex_dim_bounds = list(zip(self.get_min_lex_dim_vals(), + # [1 + v for v in self.get_max_lex_dim_vals()])) + lex_dim_bounds = list(zip(self.get_min_lex_dim_vals(var_bounds_dict), + self.get_max_lex_dim_bounds(var_bounds_dict))) + return create_symbolic_lex_mapping(n_dims, dim_bound_vals=lex_dim_bounds) + #def get_isl_map(self): def get_isl_map_str(self): map_str = "{" -- GitLab From c76305b4fd88b59e04cf52e3bae8448bb3ba88fb Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 20 May 2019 21:34:41 -0500 Subject: [PATCH 011/415] statement instance ordering creation from symbolic schedule w/variable bounds seems to be working --- example_schedule_creation.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/example_schedule_creation.py b/example_schedule_creation.py index 4ab2fe106..fac0fedff 100644 --- a/example_schedule_creation.py +++ b/example_schedule_creation.py @@ -173,8 +173,6 @@ lex_map_symbolic = sched.get_lex_map_symbolic(iname_bounds) print("lex map symbolic:") print(prettier_map_string(lex_map_symbolic)) -# TODO left off here -1/0 # Statement instance ordering print("----------------------------------------------------------------------") @@ -187,6 +185,8 @@ SIO_symbolic_valid = get_statement_ordering_map( print("statement instance ordering symbolic (valid_sched):") print(prettier_map_string(SIO_symbolic_valid)) +# TODO left off here + ''' all_inames = ['i', 'j'] iname_params = ['p0', 'p1'] -- GitLab From 85071a7343b30166aa3624f8aad81ac44f772c51 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 20 May 2019 22:54:52 -0500 Subject: [PATCH 012/415] dependency logic now allows upper+lower symbolic bounds on variables --- dependency.py | 56 +++++++++++++++++++++++++++++----- example_dependency_checking.py | 8 +++-- 2 files changed, 54 insertions(+), 10 deletions(-) diff --git a/dependency.py b/dependency.py index 40ebaa99a..76ec7d321 100644 --- a/dependency.py +++ b/dependency.py @@ -93,23 +93,22 @@ class DependencyConstraintVars(object): statement_param_val, ): self.inames = inames - self.param_names = param_names - self.param_vals = param_vals + self.param_names = param_names # TODO rename, these are pairs of bound vars + self.param_vals = param_vals # TODO rename, these are pairs of bound vals self.statement_var = statement_var self.statement_param = statement_param self.statement_param_val = statement_param_val def get_bounds_constraint_set(self): var_names = [self.statement_var]+self.inames - param_names = [self.statement_param]+self.param_names - param_vals = [self.statement_param_val]+self.param_vals - # TODO assumes lower bound is zero + param_names = [self.statement_param]+self.list_param_names() islvars = _make_islvars_with_var_primes( var_names, param_names) bounded_set = islvars[0].eq_set(islvars[0]) # initialize to True - + """ for v, p, b in zip(var_names, param_names, param_vals): + #TODO each iname could have multiple param names # create constraint 0 <= v,v'< p = b v_prime = v+"'" bounded_set = bounded_set \ @@ -118,9 +117,51 @@ class DependencyConstraintVars(object): & (islvars[0]-1).lt_set(islvars[v]) \ & (islvars[0]-1).lt_set(islvars[v_prime]) \ & islvars[p].eq_set(islvars[0]+b) + """ + v = self.statement_var + v_prime = self.statement_var+"'" + p = self.statement_param + b = self.statement_param_val + + bounded_set = bounded_set \ + & islvars[v].lt_set(islvars[p]) \ + & islvars[v_prime].lt_set(islvars[p]) \ + & (islvars[0]).le_set(islvars[v]) \ + & (islvars[0]).le_set(islvars[v_prime]) \ + & islvars[p].eq_set(islvars[0]+b) + + for v, (p_low, p_up), (pval_low, pval_up) in zip( + self.inames, self.param_names, self.param_vals): + # create constraint pval_low = p_low <= v,v'< p_up = pval_up + if p_low is None: + assert isinstance(pval_low, int) + lower_bound = islvars[0] + pval_low + else: + lower_bound = islvars[p_low] + if not pval_low is None: + bounded_set = bounded_set & lower_bound.eq_set(islvars[0]+pval_low) + + if p_up is None: + assert isinstance(pval_up, int) + upper_bound = islvars[0] + pval_up + else: + upper_bound = islvars[p_up] + if not pval_up is None: + bounded_set = bounded_set & upper_bound.eq_set(islvars[0]+pval_up) + + v_prime = v+"'" + bounded_set = bounded_set \ + & islvars[v].lt_set(upper_bound) \ + & islvars[v_prime].lt_set(upper_bound) \ + & lower_bound.le_set(islvars[v]) \ + & lower_bound.le_set(islvars[v_prime]) return bounded_set + def list_param_names(self): + from schedule_checker.sched_check_utils import flatten_2d_list + return [p for p in flatten_2d_list(self.param_names) if not p is None] + def __str__(self): return str(self.get_bounds_constraint_set()) @@ -136,7 +177,8 @@ def create_dependency_constraint( # (statement_bound = max statement id + 1) statement_param = dep_constraint_vars.statement_param - param_names = dep_constraint_vars.param_names + #param_names = dep_constraint_vars.param_names + param_names = dep_constraint_vars.list_param_names() all_inames = dep_constraint_vars.inames statement_var = dep_constraint_vars.statement_var diff --git a/example_dependency_checking.py b/example_dependency_checking.py index fffbd23fa..01cb9b013 100644 --- a/example_dependency_checking.py +++ b/example_dependency_checking.py @@ -30,9 +30,11 @@ knl = lp.tag_inames(knl, {"i": "l.0"}) print("Kernel:") print(knl) +from schedule_checker.sched_check_utils import flatten_2d_list all_inames = ['i', 'j'] -iname_params = ['p0', 'p1'] -iname_param_vals = [2, 2] +iname_params = [(None, 'p0'), (None, 'p1')] +param_names_listed = [p for p in flatten_2d_list(iname_params) if not p is None] +iname_param_vals = [(0, 2), (0, 2)] statement_var = 's' statement_param = 'ps' statement_bound = 2 @@ -43,7 +45,7 @@ print("------------------------------------------------------------------------- # i is parallel, suppose we want to enforce the following: # for a given i, statement 0 happens before statement 1 -params_sched = [statement_param]+iname_params +params_sched = [statement_param]+param_names_listed in_names_sched = [statement_var]+all_inames out_names_sched = ['l0', 'l1'] sched_space = get_space(params_sched, in_names_sched, out_names_sched) -- GitLab From 72cbdf94ec3f8697801cc2d7aed087e331c60500 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 20 May 2019 23:04:12 -0500 Subject: [PATCH 013/415] creating example constraint map to test new symbolic statement instance ordering; constraint map space and SIO space don't match yet so can't compare... --- dependency.py | 18 ++++--------- example_schedule_creation.py | 51 +++++++++++++++++++++++++++++++++++- 2 files changed, 55 insertions(+), 14 deletions(-) diff --git a/dependency.py b/dependency.py index 76ec7d321..b2688ebbe 100644 --- a/dependency.py +++ b/dependency.py @@ -106,23 +106,12 @@ class DependencyConstraintVars(object): var_names, param_names) bounded_set = islvars[0].eq_set(islvars[0]) # initialize to True - """ - for v, p, b in zip(var_names, param_names, param_vals): - #TODO each iname could have multiple param names - # create constraint 0 <= v,v'< p = b - v_prime = v+"'" - bounded_set = bounded_set \ - & islvars[v].lt_set(islvars[p]) \ - & islvars[v_prime].lt_set(islvars[p]) \ - & (islvars[0]-1).lt_set(islvars[v]) \ - & (islvars[0]-1).lt_set(islvars[v_prime]) \ - & islvars[p].eq_set(islvars[0]+b) - """ + + # bound the statement variable v = self.statement_var v_prime = self.statement_var+"'" p = self.statement_param b = self.statement_param_val - bounded_set = bounded_set \ & islvars[v].lt_set(islvars[p]) \ & islvars[v_prime].lt_set(islvars[p]) \ @@ -130,9 +119,12 @@ class DependencyConstraintVars(object): & (islvars[0]).le_set(islvars[v_prime]) \ & islvars[p].eq_set(islvars[0]+b) + # bound the other variables for v, (p_low, p_up), (pval_low, pval_up) in zip( self.inames, self.param_names, self.param_vals): + # create constraint pval_low = p_low <= v,v'< p_up = pval_up + if p_low is None: assert isinstance(pval_low, int) lower_bound = islvars[0] + pval_low diff --git a/example_schedule_creation.py b/example_schedule_creation.py index fac0fedff..31e361bab 100644 --- a/example_schedule_creation.py +++ b/example_schedule_creation.py @@ -153,9 +153,11 @@ sched = LexSchedule(knl, iname_bounds) print("LexSchedule before processing:") print(sched) +# TODO a lot of this could be cleaner if we just create a set for each iname domain and pass it around instead of messing with all these individual bounds + iname_to_params_dict = get_iname_to_param_dict(knl) #example_sched_explicit = sched.enumerate_symbolic_inames_and_create_explicit_isl_map(iname_bounds) -example_sched_symbolic = sched.create_symbolic_isl_map(iname_bounds, domains, iname_to_params_dict) # TODO don't need all of these +example_sched_symbolic = sched.create_symbolic_isl_map(iname_bounds, domains, iname_to_params_dict) # TODO don't need all of these args # ------------------------------------------------------------------- @@ -186,6 +188,53 @@ print("statement instance ordering symbolic (valid_sched):") print(prettier_map_string(SIO_symbolic_valid)) # TODO left off here +sched_inames = ['j'] +iname_params = [(None, 'pj_up')] +iname_param_vals = [(0, None)] +statement_var = 's' +statement_param = 'ps' +statement_bound = 3 + +from schedule_checker.dependency import ( + Dependency, + DependencyType as DT, + create_dependency_constraint, + append_apostrophes, + DependencyConstraintVars, +) + +dep_constraint_vars = DependencyConstraintVars( + sched_inames, + iname_params, + iname_param_vals, + statement_var, + statement_param, + statement_bound, + ) + +# i is parallel, suppose we want to enforce the following: +# for a given i, statement 0 happens before statement 1 +# i dependency is none, j dependency is `prior` + +deps = [ + Dependency(0, 1, DT.SAME, 'j'), + ] +print([str(dep) for dep in deps]) +constraint_map = create_dependency_constraint( + deps, dep_constraint_vars) +print("constraint map:") +print(prettier_map_string(constraint_map)) + +# TODO left off here, these spaces need to match and they don't + +#assert constraint_map.space == SIO_symbolic_valid.space +#1/0 + +#print("is valid sched valid?") +#print(constraint_map.is_subset(SIO_symbolic_valid)) + + + ''' all_inames = ['i', 'j'] -- GitLab From aaecadb0071839848265dafacf291de939345422 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 20 May 2019 23:28:50 -0500 Subject: [PATCH 014/415] changing example_sched_symbolic->sched-map-symbolic for clarity --- example_schedule_creation.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/example_schedule_creation.py b/example_schedule_creation.py index 31e361bab..26670937d 100644 --- a/example_schedule_creation.py +++ b/example_schedule_creation.py @@ -157,13 +157,13 @@ print(sched) iname_to_params_dict = get_iname_to_param_dict(knl) #example_sched_explicit = sched.enumerate_symbolic_inames_and_create_explicit_isl_map(iname_bounds) -example_sched_symbolic = sched.create_symbolic_isl_map(iname_bounds, domains, iname_to_params_dict) # TODO don't need all of these args +sched_map_symbolic = sched.create_symbolic_isl_map(iname_bounds, domains, iname_to_params_dict) # TODO don't need all of these args # ------------------------------------------------------------------- print("LexSched (valid):") #print(prettier_map_string(example_sched_explicit)) -print(prettier_map_string(example_sched_symbolic)) +print(prettier_map_string(sched_map_symbolic)) # *Explicit* lexicographic mapping- map each tuple to all tuples occuring later print("---------------------------------------------------------------------------") @@ -183,7 +183,7 @@ print("----------------------------------------------------------------------") #print("statement instance ordering explicit (valid_sched):") #print(prettier_map_string(SIO_explicit_valid)) SIO_symbolic_valid = get_statement_ordering_map( - example_sched_symbolic, lex_map_symbolic) + sched_map_symbolic, lex_map_symbolic) print("statement instance ordering symbolic (valid_sched):") print(prettier_map_string(SIO_symbolic_valid)) @@ -217,7 +217,7 @@ dep_constraint_vars = DependencyConstraintVars( # i dependency is none, j dependency is `prior` deps = [ - Dependency(0, 1, DT.SAME, 'j'), + Dependency(1, 0, DT.SAME, 'j'), ] print([str(dep) for dep in deps]) constraint_map = create_dependency_constraint( @@ -229,7 +229,6 @@ print(prettier_map_string(constraint_map)) #assert constraint_map.space == SIO_symbolic_valid.space #1/0 - #print("is valid sched valid?") #print(constraint_map.is_subset(SIO_symbolic_valid)) -- GitLab From 2a9047505138e18798d37cace470b908464f72a1 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sun, 26 May 2019 18:54:03 -0500 Subject: [PATCH 015/415] using only kernel domain (rather than iname bound variables) to convert schedule tuples into symbolic schedule map --- example_schedule_creation.py | 37 +++++++++++++++-------- lexicographic_order_map.py | 6 ++-- sched_check_utils.py | 10 +++++-- schedule.py | 58 +++++++++++++++++------------------- 4 files changed, 65 insertions(+), 46 deletions(-) diff --git a/example_schedule_creation.py b/example_schedule_creation.py index 26670937d..31e307556 100644 --- a/example_schedule_creation.py +++ b/example_schedule_creation.py @@ -112,9 +112,9 @@ dep_s1_j = Dependency(s0, s1, "j", DependencyType.SAME) insn_to_deps = {"0":[], "1":[dep_s1_i, dep_s1_j], "2":[]} # enforce explicit iname bounds for now TODO -print("Kernel:") -print(knl) -print(lp.generate_code_v2(knl).device_code()) +#print("Kernel:") +#print(knl) +#print(lp.generate_code_v2(knl).device_code()) print("="*80) print("Iname tags: %s" % (knl.iname_to_tags)) print("="*80) @@ -145,25 +145,30 @@ iname_bounds = get_iname_bounds_dict(knl) domains = {} for iname in knl.all_inames(): domains[iname] = knl.get_inames_domain(iname) +print("domains:") +print(domains) -#iname_bounds = get_iname_bounds_dict(knl, _set_arbitrary_bounds=[0,2]) print("iname bounds:") print(iname_bounds) -sched = LexSchedule(knl, iname_bounds) -print("LexSchedule before processing:") -print(sched) - -# TODO a lot of this could be cleaner if we just create a set for each iname domain and pass it around instead of messing with all these individual bounds +sched = LexSchedule(knl, iname_bounds) # TODO do we really need iname bounds here? +#print("LexSchedule before processing:") +#print(sched) iname_to_params_dict = get_iname_to_param_dict(knl) #example_sched_explicit = sched.enumerate_symbolic_inames_and_create_explicit_isl_map(iname_bounds) -sched_map_symbolic = sched.create_symbolic_isl_map(iname_bounds, domains, iname_to_params_dict) # TODO don't need all of these args +sched_map_symbolic = sched.create_symbolic_isl_map(domains) +print("LexSchedule after processing:") +print(sched) # ------------------------------------------------------------------- print("LexSched (valid):") #print(prettier_map_string(example_sched_explicit)) print(prettier_map_string(sched_map_symbolic)) +print("space (statement instances -> lex time):") +print(sched_map_symbolic.space) + +1/0 # left off here # *Explicit* lexicographic mapping- map each tuple to all tuples occuring later print("---------------------------------------------------------------------------") @@ -174,6 +179,8 @@ lex_map_symbolic = sched.get_lex_map_symbolic(iname_bounds) #print(prettier_map_string(lex_map_explicit)) print("lex map symbolic:") print(prettier_map_string(lex_map_symbolic)) +print("space (lex time -> lex time):") +print(lex_map_symbolic.space) # Statement instance ordering @@ -186,6 +193,8 @@ SIO_symbolic_valid = get_statement_ordering_map( sched_map_symbolic, lex_map_symbolic) print("statement instance ordering symbolic (valid_sched):") print(prettier_map_string(SIO_symbolic_valid)) +print("space (statement instances -> statement instances):") +print(SIO_symbolic_valid.space) # TODO left off here sched_inames = ['j'] @@ -218,19 +227,23 @@ dep_constraint_vars = DependencyConstraintVars( deps = [ Dependency(1, 0, DT.SAME, 'j'), + #Dependency(1, 0, DT.NONE, 'j'), ] +print("----------------------------------------------------------------------") print([str(dep) for dep in deps]) constraint_map = create_dependency_constraint( deps, dep_constraint_vars) print("constraint map:") print(prettier_map_string(constraint_map)) +print("space (statment instances -> statement instances):") +print(constraint_map.space) # TODO left off here, these spaces need to match and they don't #assert constraint_map.space == SIO_symbolic_valid.space #1/0 -#print("is valid sched valid?") -#print(constraint_map.is_subset(SIO_symbolic_valid)) +print("is valid sched valid? constraint map subset of SIO?") +print(constraint_map.is_subset(SIO_symbolic_valid)) diff --git a/lexicographic_order_map.py b/lexicographic_order_map.py index c8b5f2789..a33a50967 100644 --- a/lexicographic_order_map.py +++ b/lexicographic_order_map.py @@ -44,8 +44,8 @@ def create_explicit_map_from_tuples(tuple_pairs, space): return union_map -#def create_symbolic_map_from_tuples(tuple_pairs, space, var_bounds, var_to_domain_dict, var_to_params_dict): -def create_symbolic_map_from_tuples(tuple_pairs, space, var_bounds, domain_to_intersect): +#def create_symbolic_map_from_tuples(tuple_pairs, space, var_bounds, domain_to_intersect): +def create_symbolic_map_from_tuples(tuple_pairs, space, domain_to_intersect): dim_type = isl.dim_type individual_maps = [] @@ -77,6 +77,8 @@ def create_symbolic_map_from_tuples(tuple_pairs, space, var_bounds, domain_to_in constraint = constraint \ & islvars[out_names[i]].eq_set(islvars[val_out]) constraints_set = constraints_set | constraint + # TODO temp hack for testing: + #constraints_set = constraints_set & islvars['ps'].eq_set(islvars[0]+3) # TODO remove result_map = isl.Map.from_domain(constraints_set) result_map = result_map.move_dims( diff --git a/sched_check_utils.py b/sched_check_utils.py index 4e9faeb67..2d5a454ee 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -14,5 +14,11 @@ def get_islvars_from_space(space): out_names = space.get_var_names(isl.dim_type.out) return isl.make_zero_and_vars(in_names+out_names, param_names) -def get_dim_for_isl_space_param(space, param): - return space.get_var_names(isl.dim_type.param).index(param) +def get_dim_for_isl_space_var(space, dim_type, var): + return space.get_var_names(dim_type).index(param) + +def add_and_name_dims_to_isl_set(isl_set, dim_type, names, new_pose_start): + new_set = isl_set.insert_dims(dim_type, new_pose_start, len(names)).set_dim_name(dim_type, new_pose_start, names[0]) + for i, name in enumerate(names[1:]): + new_set = new_set.set_dim_name(dim_type, new_pose_start+1+i, name) + return new_set diff --git a/schedule.py b/schedule.py index 992a0726d..528d99b63 100644 --- a/schedule.py +++ b/schedule.py @@ -141,7 +141,9 @@ class LexSchedule(object): from schedule_checker.lexicographic_order_map import get_space return get_space(params_sched, in_names_sched, out_names_sched) - def get_space_for_symbolic_sched(self, iname_bounds): + #def get_space_for_symbolic_sched(self, iname_bounds): + def get_space_for_symbolic_sched(self): + """ iname_bound_params = [] for iname in self.inames_not_enumerated: lo, up = iname_bounds[iname] @@ -151,8 +153,9 @@ class LexSchedule(object): if not isinstance(up, int): #iname_bound_params.append("p"+iname+"up") iname_bound_params.append(up) - - params_sched = ["ps"] + iname_bound_params + """ + #params_sched = ["ps"] + iname_bound_params + params_sched = [] in_names_sched = ["s"] + self.inames_not_enumerated out_names_sched = ["l"+str(i) for i in range(self.max_lex_dims())] from schedule_checker.lexicographic_order_map import get_space @@ -216,40 +219,35 @@ class LexSchedule(object): sched_space = self.get_space_for_explicit_sched() return self.create_explicit_isl_map(sched_space) - def create_symbolic_isl_map(self, iname_bounds, domains, iname_to_params_dict): # TODO don't need all of these - # TODO assumes all knl inames included in iname_bounds - from schedule_checker.lexicographic_order_map import create_symbolic_map_from_tuples - from schedule_checker.sched_check_utils import flatten_2d_list, get_dim_for_isl_space_param - all_inames = list(iname_bounds.keys()) + def create_symbolic_isl_map(self, domains): + from schedule_checker.lexicographic_order_map import ( + create_symbolic_map_from_tuples, + ) + from schedule_checker.sched_check_utils import ( + add_and_name_dims_to_isl_set + ) + all_inames = list(domains.keys()) self.add_symbolic_inames_to_statement_instances(all_inames) - extra_params = [b for b in flatten_2d_list(iname_bounds.values()) - if isinstance(b,str)] - sched_space = self.get_space_for_symbolic_sched(iname_bounds) - #sched_space = self.get_space_for_explicit_sched() + sched_space = self.get_space_for_symbolic_sched() - # intersect all domains for symbolic (non-enumerated) inames found in statement instances + # intersect all domains for symbolic (non-enumerated) + # inames found in statement instances domain_intersection = domains[self.inames_not_enumerated[0]] #TODO what if self.inames_not_enumerated is empty? for iname in self.inames_not_enumerated[1:]: domain_intersection = domain_intersection.intersect(domains[iname]) - # inames not found in statement instance tuples should be removed - inames_to_remove_from_domains = all_inames.copy() - for iname in self.inames_not_enumerated: - inames_to_remove_from_domains.remove(iname) - - #dom = domains['j'] # TODO which domains(s) do we use? - #vars_to_remove = ['i'] # TODO where do we get this? in_names? - domain_stripped = domain_intersection.copy() - for iname in inames_to_remove_from_domains: - for p in iname_to_params_dict[iname]: - domain_stripped = domain_intersection.remove_dims( - isl.dim_type.param, - get_dim_for_isl_space_param(domain_intersection.space, p), - 1) - # TODO is projecting out iname necessary? - - return create_symbolic_map_from_tuples(list(self.items()), sched_space, iname_bounds, domain_stripped) + domain_stripped = domain_intersection.project_out_except( + self.inames_not_enumerated, + [isl.dim_type.set] + ) + new_pose = 0 # insert 's' at beginning # TODO don't hardcode 's' + domain_to_intersect = add_and_name_dims_to_isl_set( + domain_stripped, isl.dim_type.out, ['s'], new_pose) # TODO don't hardcode 's' + + # TODO this map needs 'ps' = # ?? + return create_symbolic_map_from_tuples( + list(self.items()), sched_space, domain_to_intersect) def get_lex_map_explicit(self): -- GitLab From abac82deff8cb2792ca527132d40d93372291697 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sun, 26 May 2019 19:03:24 -0500 Subject: [PATCH 016/415] removed bounds variables from symbolic lex order map --- example_schedule_creation.py | 4 ++-- lexicographic_order_map.py | 22 +++++++++++++--------- 2 files changed, 15 insertions(+), 11 deletions(-) diff --git a/example_schedule_creation.py b/example_schedule_creation.py index 31e307556..4c80cbd5f 100644 --- a/example_schedule_creation.py +++ b/example_schedule_creation.py @@ -168,8 +168,6 @@ print(prettier_map_string(sched_map_symbolic)) print("space (statement instances -> lex time):") print(sched_map_symbolic.space) -1/0 # left off here - # *Explicit* lexicographic mapping- map each tuple to all tuples occuring later print("---------------------------------------------------------------------------") #lex_map_explicit = sched.get_lex_map_explicit() @@ -182,6 +180,8 @@ print(prettier_map_string(lex_map_symbolic)) print("space (lex time -> lex time):") print(lex_map_symbolic.space) +1/0 # left off here + # Statement instance ordering print("----------------------------------------------------------------------") diff --git a/lexicographic_order_map.py b/lexicographic_order_map.py index a33a50967..a212e2027 100644 --- a/lexicographic_order_map.py +++ b/lexicographic_order_map.py @@ -44,7 +44,6 @@ def create_explicit_map_from_tuples(tuple_pairs, space): return union_map -#def create_symbolic_map_from_tuples(tuple_pairs, space, var_bounds, domain_to_intersect): def create_symbolic_map_from_tuples(tuple_pairs, space, domain_to_intersect): dim_type = isl.dim_type @@ -130,13 +129,13 @@ def get_space(param_names, in_names, out_names): # from other things...) def create_symbolic_lex_mapping( n_dims, - param_names=None, + #param_names=None, in_names=None, out_names=None, dim_bound_vals=None, ): - if param_names is None: - param_names = [["lo%s" % (i), "up%s" % (i)] for i in range(n_dims)] + #if param_names is None: + # param_names = [["lo%s" % (i), "up%s" % (i)] for i in range(n_dims)] if in_names is None: in_names = ["i%s" % (i) for i in range(n_dims)] if out_names is None: @@ -145,7 +144,8 @@ def create_symbolic_lex_mapping( if dim_bound_vals is None: raise NotImplementedError("dim_bound_vals cannot be None") - assert len(in_names) == len(out_names) == len(param_names) == len(dim_bound_vals) == n_dims + #assert len(in_names) == len(out_names) == len(param_names) == len(dim_bound_vals) == n_dims + assert len(in_names) == len(out_names) == len(dim_bound_vals) == n_dims dim_type = isl.dim_type from schedule_checker.sched_check_utils import flatten_2d_list @@ -158,13 +158,15 @@ def create_symbolic_lex_mapping( islvars = isl.make_zero_and_vars( in_names+out_names, #flatten_2d_list(param_names)) - flatten_2d_list(param_names)+params_in_dim_bounds) + #flatten_2d_list(param_names)+params_in_dim_bounds) + params_in_dim_bounds) # [param for param_pair in param_names for param in param_pair]) # initialize set with constraint that is always true - lex_set_outer_bounds = islvars[0].eq_set(islvars[0]) + #lex_set_outer_bounds = islvars[0].eq_set(islvars[0]) # make constraints to bound dim vars dim_bound[0] <= ix < dim_bound[1] #for i, dim_bound in enumerate(dim_bound_vals): + """ for i in range(n_dims): lex_set_outer_bounds = lex_set_outer_bounds \ & islvars[in_names[i]].ge_set(islvars[param_names[i][0]]) \ @@ -191,6 +193,7 @@ def create_symbolic_lex_mapping( # upper bound is variable lex_set_outer_bounds = lex_set_outer_bounds \ & islvars[param_names[i][1]].eq_set(islvars[upper_bound]) + """ # create constraint enforcing lex ordering, e.g., in the 3-dim case: # i0 < o0 or ((i0 = o0) and (i1 < o1)) @@ -204,8 +207,9 @@ def create_symbolic_lex_mapping( islvars[in_names[j]].eq_set(islvars[out_names[j]]) lex_set_order_bound = lex_set_order_bound | lex_set_order_bound_conj - lex_set = lex_set_outer_bounds & lex_set_order_bound - lex_map = isl.Map.from_domain(lex_set) + #lex_set = lex_set_outer_bounds & lex_set_order_bound + #lex_map = isl.Map.from_domain(lex_set) + lex_map = isl.Map.from_domain(lex_set_order_bound) lex_map = lex_map.move_dims( dim_type.out, 0, dim_type.in_, len(in_names), len(out_names)) -- GitLab From f3bfc25e477731008aff9d677c79e86a89185e0c Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sun, 26 May 2019 19:07:45 -0500 Subject: [PATCH 017/415] no longer passing unnecessary iname bounds around for lex order map creation --- example_schedule_creation.py | 2 +- lexicographic_order_map.py | 22 +++++++++++----------- schedule.py | 12 ++++++------ 3 files changed, 18 insertions(+), 18 deletions(-) diff --git a/example_schedule_creation.py b/example_schedule_creation.py index 4c80cbd5f..af36b9764 100644 --- a/example_schedule_creation.py +++ b/example_schedule_creation.py @@ -171,7 +171,7 @@ print(sched_map_symbolic.space) # *Explicit* lexicographic mapping- map each tuple to all tuples occuring later print("---------------------------------------------------------------------------") #lex_map_explicit = sched.get_lex_map_explicit() -lex_map_symbolic = sched.get_lex_map_symbolic(iname_bounds) +lex_map_symbolic = sched.get_lex_map_symbolic(list(iname_bounds.keys())) #print("lex map explicit:") #print(prettier_map_string(lex_map_explicit)) diff --git a/lexicographic_order_map.py b/lexicographic_order_map.py index a212e2027..f75249a1f 100644 --- a/lexicographic_order_map.py +++ b/lexicographic_order_map.py @@ -132,7 +132,8 @@ def create_symbolic_lex_mapping( #param_names=None, in_names=None, out_names=None, - dim_bound_vals=None, + #dim_bound_vals=None, + extra_params=None, ): #if param_names is None: # param_names = [["lo%s" % (i), "up%s" % (i)] for i in range(n_dims)] @@ -141,25 +142,24 @@ def create_symbolic_lex_mapping( if out_names is None: from schedule_checker.dependency import append_apostrophes out_names = append_apostrophes(in_names) - if dim_bound_vals is None: - raise NotImplementedError("dim_bound_vals cannot be None") + #if dim_bound_vals is None: + # raise NotImplementedError("dim_bound_vals cannot be None") #assert len(in_names) == len(out_names) == len(param_names) == len(dim_bound_vals) == n_dims - assert len(in_names) == len(out_names) == len(dim_bound_vals) == n_dims + assert len(in_names) == len(out_names) == n_dims dim_type = isl.dim_type - from schedule_checker.sched_check_utils import flatten_2d_list + #from schedule_checker.sched_check_utils import flatten_2d_list - #TODO left off here, need to add params from dim_bounds to islvars? - params_in_dim_bounds = [] - for v in flatten_2d_list(dim_bound_vals): - if not isinstance(v, int): - params_in_dim_bounds.append(v) + #params_in_dim_bounds = [] + #for v in flatten_2d_list(dim_bound_vals): + # if not isinstance(v, int): + # params_in_dim_bounds.append(v) islvars = isl.make_zero_and_vars( in_names+out_names, #flatten_2d_list(param_names)) #flatten_2d_list(param_names)+params_in_dim_bounds) - params_in_dim_bounds) + extra_params) # [param for param_pair in param_names for param in param_pair]) # initialize set with constraint that is always true diff --git a/schedule.py b/schedule.py index 528d99b63..ce530924d 100644 --- a/schedule.py +++ b/schedule.py @@ -273,17 +273,17 @@ class LexSchedule(object): return create_explicit_map_from_tuples(explicit_lex_map_pairs, lex_space_explicit) - def get_lex_map_symbolic(self, var_bounds_dict): + #def get_lex_map_symbolic(self, var_bounds_dict): + def get_lex_map_symbolic(self, extra_params): from schedule_checker.lexicographic_order_map import ( create_symbolic_lex_mapping, ) n_dims = self.max_lex_dims() - #lex_dim_bounds = list(zip(self.get_min_lex_dim_vals(), - # [1 + v for v in self.get_max_lex_dim_vals()])) - lex_dim_bounds = list(zip(self.get_min_lex_dim_vals(var_bounds_dict), - self.get_max_lex_dim_bounds(var_bounds_dict))) - return create_symbolic_lex_mapping(n_dims, dim_bound_vals=lex_dim_bounds) + #lex_dim_bounds = list(zip(self.get_min_lex_dim_vals(var_bounds_dict), + # self.get_max_lex_dim_bounds(var_bounds_dict))) + #return create_symbolic_lex_mapping(n_dims, dim_bound_vals=lex_dim_bounds) + return create_symbolic_lex_mapping(n_dims, extra_params=extra_params) #def get_isl_map(self): def get_isl_map_str(self): -- GitLab From cb5cdd94b47708efb3d2e77001833f8529901aba Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sun, 26 May 2019 19:13:00 -0500 Subject: [PATCH 018/415] fixing bug- passing correct parameters to symbolic lex order map creator --- example_schedule_creation.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/example_schedule_creation.py b/example_schedule_creation.py index af36b9764..f095c9c54 100644 --- a/example_schedule_creation.py +++ b/example_schedule_creation.py @@ -171,7 +171,14 @@ print(sched_map_symbolic.space) # *Explicit* lexicographic mapping- map each tuple to all tuples occuring later print("---------------------------------------------------------------------------") #lex_map_explicit = sched.get_lex_map_explicit() -lex_map_symbolic = sched.get_lex_map_symbolic(list(iname_bounds.keys())) + +params_in_dim_bounds = [] +from schedule_checker.sched_check_utils import flatten_2d_list +# TODO need better way to incorporate these params into lex map... do we even need them? +for v in flatten_2d_list(dim_bound_vals): + if not isinstance(v, int): + params_in_dim_bounds.append(v) +lex_map_symbolic = sched.get_lex_map_symbolic(params_in_dim_bounds) #print("lex map explicit:") #print(prettier_map_string(lex_map_explicit)) -- GitLab From 4b5aa81b79f1d7a4cd364a0abb4de5313cca378f Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sun, 26 May 2019 19:22:42 -0500 Subject: [PATCH 019/415] completely removing extra params from symbolic lex ordering, don't think they're necessary --- example_schedule_creation.py | 19 +++++++++--------- lexicographic_order_map.py | 38 +++--------------------------------- schedule.py | 6 ++++-- 3 files changed, 16 insertions(+), 47 deletions(-) diff --git a/example_schedule_creation.py b/example_schedule_creation.py index f095c9c54..c5ee01f23 100644 --- a/example_schedule_creation.py +++ b/example_schedule_creation.py @@ -172,13 +172,13 @@ print(sched_map_symbolic.space) print("---------------------------------------------------------------------------") #lex_map_explicit = sched.get_lex_map_explicit() -params_in_dim_bounds = [] -from schedule_checker.sched_check_utils import flatten_2d_list -# TODO need better way to incorporate these params into lex map... do we even need them? -for v in flatten_2d_list(dim_bound_vals): - if not isinstance(v, int): - params_in_dim_bounds.append(v) -lex_map_symbolic = sched.get_lex_map_symbolic(params_in_dim_bounds) +#params_in_dim_bounds = [] +#from schedule_checker.sched_check_utils import flatten_2d_list +#for v in flatten_2d_list(iname_bounds.values()): +# if not isinstance(v, int): +# params_in_dim_bounds.append(v) +#lex_map_symbolic = sched.get_lex_map_symbolic(params_in_dim_bounds) +lex_map_symbolic = sched.get_lex_map_symbolic() #print("lex map explicit:") #print(prettier_map_string(lex_map_explicit)) @@ -187,9 +187,6 @@ print(prettier_map_string(lex_map_symbolic)) print("space (lex time -> lex time):") print(lex_map_symbolic.space) -1/0 # left off here - - # Statement instance ordering print("----------------------------------------------------------------------") #SIO_explicit_valid = get_statement_ordering_map( @@ -203,6 +200,8 @@ print(prettier_map_string(SIO_symbolic_valid)) print("space (statement instances -> statement instances):") print(SIO_symbolic_valid.space) +1/0 # left off here + # TODO left off here sched_inames = ['j'] iname_params = [(None, 'pj_up')] diff --git a/lexicographic_order_map.py b/lexicographic_order_map.py index f75249a1f..7b7721e98 100644 --- a/lexicographic_order_map.py +++ b/lexicographic_order_map.py @@ -133,7 +133,7 @@ def create_symbolic_lex_mapping( in_names=None, out_names=None, #dim_bound_vals=None, - extra_params=None, + #extra_params=None, ): #if param_names is None: # param_names = [["lo%s" % (i), "up%s" % (i)] for i in range(n_dims)] @@ -159,42 +159,10 @@ def create_symbolic_lex_mapping( in_names+out_names, #flatten_2d_list(param_names)) #flatten_2d_list(param_names)+params_in_dim_bounds) - extra_params) + #extra_params) + []) # [param for param_pair in param_names for param in param_pair]) - # initialize set with constraint that is always true - #lex_set_outer_bounds = islvars[0].eq_set(islvars[0]) - # make constraints to bound dim vars dim_bound[0] <= ix < dim_bound[1] - #for i, dim_bound in enumerate(dim_bound_vals): - """ - for i in range(n_dims): - lex_set_outer_bounds = lex_set_outer_bounds \ - & islvars[in_names[i]].ge_set(islvars[param_names[i][0]]) \ - & islvars[in_names[i]].lt_set(islvars[param_names[i][1]]) \ - & islvars[out_names[i]].ge_set(islvars[param_names[i][0]]) \ - & islvars[out_names[i]].lt_set(islvars[param_names[i][1]]) - if dim_bound_vals: - #lex_set_outer_bounds = lex_set_outer_bounds \ - # & islvars[param_names[i][0]].eq_set(islvars[0]+dim_bound_vals[i][0]) \ - # & islvars[param_names[i][1]].eq_set(islvars[0]+dim_bound_vals[i][1]) - lower_bound = dim_bound_vals[i][0] - upper_bound = dim_bound_vals[i][1] - if isinstance(lower_bound, int): - lex_set_outer_bounds = lex_set_outer_bounds \ - & islvars[param_names[i][0]].eq_set(islvars[0]+lower_bound) - else: - # lower bound is variable - lex_set_outer_bounds = lex_set_outer_bounds \ - & islvars[param_names[i][0]].eq_set(islvars[lower_bound]) - if isinstance(upper_bound, int): - lex_set_outer_bounds = lex_set_outer_bounds \ - & islvars[param_names[i][1]].eq_set(islvars[0]+upper_bound) - else: - # upper bound is variable - lex_set_outer_bounds = lex_set_outer_bounds \ - & islvars[param_names[i][1]].eq_set(islvars[upper_bound]) - """ - # create constraint enforcing lex ordering, e.g., in the 3-dim case: # i0 < o0 or ((i0 = o0) and (i1 < o1)) # or ((i0 = o0) and (i1 = o1) and (i2 < o2)) diff --git a/schedule.py b/schedule.py index ce530924d..a03f0d646 100644 --- a/schedule.py +++ b/schedule.py @@ -274,7 +274,8 @@ class LexSchedule(object): lex_space_explicit) #def get_lex_map_symbolic(self, var_bounds_dict): - def get_lex_map_symbolic(self, extra_params): + #def get_lex_map_symbolic(self, extra_params): + def get_lex_map_symbolic(self): from schedule_checker.lexicographic_order_map import ( create_symbolic_lex_mapping, ) @@ -283,7 +284,8 @@ class LexSchedule(object): #lex_dim_bounds = list(zip(self.get_min_lex_dim_vals(var_bounds_dict), # self.get_max_lex_dim_bounds(var_bounds_dict))) #return create_symbolic_lex_mapping(n_dims, dim_bound_vals=lex_dim_bounds) - return create_symbolic_lex_mapping(n_dims, extra_params=extra_params) + #return create_symbolic_lex_mapping(n_dims, extra_params=extra_params) + return create_symbolic_lex_mapping(n_dims) #def get_isl_map(self): def get_isl_map_str(self): -- GitLab From 21b310e5663050aa8ce23617ad3ed2ed760aa8b6 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 27 May 2019 20:30:07 -0500 Subject: [PATCH 020/415] added function create_new_set_with_primes() which just appends apostrophe to all set vars; added function add_missing_set_dims_to_map_indims(map,set) which adds dims to map so that its in-dims include the set dims --- sched_check_utils.py | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/sched_check_utils.py b/sched_check_utils.py index 2d5a454ee..8bb95c7d2 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -22,3 +22,33 @@ def add_and_name_dims_to_isl_set(isl_set, dim_type, names, new_pose_start): for i, name in enumerate(names[1:]): new_set = new_set.set_dim_name(dim_type, new_pose_start+1+i, name) return new_set + +def create_new_set_with_primes(old_set): + new_set = old_set.copy() + for i in range(old_set.n_dim()): + new_set = new_set.set_dim_name(isl.dim_type.out, i, old_set.get_dim_name(isl.dim_type.out, i)+"'") + return new_set + +def add_missing_set_dims_to_map_indims(islmap, islset): + new_map = islmap.copy() + for i in range(islset.n_dim()): + new_dim_name = islset.get_dim_name(isl.dim_type.out, i) + + old_map_in_names = new_map.get_var_names(isl.dim_type.in_) + if len(old_map_in_names) > i and old_map_in_names[i] == new_dim_name: + continue + else: + new_map = new_map.insert_dims(isl.dim_type.in_, i, 1) + new_map = new_map.set_dim_name(isl.dim_type.in_, i, new_dim_name) + """ + old_map_out_names = new_map.get_var_names(isl.dim_type.out) + if len(old_map_out_names) > i and old_map_out_names[i] == new_dim_name: + continue + else: + new_map = new_map.insert_dims(isl.dim_type.out, i, 1) + new_map = new_map.set_dim_name(isl.dim_type.out, i, new_dim_name) + """ + return new_map + + + -- GitLab From a54bae7ec3da8f66e57252d026324e219e9e3438 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 27 May 2019 20:30:25 -0500 Subject: [PATCH 021/415] added concurrent inames (ignored in initial sched creation) into lex order map --- lexicographic_order_map.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/lexicographic_order_map.py b/lexicographic_order_map.py index 7b7721e98..25ab708b9 100644 --- a/lexicographic_order_map.py +++ b/lexicographic_order_map.py @@ -76,14 +76,15 @@ def create_symbolic_map_from_tuples(tuple_pairs, space, domain_to_intersect): constraint = constraint \ & islvars[out_names[i]].eq_set(islvars[val_out]) constraints_set = constraints_set | constraint - # TODO temp hack for testing: - #constraints_set = constraints_set & islvars['ps'].eq_set(islvars[0]+3) # TODO remove result_map = isl.Map.from_domain(constraints_set) result_map = result_map.move_dims( dim_type.out, 0, dim_type.in_, len(in_names), len(out_names)) + from schedule_checker.sched_check_utils import add_missing_set_dims_to_map_indims + result_map = add_missing_set_dims_to_map_indims(result_map, domain_to_intersect) + return result_map.intersect_domain(domain_to_intersect) -- GitLab From ca934cfda91e192e96a1727f7e73580b3423f62c Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 27 May 2019 20:30:45 -0500 Subject: [PATCH 022/415] no longer projecting out concurrent inames (ignored in initial sched creation) when creating sched map --- schedule.py | 28 +++++++++++++++------------- 1 file changed, 15 insertions(+), 13 deletions(-) diff --git a/schedule.py b/schedule.py index a03f0d646..c9c7a0491 100644 --- a/schedule.py +++ b/schedule.py @@ -5,15 +5,15 @@ from collections import OrderedDict class Statement(object): def __init__( self, - statement_id, + sid, active_inames, ): - self.statement_id = statement_id # string + self.sid = sid # string self.active_inames = active_inames # [string, ] def __str__(self): return "%s {%s}" % ( - self.statement_id, ",".join(self.active_inames)) + self.sid, ",".join(self.active_inames)) class StatementInstance(object): @@ -31,12 +31,12 @@ class StatementInstance(object): def __str__(self): import six return "[%s,%s]" % ( - self.statement.statement_id, ",".join( + self.statement.sid, ",".join( ["%d" % (v) for k, v in sorted(six.iteritems(self.iname_vals))])) def __eq__(self, other): return self.iname_vals == other.iname_vals and \ - self.statement.statement_id == other.statement.statement_id + self.statement.sid == other.statement.sid def __hash__(self): return hash(str(self)) @@ -52,7 +52,7 @@ class LexSchedule(object): self.lex_schedule = OrderedDict() # statement instance: lex point self.inames_enumerated = [] # symbolic inames in sched that have been enumerated into explicit statement instances self.inames_not_enumerated = [] # TODO better way to do this - self.lp_insnid_to_id = {} + self.lp_insnid_to_int_sid = {} from loopy.schedule import (EnterLoop, LeaveLoop, Barrier, RunInstruction) cur_nest_lex_prefix = [] @@ -73,7 +73,7 @@ class LexSchedule(object): cur_nest_lex_prefix.pop() # pop insn ct variable elif isinstance(sched_item, RunInstruction): self.add_new_lp_insnid(sched_item.insn_id) - insn_id_int = self.lp_insnid_to_id[sched_item.insn_id] + insn_id_int = self.lp_insnid_to_int_sid[sched_item.insn_id] #inames = knl.id_to_insn[insn_id].within_inames #conc_dict = get_iname_concurrency_dict(inames, knl) #print("RunInstruction: id: %s; inames: %s" % (sched_item.insn_id, conc_dict)) @@ -129,10 +129,10 @@ class LexSchedule(object): self.inames_not_enumerated.append(iname) def add_new_lp_insnid(self, lp_insnid): - if self.lp_insnid_to_id: - self.lp_insnid_to_id[lp_insnid] = max(self.lp_insnid_to_id.values()) + 1 + if self.lp_insnid_to_int_sid: + self.lp_insnid_to_int_sid[lp_insnid] = max(self.lp_insnid_to_int_sid.values()) + 1 else: - self.lp_insnid_to_id[lp_insnid] = 0 + self.lp_insnid_to_int_sid[lp_insnid] = 0 def get_space_for_explicit_sched(self): params_sched = ["ps"] + ["p"+iname for iname in self.inames_enumerated] @@ -237,15 +237,17 @@ class LexSchedule(object): for iname in self.inames_not_enumerated[1:]: domain_intersection = domain_intersection.intersect(domains[iname]) + # TODO maybe don't project this out, constraints may involve any iname later... + """ domain_stripped = domain_intersection.project_out_except( self.inames_not_enumerated, [isl.dim_type.set] ) + """ new_pose = 0 # insert 's' at beginning # TODO don't hardcode 's' domain_to_intersect = add_and_name_dims_to_isl_set( - domain_stripped, isl.dim_type.out, ['s'], new_pose) # TODO don't hardcode 's' + domain_intersection, isl.dim_type.out, ['s'], new_pose) # TODO don't hardcode 's' - # TODO this map needs 'ps' = # ?? return create_symbolic_map_from_tuples( list(self.items()), sched_space, domain_to_intersect) @@ -292,7 +294,7 @@ class LexSchedule(object): map_str = "{" for state_inst, lex in self.lex_schedule.items(): domain_elem = "[s=%s,%s]" % ( - state_inst.statement.statement_id, ",".join( + state_inst.statement.sid, ",".join( ["%s=%d" % (iname, val) for iname, val in state_inst.iname_vals.items()])) range_elem = "[%s]" % (",".join("%s" % (l) for l in lex)) map_str += "%s -> %s; " % (domain_elem, range_elem) -- GitLab From b77133caf5add075622fe0fa2772a254fc6a5e37 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 27 May 2019 20:31:31 -0500 Subject: [PATCH 023/415] removed unnecessary class DependencyConstraintVars, enforced consistent iname ordering, applied loopy kernel loop domain to dependency constraint map --- dependency.py | 131 ++++++++------------------------- example_dependency_checking.py | 60 +++++++++------ example_lex_map_creation.py | 4 +- example_schedule_creation.py | 107 ++++++++------------------- 4 files changed, 102 insertions(+), 200 deletions(-) diff --git a/dependency.py b/dependency.py index b2688ebbe..491e296ee 100644 --- a/dependency.py +++ b/dependency.py @@ -82,85 +82,12 @@ def _create_bounded_set_for_dependency_constraints( return bounded_set -class DependencyConstraintVars(object): - def __init__( - self, - inames, - param_names, - param_vals, - statement_var, - statement_param, - statement_param_val, - ): - self.inames = inames - self.param_names = param_names # TODO rename, these are pairs of bound vars - self.param_vals = param_vals # TODO rename, these are pairs of bound vals - self.statement_var = statement_var - self.statement_param = statement_param - self.statement_param_val = statement_param_val - - def get_bounds_constraint_set(self): - var_names = [self.statement_var]+self.inames - param_names = [self.statement_param]+self.list_param_names() - islvars = _make_islvars_with_var_primes( - var_names, param_names) - - bounded_set = islvars[0].eq_set(islvars[0]) # initialize to True - - # bound the statement variable - v = self.statement_var - v_prime = self.statement_var+"'" - p = self.statement_param - b = self.statement_param_val - bounded_set = bounded_set \ - & islvars[v].lt_set(islvars[p]) \ - & islvars[v_prime].lt_set(islvars[p]) \ - & (islvars[0]).le_set(islvars[v]) \ - & (islvars[0]).le_set(islvars[v_prime]) \ - & islvars[p].eq_set(islvars[0]+b) - - # bound the other variables - for v, (p_low, p_up), (pval_low, pval_up) in zip( - self.inames, self.param_names, self.param_vals): - - # create constraint pval_low = p_low <= v,v'< p_up = pval_up - - if p_low is None: - assert isinstance(pval_low, int) - lower_bound = islvars[0] + pval_low - else: - lower_bound = islvars[p_low] - if not pval_low is None: - bounded_set = bounded_set & lower_bound.eq_set(islvars[0]+pval_low) - - if p_up is None: - assert isinstance(pval_up, int) - upper_bound = islvars[0] + pval_up - else: - upper_bound = islvars[p_up] - if not pval_up is None: - bounded_set = bounded_set & upper_bound.eq_set(islvars[0]+pval_up) - - v_prime = v+"'" - bounded_set = bounded_set \ - & islvars[v].lt_set(upper_bound) \ - & islvars[v_prime].lt_set(upper_bound) \ - & lower_bound.le_set(islvars[v]) \ - & lower_bound.le_set(islvars[v_prime]) - - return bounded_set - - def list_param_names(self): - from schedule_checker.sched_check_utils import flatten_2d_list - return [p for p in flatten_2d_list(self.param_names) if not p is None] - - def __str__(self): - return str(self.get_bounds_constraint_set()) - - def create_dependency_constraint( dependencies, - dep_constraint_vars, + all_inames_ordered, + statement_var, + domain_constraint_set, + sid_to_int, ): # This function uses the dependencies given to create the following constraint: # Statement [s,i,j] comes before statement [s',i',j'] iff @@ -168,25 +95,19 @@ def create_dependency_constraint( # assumes statements are numbered sequentially # (statement_bound = max statement id + 1) - statement_param = dep_constraint_vars.statement_param - #param_names = dep_constraint_vars.param_names - param_names = dep_constraint_vars.list_param_names() - all_inames = dep_constraint_vars.inames - statement_var = dep_constraint_vars.statement_var - # make sure all dependencies involve same two statements - if len(set([dep.statement_before for dep in dependencies])) != 1 or \ - len(set([dep.statement_after for dep in dependencies])) != 1: + if len(set([dep.statement_before.sid for dep in dependencies])) != 1 or \ + len(set([dep.statement_after.sid for dep in dependencies])) != 1: raise ValueError("All depencencies must be between same two statements.") # make sure all dependencies involve different inames # TODO upate after allowing prior(i,k) if len(set([dep.iname for dep in dependencies])) != len(dependencies): raise ValueError("All depencencies must apply to different inames.") - DT = DependencyType statement_var_prime = statement_var+"'" + DT = DependencyType islvars = _make_islvars_with_var_primes( - [statement_var]+all_inames, - [statement_param]+param_names) + [statement_var]+all_inames_ordered, + []) # initialize constraints to False # this will disappear as soon as we add a constraint that is not DT.NONE @@ -199,7 +120,7 @@ def create_dependency_constraint( continue iname_prime = iname+"'" # i' - other_inames = all_inames.copy() + other_inames = all_inames_ordered.copy() other_inames.remove(iname) # remaining inames, e.g., [j, k] other_inames_prime = append_apostrophes(other_inames) # e.g., [j', k'] @@ -212,18 +133,28 @@ def create_dependency_constraint( elif dep_type == DT.ALL: constraint_set = constraint_set & islvars[0].eq_set(islvars[0]) # True - constraint_set = constraint_set & islvars[statement_var].eq_set(islvars[0]+dep.statement_before) - constraint_set = constraint_set & islvars[statement_var_prime].eq_set(islvars[0]+dep.statement_after) - # TODO get this working - # add 'or' to indicate that this constraint doesn't apply to other statements - #remainder_set = islvars[statement_var].ne_set(islvars[0]+dep.statement_before) \ - # | islvars[statement_var_prime].ne_set(islvars[0]+dep.statement_after) - #print("remainder_set", remainder_set) - #constraint_set = constraint_set | remainder_set - + s_before_int = sid_to_int[dep.statement_before.sid] + s_after_int = sid_to_int[dep.statement_after.sid] + constraint_set = constraint_set & islvars[statement_var].eq_set(islvars[0]+s_before_int) + constraint_set = constraint_set & islvars[statement_var_prime].eq_set(islvars[0]+s_after_int) all_constraints_set = all_constraints_set | constraint_set - all_constraints_set = all_constraints_set & dep_constraint_vars.get_bounds_constraint_set() + all_constraints_map = _convert_constraint_set_to_map(all_constraints_set, len(all_inames_ordered)+1) + + from schedule_checker.sched_check_utils import create_new_set_with_primes + range_constraint_set = create_new_set_with_primes(domain_constraint_set) + + from schedule_checker.sched_check_utils import ( + add_and_name_dims_to_isl_set + ) + new_pose = 0 # insert 's' at beginning # TODO don't hardcode 's' + domain_to_intersect = add_and_name_dims_to_isl_set( + domain_constraint_set, isl.dim_type.out, ["s"], new_pose) # TODO don't hardcode 's' + range_to_intersect = add_and_name_dims_to_isl_set( + range_constraint_set, isl.dim_type.out, ["s'"], new_pose) # TODO don't hardcode 's' - return _convert_constraint_set_to_map(all_constraints_set, len(dep_constraint_vars.inames)+1) + map_with_loop_domain_constraints = all_constraints_map.intersect_domain(domain_to_intersect).intersect_range(range_to_intersect) + #blah2 = isl.Map("[pi_up, pj_up] -> { [s = 1, i, j] -> [s' = 0, i' = i, j'] : 0 <= i < pi_up and 0 <= j < pj_up and j' > j and 0 <= j' < pj_up}") + #assert blah2 == map_with_loop_domain_constraints + return map_with_loop_domain_constraints diff --git a/example_dependency_checking.py b/example_dependency_checking.py index 01cb9b013..0b3444c49 100644 --- a/example_dependency_checking.py +++ b/example_dependency_checking.py @@ -5,7 +5,6 @@ from schedule_checker.dependency import ( DependencyType as DT, create_dependency_constraint, append_apostrophes, - DependencyConstraintVars, ) from schedule_checker.lexicographic_order_map import ( make_lex_mapping_tuple_pairs, @@ -14,6 +13,7 @@ from schedule_checker.lexicographic_order_map import ( set_space_names, get_space, ) +from schedule_checker.schedule import Statement from schedule_checker.sched_check_utils import prettier_map_string @@ -31,13 +31,9 @@ print("Kernel:") print(knl) from schedule_checker.sched_check_utils import flatten_2d_list -all_inames = ['i', 'j'] -iname_params = [(None, 'p0'), (None, 'p1')] -param_names_listed = [p for p in flatten_2d_list(iname_params) if not p is None] -iname_param_vals = [(0, 2), (0, 2)] +all_inames_ordered = ['i', 'j'] +#all_inames_ordered = sorted(list(knl.all_inames())) statement_var = 's' -statement_param = 'ps' -statement_bound = 2 # example sched: print("---------------------------------------------------------------------------") @@ -45,8 +41,8 @@ print("------------------------------------------------------------------------- # i is parallel, suppose we want to enforce the following: # for a given i, statement 0 happens before statement 1 -params_sched = [statement_param]+param_names_listed -in_names_sched = [statement_var]+all_inames +params_sched = ['p0', 'p1'] +in_names_sched = [statement_var]+all_inames_ordered out_names_sched = ['l0', 'l1'] sched_space = get_space(params_sched, in_names_sched, out_names_sched) @@ -114,28 +110,46 @@ print(prettier_map_string(SIO_explicit_invalid)) # Dependencies and constraints: print("----------------------------------------------------------------------") -dep_constraint_vars = DependencyConstraintVars( - all_inames, - iname_params, - iname_param_vals, - statement_var, - statement_param, - statement_bound, - ) - # i is parallel, suppose we want to enforce the following: # for a given i, statement 0 happens before statement 1 # i dependency is none, j dependency is `prior` +statement_var = 's' + +domains = {} +for iname in all_inames_ordered: + domains[iname] = knl.get_inames_domain(iname) +domains_list = list(domains.values()) +domain_union = domains_list[0] +#TODO is union the right thing to do here? +for dom in domains_list[1:]: + domain_union = domain_union.union(dom) +print("domain union:") +print(domain_union) + +# make some dependencies manually for now: +s0 = Statement("0", ["i", "j"]) +s1 = Statement("1", ["i", "j"]) +insnid_to_int_sid = {"0": 0, "1": 1} + deps = [ - #Dependency(0, 1, DT.NONE, 'i'), - Dependency(0, 1, DT.SAME, 'i'), - Dependency(0, 1, DT.SAME, 'j'), + Dependency(s0, s1, DT.SAME, "i"), + Dependency(s0, s1, DT.SAME, "j"), ] + print([str(dep) for dep in deps]) constraint_map = create_dependency_constraint( - deps, dep_constraint_vars) -assert constraint_map.space == SIO_explicit_valid.space + deps, + all_inames_ordered, + statement_var, + domain_union, + insnid_to_int_sid, + ) +print("constraint map space:") +print(constraint_map.space) +print("SIO space:") +print(SIO_explicit_valid.space) +#assert constraint_map.space == SIO_explicit_valid.space print("constraint map:") print(prettier_map_string(constraint_map)) diff --git a/example_lex_map_creation.py b/example_lex_map_creation.py index 92bfe2930..d26b268a0 100644 --- a/example_lex_map_creation.py +++ b/example_lex_map_creation.py @@ -17,12 +17,11 @@ from schedule_checker.lexicographic_order_map import ( # *Symbolic* lexicographic mapping- map each tuple to all tuples occuring later -dim_bounds = [(0,2), (0,2)] # max vals for each dim (e.g., 0 <= i0 < max0 ...) #in_names = ["i", "j"] #out_names = append_apostrophes(in_names) n_dims = 2 #len(in_names) lex_map_symbolic = create_symbolic_lex_mapping( - n_dims, dim_bound_vals=dim_bounds) + n_dims) print("lex_map (symbolic):") print(lex_map_symbolic) @@ -30,6 +29,7 @@ print(lex_map_symbolic) # *Explicit* lexicographic mapping- map each tuple to all tuples occuring later """ +dim_bounds = [(0,2), (0,2)] # max vals for each dim (e.g., 0 <= i0 < max0 ...) explicit_lex_map_pairs = make_lex_mapping_tuple_pairs(dim_bounds) # for pair in explicit_lex_map_pairs: # print(pair[0], pair[1]) diff --git a/example_schedule_creation.py b/example_schedule_creation.py index c5ee01f23..f8aa90788 100644 --- a/example_schedule_creation.py +++ b/example_schedule_creation.py @@ -3,26 +3,23 @@ import loopy as lp import numpy as np from schedule_checker.dependency import ( Dependency, - DependencyType, - append_apostrophes, + DependencyType as DT, ) from schedule_checker.schedule import Statement, StatementInstance, LexSchedule from schedule_checker.sched_check_utils import prettier_map_string from schedule_checker.lexicographic_order_map import ( create_explicit_map_from_tuples, get_statement_ordering_map, - #set_space_names, get_space, - #create_symbolic_lex_mapping, ) from schedule_checker.sched_check_utils import prettier_map_string -def get_iname_bounds_dict(knl, _set_arbitrary_bounds=None): +def get_iname_bounds_dict(knl, all_inames_ordered, _set_arbitrary_bounds=None): # TODO don't require explicit bounds if _set_arbitrary_bounds: - return dict((iname, _set_arbitrary_bounds) for iname in knl.all_inames()) + return dict((iname, _set_arbitrary_bounds) for iname in all_inames_ordered) from loopy.symbolic import aff_to_expr from loopy.isl_helpers import static_max_of_pw_aff @@ -37,7 +34,7 @@ def get_iname_bounds_dict(knl, _set_arbitrary_bounds=None): bounds = {} all_params = knl.all_params() - for iname in knl.all_inames(): + for iname in all_inames_ordered: #bounds_record = knl.get_iname_bounds(iname, constants_only=True) bounds_record = knl.get_iname_bounds(iname) (_, iname_min_aff), = bounds_record.lower_bound_pw_aff.get_pieces() @@ -62,26 +59,6 @@ def get_iname_bounds_dict(knl, _set_arbitrary_bounds=None): #assert all(isinstance(i,int) for i in int_bounds[iname]) return bounds -def get_iname_to_param_dict(knl): - from loopy.symbolic import aff_to_expr - bounds = {} - all_params = knl.all_params() - for iname in knl.all_inames(): - #bounds_record = knl.get_iname_bounds(iname, constants_only=True) - bounds_record = knl.get_iname_bounds(iname) - (_, iname_min_aff), = bounds_record.lower_bound_pw_aff.get_pieces() - (_, iname_max_aff), = bounds_record.upper_bound_pw_aff.get_pieces() - iname_min_aff = aff_to_expr(iname_min_aff) - iname_max_aff = aff_to_expr(iname_max_aff) - bounds_strs = str(iname_min_aff)+str(iname_max_aff) - params_found = [] - for param in all_params: - if param in bounds_strs: - params_found.append(param) - - bounds[iname] = params_found - return bounds - # make example kernel knl = lp.make_kernel( #"{[i,j]: 0<=i,j<2}", @@ -103,15 +80,9 @@ knl = lp.tag_inames(knl, {"i": "l.0"}) knl = lp.preprocess_kernel(knl) knl = lp.get_one_scheduled_kernel(knl) -# make some dependencies manually for now: -s0 = Statement("0", ["i", "j"]) -s1 = Statement("1", ["i", "j"]) -s2 = Statement("2", ["i", "j"]) -dep_s1_i = Dependency(s0, s1, "i", DependencyType.SAME) -dep_s1_j = Dependency(s0, s1, "j", DependencyType.SAME) -insn_to_deps = {"0":[], "1":[dep_s1_i, dep_s1_j], "2":[]} +# get all inames in consistent ordering: +all_inames_ordered = sorted(list(knl.all_inames())) -# enforce explicit iname bounds for now TODO #print("Kernel:") #print(knl) #print(lp.generate_code_v2(knl).device_code()) @@ -140,10 +111,10 @@ def get_iname_concurrency_dict(inames, knl): return conc_dict # Get schedule ------------------------------------------------------ -iname_bounds = get_iname_bounds_dict(knl) +iname_bounds = get_iname_bounds_dict(knl, all_inames_ordered) domains = {} -for iname in knl.all_inames(): +for iname in all_inames_ordered: domains[iname] = knl.get_inames_domain(iname) print("domains:") print(domains) @@ -154,7 +125,6 @@ sched = LexSchedule(knl, iname_bounds) # TODO do we really need iname bounds he #print("LexSchedule before processing:") #print(sched) -iname_to_params_dict = get_iname_to_param_dict(knl) #example_sched_explicit = sched.enumerate_symbolic_inames_and_create_explicit_isl_map(iname_bounds) sched_map_symbolic = sched.create_symbolic_isl_map(domains) print("LexSchedule after processing:") @@ -172,12 +142,6 @@ print(sched_map_symbolic.space) print("---------------------------------------------------------------------------") #lex_map_explicit = sched.get_lex_map_explicit() -#params_in_dim_bounds = [] -#from schedule_checker.sched_check_utils import flatten_2d_list -#for v in flatten_2d_list(iname_bounds.values()): -# if not isinstance(v, int): -# params_in_dim_bounds.append(v) -#lex_map_symbolic = sched.get_lex_map_symbolic(params_in_dim_bounds) lex_map_symbolic = sched.get_lex_map_symbolic() #print("lex map explicit:") @@ -200,60 +164,53 @@ print(prettier_map_string(SIO_symbolic_valid)) print("space (statement instances -> statement instances):") print(SIO_symbolic_valid.space) -1/0 # left off here - -# TODO left off here -sched_inames = ['j'] -iname_params = [(None, 'pj_up')] -iname_param_vals = [(0, None)] -statement_var = 's' -statement_param = 'ps' -statement_bound = 3 - from schedule_checker.dependency import ( - Dependency, - DependencyType as DT, create_dependency_constraint, - append_apostrophes, - DependencyConstraintVars, ) -dep_constraint_vars = DependencyConstraintVars( - sched_inames, - iname_params, - iname_param_vals, - statement_var, - statement_param, - statement_bound, - ) +statement_var = 's' + +domains_list = list(domains.values()) +domain_union = domains_list[0] +#TODO is union the right thing to do here? +for dom in domains_list[1:]: + domain_union = domain_union.union(dom) # i is parallel, suppose we want to enforce the following: # for a given i, statement 0 happens before statement 1 # i dependency is none, j dependency is `prior` +# make some dependencies manually for now: +s0 = Statement("0", ["i", "j"]) +s1 = Statement("1", ["i", "j"]) +s2 = Statement("2", ["i", "j"]) +#dep_s1_i = Dependency(s0, s1, DT.NONE, "i") +#dep_s1_j = Dependency(s0, s1, DT.PRIOR, "j") +#insn_to_deps = {"0":[], "1":[dep_s1_i, dep_s1_j], "2":[]} + deps = [ - Dependency(1, 0, DT.SAME, 'j'), - #Dependency(1, 0, DT.NONE, 'j'), + Dependency(s0, s1, DT.NONE, "i"), + Dependency(s0, s1, DT.PRIOR, "j"), ] print("----------------------------------------------------------------------") print([str(dep) for dep in deps]) constraint_map = create_dependency_constraint( - deps, dep_constraint_vars) + deps, + all_inames_ordered, + statement_var, + domain_union, + sched.lp_insnid_to_int_sid, + ) print("constraint map:") print(prettier_map_string(constraint_map)) print("space (statment instances -> statement instances):") print(constraint_map.space) -# TODO left off here, these spaces need to match and they don't - -#assert constraint_map.space == SIO_symbolic_valid.space -#1/0 +assert constraint_map.space == SIO_symbolic_valid.space print("is valid sched valid? constraint map subset of SIO?") print(constraint_map.is_subset(SIO_symbolic_valid)) - - ''' all_inames = ['i', 'j'] iname_params = ['p0', 'p1'] -- GitLab From f368ed8a8c4db23fb99961775f1eb09e8f90d22b Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 27 May 2019 20:39:18 -0500 Subject: [PATCH 024/415] moved no-longer-used function to utils for now --- dependency.py | 21 --------------------- sched_check_utils.py | 18 ++++++++++++++++++ schedule.py | 7 ------- 3 files changed, 18 insertions(+), 28 deletions(-) diff --git a/dependency.py b/dependency.py index 491e296ee..f85ed2fdc 100644 --- a/dependency.py +++ b/dependency.py @@ -61,27 +61,6 @@ def _make_islvars_with_var_primes(var_names, param_names): var_names+append_apostrophes(var_names), param_names) -def _create_bounded_set_for_dependency_constraints( - var_names, param_names, upper_bounds): - - # TODO assumes lower bound is zero - islvars = _make_islvars_with_var_primes(var_names, param_names) - - bounded_set = islvars[0].eq_set(islvars[0]) # initialize to True - - for v, p, b in zip(var_names, param_names, upper_bounds): - # create constraint 0 <= v,v'< p = b - v_prime = v+"'" - bounded_set = bounded_set \ - & islvars[v].lt_set(islvars[p]) \ - & islvars[v_prime].lt_set(islvars[p]) \ - & (islvars[0]-1).lt_set(islvars[v]) \ - & (islvars[0]-1).lt_set(islvars[v_prime]) \ - & islvars[p].eq_set(islvars[0]+b) - - return bounded_set - - def create_dependency_constraint( dependencies, all_inames_ordered, diff --git a/sched_check_utils.py b/sched_check_utils.py index 8bb95c7d2..e1b9eb7cb 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -51,4 +51,22 @@ def add_missing_set_dims_to_map_indims(islmap, islset): return new_map +def _create_positive_set_with_bounds( + var_names, param_names, upper_bounds): + # TODO assumes lower bound is zero + islvars = _make_islvars_with_var_primes(var_names, param_names) + + bounded_set = islvars[0].eq_set(islvars[0]) # initialize to True + + for v, p, b in zip(var_names, param_names, upper_bounds): + # create constraint 0 <= v,v'< p = b + v_prime = v+"'" + bounded_set = bounded_set \ + & islvars[v].lt_set(islvars[p]) \ + & islvars[v_prime].lt_set(islvars[p]) \ + & (islvars[0]-1).lt_set(islvars[v]) \ + & (islvars[0]-1).lt_set(islvars[v_prime]) \ + & islvars[p].eq_set(islvars[0]+b) + + return bounded_set diff --git a/schedule.py b/schedule.py index c9c7a0491..1cf2a93a1 100644 --- a/schedule.py +++ b/schedule.py @@ -275,21 +275,14 @@ class LexSchedule(object): return create_explicit_map_from_tuples(explicit_lex_map_pairs, lex_space_explicit) - #def get_lex_map_symbolic(self, var_bounds_dict): - #def get_lex_map_symbolic(self, extra_params): def get_lex_map_symbolic(self): from schedule_checker.lexicographic_order_map import ( create_symbolic_lex_mapping, ) n_dims = self.max_lex_dims() - #lex_dim_bounds = list(zip(self.get_min_lex_dim_vals(var_bounds_dict), - # self.get_max_lex_dim_bounds(var_bounds_dict))) - #return create_symbolic_lex_mapping(n_dims, dim_bound_vals=lex_dim_bounds) - #return create_symbolic_lex_mapping(n_dims, extra_params=extra_params) return create_symbolic_lex_mapping(n_dims) - #def get_isl_map(self): def get_isl_map_str(self): map_str = "{" for state_inst, lex in self.lex_schedule.items(): -- GitLab From cefc8d6e7c90c35355b14850655e5571edc8a54a Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 27 May 2019 20:48:10 -0500 Subject: [PATCH 025/415] moved append_apostrophes() and make_islvars_with_var_primes() into utils --- dependency.py | 24 +++++++----------------- example_dependency_checking.py | 7 ++++--- example_lex_map_creation.py | 1 - lexicographic_order_map.py | 2 +- sched_check_utils.py | 14 +++++++++++++- schedule.py | 2 +- 6 files changed, 26 insertions(+), 24 deletions(-) diff --git a/dependency.py b/dependency.py index f85ed2fdc..2c9884f3f 100644 --- a/dependency.py +++ b/dependency.py @@ -30,13 +30,6 @@ class Dependency(object): self.dep_type) -def append_apostrophes(strings): - if not isinstance(strings, list): - raise ValueError("append_apostrophes did not receive a list") - else: - return [s+"'" for s in strings] - - def create_equality_conjunction_set(names0, names1, islvars): # initialize set with constraint that is always true @@ -56,11 +49,6 @@ def _convert_constraint_set_to_map(constraint_set, mv_count, src_position=None): return constraint_map.move_dims(dim_type.out, 0, dim_type.in_, mv_count, mv_count) -def _make_islvars_with_var_primes(var_names, param_names): - return isl.make_zero_and_vars( - var_names+append_apostrophes(var_names), param_names) - - def create_dependency_constraint( dependencies, all_inames_ordered, @@ -68,6 +56,12 @@ def create_dependency_constraint( domain_constraint_set, sid_to_int, ): + from schedule_checker.sched_check_utils import ( + make_islvars_with_var_primes, + append_apostrophes, + add_and_name_dims_to_isl_set, + create_new_set_with_primes, + ) # This function uses the dependencies given to create the following constraint: # Statement [s,i,j] comes before statement [s',i',j'] iff @@ -84,7 +78,7 @@ def create_dependency_constraint( statement_var_prime = statement_var+"'" DT = DependencyType - islvars = _make_islvars_with_var_primes( + islvars = make_islvars_with_var_primes( [statement_var]+all_inames_ordered, []) @@ -121,12 +115,8 @@ def create_dependency_constraint( all_constraints_map = _convert_constraint_set_to_map(all_constraints_set, len(all_inames_ordered)+1) - from schedule_checker.sched_check_utils import create_new_set_with_primes range_constraint_set = create_new_set_with_primes(domain_constraint_set) - from schedule_checker.sched_check_utils import ( - add_and_name_dims_to_isl_set - ) new_pose = 0 # insert 's' at beginning # TODO don't hardcode 's' domain_to_intersect = add_and_name_dims_to_isl_set( domain_constraint_set, isl.dim_type.out, ["s"], new_pose) # TODO don't hardcode 's' diff --git a/example_dependency_checking.py b/example_dependency_checking.py index 0b3444c49..e394e779b 100644 --- a/example_dependency_checking.py +++ b/example_dependency_checking.py @@ -4,7 +4,6 @@ from schedule_checker.dependency import ( Dependency, DependencyType as DT, create_dependency_constraint, - append_apostrophes, ) from schedule_checker.lexicographic_order_map import ( make_lex_mapping_tuple_pairs, @@ -14,8 +13,10 @@ from schedule_checker.lexicographic_order_map import ( get_space, ) from schedule_checker.schedule import Statement -from schedule_checker.sched_check_utils import prettier_map_string - +from schedule_checker.sched_check_utils import ( + prettier_map_string, + append_apostrophes, +) # make example kernel knl = lp.make_kernel( diff --git a/example_lex_map_creation.py b/example_lex_map_creation.py index d26b268a0..00c26e1b3 100644 --- a/example_lex_map_creation.py +++ b/example_lex_map_creation.py @@ -3,7 +3,6 @@ from schedule_checker.dependency import ( Dependency, DependencyType as DT, create_dependency_constraint, - append_apostrophes, ) from schedule_checker.lexicographic_order_map import ( make_lex_mapping_tuple_pairs, diff --git a/lexicographic_order_map.py b/lexicographic_order_map.py index 25ab708b9..41377485f 100644 --- a/lexicographic_order_map.py +++ b/lexicographic_order_map.py @@ -141,7 +141,7 @@ def create_symbolic_lex_mapping( if in_names is None: in_names = ["i%s" % (i) for i in range(n_dims)] if out_names is None: - from schedule_checker.dependency import append_apostrophes + from schedule_checker.sched_check_utils import append_apostrophes out_names = append_apostrophes(in_names) #if dim_bound_vals is None: # raise NotImplementedError("dim_bound_vals cannot be None") diff --git a/sched_check_utils.py b/sched_check_utils.py index e1b9eb7cb..37fb3843a 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -51,11 +51,16 @@ def add_missing_set_dims_to_map_indims(islmap, islset): return new_map +def make_islvars_with_var_primes(var_names, param_names): + return isl.make_zero_and_vars( + var_names+append_apostrophes(var_names), param_names) + + def _create_positive_set_with_bounds( var_names, param_names, upper_bounds): # TODO assumes lower bound is zero - islvars = _make_islvars_with_var_primes(var_names, param_names) + islvars = make_islvars_with_var_primes(var_names, param_names) bounded_set = islvars[0].eq_set(islvars[0]) # initialize to True @@ -70,3 +75,10 @@ def _create_positive_set_with_bounds( & islvars[p].eq_set(islvars[0]+b) return bounded_set + + +def append_apostrophes(strings): + if not isinstance(strings, list): + raise ValueError("append_apostrophes did not receive a list") + else: + return [s+"'" for s in strings] diff --git a/schedule.py b/schedule.py index 1cf2a93a1..d1613e7bf 100644 --- a/schedule.py +++ b/schedule.py @@ -258,7 +258,7 @@ class LexSchedule(object): create_explicit_map_from_tuples, get_space, ) - from schedule_checker.dependency import append_apostrophes + from schedule_checker.sched_check_utils import append_apostrophes # TODO lower bound may not be zero lex_dim_bounds = list(zip(self.get_min_lex_dim_vals(), -- GitLab From 3b8a3c4dcab7a1501e4b5c150ec2adad8f40d741 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 28 May 2019 02:20:57 -0500 Subject: [PATCH 026/415] fixed bugs in loopy schedule traversal for lex schedule creation, and removed final unnecessary iname bound gathering/usage --- example_schedule_creation.py | 72 +++++---------------------- schedule.py | 95 +++++------------------------------- 2 files changed, 25 insertions(+), 142 deletions(-) diff --git a/example_schedule_creation.py b/example_schedule_creation.py index f8aa90788..d8d2db29d 100644 --- a/example_schedule_creation.py +++ b/example_schedule_creation.py @@ -15,67 +15,27 @@ from schedule_checker.lexicographic_order_map import ( from schedule_checker.sched_check_utils import prettier_map_string -def get_iname_bounds_dict(knl, all_inames_ordered, _set_arbitrary_bounds=None): - # TODO don't require explicit bounds - - if _set_arbitrary_bounds: - return dict((iname, _set_arbitrary_bounds) for iname in all_inames_ordered) - - from loopy.symbolic import aff_to_expr - from loopy.isl_helpers import static_max_of_pw_aff - from loopy.isl_helpers import static_value_of_pw_aff - - def _param_in_expr_hack(expr, all_params): - expr_str = str(expr) - for p in all_params: - if p in expr_str: - return p - return None - - bounds = {} - all_params = knl.all_params() - for iname in all_inames_ordered: - #bounds_record = knl.get_iname_bounds(iname, constants_only=True) - bounds_record = knl.get_iname_bounds(iname) - (_, iname_min_aff), = bounds_record.lower_bound_pw_aff.get_pieces() - (_, iname_max_aff), = bounds_record.upper_bound_pw_aff.get_pieces() - iname_min_aff = aff_to_expr(iname_min_aff) - iname_max_aff = aff_to_expr(iname_max_aff) - param_bound_min = _param_in_expr_hack(iname_min_aff, all_params) - param_bound_max = _param_in_expr_hack(iname_max_aff, all_params) - - if param_bound_min is None: - param_bound_min = int(iname_min_aff) # TODO what if this fails? - if param_bound_max is None: - param_bound_max = int(iname_max_aff)+1 # TODO what if this fails? - - dom = knl.get_inames_domain(iname) - - #int_bounds[iname] = [ - bounds[iname] = [ - param_bound_min, - param_bound_max, - ] - #assert all(isinstance(i,int) for i in int_bounds[iname]) - return bounds - # make example kernel knl = lp.make_kernel( - #"{[i,j]: 0<=i,j<2}", #"{[i,j]: 0<=i<2 and 1<=j<3}", #"{[i,j]: pi_lo<=itemp = b[i,j] {id=0}", "a[i,j] = temp + 1 {id=1,dep=0}", - "c[i,j] = d[i,j] {id=2}" + "c[i,j] = d[i,j] {id=2}", + "out[t,tt] = in[t,tt] {id=3}", ], - name="example", + name="example_blah", #assumptions="pi_lo,pi_up,pj_lo,pj_up >= 1", - assumptions="pi_up,pj_up >= 1", + #assumptions="pi_up,pj_up >= 1", + #assumptions="pi_up,pj_up,pt_up,pt_lo >= 1 and pt_lo < pt_up", + assumptions="pi_up,pj_up,pt_up,pt_lo >= 1 and pt_lo < pt_up", lang_version=(2018, 2) ) -knl = lp.add_and_infer_dtypes(knl, {"b": np.float32, "d": np.float32}) +#knl = lp.add_and_infer_dtypes(knl, {"b": np.float32, "d": np.float32}) +knl = lp.add_and_infer_dtypes(knl, {"b": np.float32, "d": np.float32, "in": np.float32}) knl = lp.tag_inames(knl, {"i": "l.0"}) knl = lp.preprocess_kernel(knl) knl = lp.get_one_scheduled_kernel(knl) @@ -111,25 +71,19 @@ def get_iname_concurrency_dict(inames, knl): return conc_dict # Get schedule ------------------------------------------------------ -iname_bounds = get_iname_bounds_dict(knl, all_inames_ordered) - domains = {} for iname in all_inames_ordered: domains[iname] = knl.get_inames_domain(iname) print("domains:") print(domains) -print("iname bounds:") -print(iname_bounds) -sched = LexSchedule(knl, iname_bounds) # TODO do we really need iname bounds here? -#print("LexSchedule before processing:") -#print(sched) +sched = LexSchedule(knl) +print("LexSchedule before processing:") +print(sched) -#example_sched_explicit = sched.enumerate_symbolic_inames_and_create_explicit_isl_map(iname_bounds) sched_map_symbolic = sched.create_symbolic_isl_map(domains) print("LexSchedule after processing:") print(sched) - # ------------------------------------------------------------------- print("LexSched (valid):") diff --git a/schedule.py b/schedule.py index d1613e7bf..34ce1553f 100644 --- a/schedule.py +++ b/schedule.py @@ -47,39 +47,33 @@ class LexSchedule(object): def __init__( self, knl, - iname_bounds, ): self.lex_schedule = OrderedDict() # statement instance: lex point self.inames_enumerated = [] # symbolic inames in sched that have been enumerated into explicit statement instances self.inames_not_enumerated = [] # TODO better way to do this self.lp_insnid_to_int_sid = {} + assert not any(iname == 's' for iname in knl.all_inames()) from loopy.schedule import (EnterLoop, LeaveLoop, Barrier, RunInstruction) - cur_nest_lex_prefix = [] + next_insn_lex_pt = [0] + # TODO assumes perfect loop nesting for sched_item in knl.schedule: if isinstance(sched_item, EnterLoop): iname = sched_item.iname - #conc_dict = get_iname_concurrency_dict([iname], knl) - #print("EnterLoop: %s" % (conc_dict)) if self: - cur_nest_lex_prefix.append(self.get_last_lex_pt()[-1]) - else: - cur_nest_lex_prefix.append(0) - cur_nest_lex_prefix.append(iname) + next_insn_lex_pt[-1] = next_insn_lex_pt[-1]+1 + next_insn_lex_pt.append(iname) + next_insn_lex_pt.append(0) elif isinstance(sched_item, LeaveLoop): - #conc_dict = get_iname_concurrency_dict([sched_item.iname], knl) - #print("LeaveLoop: %s" % (conc_dict)) - cur_nest_lex_prefix.pop() # pop loop variable - cur_nest_lex_prefix.pop() # pop insn ct variable + next_insn_lex_pt.pop() + next_insn_lex_pt.pop() + next_insn_lex_pt[-1] = next_insn_lex_pt[-1]+1 elif isinstance(sched_item, RunInstruction): self.add_new_lp_insnid(sched_item.insn_id) insn_id_int = self.lp_insnid_to_int_sid[sched_item.insn_id] - #inames = knl.id_to_insn[insn_id].within_inames - #conc_dict = get_iname_concurrency_dict(inames, knl) - #print("RunInstruction: id: %s; inames: %s" % (sched_item.insn_id, conc_dict)) - self.append_item( - (insn_id_int,), - cur_nest_lex_prefix + [self.get_next_lex_val_in_series(cur_nest_lex_prefix, iname_bounds)]) + + self.append_item((insn_id_int,), next_insn_lex_pt[:]) + next_insn_lex_pt[-1] = next_insn_lex_pt[-1] + 1 elif isinstance(sched_item, Barrier): pass else: @@ -141,20 +135,7 @@ class LexSchedule(object): from schedule_checker.lexicographic_order_map import get_space return get_space(params_sched, in_names_sched, out_names_sched) - #def get_space_for_symbolic_sched(self, iname_bounds): def get_space_for_symbolic_sched(self): - """ - iname_bound_params = [] - for iname in self.inames_not_enumerated: - lo, up = iname_bounds[iname] - if not isinstance(lo, int): - #iname_bound_params.append("p"+iname+"up") - iname_bound_params.append(lo) - if not isinstance(up, int): - #iname_bound_params.append("p"+iname+"up") - iname_bound_params.append(up) - """ - #params_sched = ["ps"] + iname_bound_params params_sched = [] in_names_sched = ["s"] + self.inames_not_enumerated out_names_sched = ["l"+str(i) for i in range(self.max_lex_dims())] @@ -194,22 +175,6 @@ class LexSchedule(object): def get_last_lex_pt(self): return self.lex_schedule[self.get_last_schedule_item()] - def get_next_lex_val_in_series(self, cur_nest_lex_prefix, iname_bounds): - if not self.lex_schedule: - return 0 - last_lex_pt = self.get_last_lex_pt() - #print(last_lex_pt) - if len(last_lex_pt) == len(cur_nest_lex_prefix) + 1: - # we're still in same loop, increment current lex dim val - return last_lex_pt[-1] + 1 - elif len(last_lex_pt) > len(cur_nest_lex_prefix) + 1: - # we just ended one or more loops, increment appropriate lex dim val - return last_lex_pt[len(cur_nest_lex_prefix)] + 1 - else: # len(last_lex_pt) < cur_nest_lex_prefix + 1: - # we just entered one or more loops - #return 0 - return iname_bounds[cur_nest_lex_prefix[-1]][0] - def create_explicit_isl_map(self, sched_space): from schedule_checker.lexicographic_order_map import create_explicit_map_from_tuples return create_explicit_map_from_tuples(list(self.items()), sched_space) @@ -323,39 +288,3 @@ class LexSchedule(object): def __str__(self): return str(list(self.lex_schedule.items())) - # TODO remove after stripping useful parts: - """ - def add_run_instructions_within_loop_nesting( - self, - insn_ids_ordered, - nest_order, # sequential lex dims in nest order (other lex dims assumed parallel) - iname_bounds, # dict w/bounds for sequential lex dims - concurrent_inames, - ): - # TODO don't pass explicit iname bounds, get them from kernel - - # TODO for now, assuming loop nestings are not re-encountered - - # create a lex dim for this set of (sequential) insns - self.add_lex_dim("s"+"".join(str(i) for i in insn_ids_ordered)) - - nested_iname_bounds_ordered = [iname_bounds[i] for i in nest_order] - import itertools - all_iname_val_sets = list( - itertools.product(*[range(b) for b in nested_iname_bounds_ordered])) - #TODO is there an order guarantee with product? - - for n_insn, insn_id in enumerate(insn_ids_ordered): # for each statement - st = Statement(insn_id, concurrent_inames+nest_order) - new_st_instances = [] - for iname_vals in all_iname_val_sets: - iname_vals = list(iname_vals) - # TODO handle concurrent inames - concurrent_iname_vals = [-1 for iname in range(len(concurrent_inames))] - st_i = StatementInstance( - st, - dict(zip(concurrent_inames+nest_order, - concurrent_iname_vals+iname_vals))) - self.lex_schedule[st_i] = iname_vals+[n_insn] - """ - -- GitLab From 1a8c83b1555979811e9a1862a1dde23412fefdd6 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 28 May 2019 05:48:21 -0500 Subject: [PATCH 027/415] removed get_dim_for_isl_space_var() because found redundant islpy function; improved add_missing_set_dims_to_map_indims() (error when names out of order); added _get_knl_domain_for_sched_checking() and order_var_names_to_match_islset() --- sched_check_utils.py | 47 +++++++++++++++++++++++++++++--------------- 1 file changed, 31 insertions(+), 16 deletions(-) diff --git a/sched_check_utils.py b/sched_check_utils.py index 37fb3843a..3b4a28d0d 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -14,10 +14,7 @@ def get_islvars_from_space(space): out_names = space.get_var_names(isl.dim_type.out) return isl.make_zero_and_vars(in_names+out_names, param_names) -def get_dim_for_isl_space_var(space, dim_type, var): - return space.get_var_names(dim_type).index(param) - -def add_and_name_dims_to_isl_set(isl_set, dim_type, names, new_pose_start): +def add_dims_to_isl_set(isl_set, dim_type, names, new_pose_start): new_set = isl_set.insert_dims(dim_type, new_pose_start, len(names)).set_dim_name(dim_type, new_pose_start, names[0]) for i, name in enumerate(names[1:]): new_set = new_set.set_dim_name(dim_type, new_pose_start+1+i, name) @@ -33,21 +30,22 @@ def add_missing_set_dims_to_map_indims(islmap, islset): new_map = islmap.copy() for i in range(islset.n_dim()): new_dim_name = islset.get_dim_name(isl.dim_type.out, i) - - old_map_in_names = new_map.get_var_names(isl.dim_type.in_) - if len(old_map_in_names) > i and old_map_in_names[i] == new_dim_name: - continue - else: + # does new_dim_name already exist in map? + dim_idx = new_map.find_dim_by_name(isl.dim_type.in_, new_dim_name) + if dim_idx == -1: + # new map needs dim, insert it new_map = new_map.insert_dims(isl.dim_type.in_, i, 1) new_map = new_map.set_dim_name(isl.dim_type.in_, i, new_dim_name) - """ - old_map_out_names = new_map.get_var_names(isl.dim_type.out) - if len(old_map_out_names) > i and old_map_out_names[i] == new_dim_name: - continue else: - new_map = new_map.insert_dims(isl.dim_type.out, i, 1) - new_map = new_map.set_dim_name(isl.dim_type.out, i, new_dim_name) - """ + # new_map already has new_dim_name + if dim_idx == i: + # and it's already in the right spot + continue + else: + # move it + # TODO how do we move these? move_dims doesn't work for same dim_type + print("%s not in right spot" % (new_dim_name)) + raise ValueError("(this should not happen)") return new_map @@ -82,3 +80,20 @@ def append_apostrophes(strings): raise ValueError("append_apostrophes did not receive a list") else: return [s+"'" for s in strings] + + +def _get_knl_domain_for_sched_checking(knl): + all_inames = list(knl.all_inames()) + domain_union = knl.get_inames_domain(all_inames[0]) + for iname in all_inames[1:]: + domain_union = domain_union.union(knl.get_inames_domain(iname)) + return domain_union + + +def order_var_names_to_match_islset(var_names, islset): + name_order = islset.get_var_names(isl.dim_type.out) + names_ordered_to_match_islset = [] + for v in name_order: + if v in var_names: + names_ordered_to_match_islset.append(v) + return names_ordered_to_match_islset -- GitLab From 4014ed06c4a38eb3f231cb9962a688898016e2e1 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 28 May 2019 05:49:07 -0500 Subject: [PATCH 028/415] make sure iname ordering in lex sched matches iname ordering in domain; also new example knl (mm) for sched testing --- dependency.py | 6 +-- example_schedule_creation.py | 100 ++++++++++++++++++++--------------- lexicographic_order_map.py | 2 +- schedule.py | 25 +++++---- 4 files changed, 73 insertions(+), 60 deletions(-) diff --git a/dependency.py b/dependency.py index 2c9884f3f..4eb518809 100644 --- a/dependency.py +++ b/dependency.py @@ -59,7 +59,7 @@ def create_dependency_constraint( from schedule_checker.sched_check_utils import ( make_islvars_with_var_primes, append_apostrophes, - add_and_name_dims_to_isl_set, + add_dims_to_isl_set, create_new_set_with_primes, ) # This function uses the dependencies given to create the following constraint: @@ -118,9 +118,9 @@ def create_dependency_constraint( range_constraint_set = create_new_set_with_primes(domain_constraint_set) new_pose = 0 # insert 's' at beginning # TODO don't hardcode 's' - domain_to_intersect = add_and_name_dims_to_isl_set( + domain_to_intersect = add_dims_to_isl_set( domain_constraint_set, isl.dim_type.out, ["s"], new_pose) # TODO don't hardcode 's' - range_to_intersect = add_and_name_dims_to_isl_set( + range_to_intersect = add_dims_to_isl_set( range_constraint_set, isl.dim_type.out, ["s'"], new_pose) # TODO don't hardcode 's' map_with_loop_domain_constraints = all_constraints_map.intersect_domain(domain_to_intersect).intersect_range(range_to_intersect) diff --git a/example_schedule_creation.py b/example_schedule_creation.py index d8d2db29d..c231227b9 100644 --- a/example_schedule_creation.py +++ b/example_schedule_creation.py @@ -12,36 +12,60 @@ from schedule_checker.lexicographic_order_map import ( get_statement_ordering_map, get_space, ) -from schedule_checker.sched_check_utils import prettier_map_string - +from schedule_checker.sched_check_utils import ( + prettier_map_string, + _get_knl_domain_for_sched_checking, + order_var_names_to_match_islset, +) -# make example kernel -knl = lp.make_kernel( - #"{[i,j]: 0<=i<2 and 1<=j<3}", - #"{[i,j]: pi_lo<=itemp = b[i,j] {id=0}", - "a[i,j] = temp + 1 {id=1,dep=0}", - "c[i,j] = d[i,j] {id=2}", - "out[t,tt] = in[t,tt] {id=3}", - ], - name="example_blah", - #assumptions="pi_lo,pi_up,pj_lo,pj_up >= 1", - #assumptions="pi_up,pj_up >= 1", - #assumptions="pi_up,pj_up,pt_up,pt_lo >= 1 and pt_lo < pt_up", - assumptions="pi_up,pj_up,pt_up,pt_lo >= 1 and pt_lo < pt_up", - lang_version=(2018, 2) - ) -#knl = lp.add_and_infer_dtypes(knl, {"b": np.float32, "d": np.float32}) -knl = lp.add_and_infer_dtypes(knl, {"b": np.float32, "d": np.float32, "in": np.float32}) -knl = lp.tag_inames(knl, {"i": "l.0"}) -knl = lp.preprocess_kernel(knl) -knl = lp.get_one_scheduled_kernel(knl) +knl_choice = "example" +#knl_choice = "matmul" + +if knl_choice == "example": + # make example kernel + knl = lp.make_kernel( + #"{[i,j]: 0<=i<2 and 1<=j<3}", + #"{[i,j]: pi_lo<=itemp = b[i,j] {id=0}", + "a[i,j] = temp + 1 {id=1,dep=0}", + "c[i,j] = d[i,j] {id=2}", + "out[t,tt] = in[t,tt] {id=3}", + ], + name="example", + #assumptions="pi_lo,pi_up,pj_lo,pj_up >= 1", + #assumptions="pi_up,pj_up >= 1", + #assumptions="pi_up,pj_up,pt_up,pt_lo >= 1 and pt_lo < pt_up", + assumptions="pi_up,pj_up,pt_up,pt_lo >= 1 and pt_lo < pt_up", + lang_version=(2018, 2) + ) + #knl = lp.add_and_infer_dtypes(knl, {"b": np.float32, "d": np.float32}) + knl = lp.add_and_infer_dtypes(knl, {"b": np.float32, "d": np.float32, "in": np.float32}) + knl = lp.tag_inames(knl, {"i": "l.0"}) + knl = lp.preprocess_kernel(knl) + knl = lp.get_one_scheduled_kernel(knl) +elif knl_choice == "matmul": + bsize = 16 + knl = lp.make_kernel( + "{[i,k,j]: 0<=i lex time):") print(sched_map_symbolic.space) @@ -98,8 +120,6 @@ print("------------------------------------------------------------------------- lex_map_symbolic = sched.get_lex_map_symbolic() -#print("lex map explicit:") -#print(prettier_map_string(lex_map_explicit)) print("lex map symbolic:") print(prettier_map_string(lex_map_symbolic)) print("space (lex time -> lex time):") @@ -124,12 +144,6 @@ from schedule_checker.dependency import ( statement_var = 's' -domains_list = list(domains.values()) -domain_union = domains_list[0] -#TODO is union the right thing to do here? -for dom in domains_list[1:]: - domain_union = domain_union.union(dom) - # i is parallel, suppose we want to enforce the following: # for a given i, statement 0 happens before statement 1 # i dependency is none, j dependency is `prior` diff --git a/lexicographic_order_map.py b/lexicographic_order_map.py index 41377485f..cfbe938e2 100644 --- a/lexicographic_order_map.py +++ b/lexicographic_order_map.py @@ -83,8 +83,8 @@ def create_symbolic_map_from_tuples(tuple_pairs, space, domain_to_intersect): len(in_names), len(out_names)) from schedule_checker.sched_check_utils import add_missing_set_dims_to_map_indims + # TODO make sure these always align properly result_map = add_missing_set_dims_to_map_indims(result_map, domain_to_intersect) - return result_map.intersect_domain(domain_to_intersect) diff --git a/schedule.py b/schedule.py index 34ce1553f..481c7125f 100644 --- a/schedule.py +++ b/schedule.py @@ -184,24 +184,23 @@ class LexSchedule(object): sched_space = self.get_space_for_explicit_sched() return self.create_explicit_isl_map(sched_space) - def create_symbolic_isl_map(self, domains): + def create_symbolic_isl_map(self, domain, inames): + # TODO if inames will always match domain out vars, don't need to pass them from schedule_checker.lexicographic_order_map import ( create_symbolic_map_from_tuples, ) from schedule_checker.sched_check_utils import ( - add_and_name_dims_to_isl_set + add_dims_to_isl_set ) - all_inames = list(domains.keys()) - self.add_symbolic_inames_to_statement_instances(all_inames) + domain_iname_order = domain.get_var_names(isl.dim_type.out) + inames_ordered_to_match_domain = [] + for iname in domain_iname_order: + if iname in inames: + inames_ordered_to_match_domain.append(iname) + self.add_symbolic_inames_to_statement_instances( + inames_ordered_to_match_domain) sched_space = self.get_space_for_symbolic_sched() - # intersect all domains for symbolic (non-enumerated) - # inames found in statement instances - domain_intersection = domains[self.inames_not_enumerated[0]] - #TODO what if self.inames_not_enumerated is empty? - for iname in self.inames_not_enumerated[1:]: - domain_intersection = domain_intersection.intersect(domains[iname]) - # TODO maybe don't project this out, constraints may involve any iname later... """ domain_stripped = domain_intersection.project_out_except( @@ -210,8 +209,8 @@ class LexSchedule(object): ) """ new_pose = 0 # insert 's' at beginning # TODO don't hardcode 's' - domain_to_intersect = add_and_name_dims_to_isl_set( - domain_intersection, isl.dim_type.out, ['s'], new_pose) # TODO don't hardcode 's' + domain_to_intersect = add_dims_to_isl_set( + domain, isl.dim_type.out, ['s'], new_pose) # TODO don't hardcode 's' return create_symbolic_map_from_tuples( list(self.items()), sched_space, domain_to_intersect) -- GitLab From fb412e90c5bb36c4eda9521399831d32b8a50fb0 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 28 May 2019 07:32:34 -0500 Subject: [PATCH 029/415] made mechanism for gathering dependencies from legacy loopy kernels --- dependency.py | 44 ++++++++++++++++++ example_lex_map_creation.py | 1 - example_schedule_creation.py | 87 +++++++++++++++++++----------------- 3 files changed, 90 insertions(+), 42 deletions(-) diff --git a/dependency.py b/dependency.py index 4eb518809..0f5a0cb48 100644 --- a/dependency.py +++ b/dependency.py @@ -49,6 +49,7 @@ def _convert_constraint_set_to_map(constraint_set, mv_count, src_position=None): return constraint_map.move_dims(dim_type.out, 0, dim_type.in_, mv_count, mv_count) +# TODO make this take in a set of dep sets and intersect all the constraints def create_dependency_constraint( dependencies, all_inames_ordered, @@ -127,3 +128,46 @@ def create_dependency_constraint( #blah2 = isl.Map("[pi_up, pj_up] -> { [s = 1, i, j] -> [s' = 0, i' = i, j'] : 0 <= i < pi_up and 0 <= j < pj_up and j' > j and 0 <= j' < pj_up}") #assert blah2 == map_with_loop_domain_constraints return map_with_loop_domain_constraints + + +def get_concurrent_inames(knl): + from loopy.kernel.data import LocalIndexTag, GroupIndexTag + conc_inames = set() + all_inames = knl.all_inames() + for iname in all_inames: + iname_tags = knl.iname_to_tags.get(iname, None) + if iname_tags and any( + isinstance(tag, (LocalIndexTag, GroupIndexTag)) for tag in iname_tags): + conc_inames.add(iname) + return conc_inames, all_inames-conc_inames + + +def create_dependencies_from_legacy_knl(knl): + from schedule_checker.schedule import Statement + from schedule_checker.dependency import ( + Dependency, + DependencyType as DT, + ) + conc_inames, non_conc_inames = get_concurrent_inames(knl) + all_inames = list(knl.all_inames()) + dep_sets = [] + for insn_after in knl.instructions: + for insn_before_id in insn_after.depends_on: + dep_set = [] + insn_before = knl.id_to_insn[insn_before_id] + insn_before_inames = insn_before.within_inames + insn_after_inames = insn_after.within_inames + #print("%s (%s) -> %s (%s)" % ( + # insn_before.id, insn_before_inames, insn_after.id, insn_after_inames)) + shared_inames = insn_before_inames & insn_after_inames + shared_conc_inames = shared_inames & conc_inames + shared_non_conc_inames = shared_inames & non_conc_inames + #print("shared conc/non-conc %s/%s" % (shared_conc_inames, shared_non_conc_inames)) + s_before = Statement(insn_before.id, all_inames) + s_after = Statement(insn_after.id, all_inames) + for non_conc_iname in shared_non_conc_inames: + dep_set.append(Dependency(s_before, s_after, DT.SAME, non_conc_iname)) + for conc_iname in shared_conc_inames: + dep_set.append(Dependency(s_before, s_after, DT.ALL, conc_iname)) + dep_sets.append(dep_set) + return dep_sets diff --git a/example_lex_map_creation.py b/example_lex_map_creation.py index 00c26e1b3..d94d4b313 100644 --- a/example_lex_map_creation.py +++ b/example_lex_map_creation.py @@ -2,7 +2,6 @@ import islpy as isl from schedule_checker.dependency import ( Dependency, DependencyType as DT, - create_dependency_constraint, ) from schedule_checker.lexicographic_order_map import ( make_lex_mapping_tuple_pairs, diff --git a/example_schedule_creation.py b/example_schedule_creation.py index c231227b9..b598ff99f 100644 --- a/example_schedule_creation.py +++ b/example_schedule_creation.py @@ -4,6 +4,8 @@ import numpy as np from schedule_checker.dependency import ( Dependency, DependencyType as DT, + create_dependencies_from_legacy_knl, + create_dependency_constraint, ) from schedule_checker.schedule import Statement, StatementInstance, LexSchedule from schedule_checker.sched_check_utils import prettier_map_string @@ -18,8 +20,8 @@ from schedule_checker.sched_check_utils import ( order_var_names_to_match_islset, ) -knl_choice = "example" -#knl_choice = "matmul" +#knl_choice = "example" +knl_choice = "matmul" if knl_choice == "example": # make example kernel @@ -29,10 +31,10 @@ if knl_choice == "example": #"{[i,j]: 0<=itemp = b[i,j] {id=0}", - "a[i,j] = temp + 1 {id=1,dep=0}", - "c[i,j] = d[i,j] {id=2}", - "out[t,tt] = in[t,tt] {id=3}", + "<>temp = b[i,j] {id=insn_a}", + "a[i,j] = temp + 1 {id=insn_b,dep=insn_a}", + "c[i,j] = d[i,j] {id=insn_c}", + "out[t,tt] = in[t,tt] {id=insn_d}", ], name="example", #assumptions="pi_lo,pi_up,pj_lo,pj_up >= 1", @@ -78,22 +80,6 @@ for sched_item in knl.schedule: print(sched_item) print("="*80) -def get_iname_concurrency_dict(inames, knl): - from loopy.kernel.data import LocalIndexTag, GroupIndexTag - conc_dict = {} - for iname in inames: - iname_tags = knl.iname_to_tags.get(iname, None) - concurrent = False - if iname_tags: - if len(iname_tags) > 1: - 1/0 - else: - iname_tag = list(iname_tags)[0] - if isinstance(iname_tag, (LocalIndexTag, GroupIndexTag)): - concurrent = True - conc_dict[iname] = "concurrent" if concurrent else "sequential" - return conc_dict - # Get schedule ------------------------------------------------------ domain_union = _get_knl_domain_for_sched_checking(knl) @@ -138,12 +124,9 @@ print(prettier_map_string(SIO_symbolic_valid)) print("space (statement instances -> statement instances):") print(SIO_symbolic_valid.space) -from schedule_checker.dependency import ( - create_dependency_constraint, -) -statement_var = 's' +""" # i is parallel, suppose we want to enforce the following: # for a given i, statement 0 happens before statement 1 # i dependency is none, j dependency is `prior` @@ -160,23 +143,45 @@ deps = [ Dependency(s0, s1, DT.NONE, "i"), Dependency(s0, s1, DT.PRIOR, "j"), ] +""" + +#For every shared (between depender and dependee) non-concurrent iname Introduce a same dep +# (Perform voodoo guesswork to determine whether a ‘prior’ dep is needed) +#For every shared (between depender and dependee) concurrent iname Introduce an all dep + +print("----------------------------------------------------------------------") +dep_sets = create_dependencies_from_legacy_knl(knl) +print("Dependency sets:") +for dep_set in dep_sets: + for dep in dep_set: + print(dep) + print("") print("----------------------------------------------------------------------") -print([str(dep) for dep in deps]) -constraint_map = create_dependency_constraint( - deps, - all_inames_ordered, - statement_var, - domain_union, - sched.lp_insnid_to_int_sid, - ) -print("constraint map:") -print(prettier_map_string(constraint_map)) -print("space (statment instances -> statement instances):") -print(constraint_map.space) - -assert constraint_map.space == SIO_symbolic_valid.space +print("dict{lp insn id : sched sid int}:") +print(sched.lp_insnid_to_int_sid) +print("----------------------------------------------------------------------") +statement_var = 's' +sched_is_valid = True +for dep_set in dep_sets: + # TODO make create_dep_constraint accept whole set of dep_sets + constraint_map = create_dependency_constraint( + dep_set, + all_inames_ordered, + statement_var, + domain_union, + sched.lp_insnid_to_int_sid, + ) + #print("constraint map:") + #print(prettier_map_string(constraint_map)) + #print("space (statment instances -> statement instances):") + #print(constraint_map.space) + + assert constraint_map.space == SIO_symbolic_valid.space + if not constraint_map.is_subset(SIO_symbolic_valid): + sched_is_valid = False + print("is valid sched valid? constraint map subset of SIO?") -print(constraint_map.is_subset(SIO_symbolic_valid)) +print(sched_is_valid) ''' -- GitLab From 4174d9349dd9fd05bb9bf86928f9490379a13130 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 10 Jun 2019 23:54:11 -0500 Subject: [PATCH 030/415] removed unused code from symbolic lex map creatoin --- lexicographic_order_map.py | 24 +++++------------------- 1 file changed, 5 insertions(+), 19 deletions(-) diff --git a/lexicographic_order_map.py b/lexicographic_order_map.py index cfbe938e2..f4c51f68c 100644 --- a/lexicographic_order_map.py +++ b/lexicographic_order_map.py @@ -81,7 +81,11 @@ def create_symbolic_map_from_tuples(tuple_pairs, space, domain_to_intersect): result_map = result_map.move_dims( dim_type.out, 0, dim_type.in_, len(in_names), len(out_names)) - + """ + result_map_vars_in = result_map.space.get_var_names(isl.dim_type.in_) + domain_stripped = domain_to_intersect.project_out_except(result_map_vars_in, [isl.dim_type.set]) + return result_map.intersect_domain(domain_stripped) + """ from schedule_checker.sched_check_utils import add_missing_set_dims_to_map_indims # TODO make sure these always align properly result_map = add_missing_set_dims_to_map_indims(result_map, domain_to_intersect) @@ -130,39 +134,21 @@ def get_space(param_names, in_names, out_names): # from other things...) def create_symbolic_lex_mapping( n_dims, - #param_names=None, in_names=None, out_names=None, - #dim_bound_vals=None, - #extra_params=None, ): - #if param_names is None: - # param_names = [["lo%s" % (i), "up%s" % (i)] for i in range(n_dims)] if in_names is None: in_names = ["i%s" % (i) for i in range(n_dims)] if out_names is None: from schedule_checker.sched_check_utils import append_apostrophes out_names = append_apostrophes(in_names) - #if dim_bound_vals is None: - # raise NotImplementedError("dim_bound_vals cannot be None") - #assert len(in_names) == len(out_names) == len(param_names) == len(dim_bound_vals) == n_dims assert len(in_names) == len(out_names) == n_dims dim_type = isl.dim_type - #from schedule_checker.sched_check_utils import flatten_2d_list - - #params_in_dim_bounds = [] - #for v in flatten_2d_list(dim_bound_vals): - # if not isinstance(v, int): - # params_in_dim_bounds.append(v) islvars = isl.make_zero_and_vars( in_names+out_names, - #flatten_2d_list(param_names)) - #flatten_2d_list(param_names)+params_in_dim_bounds) - #extra_params) []) - # [param for param_pair in param_names for param in param_pair]) # create constraint enforcing lex ordering, e.g., in the 3-dim case: # i0 < o0 or ((i0 = o0) and (i1 < o1)) -- GitLab From c1576ce8eb330264fcda096a9cf1c12dc6942c99 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 10 Jun 2019 23:55:33 -0500 Subject: [PATCH 031/415] added option in sched creation to only include specific insns --- schedule.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/schedule.py b/schedule.py index 481c7125f..a314b8b51 100644 --- a/schedule.py +++ b/schedule.py @@ -47,8 +47,9 @@ class LexSchedule(object): def __init__( self, knl, + include_only_insn_ids=None, ): - self.lex_schedule = OrderedDict() # statement instance: lex point + self.lex_schedule = OrderedDict() # statement instance: lex point self.inames_enumerated = [] # symbolic inames in sched that have been enumerated into explicit statement instances self.inames_not_enumerated = [] # TODO better way to do this self.lp_insnid_to_int_sid = {} @@ -69,11 +70,12 @@ class LexSchedule(object): next_insn_lex_pt.pop() next_insn_lex_pt[-1] = next_insn_lex_pt[-1]+1 elif isinstance(sched_item, RunInstruction): - self.add_new_lp_insnid(sched_item.insn_id) - insn_id_int = self.lp_insnid_to_int_sid[sched_item.insn_id] + if include_only_insn_ids is None or sched_item.insn_id in include_only_insn_ids: + self.add_new_lp_insnid(sched_item.insn_id) + insn_id_int = self.lp_insnid_to_int_sid[sched_item.insn_id] - self.append_item((insn_id_int,), next_insn_lex_pt[:]) - next_insn_lex_pt[-1] = next_insn_lex_pt[-1] + 1 + self.append_item((insn_id_int,), next_insn_lex_pt[:]) + next_insn_lex_pt[-1] = next_insn_lex_pt[-1] + 1 elif isinstance(sched_item, Barrier): pass else: -- GitLab From 2d6c1c30b83e35587e0630d874911388ab4785c4 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 10 Jun 2019 23:56:41 -0500 Subject: [PATCH 032/415] added all_iname_domains_equal(knl) fn --- sched_check_utils.py | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/sched_check_utils.py b/sched_check_utils.py index 3b4a28d0d..48109a301 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -82,7 +82,14 @@ def append_apostrophes(strings): return [s+"'" for s in strings] -def _get_knl_domain_for_sched_checking(knl): +def _union_of_sets(set_list): + union = set_list[0] + for s in set_list[1:]: + union = union.union(s) + return union + + +def _union_inames_domains(knl): all_inames = list(knl.all_inames()) domain_union = knl.get_inames_domain(all_inames[0]) for iname in all_inames[1:]: @@ -90,6 +97,16 @@ def _get_knl_domain_for_sched_checking(knl): return domain_union +def all_iname_domains_equal(knl): + all_inames = list(knl.all_inames()) + + first = knl.get_inames_domain(all_inames[0]) + for iname in all_inames[1:]: + if knl.get_inames_domain(iname) != first: + return False + return True + + def order_var_names_to_match_islset(var_names, islset): name_order = islset.get_var_names(isl.dim_type.out) names_ordered_to_match_islset = [] -- GitLab From c73839bfd341329ad714d0289d5bb6631347fa0a Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 11 Jun 2019 00:02:06 -0500 Subject: [PATCH 033/415] created StatementDependency (and updated other relevant functions), possibly to replace Dependency, which holds all individual iname deps for a given pair of statements --- dependency.py | 135 +++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 133 insertions(+), 2 deletions(-) diff --git a/dependency.py b/dependency.py index 0f5a0cb48..0e2ad13e2 100644 --- a/dependency.py +++ b/dependency.py @@ -8,6 +8,7 @@ class DependencyType: ALL = "all" +# TODO remove old dep class class Dependency(object): def __init__( self, @@ -30,6 +31,26 @@ class Dependency(object): self.dep_type) +class StatementDependency(object): + def __init__( + self, + statement_before, + statement_after, + iname_deps, # {iname: dep_type} + ): + self.statement_before = statement_before + self.statement_after = statement_after + self.iname_deps = iname_deps + + + def __str__(self): + result = "%s --before->\n%s iff\n " % ( + self.statement_before, self.statement_after) + return result + " and\n ".join( + ["(%s dep: %s)" % (iname, dep_type) + for iname, dep_type in self.iname_deps.items()]) + + def create_equality_conjunction_set(names0, names1, islvars): # initialize set with constraint that is always true @@ -50,7 +71,7 @@ def _convert_constraint_set_to_map(constraint_set, mv_count, src_position=None): # TODO make this take in a set of dep sets and intersect all the constraints -def create_dependency_constraint( +def create_dependency_constraint_old( dependencies, all_inames_ordered, statement_var, @@ -130,6 +151,81 @@ def create_dependency_constraint( return map_with_loop_domain_constraints +def create_dependency_constraint( + statement_dep, + all_inames_ordered, + statement_var, + domain_constraint_set, + sid_to_int, + ): + from schedule_checker.sched_check_utils import ( + make_islvars_with_var_primes, + append_apostrophes, + add_dims_to_isl_set, + create_new_set_with_primes, + ) + # This function uses the dependency given to create the following constraint: + # Statement [s,i,j] comes before statement [s',i',j'] iff + + # assumes statements are numbered sequentially + # (statement_bound = max statement id + 1) + + # make sure all dependencies involve different inames # TODO upate after allowing prior(i,k) + if len(set(statement_dep.iname_deps.keys()) + ) != len(statement_dep.iname_deps.keys()): + raise ValueError("All depencencies must apply to different inames.") + + statement_var_prime = statement_var+"'" + DT = DependencyType + islvars = make_islvars_with_var_primes( + [statement_var]+all_inames_ordered, + []) + + # initialize constraints to False + # this will disappear as soon as we add a constraint that is not DT.NONE + all_constraints_set = islvars[0].eq_set(islvars[0] + 1) + + for iname, dep_type in statement_dep.iname_deps.items(): + if dep_type == DT.NONE: + continue + + iname_prime = iname+"'" # i' + other_inames = all_inames_ordered.copy() + other_inames.remove(iname) # remaining inames, e.g., [j, k] + other_inames_prime = append_apostrophes(other_inames) # e.g., [j', k'] + + # initialize constraint set with what we know about other inames (e.g., j = j', k = k') + constraint_set = create_equality_conjunction_set(other_inames, other_inames_prime, islvars) + if dep_type == DT.SAME: + constraint_set = constraint_set & islvars[iname].eq_set(islvars[iname_prime]) + elif dep_type == DT.PRIOR: + constraint_set = constraint_set & islvars[iname].lt_set(islvars[iname_prime]) + elif dep_type == DT.ALL: + constraint_set = constraint_set & islvars[0].eq_set(islvars[0]) # True + + s_before_int = sid_to_int[statement_dep.statement_before.sid] + s_after_int = sid_to_int[statement_dep.statement_after.sid] + constraint_set = constraint_set & islvars[statement_var].eq_set(islvars[0]+s_before_int) + constraint_set = constraint_set & islvars[statement_var_prime].eq_set(islvars[0]+s_after_int) + + all_constraints_set = all_constraints_set | constraint_set + + all_constraints_map = _convert_constraint_set_to_map(all_constraints_set, len(all_inames_ordered)+1) + + range_constraint_set = create_new_set_with_primes(domain_constraint_set) + + new_pose = 0 # insert 's' at beginning # TODO don't hardcode 's' + domain_to_intersect = add_dims_to_isl_set( + domain_constraint_set, isl.dim_type.out, ["s"], new_pose) # TODO don't hardcode 's' + range_to_intersect = add_dims_to_isl_set( + range_constraint_set, isl.dim_type.out, ["s'"], new_pose) # TODO don't hardcode 's' + + map_with_loop_domain_constraints = all_constraints_map.intersect_domain(domain_to_intersect).intersect_range(range_to_intersect) + #blah2 = isl.Map("[pi_up, pj_up] -> { [s = 1, i, j] -> [s' = 0, i' = i, j'] : 0 <= i < pi_up and 0 <= j < pj_up and j' > j and 0 <= j' < pj_up}") + #assert blah2 == map_with_loop_domain_constraints + return map_with_loop_domain_constraints + + def get_concurrent_inames(knl): from loopy.kernel.data import LocalIndexTag, GroupIndexTag conc_inames = set() @@ -142,7 +238,7 @@ def get_concurrent_inames(knl): return conc_inames, all_inames-conc_inames -def create_dependencies_from_legacy_knl(knl): +def create_dependencies_from_legacy_knl_old(knl): from schedule_checker.schedule import Statement from schedule_checker.dependency import ( Dependency, @@ -171,3 +267,38 @@ def create_dependencies_from_legacy_knl(knl): dep_set.append(Dependency(s_before, s_after, DT.ALL, conc_iname)) dep_sets.append(dep_set) return dep_sets + + +def create_dependencies_from_legacy_knl(knl): + from schedule_checker.schedule import Statement + DT = DependencyType + conc_inames, non_conc_inames = get_concurrent_inames(knl) + all_inames = list(knl.all_inames()) + deps = [] + for insn_after in knl.instructions: + for insn_before_id in insn_after.depends_on: + iname_deps = {} + insn_before = knl.id_to_insn[insn_before_id] + insn_before_inames = insn_before.within_inames + insn_after_inames = insn_after.within_inames + #print("%s (%s) -> %s (%s)" % ( + # insn_before.id, insn_before_inames, insn_after.id, insn_after_inames)) + shared_inames = insn_before_inames & insn_after_inames + shared_conc_inames = shared_inames & conc_inames + shared_non_conc_inames = shared_inames & non_conc_inames + #print("shared conc/non-conc %s/%s" % (shared_conc_inames, shared_non_conc_inames)) + s_before = Statement(insn_before.id, all_inames) + s_after = Statement(insn_after.id, all_inames) + #TODO should this be all_inames or within_inames? + #s_before = Statement(insn_before.id, insn_before_inames) + #s_after = Statement(insn_after.id, insn_after_inames) + # TODO or union? + #s_before = Statement(insn_before.id, insn_before_inames | insn_after_inames) + #s_after = Statement(insn_after.id, insn_before_inames | insn_after_inames) + + for non_conc_iname in shared_non_conc_inames: + iname_deps[non_conc_iname] = DT.SAME + for conc_iname in shared_conc_inames: + iname_deps[conc_iname] = DT.ALL + deps.append(StatementDependency(s_before, s_after, iname_deps)) + return deps -- GitLab From 3ef3a863989cc23ea2090c50a3e0fa7b92add151 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 11 Jun 2019 00:04:54 -0500 Subject: [PATCH 034/415] renamed example_sched_creation.py -> example_schedule_creation_old.py --- ...ion.py => example_schedule_creation_old.py | 50 +++++++++++++++---- 1 file changed, 40 insertions(+), 10 deletions(-) rename example_schedule_creation.py => example_schedule_creation_old.py (85%) diff --git a/example_schedule_creation.py b/example_schedule_creation_old.py similarity index 85% rename from example_schedule_creation.py rename to example_schedule_creation_old.py index b598ff99f..47876b51c 100644 --- a/example_schedule_creation.py +++ b/example_schedule_creation_old.py @@ -4,8 +4,8 @@ import numpy as np from schedule_checker.dependency import ( Dependency, DependencyType as DT, - create_dependencies_from_legacy_knl, - create_dependency_constraint, + create_dependencies_from_legacy_knl_old, + create_dependency_constraint_old, ) from schedule_checker.schedule import Statement, StatementInstance, LexSchedule from schedule_checker.sched_check_utils import prettier_map_string @@ -16,12 +16,14 @@ from schedule_checker.lexicographic_order_map import ( ) from schedule_checker.sched_check_utils import ( prettier_map_string, - _get_knl_domain_for_sched_checking, + _union_inames_domains, + all_iname_domains_equal, order_var_names_to_match_islset, ) -#knl_choice = "example" -knl_choice = "matmul" +knl_choice = "example" +#knl_choice = "matmul" +#knl_choice = "scan" if knl_choice == "example": # make example kernel @@ -67,7 +69,22 @@ elif knl_choice == "matmul": knl = lp.add_prefetch(knl, "b", ["j_inner", "k_inner"], default_tag="l.auto") knl = lp.preprocess_kernel(knl) knl = lp.get_one_scheduled_kernel(knl) +elif knl_choice == "scan": + stride = 1 + n_scan = 16 + knl = lp.make_kernel( + "[n] -> {[i,j]: 0<=i statement instances):") #print(constraint_map.space) -- GitLab From 51074677a01d75506351193088f2b1d8ed6b26d5 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 11 Jun 2019 00:05:55 -0500 Subject: [PATCH 035/415] new exampleschedule creation where pairs of statements with dependencies are tested individually --- example_dep_pairwise_schedule_creation.py | 349 ++++++++++++++++++++++ 1 file changed, 349 insertions(+) create mode 100644 example_dep_pairwise_schedule_creation.py diff --git a/example_dep_pairwise_schedule_creation.py b/example_dep_pairwise_schedule_creation.py new file mode 100644 index 000000000..8fc92aebf --- /dev/null +++ b/example_dep_pairwise_schedule_creation.py @@ -0,0 +1,349 @@ +import islpy as isl +import loopy as lp +import numpy as np +from schedule_checker.dependency import ( + Dependency, + DependencyType as DT, + create_dependencies_from_legacy_knl, + create_dependency_constraint, +) +from schedule_checker.schedule import Statement, StatementInstance, LexSchedule +from schedule_checker.sched_check_utils import prettier_map_string +from schedule_checker.lexicographic_order_map import ( + create_explicit_map_from_tuples, + get_statement_ordering_map, + get_space, +) +from schedule_checker.sched_check_utils import ( + prettier_map_string, + _union_inames_domains, + all_iname_domains_equal, + order_var_names_to_match_islset, +) + +#knl_choice = "example" +#knl_choice = "matmul" +knl_choice = "scan" +#knl_choice = "dependent_domain" + +if knl_choice == "example": + # make example kernel + knl = lp.make_kernel( + #"{[i,j]: 0<=i<2 and 1<=j<3}", + #"{[i,j]: pi_lo<=itemp = b[i,j] {id=insn_a}", + "a[i,j] = temp + 1 {id=insn_b,dep=insn_a}", + "c[i,j] = d[i,j] {id=insn_c}", + "out[t,tt] = in[t,tt] {id=insn_d}", + ], + name="example", + #assumptions="pi_lo,pi_up,pj_lo,pj_up >= 1", + #assumptions="pi_up,pj_up >= 1", + #assumptions="pi_up,pj_up,pt_up,pt_lo >= 1 and pt_lo < pt_up", + assumptions="pi_up,pj_up,pt_up,pt_lo >= 1 and pt_lo < pt_up", + lang_version=(2018, 2) + ) + #knl = lp.add_and_infer_dtypes(knl, {"b": np.float32, "d": np.float32}) + knl = lp.add_and_infer_dtypes(knl, {"b": np.float32, "d": np.float32, "in": np.float32}) + knl = lp.tag_inames(knl, {"i": "l.0"}) + knl = lp.preprocess_kernel(knl) + knl = lp.get_one_scheduled_kernel(knl) +elif knl_choice == "matmul": + bsize = 16 + knl = lp.make_kernel( + "{[i,k,j]: 0<=i {[i,j]: 0<=i {[i]: 0<=i lex time):") + print(sched_map_symbolic.space) + + # *Explicit* lexicographic mapping- map each tuple to all tuples occuring later + print("---------------------------------------------------------------------------") + #lex_map_explicit = sched.get_lex_map_explicit() + + lex_map_symbolic = sched.get_lex_map_symbolic() + + print("lex map symbolic:") + print(prettier_map_string(lex_map_symbolic)) + print("space (lex time -> lex time):") + print(lex_map_symbolic.space) + + # Statement instance ordering + print("----------------------------------------------------------------------") + #SIO_explicit_valid = get_statement_ordering_map( + # example_sched_explicit, lex_map_explicit) + #print("statement instance ordering explicit (valid_sched):") + #print(prettier_map_string(SIO_explicit_valid)) + SIO_symbolic_valid = get_statement_ordering_map( + sched_map_symbolic, lex_map_symbolic) + print("statement instance ordering symbolic (valid_sched):") + print(prettier_map_string(SIO_symbolic_valid)) + print("space (statement instances -> statement instances):") + print(SIO_symbolic_valid.space) + """ + # i is parallel, suppose we want to enforce the following: + # for a given i, statement 0 happens before statement 1 + # i dependency is none, j dependency is `prior` + + # make some dependencies manually for now: + s0 = Statement("0", ["i", "j"]) + s1 = Statement("1", ["i", "j"]) + s2 = Statement("2", ["i", "j"]) + #dep_s1_i = Dependency(s0, s1, DT.NONE, "i") + #dep_s1_j = Dependency(s0, s1, DT.PRIOR, "j") + #insn_to_deps = {"0":[], "1":[dep_s1_i, dep_s1_j], "2":[]} + + deps = [ + Dependency(s0, s1, DT.NONE, "i"), + Dependency(s0, s1, DT.PRIOR, "j"), + ] + """ + + print("----------------------------------------------------------------------") + print("dict{lp insn id : sched sid int}:") + print(sched.lp_insnid_to_int_sid) + print("----------------------------------------------------------------------") + + statement_var = 's' + # TODO make create_dep_constraint accept whole set of dep_sets + constraint_map = create_dependency_constraint( + statement_dep, + all_inames_ordered, + statement_var, + dom, + sched.lp_insnid_to_int_sid, + ) + print("constraint map:") + print(prettier_map_string(constraint_map)) + #print("space (statment instances -> statement instances):") + #print(constraint_map.space) + + assert constraint_map.space == SIO_symbolic_valid.space + if not constraint_map.is_subset(SIO_symbolic_valid): + sched_is_valid = False + +print("is valid sched valid? constraint map subset of SIO?") +print(sched_is_valid) + + +''' +all_inames = ['i', 'j'] +iname_params = ['p0', 'p1'] +iname_param_vals = [2, 2] +statement_var = 's' +statement_param = 'ps' +statement_bound = 2 + + + +s0 = Statement("0", ["i", "j"]) +s1 = Statement("1", ["i", "j"]) +print("Statements:") +print(s0) +print(s1) + +s0_00 = StatementInstance(s0, {"i": 0, "j": 0}) +s0_10 = StatementInstance(s0, {"i": 1, "j": 0}) +s0_01 = StatementInstance(s0, {"i": 0, "j": 1}) +s0_11 = StatementInstance(s0, {"i": 1, "j": 1}) +s1_00 = StatementInstance(s1, {"i": 0, "j": 0}) +s1_10 = StatementInstance(s1, {"i": 1, "j": 0}) +s1_01 = StatementInstance(s1, {"i": 0, "j": 1}) +s1_11 = StatementInstance(s1, {"i": 1, "j": 1}) +print("Statement instances:") +print(s0_00) +print(s0_10) +print(s0_01) +print(s0_11) +print(s1_00) +print(s1_10) +print(s1_01) +print(s1_11) + +state_inst_to_lex_time_dict = { + s0_00: (0,0), + s1_00: (0,1), + s0_10: (0,0), + s1_10: (0,1), + s0_01: (1,0), + s1_01: (1,1), + s0_11: (1,0), + s1_11: (1,1), + } + +sched = LexSchedule(state_inst_to_lex_time_dict) +print("LexSchedule:") +print(sched) + +# sched map should be this: +schedule_explicit_map = isl.Map( + """{ + [s,i,j] -> [0,0] : s = 0 and i = 0 and j = 0; + [s,i,j] -> [0,1] : s = 1 and i = 0 and j = 0; + [s,i,j] -> [0,0] : s = 0 and i = 1 and j = 0; + [s,i,j] -> [0,1] : s = 1 and i = 1 and j = 0; + [s,i,j] -> [1,0] : s = 0 and i = 0 and j = 1; + [s,i,j] -> [1,1] : s = 1 and i = 0 and j = 1; + [s,i,j] -> [1,0] : s = 0 and i = 1 and j = 1; + [s,i,j] -> [1,1] : s = 1 and i = 1 and j = 1; + }""") + +schedule_general_map = isl.Map("{[s,i,j] -> [j,s]}") + +print("Map representing schedule generally:") +print(schedule_general_map) + +# the following is equivalent to explicit map above: +schedule_explicit_map2 = isl.Map( + """{ + [s=0,i=0,j=0] -> [0,0]; + [s=1,i=0,j=0] -> [0,1]; + [s=0,i=1,j=0] -> [0,0]; + [s=1,i=1,j=0] -> [0,1]; + [s=0,i=0,j=1] -> [1,0]; + [s=1,i=0,j=1] -> [1,1]; + [s=0,i=1,j=1] -> [1,0]; + [s=1,i=1,j=1] -> [1,1]; + }""") +assert schedule_explicit_map2 == schedule_explicit_map == sched.get_isl_map() + +''' + +""" +dep_i_same = Dependency(s0, s1, "i", DependencyType.SAME) +dep_i_none = Dependency(s0, s1, "i", DependencyType.NONE) +dep_i_prior = Dependency(s0, s1, "i", DependencyType.PRIOR) +dep_i_all = Dependency(s0, s1, "i", DependencyType.ALL) +dep_j_same = Dependency(s0, s1, "j", DependencyType.SAME) +dep_j_none = Dependency(s0, s1, "j", DependencyType.NONE) +dep_j_prior = Dependency(s0, s1, "j", DependencyType.PRIOR) +dep_j_all = Dependency(s0, s1, "j", DependencyType.ALL) +print("Example dependencies: ") +print(dep_i_same) +print(dep_i_none) +print(dep_i_prior) +print(dep_i_all) +print(dep_j_same) +print(dep_j_none) +print(dep_j_prior) +print(dep_j_all) +""" -- GitLab From a0b4f9293943737cebd3b0cae69673c28e6340f6 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Fri, 21 Jun 2019 17:59:06 -0500 Subject: [PATCH 036/415] for creating constraints for deps in legacy kernels, apply existing dep constraint creation logic only to set of inames that is *shared* between both instructions; for any non-shared inames, create a pseudo-ALL dep constraint that requires insn0 before insn1 iff True and s0=s0' and ... sn=sn' for all shared inames s0...sn; as a result, if there are no shared inames, insn0 always happens before insn1 --- dependency.py | 46 +++++++++++++++-------- example_dep_pairwise_schedule_creation.py | 13 ++++--- 2 files changed, 37 insertions(+), 22 deletions(-) diff --git a/dependency.py b/dependency.py index 0e2ad13e2..10f090798 100644 --- a/dependency.py +++ b/dependency.py @@ -185,24 +185,40 @@ def create_dependency_constraint( # this will disappear as soon as we add a constraint that is not DT.NONE all_constraints_set = islvars[0].eq_set(islvars[0] + 1) + before_inames = statement_dep.statement_before.active_inames + after_inames = statement_dep.statement_after.active_inames + shared_inames = before_inames & after_inames + #non_shared_inames = (before_inames | after_inames) - shared_inames + + # for each (iname, dep_type) pair, create a constraint, + # all_constraints_set will be the union of all these constraints for iname, dep_type in statement_dep.iname_deps.items(): if dep_type == DT.NONE: continue iname_prime = iname+"'" # i' - other_inames = all_inames_ordered.copy() - other_inames.remove(iname) # remaining inames, e.g., [j, k] - other_inames_prime = append_apostrophes(other_inames) # e.g., [j', k'] - # initialize constraint set with what we know about other inames (e.g., j = j', k = k') - constraint_set = create_equality_conjunction_set(other_inames, other_inames_prime, islvars) + #other_inames = all_inames_ordered.copy() + #other_inames.remove(iname) # remaining inames, e.g., [j, k] + #other_inames_prime = append_apostrophes(other_inames) # e.g., [j', k'] + other_shared_inames = list(shared_inames - {iname}) # remaining shared inames, e.g., [j, k] + other_shared_inames_prime = append_apostrophes(other_shared_inames) # e.g., [j', k'] + + # initialize constraint set with what we know about other shared inames (e.g., j = j', k = k') + # will be True if no shared inames + constraint_set = create_equality_conjunction_set( + other_shared_inames, other_shared_inames_prime, islvars) if dep_type == DT.SAME: - constraint_set = constraint_set & islvars[iname].eq_set(islvars[iname_prime]) + constraint_set = constraint_set & islvars[iname].eq_set( + islvars[iname_prime]) elif dep_type == DT.PRIOR: - constraint_set = constraint_set & islvars[iname].lt_set(islvars[iname_prime]) + constraint_set = constraint_set & islvars[iname].lt_set( + islvars[iname_prime]) elif dep_type == DT.ALL: - constraint_set = constraint_set & islvars[0].eq_set(islvars[0]) # True + constraint_set = constraint_set & islvars[0].eq_set( + islvars[0]) # True + # enforce statement_var == statement # s_before_int = sid_to_int[statement_dep.statement_before.sid] s_after_int = sid_to_int[statement_dep.statement_after.sid] constraint_set = constraint_set & islvars[statement_var].eq_set(islvars[0]+s_before_int) @@ -284,21 +300,19 @@ def create_dependencies_from_legacy_knl(knl): #print("%s (%s) -> %s (%s)" % ( # insn_before.id, insn_before_inames, insn_after.id, insn_after_inames)) shared_inames = insn_before_inames & insn_after_inames + non_shared_inames = (insn_before_inames | insn_after_inames) - shared_inames shared_conc_inames = shared_inames & conc_inames shared_non_conc_inames = shared_inames & non_conc_inames #print("shared conc/non-conc %s/%s" % (shared_conc_inames, shared_non_conc_inames)) - s_before = Statement(insn_before.id, all_inames) - s_after = Statement(insn_after.id, all_inames) - #TODO should this be all_inames or within_inames? - #s_before = Statement(insn_before.id, insn_before_inames) - #s_after = Statement(insn_after.id, insn_after_inames) - # TODO or union? - #s_before = Statement(insn_before.id, insn_before_inames | insn_after_inames) - #s_after = Statement(insn_after.id, insn_before_inames | insn_after_inames) + s_before = Statement(insn_before.id, insn_before_inames) + s_after = Statement(insn_after.id, insn_after_inames) for non_conc_iname in shared_non_conc_inames: iname_deps[non_conc_iname] = DT.SAME for conc_iname in shared_conc_inames: iname_deps[conc_iname] = DT.ALL + for non_shared_iname in non_shared_inames: + iname_deps[non_shared_iname] = DT.ALL + deps.append(StatementDependency(s_before, s_after, iname_deps)) return deps diff --git a/example_dep_pairwise_schedule_creation.py b/example_dep_pairwise_schedule_creation.py index 8fc92aebf..561d05ae7 100644 --- a/example_dep_pairwise_schedule_creation.py +++ b/example_dep_pairwise_schedule_creation.py @@ -23,8 +23,8 @@ from schedule_checker.sched_check_utils import ( #knl_choice = "example" #knl_choice = "matmul" -knl_choice = "scan" -#knl_choice = "dependent_domain" +#knl_choice = "scan" +knl_choice = "dependent_domain" if knl_choice == "example": # make example kernel @@ -103,8 +103,8 @@ elif knl_choice == "dependent_domain": knl = lp.get_one_scheduled_kernel(knl) -#print("Kernel:") -#print(knl) +print("Kernel:") +print(knl) #print(lp.generate_code_v2(knl).device_code()) print("="*80) print("Iname tags: %s" % (knl.iname_to_tags)) @@ -134,8 +134,9 @@ for sd in statement_deps: deps_and_domains = [] for sd in statement_deps: - assert sd.statement_before.active_inames == sd.statement_after.active_inames # TODO does this need to be true? - deps_and_domains.append([sd, knl.get_inames_domain(sd.statement_before.active_inames)]) + deps_and_domains.append([sd, knl.get_inames_domain(sd.statement_before.active_inames | sd.statement_after.active_inames)]) + # TODO need to have separate domains for separate instructions? ...domain for after distinct from before + #1/0 print("----------------------------------------------------------------------") print("StatementDependencies w/domains:") -- GitLab From f65a9791b7993cf864ddca717dd08b06019fec3a Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Fri, 21 Jun 2019 18:10:04 -0500 Subject: [PATCH 037/415] added stroud test kernel --- example_dep_pairwise_schedule_creation.py | 48 ++++++++++++++++++++++- 1 file changed, 47 insertions(+), 1 deletion(-) diff --git a/example_dep_pairwise_schedule_creation.py b/example_dep_pairwise_schedule_creation.py index 561d05ae7..c17ab9d27 100644 --- a/example_dep_pairwise_schedule_creation.py +++ b/example_dep_pairwise_schedule_creation.py @@ -24,7 +24,8 @@ from schedule_checker.sched_check_utils import ( #knl_choice = "example" #knl_choice = "matmul" #knl_choice = "scan" -knl_choice = "dependent_domain" +#knl_choice = "dependent_domain" +knl_choice = "stroud" if knl_choice == "example": # make example kernel @@ -101,6 +102,51 @@ elif knl_choice == "dependent_domain": knl = lp.realize_reduction(knl, force_scan=True) knl = lp.preprocess_kernel(knl) knl = lp.get_one_scheduled_kernel(knl) +elif knl_choice == "stroud": + knl = lp.make_kernel( + "{[el, i2, alpha1,alpha2]: \ + 0 <= el < nels and \ + 0 <= i2 < nqp1d and \ + 0 <= alpha1 <= deg and 0 <= alpha2 <= deg-alpha1 }", + """ + for el,i2 + <> xi = qpts[1, i2] + <> s = 1-xi + <> r = xi/s + <> aind = 0 {id=aind_init} + + for alpha1 + <> w = s**(deg-alpha1) {id=init_w} + + for alpha2 + tmp[el,alpha1,i2] = tmp[el,alpha1,i2] + w * coeffs[aind] \ + {id=write_tmp,dep=init_w:aind_init} + w = w * r * ( deg - alpha1 - alpha2 ) / (1 + alpha2) \ + {id=update_w,dep=init_w:write_tmp} + aind = aind + 1 \ + {id=aind_incr,dep=aind_init:write_tmp:update_w} + end + end + end + """, + [ + # Must declare coeffs to have "no" shape, to keep loopy + # from trying to figure it out the shape automatically. + + lp.GlobalArg("coeffs", None, shape=None), + "..." + ], + name="stroud", + assumptions="deg>=0 and nels>=1" + ) + + knl = lp.fix_parameters(knl, nqp1d=7, deg=4) + knl = lp.split_iname(knl, "el", 16, inner_tag="l.0") + knl = lp.split_iname(knl, "el_outer", 2, outer_tag="g.0", inner_tag="ilp", + slabs=(0, 1)) + knl = lp.tag_inames(knl, dict(i2="l.1", alpha1="unr", alpha2="unr")) + knl = lp.preprocess_kernel(knl) + knl = lp.get_one_scheduled_kernel(knl) print("Kernel:") -- GitLab From 1ba3d784f72cc00a0dededf75ceceaad45ab56c7 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Fri, 21 Jun 2019 18:44:57 -0500 Subject: [PATCH 038/415] printing info about invalid schedules --- dependency.py | 1 + example_dep_pairwise_schedule_creation.py | 22 +++++++++++++++++++++- 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/dependency.py b/dependency.py index 10f090798..efcb776f7 100644 --- a/dependency.py +++ b/dependency.py @@ -228,6 +228,7 @@ def create_dependency_constraint( all_constraints_map = _convert_constraint_set_to_map(all_constraints_set, len(all_inames_ordered)+1) + # TODO use separate domain for before and after insns range_constraint_set = create_new_set_with_primes(domain_constraint_set) new_pose = 0 # insert 's' at beginning # TODO don't hardcode 's' diff --git a/example_dep_pairwise_schedule_creation.py b/example_dep_pairwise_schedule_creation.py index c17ab9d27..6cd5c0977 100644 --- a/example_dep_pairwise_schedule_creation.py +++ b/example_dep_pairwise_schedule_creation.py @@ -6,6 +6,7 @@ from schedule_checker.dependency import ( DependencyType as DT, create_dependencies_from_legacy_knl, create_dependency_constraint, + get_concurrent_inames, ) from schedule_checker.schedule import Statement, StatementInstance, LexSchedule from schedule_checker.sched_check_utils import prettier_map_string @@ -193,6 +194,7 @@ for sd, dom in deps_and_domains: sched_is_valid = True # check each statement pair individually for statement_dep, dom in deps_and_domains: + # TODO separate dom for before and after insns s_before = statement_dep.statement_before s_after = statement_dep.statement_after @@ -244,7 +246,7 @@ for statement_dep, dom in deps_and_domains: sched_map_symbolic, lex_map_symbolic) print("statement instance ordering symbolic (valid_sched):") print(prettier_map_string(SIO_symbolic_valid)) - print("space (statement instances -> statement instances):") + print("SIO space (statement instances -> statement instances):") print(SIO_symbolic_valid.space) """ # i is parallel, suppose we want to enforce the following: @@ -287,6 +289,24 @@ for statement_dep, dom in deps_and_domains: assert constraint_map.space == SIO_symbolic_valid.space if not constraint_map.is_subset(SIO_symbolic_valid): sched_is_valid = False + conc_inames, non_conc_inames = get_concurrent_inames(knl) + print("================ constraint check failure =================") + print("constraint map not subset of SIO") + print("dependency:") + print(statement_dep) + print("concurrent inames:", conc_inames) + print("sequential inames:", non_conc_inames) + print("constraint map space (statment instances -> statement instances):") + print(constraint_map.space) + print("SIO space (statement instances -> statement instances):") + print(SIO_symbolic_valid.space) + print("constraint map:") + print(prettier_map_string(constraint_map)) + print("statement instance ordering:") + print(prettier_map_string(SIO_symbolic_valid)) + print("{insn id -> sched sid int} dict:") + print(sched.lp_insnid_to_int_sid) + print("===========================================================") print("is valid sched valid? constraint map subset of SIO?") print(sched_is_valid) -- GitLab From db06cbee6c99439f83a8c140b38969b306267eb9 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Fri, 21 Jun 2019 23:10:14 -0500 Subject: [PATCH 039/415] started separating depender vs. dependee domains, then commented out most changes because not sure if this is right approach --- dependency.py | 12 +++-- example_dep_pairwise_schedule_creation.py | 58 +++++++++++++++++++---- schedule.py | 12 ++++- 3 files changed, 67 insertions(+), 15 deletions(-) diff --git a/dependency.py b/dependency.py index efcb776f7..dbf35073b 100644 --- a/dependency.py +++ b/dependency.py @@ -70,7 +70,6 @@ def _convert_constraint_set_to_map(constraint_set, mv_count, src_position=None): return constraint_map.move_dims(dim_type.out, 0, dim_type.in_, mv_count, mv_count) -# TODO make this take in a set of dep sets and intersect all the constraints def create_dependency_constraint_old( dependencies, all_inames_ordered, @@ -156,6 +155,8 @@ def create_dependency_constraint( all_inames_ordered, statement_var, domain_constraint_set, + #dom_before_constraint_set, + #dom_after_constraint_set, sid_to_int, ): from schedule_checker.sched_check_utils import ( @@ -228,14 +229,19 @@ def create_dependency_constraint( all_constraints_map = _convert_constraint_set_to_map(all_constraints_set, len(all_inames_ordered)+1) - # TODO use separate domain for before and after insns + # TODO use separate domain for before and after insns? range_constraint_set = create_new_set_with_primes(domain_constraint_set) - new_pose = 0 # insert 's' at beginning # TODO don't hardcode 's' domain_to_intersect = add_dims_to_isl_set( domain_constraint_set, isl.dim_type.out, ["s"], new_pose) # TODO don't hardcode 's' range_to_intersect = add_dims_to_isl_set( range_constraint_set, isl.dim_type.out, ["s'"], new_pose) # TODO don't hardcode 's' + #new_pose = 0 # insert 's' at beginning # TODO don't hardcode 's' + #domain_to_intersect = add_dims_to_isl_set( + # dom_before_constraint_set, isl.dim_type.out, ["s"], new_pose) # TODO don't hardcode 's' + #range_constraint_set = create_new_set_with_primes(dom_after_constraint_set) + #range_to_intersect = add_dims_to_isl_set( + # range_constraint_set, isl.dim_type.out, ["s'"], new_pose) # TODO don't hardcode 's' map_with_loop_domain_constraints = all_constraints_map.intersect_domain(domain_to_intersect).intersect_range(range_to_intersect) #blah2 = isl.Map("[pi_up, pj_up] -> { [s = 1, i, j] -> [s' = 0, i' = i, j'] : 0 <= i < pi_up and 0 <= j < pj_up and j' > j and 0 <= j' < pj_up}") diff --git a/example_dep_pairwise_schedule_creation.py b/example_dep_pairwise_schedule_creation.py index 6cd5c0977..40f5004ab 100644 --- a/example_dep_pairwise_schedule_creation.py +++ b/example_dep_pairwise_schedule_creation.py @@ -148,6 +148,28 @@ elif knl_choice == "stroud": knl = lp.tag_inames(knl, dict(i2="l.1", alpha1="unr", alpha2="unr")) knl = lp.preprocess_kernel(knl) knl = lp.get_one_scheduled_kernel(knl) +if knl_choice == "add_barrier": + np.random.seed(17) + a = np.random.randn(16) + cnst = np.random.randn(16) + knl = lp.make_kernel( + "{[i, ii]: 0<=i, ii Date: Fri, 21 Jun 2019 23:46:50 -0500 Subject: [PATCH 040/415] barriers can be part of a dependency pair; treating them just like a RunInstruction --- example_dep_pairwise_schedule_creation.py | 5 +++-- schedule.py | 8 +++++++- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/example_dep_pairwise_schedule_creation.py b/example_dep_pairwise_schedule_creation.py index 40f5004ab..d8550c15b 100644 --- a/example_dep_pairwise_schedule_creation.py +++ b/example_dep_pairwise_schedule_creation.py @@ -22,11 +22,12 @@ from schedule_checker.sched_check_utils import ( order_var_names_to_match_islset, ) -#knl_choice = "example" +knl_choice = "example" #knl_choice = "matmul" #knl_choice = "scan" #knl_choice = "dependent_domain" -knl_choice = "stroud" +#knl_choice = "stroud" +#knl_choice = "add_barrier" if knl_choice == "example": # make example kernel diff --git a/schedule.py b/schedule.py index fde888649..2fa8eb54c 100644 --- a/schedule.py +++ b/schedule.py @@ -77,7 +77,13 @@ class LexSchedule(object): self.append_item((insn_id_int,), next_insn_lex_pt[:]) next_insn_lex_pt[-1] = next_insn_lex_pt[-1] + 1 elif isinstance(sched_item, Barrier): - pass + # TODO barriers can be part of a dependency... how should these be handled? + if include_only_insn_ids is None or sched_item.originating_insn_id in include_only_insn_ids: + self.add_new_lp_insnid(sched_item.originating_insn_id) + insn_id_int = self.lp_insnid_to_int_sid[sched_item.originating_insn_id] + + self.append_item((insn_id_int,), next_insn_lex_pt[:]) + next_insn_lex_pt[-1] = next_insn_lex_pt[-1] + 1 else: pass self.pad_lex_pts_with_zeros() -- GitLab From 4b7163667a3e73bb133530218fc28f1ca9e867a0 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sat, 22 Jun 2019 01:12:30 -0500 Subject: [PATCH 041/415] added nop and nest example kernels --- example_dep_pairwise_schedule_creation.py | 55 ++++++++++++++++++++++- 1 file changed, 54 insertions(+), 1 deletion(-) diff --git a/example_dep_pairwise_schedule_creation.py b/example_dep_pairwise_schedule_creation.py index d8550c15b..b060bf8cb 100644 --- a/example_dep_pairwise_schedule_creation.py +++ b/example_dep_pairwise_schedule_creation.py @@ -22,12 +22,14 @@ from schedule_checker.sched_check_utils import ( order_var_names_to_match_islset, ) -knl_choice = "example" +#knl_choice = "example" #knl_choice = "matmul" #knl_choice = "scan" #knl_choice = "dependent_domain" #knl_choice = "stroud" #knl_choice = "add_barrier" +#knl_choice = "nop" #TODO +knl_choice = "nest" if knl_choice == "example": # make example kernel @@ -170,7 +172,58 @@ if knl_choice == "add_barrier": knl = lp.split_iname(knl, "ii", 2, outer_tag="g.0", inner_tag="l.0") knl = lp.preprocess_kernel(knl) knl = lp.get_one_scheduled_kernel(knl) +if knl_choice == "nop": + knl = lp.make_kernel( + [ + "{[a]: 0<=a<10}", + "{[b]: b_start<=b b_start = 1 + <> b_end = 2 + for b + <> c_start = 1 + <> c_end = 2 + + for c + ... nop + end + + <>t[idim] = 1 + end + end + """, + "...", + seq_dependencies=True) + knl = lp.fix_parameters(knl, dim=3) + knl = lp.preprocess_kernel(knl) + knl = lp.get_one_scheduled_kernel(knl) +if knl_choice == "nest": + knl = lp.make_kernel( + "{[i,j,k]: 0<=i,j,kfoo = 0 {id=insn0} + for i + <>acc = 0 {id=insn1} + for j + for k + acc = acc + j + k {id=insn2,dep=insn1} + end + end + foo = foo + acc {id=insn3,dep=insn2} + end + <>bar = foo {id=insn4,dep=insn3} + """, + name="nest", + assumptions="n >= 1", + lang_version=(2018, 2) + ) + + knl = lp.preprocess_kernel(knl) + knl = lp.get_one_scheduled_kernel(knl) print("Kernel:") -- GitLab From a48db945c0266a44a97a409d85dff5f178b0d3f6 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 24 Jun 2019 19:23:14 -0500 Subject: [PATCH 042/415] fixed pep8 issues --- example_dep_pairwise_schedule_creation.py | 60 ++++++++++++----------- 1 file changed, 31 insertions(+), 29 deletions(-) diff --git a/example_dep_pairwise_schedule_creation.py b/example_dep_pairwise_schedule_creation.py index b060bf8cb..2f995eb3e 100644 --- a/example_dep_pairwise_schedule_creation.py +++ b/example_dep_pairwise_schedule_creation.py @@ -1,35 +1,28 @@ -import islpy as isl import loopy as lp import numpy as np from schedule_checker.dependency import ( - Dependency, - DependencyType as DT, create_dependencies_from_legacy_knl, create_dependency_constraint, get_concurrent_inames, ) -from schedule_checker.schedule import Statement, StatementInstance, LexSchedule -from schedule_checker.sched_check_utils import prettier_map_string +from schedule_checker.schedule import LexSchedule from schedule_checker.lexicographic_order_map import ( - create_explicit_map_from_tuples, + #create_explicit_map_from_tuples, get_statement_ordering_map, - get_space, ) from schedule_checker.sched_check_utils import ( prettier_map_string, - _union_inames_domains, - all_iname_domains_equal, order_var_names_to_match_islset, ) #knl_choice = "example" #knl_choice = "matmul" -#knl_choice = "scan" +knl_choice = "scan" #knl_choice = "dependent_domain" #knl_choice = "stroud" #knl_choice = "add_barrier" #knl_choice = "nop" #TODO -knl_choice = "nest" +#knl_choice = "nest" if knl_choice == "example": # make example kernel @@ -52,7 +45,9 @@ if knl_choice == "example": lang_version=(2018, 2) ) #knl = lp.add_and_infer_dtypes(knl, {"b": np.float32, "d": np.float32}) - knl = lp.add_and_infer_dtypes(knl, {"b": np.float32, "d": np.float32, "in": np.float32}) + knl = lp.add_and_infer_dtypes( + knl, + {"b": np.float32, "d": np.float32, "in": np.float32}) knl = lp.tag_inames(knl, {"i": "l.0"}) knl = lp.preprocess_kernel(knl) knl = lp.get_one_scheduled_kernel(knl) @@ -71,7 +66,7 @@ elif knl_choice == "matmul": knl = lp.split_iname(knl, "i", bsize, outer_tag="g.0", inner_tag="l.1") knl = lp.split_iname(knl, "j", bsize, outer_tag="g.1", inner_tag="l.0") knl = lp.split_iname(knl, "k", bsize) - knl = lp.add_prefetch(knl, "a", ["k_inner", "i_inner"], default_tag="l.auto") + knl = lp.add_prefetch(knl, "a", ["k_inner", "i_inner"], default_tag="l.auto") knl = lp.add_prefetch(knl, "b", ["j_inner", "k_inner"], default_tag="l.auto") knl = lp.preprocess_kernel(knl) knl = lp.get_one_scheduled_kernel(knl) @@ -244,11 +239,11 @@ if not all_iname_domains_equal(knl): "get_inames_domain(iname) is not same for all inames") """ -#For every shared (between depender and dependee) non-concurrent iname Introduce a same dep +#For every shared (b/t depender and dependee) non-concurrent iname Introduce SAME dep # (Perform voodoo guesswork to determine whether a ‘prior’ dep is needed) -#For every shared (between depender and dependee) concurrent iname Introduce an all dep +#For every shared (b/t depender and dependee) concurrent iname Introduce an ALL dep -print("----------------------------------------------------------------------") +print("-"*85) statement_deps = create_dependencies_from_legacy_knl(knl) print("Statement Dependencies:") for sd in statement_deps: @@ -257,15 +252,18 @@ for sd in statement_deps: deps_and_domains = [] for sd in statement_deps: - #deps_and_domains.append([sd, knl.get_inames_domain(sd.statement_before.active_inames | sd.statement_after.active_inames)]) - # TODO need to have separate domains for separate instructions? ...domain for after distinct from before + #deps_and_domains.append([ + # sd, knl.get_inames_domain( + # sd.statement_before.active_inames | sd.statement_after.active_inames)]) + # TODO need to have separate domains for separate instructions? + # ...domain for after distinct from before deps_and_domains.append([ sd, knl.get_inames_domain(sd.statement_before.active_inames), knl.get_inames_domain(sd.statement_after.active_inames) ]) -print("----------------------------------------------------------------------") +print("-"*85) print("StatementDependencies w/domains:") #for sd, dom in deps_and_domains: for sd, dom_before, dom_after in deps_and_domains: @@ -289,15 +287,18 @@ for statement_dep, dom_before, dom_after in deps_and_domains: ) # get all inames in consistent ordering: - all_inames_ordered = order_var_names_to_match_islset(knl.all_inames(), combined_doms) # should separate doms? + all_inames_ordered = order_var_names_to_match_islset( + knl.all_inames(), combined_doms) # should separate doms? sched = LexSchedule(knl, include_only_insn_ids=[s_before.sid, s_after.sid]) - print("----------------------------------------------------------------------") + print("-"*85) print("LexSchedule before processing:") print(sched) - sched_map_symbolic = sched.create_symbolic_isl_map(combined_doms, all_inames_ordered) # should separate doms? - #sched_map_symbolic = sched.create_symbolic_isl_map(dom_before, dom_after, all_inames_ordered) + sched_map_symbolic = sched.create_symbolic_isl_map( + combined_doms, all_inames_ordered) # should separate doms? + #sched_map_symbolic = sched.create_symbolic_isl_map( + # dom_before, dom_after, all_inames_ordered) print("LexSchedule after processing:") print(sched) # ------------------------------------------------------------------- @@ -307,7 +308,8 @@ for statement_dep, dom_before, dom_after in deps_and_domains: domain_w_s = add_dims_to_isl_set( domain_union, isl.dim_type.out, ['s'], new_pose) # TODO don't hardcode 's' sched_map_vars_in = sched_map_symbolic.space.get_var_names(isl.dim_type.in_) - domain_stripped = domain_w_s.project_out_except(sched_map_vars_in, [isl.dim_type.set]) + domain_stripped = domain_w_s.project_out_except( + sched_map_vars_in, [isl.dim_type.set]) """ # ------------------------------------------------------------------- @@ -317,7 +319,7 @@ for statement_dep, dom_before, dom_after in deps_and_domains: print(sched_map_symbolic.space) # *Explicit* lexicographic mapping- map each tuple to all tuples occuring later - print("---------------------------------------------------------------------------") + print("-"*85) #lex_map_explicit = sched.get_lex_map_explicit() lex_map_symbolic = sched.get_lex_map_symbolic() @@ -328,7 +330,7 @@ for statement_dep, dom_before, dom_after in deps_and_domains: print(lex_map_symbolic.space) # Statement instance ordering - print("----------------------------------------------------------------------") + print("-"*85) #SIO_explicit_valid = get_statement_ordering_map( # example_sched_explicit, lex_map_explicit) #print("statement instance ordering explicit (valid_sched):") @@ -358,17 +360,17 @@ for statement_dep, dom_before, dom_after in deps_and_domains: ] """ - print("----------------------------------------------------------------------") + print("-"*85) print("dict{lp insn id : sched sid int}:") print(sched.lp_insnid_to_int_sid) - print("----------------------------------------------------------------------") + print("-"*85) statement_var = 's' constraint_map = create_dependency_constraint( statement_dep, all_inames_ordered, # TODO separate lists for separate doms? statement_var, - combined_doms, # TODO separate domains for before/after + combined_doms, # TODO separate domains for before/after #dom_before, #dom_after, sched.lp_insnid_to_int_sid, -- GitLab From 20e1b99e485959453ac8d34b8482afa4c2df3539 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 24 Jun 2019 19:37:20 -0500 Subject: [PATCH 043/415] fixing pep8 issues --- dependency.py | 148 ++++++++++++++++++++++++++------------------------ 1 file changed, 78 insertions(+), 70 deletions(-) diff --git a/dependency.py b/dependency.py index dbf35073b..07ad15112 100644 --- a/dependency.py +++ b/dependency.py @@ -22,7 +22,6 @@ class Dependency(object): self.dep_type = dep_type self.iname = iname - def __str__(self): return "%s -> %s {%s dep: %s}" % ( self.statement_before, @@ -36,13 +35,12 @@ class StatementDependency(object): self, statement_before, statement_after, - iname_deps, # {iname: dep_type} + iname_deps, # {iname: dep_type} ): self.statement_before = statement_before self.statement_after = statement_after self.iname_deps = iname_deps - def __str__(self): result = "%s --before->\n%s iff\n " % ( self.statement_before, self.statement_after) @@ -65,9 +63,11 @@ def _convert_constraint_set_to_map(constraint_set, mv_count, src_position=None): dim_type = isl.dim_type constraint_map = isl.Map.from_domain(constraint_set) if src_position: - return constraint_map.move_dims(dim_type.out, 0, dim_type.in_, src_position, mv_count) + return constraint_map.move_dims( + dim_type.out, 0, dim_type.in_, src_position, mv_count) else: - return constraint_map.move_dims(dim_type.out, 0, dim_type.in_, mv_count, mv_count) + return constraint_map.move_dims( + dim_type.out, 0, dim_type.in_, mv_count, mv_count) def create_dependency_constraint_old( @@ -91,26 +91,26 @@ def create_dependency_constraint_old( # make sure all dependencies involve same two statements if len(set([dep.statement_before.sid for dep in dependencies])) != 1 or \ - len(set([dep.statement_after.sid for dep in dependencies])) != 1: + len(set([dep.statement_after.sid for dep in dependencies])) != 1: raise ValueError("All depencencies must be between same two statements.") - # make sure all dependencies involve different inames # TODO upate after allowing prior(i,k) + # make sure all dependencies involve different inames if len(set([dep.iname for dep in dependencies])) != len(dependencies): raise ValueError("All depencencies must apply to different inames.") statement_var_prime = statement_var+"'" - DT = DependencyType + dt = DependencyType islvars = make_islvars_with_var_primes( [statement_var]+all_inames_ordered, []) # initialize constraints to False - # this will disappear as soon as we add a constraint that is not DT.NONE + # this will disappear as soon as we add a constraint that is not dt.NONE all_constraints_set = islvars[0].eq_set(islvars[0] + 1) for dep in dependencies: iname = dep.iname dep_type = dep.dep_type - if dep_type == DT.NONE: + if dep_type == dt.NONE: continue iname_prime = iname+"'" # i' @@ -118,35 +118,41 @@ def create_dependency_constraint_old( other_inames.remove(iname) # remaining inames, e.g., [j, k] other_inames_prime = append_apostrophes(other_inames) # e.g., [j', k'] - # initialize constraint set with what we know about other inames (e.g., j = j', k = k') - constraint_set = create_equality_conjunction_set(other_inames, other_inames_prime, islvars) - if dep_type == DT.SAME: - constraint_set = constraint_set & islvars[iname].eq_set(islvars[iname_prime]) - elif dep_type == DT.PRIOR: - constraint_set = constraint_set & islvars[iname].lt_set(islvars[iname_prime]) - elif dep_type == DT.ALL: - constraint_set = constraint_set & islvars[0].eq_set(islvars[0]) # True + # initialize constraint set with what we know about other inames + # (e.g., j = j', k = k') + constraint_set = create_equality_conjunction_set( + other_inames, other_inames_prime, islvars) + if dep_type == dt.SAME: + constraint_set = constraint_set & islvars[iname].eq_set( + islvars[iname_prime]) + elif dep_type == dt.PRIOR: + constraint_set = constraint_set & islvars[iname].lt_set( + islvars[iname_prime]) + elif dep_type == dt.ALL: + constraint_set = constraint_set & islvars[0].eq_set(islvars[0]) # True s_before_int = sid_to_int[dep.statement_before.sid] s_after_int = sid_to_int[dep.statement_after.sid] - constraint_set = constraint_set & islvars[statement_var].eq_set(islvars[0]+s_before_int) - constraint_set = constraint_set & islvars[statement_var_prime].eq_set(islvars[0]+s_after_int) + constraint_set = constraint_set & islvars[statement_var].eq_set( + islvars[0]+s_before_int) + constraint_set = constraint_set & islvars[statement_var_prime].eq_set( + islvars[0]+s_after_int) all_constraints_set = all_constraints_set | constraint_set - all_constraints_map = _convert_constraint_set_to_map(all_constraints_set, len(all_inames_ordered)+1) + all_constraints_map = _convert_constraint_set_to_map( + all_constraints_set, len(all_inames_ordered)+1) range_constraint_set = create_new_set_with_primes(domain_constraint_set) - new_pose = 0 # insert 's' at beginning # TODO don't hardcode 's' + new_pose = 0 # insert 's' at beginning # TODO don't hardcode 's' domain_to_intersect = add_dims_to_isl_set( - domain_constraint_set, isl.dim_type.out, ["s"], new_pose) # TODO don't hardcode 's' + domain_constraint_set, isl.dim_type.out, ["s"], new_pose) range_to_intersect = add_dims_to_isl_set( - range_constraint_set, isl.dim_type.out, ["s'"], new_pose) # TODO don't hardcode 's' + range_constraint_set, isl.dim_type.out, ["s'"], new_pose) - map_with_loop_domain_constraints = all_constraints_map.intersect_domain(domain_to_intersect).intersect_range(range_to_intersect) - #blah2 = isl.Map("[pi_up, pj_up] -> { [s = 1, i, j] -> [s' = 0, i' = i, j'] : 0 <= i < pi_up and 0 <= j < pj_up and j' > j and 0 <= j' < pj_up}") - #assert blah2 == map_with_loop_domain_constraints + map_with_loop_domain_constraints = all_constraints_map.intersect_domain( + domain_to_intersect).intersect_range(range_to_intersect) return map_with_loop_domain_constraints @@ -171,19 +177,19 @@ def create_dependency_constraint( # assumes statements are numbered sequentially # (statement_bound = max statement id + 1) - # make sure all dependencies involve different inames # TODO upate after allowing prior(i,k) - if len(set(statement_dep.iname_deps.keys()) - ) != len(statement_dep.iname_deps.keys()): + # make sure all dependencies involve different inames + if len(set(statement_dep.iname_deps.keys())) != len( + statement_dep.iname_deps.keys()): raise ValueError("All depencencies must apply to different inames.") statement_var_prime = statement_var+"'" - DT = DependencyType + dt = DependencyType islvars = make_islvars_with_var_primes( [statement_var]+all_inames_ordered, []) # initialize constraints to False - # this will disappear as soon as we add a constraint that is not DT.NONE + # this will disappear as soon as we add a constraint that is not dt.NONE all_constraints_set = islvars[0].eq_set(islvars[0] + 1) before_inames = statement_dep.statement_before.active_inames @@ -194,7 +200,7 @@ def create_dependency_constraint( # for each (iname, dep_type) pair, create a constraint, # all_constraints_set will be the union of all these constraints for iname, dep_type in statement_dep.iname_deps.items(): - if dep_type == DT.NONE: + if dep_type == dt.NONE: continue iname_prime = iname+"'" # i' @@ -202,50 +208,57 @@ def create_dependency_constraint( #other_inames = all_inames_ordered.copy() #other_inames.remove(iname) # remaining inames, e.g., [j, k] #other_inames_prime = append_apostrophes(other_inames) # e.g., [j', k'] - other_shared_inames = list(shared_inames - {iname}) # remaining shared inames, e.g., [j, k] - other_shared_inames_prime = append_apostrophes(other_shared_inames) # e.g., [j', k'] - # initialize constraint set with what we know about other shared inames (e.g., j = j', k = k') + # remaining shared inames, e.g., [j, k] + other_shared_inames = list(shared_inames - {iname}) + + other_shared_inames_prime = append_apostrophes(other_shared_inames) + # e.g., [j', k'] + + # initialize constraint set with what we know about other shared inames + # (e.g., j = j', k = k') # will be True if no shared inames constraint_set = create_equality_conjunction_set( other_shared_inames, other_shared_inames_prime, islvars) - if dep_type == DT.SAME: + if dep_type == dt.SAME: constraint_set = constraint_set & islvars[iname].eq_set( islvars[iname_prime]) - elif dep_type == DT.PRIOR: + elif dep_type == dt.PRIOR: constraint_set = constraint_set & islvars[iname].lt_set( islvars[iname_prime]) - elif dep_type == DT.ALL: + elif dep_type == dt.ALL: constraint_set = constraint_set & islvars[0].eq_set( - islvars[0]) # True + islvars[0]) # True # enforce statement_var == statement # s_before_int = sid_to_int[statement_dep.statement_before.sid] s_after_int = sid_to_int[statement_dep.statement_after.sid] - constraint_set = constraint_set & islvars[statement_var].eq_set(islvars[0]+s_before_int) - constraint_set = constraint_set & islvars[statement_var_prime].eq_set(islvars[0]+s_after_int) + constraint_set = constraint_set & islvars[statement_var].eq_set( + islvars[0]+s_before_int) + constraint_set = constraint_set & islvars[statement_var_prime].eq_set( + islvars[0]+s_after_int) all_constraints_set = all_constraints_set | constraint_set - all_constraints_map = _convert_constraint_set_to_map(all_constraints_set, len(all_inames_ordered)+1) + all_constraints_map = _convert_constraint_set_to_map( + all_constraints_set, len(all_inames_ordered)+1) # TODO use separate domain for before and after insns? range_constraint_set = create_new_set_with_primes(domain_constraint_set) - new_pose = 0 # insert 's' at beginning # TODO don't hardcode 's' + new_pose = 0 # insert 's' at beginning # TODO don't hardcode 's' domain_to_intersect = add_dims_to_isl_set( - domain_constraint_set, isl.dim_type.out, ["s"], new_pose) # TODO don't hardcode 's' + domain_constraint_set, isl.dim_type.out, ["s"], new_pose) range_to_intersect = add_dims_to_isl_set( - range_constraint_set, isl.dim_type.out, ["s'"], new_pose) # TODO don't hardcode 's' + range_constraint_set, isl.dim_type.out, ["s'"], new_pose) #new_pose = 0 # insert 's' at beginning # TODO don't hardcode 's' #domain_to_intersect = add_dims_to_isl_set( - # dom_before_constraint_set, isl.dim_type.out, ["s"], new_pose) # TODO don't hardcode 's' + # dom_before_constraint_set, isl.dim_type.out, ["s"], new_pose) #range_constraint_set = create_new_set_with_primes(dom_after_constraint_set) #range_to_intersect = add_dims_to_isl_set( - # range_constraint_set, isl.dim_type.out, ["s'"], new_pose) # TODO don't hardcode 's' + # range_constraint_set, isl.dim_type.out, ["s'"], new_pose) - map_with_loop_domain_constraints = all_constraints_map.intersect_domain(domain_to_intersect).intersect_range(range_to_intersect) - #blah2 = isl.Map("[pi_up, pj_up] -> { [s = 1, i, j] -> [s' = 0, i' = i, j'] : 0 <= i < pi_up and 0 <= j < pj_up and j' > j and 0 <= j' < pj_up}") - #assert blah2 == map_with_loop_domain_constraints + map_with_loop_domain_constraints = all_constraints_map.intersect_domain( + domain_to_intersect).intersect_range(range_to_intersect) return map_with_loop_domain_constraints @@ -256,17 +269,15 @@ def get_concurrent_inames(knl): for iname in all_inames: iname_tags = knl.iname_to_tags.get(iname, None) if iname_tags and any( - isinstance(tag, (LocalIndexTag, GroupIndexTag)) for tag in iname_tags): + isinstance(tag, (LocalIndexTag, GroupIndexTag)) + for tag in iname_tags): conc_inames.add(iname) return conc_inames, all_inames-conc_inames def create_dependencies_from_legacy_knl_old(knl): from schedule_checker.schedule import Statement - from schedule_checker.dependency import ( - Dependency, - DependencyType as DT, - ) + dt = DependencyType conc_inames, non_conc_inames = get_concurrent_inames(knl) all_inames = list(knl.all_inames()) dep_sets = [] @@ -276,27 +287,26 @@ def create_dependencies_from_legacy_knl_old(knl): insn_before = knl.id_to_insn[insn_before_id] insn_before_inames = insn_before.within_inames insn_after_inames = insn_after.within_inames - #print("%s (%s) -> %s (%s)" % ( - # insn_before.id, insn_before_inames, insn_after.id, insn_after_inames)) shared_inames = insn_before_inames & insn_after_inames shared_conc_inames = shared_inames & conc_inames shared_non_conc_inames = shared_inames & non_conc_inames - #print("shared conc/non-conc %s/%s" % (shared_conc_inames, shared_non_conc_inames)) s_before = Statement(insn_before.id, all_inames) s_after = Statement(insn_after.id, all_inames) for non_conc_iname in shared_non_conc_inames: - dep_set.append(Dependency(s_before, s_after, DT.SAME, non_conc_iname)) + dep_set.append( + Dependency(s_before, s_after, dt.SAME, non_conc_iname)) for conc_iname in shared_conc_inames: - dep_set.append(Dependency(s_before, s_after, DT.ALL, conc_iname)) + dep_set.append( + Dependency(s_before, s_after, dt.ALL, conc_iname)) dep_sets.append(dep_set) return dep_sets def create_dependencies_from_legacy_knl(knl): from schedule_checker.schedule import Statement - DT = DependencyType + dt = DependencyType conc_inames, non_conc_inames = get_concurrent_inames(knl) - all_inames = list(knl.all_inames()) + #all_inames = list(knl.all_inames()) deps = [] for insn_after in knl.instructions: for insn_before_id in insn_after.depends_on: @@ -304,22 +314,20 @@ def create_dependencies_from_legacy_knl(knl): insn_before = knl.id_to_insn[insn_before_id] insn_before_inames = insn_before.within_inames insn_after_inames = insn_after.within_inames - #print("%s (%s) -> %s (%s)" % ( - # insn_before.id, insn_before_inames, insn_after.id, insn_after_inames)) shared_inames = insn_before_inames & insn_after_inames - non_shared_inames = (insn_before_inames | insn_after_inames) - shared_inames + non_shared_inames = (insn_before_inames | insn_after_inames + ) - shared_inames shared_conc_inames = shared_inames & conc_inames shared_non_conc_inames = shared_inames & non_conc_inames - #print("shared conc/non-conc %s/%s" % (shared_conc_inames, shared_non_conc_inames)) s_before = Statement(insn_before.id, insn_before_inames) s_after = Statement(insn_after.id, insn_after_inames) for non_conc_iname in shared_non_conc_inames: - iname_deps[non_conc_iname] = DT.SAME + iname_deps[non_conc_iname] = dt.SAME for conc_iname in shared_conc_inames: - iname_deps[conc_iname] = DT.ALL + iname_deps[conc_iname] = dt.ALL for non_shared_iname in non_shared_inames: - iname_deps[non_shared_iname] = DT.ALL + iname_deps[non_shared_iname] = dt.ALL deps.append(StatementDependency(s_before, s_after, iname_deps)) return deps -- GitLab From e1f2bee691b8530f8a26c298b7bd4bfa8c50fef3 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 24 Jun 2019 19:49:11 -0500 Subject: [PATCH 044/415] fixing pep8 issues --- schedule.py | 60 +++++++++++++++++++++++++++++++++-------------------- 1 file changed, 37 insertions(+), 23 deletions(-) diff --git a/schedule.py b/schedule.py index 2fa8eb54c..2371c40a1 100644 --- a/schedule.py +++ b/schedule.py @@ -43,21 +43,26 @@ class StatementInstance(object): class LexSchedule(object): - # TODO this should hold a map from statement instances to lex order space def __init__( self, knl, include_only_insn_ids=None, ): - self.lex_schedule = OrderedDict() # statement instance: lex point - self.inames_enumerated = [] # symbolic inames in sched that have been enumerated into explicit statement instances + self.lex_schedule = OrderedDict() # {statement instance: lex point} + + # symbolic inames in sched that have been enumerated + # into explicit statement instances + self.inames_enumerated = [] + self.inames_not_enumerated = [] # TODO better way to do this + self.lp_insnid_to_int_sid = {} + assert not any(iname == 's' for iname in knl.all_inames()) from loopy.schedule import (EnterLoop, LeaveLoop, Barrier, RunInstruction) next_insn_lex_pt = [0] - # TODO assumes perfect loop nesting + # TODO originally assumed perfect loop nesting, still the case? for sched_item in knl.schedule: if isinstance(sched_item, EnterLoop): iname = sched_item.iname @@ -70,17 +75,19 @@ class LexSchedule(object): next_insn_lex_pt.pop() next_insn_lex_pt[-1] = next_insn_lex_pt[-1]+1 elif isinstance(sched_item, RunInstruction): - if include_only_insn_ids is None or sched_item.insn_id in include_only_insn_ids: + if (include_only_insn_ids is None + or sched_item.insn_id in include_only_insn_ids): self.add_new_lp_insnid(sched_item.insn_id) insn_id_int = self.lp_insnid_to_int_sid[sched_item.insn_id] self.append_item((insn_id_int,), next_insn_lex_pt[:]) next_insn_lex_pt[-1] = next_insn_lex_pt[-1] + 1 elif isinstance(sched_item, Barrier): - # TODO barriers can be part of a dependency... how should these be handled? - if include_only_insn_ids is None or sched_item.originating_insn_id in include_only_insn_ids: + if (include_only_insn_ids is None + or sched_item.originating_insn_id in include_only_insn_ids): self.add_new_lp_insnid(sched_item.originating_insn_id) - insn_id_int = self.lp_insnid_to_int_sid[sched_item.originating_insn_id] + insn_id_int = self.lp_insnid_to_int_sid[ + sched_item.originating_insn_id] self.append_item((insn_id_int,), next_insn_lex_pt[:]) next_insn_lex_pt[-1] = next_insn_lex_pt[-1] + 1 @@ -89,7 +96,7 @@ class LexSchedule(object): self.pad_lex_pts_with_zeros() def max_lex_dims(self): - return max(len(lex_pt) for insn, lex_pt in self.items()) + return max(len(lex_pt) for insn, lex_pt in self.items()) def pad_lex_pts_with_zeros(self): max_lex_dim = self.max_lex_dims() @@ -103,8 +110,9 @@ class LexSchedule(object): iname_found = False for insn, lex_pt in self.lex_schedule.items(): if iname in lex_pt: - for v in range(bound[0],bound[1]): - new_sched[tuple(list(insn)+[v])] = [l if l != iname else v for l in lex_pt] + for v in range(bound[0], bound[1]): + new_sched[tuple(list(insn)+[v])] = [ + lx if lx != iname else v for lx in lex_pt] iname_found = True else: new_sched[insn] = lex_pt @@ -132,7 +140,8 @@ class LexSchedule(object): def add_new_lp_insnid(self, lp_insnid): if self.lp_insnid_to_int_sid: - self.lp_insnid_to_int_sid[lp_insnid] = max(self.lp_insnid_to_int_sid.values()) + 1 + self.lp_insnid_to_int_sid[lp_insnid] = max( + self.lp_insnid_to_int_sid.values()) + 1 else: self.lp_insnid_to_int_sid[lp_insnid] = 0 @@ -156,10 +165,12 @@ class LexSchedule(object): result = [] for dim_pts in zip(*self.lex_schedule.values()): if all(isinstance(pt, int) for pt in dim_pts): - result.append(max(dim_pts) + 1) # +1 because this is the non-inclusive upper bound + result.append(max(dim_pts) + 1) + # +1 b/c this is the non-inclusive upper bound else: assert all(pt == dim_pts[0] for pt in dim_pts) - result.append(var_bounds_dict[dim_pts[0]][1]) # upper bound for this variable + # append upper bound for this variable + result.append(var_bounds_dict[dim_pts[0]][1]) return result def get_min_lex_dim_vals(self, var_bounds_dict): @@ -171,7 +182,8 @@ class LexSchedule(object): result.append(min(dim_pts)) else: assert all(pt == dim_pts[0] for pt in dim_pts) - result.append(var_bounds_dict[dim_pts[0]][0]) # lower bound for this variable + # append lower bound for this variable + result.append(var_bounds_dict[dim_pts[0]][0]) return result def append_item(self, sched_item, lex_pt): @@ -184,7 +196,9 @@ class LexSchedule(object): return self.lex_schedule[self.get_last_schedule_item()] def create_explicit_isl_map(self, sched_space): - from schedule_checker.lexicographic_order_map import create_explicit_map_from_tuples + from schedule_checker.lexicographic_order_map import ( + create_explicit_map_from_tuples + ) return create_explicit_map_from_tuples(list(self.items()), sched_space) def enumerate_symbolic_inames_and_create_explicit_isl_map(self, iname_bounds): @@ -192,8 +206,8 @@ class LexSchedule(object): sched_space = self.get_space_for_explicit_sched() return self.create_explicit_isl_map(sched_space) - def create_symbolic_isl_map(self, domain, inames): #def create_symbolic_isl_map(self, dom_before, dom_after, inames): + def create_symbolic_isl_map(self, domain, inames): # TODO if inames will always match domain out vars, don't need to pass them from schedule_checker.lexicographic_order_map import ( create_symbolic_map_from_tuples, @@ -211,25 +225,25 @@ class LexSchedule(object): sched_space = self.get_space_for_symbolic_sched() """ - # TODO maybe don't project this out, constraints may involve any iname later... + # TODO maybe don't project this out, constraints may involve any iname later? domain_stripped = domain_intersection.project_out_except( self.inames_not_enumerated, [isl.dim_type.set] ) """ # TODO first need to make sure statement var name isn't already being used - new_pose = 0 # insert 's' at beginning # TODO don't hardcode 's' + new_pose = 0 # insert 's' at beginning domain_to_intersect = add_dims_to_isl_set( - domain, isl.dim_type.out, ['s'], new_pose) # TODO don't hardcode 's' + domain, isl.dim_type.out, ['s'], new_pose) #dom_before_to_intersect = add_dims_to_isl_set( # dom_before, isl.dim_type.out, ['s'], new_pose) #dom_after_to_intersect = add_dims_to_isl_set( # dom_before, isl.dim_type.out, ['s'], new_pose) return create_symbolic_map_from_tuples( - list(self.items()), sched_space, domain_to_intersect) #list(self.items()), sched_space, #dom_before_to_intersect, dom_after_to_intersect) + list(self.items()), sched_space, domain_to_intersect) def get_lex_map_explicit(self): @@ -268,7 +282,8 @@ class LexSchedule(object): for state_inst, lex in self.lex_schedule.items(): domain_elem = "[s=%s,%s]" % ( state_inst.statement.sid, ",".join( - ["%s=%d" % (iname, val) for iname, val in state_inst.iname_vals.items()])) + ["%s=%d" % (iname, val) + for iname, val in state_inst.iname_vals.items()])) range_elem = "[%s]" % (",".join("%s" % (l) for l in lex)) map_str += "%s -> %s; " % (domain_elem, range_elem) map_str += "}" @@ -302,4 +317,3 @@ class LexSchedule(object): def __str__(self): return str(list(self.lex_schedule.items())) - -- GitLab From b3577727f05bc7f72ef1a0933b07d42ad738e101 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 24 Jun 2019 19:51:15 -0500 Subject: [PATCH 045/415] fixing pep8 issues --- lexicographic_order_map.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/lexicographic_order_map.py b/lexicographic_order_map.py index f4c51f68c..32c33cbf6 100644 --- a/lexicographic_order_map.py +++ b/lexicographic_order_map.py @@ -6,7 +6,7 @@ def make_lex_mapping_tuple_pairs(dim_bounds): import itertools # all lex tuples in order: lex_tuples = list( - itertools.product(*[range(l,u) for l,u in dim_bounds])) + itertools.product(*[range(l, u) for l, u in dim_bounds])) # goes up to u-1 because u is a non-inclusive upper bound # TODO: is itertools.product ordering guaranteed? @@ -47,7 +47,6 @@ def create_explicit_map_from_tuples(tuple_pairs, space): def create_symbolic_map_from_tuples(tuple_pairs, space, domain_to_intersect): dim_type = isl.dim_type - individual_maps = [] from schedule_checker.sched_check_utils import get_islvars_from_space #param_names = space.get_var_names(isl.dim_type.param) @@ -83,7 +82,8 @@ def create_symbolic_map_from_tuples(tuple_pairs, space, domain_to_intersect): len(in_names), len(out_names)) """ result_map_vars_in = result_map.space.get_var_names(isl.dim_type.in_) - domain_stripped = domain_to_intersect.project_out_except(result_map_vars_in, [isl.dim_type.set]) + domain_stripped = domain_to_intersect.project_out_except( + result_map_vars_in, [isl.dim_type.set]) return result_map.intersect_domain(domain_stripped) """ from schedule_checker.sched_check_utils import add_missing_set_dims_to_map_indims @@ -124,8 +124,10 @@ def set_space_names(space, param_names=None, in_names=None, out_names=None): def get_space(param_names, in_names, out_names): - space = isl.Space.alloc(isl.DEFAULT_CONTEXT, len(param_names), len(in_names), len(out_names)) - return set_space_names(space, param_names=param_names, in_names=in_names, out_names=out_names) + space = isl.Space.alloc( + isl.DEFAULT_CONTEXT, len(param_names), len(in_names), len(out_names)) + return set_space_names( + space, param_names=param_names, in_names=in_names, out_names=out_names) #TODO rename these functions for clarity @@ -170,5 +172,3 @@ def create_symbolic_lex_mapping( len(in_names), len(out_names)) return lex_map - - -- GitLab From b26b3b1479ec216566fd4b0def61200fd1eb16e0 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 24 Jun 2019 19:53:19 -0500 Subject: [PATCH 046/415] fixing pep8 issues --- sched_check_utils.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/sched_check_utils.py b/sched_check_utils.py index 48109a301..dd9d636ab 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -1,7 +1,9 @@ import islpy as isl + def prettier_map_string(isl_map): - return str(isl_map).replace("{ ", "{\n").replace(" }","\n}").replace("; ",";\n") + return str(isl_map + ).replace("{ ", "{\n").replace(" }", "\n}").replace("; ", ";\n") def flatten_2d_list(list2d): @@ -14,18 +16,23 @@ def get_islvars_from_space(space): out_names = space.get_var_names(isl.dim_type.out) return isl.make_zero_and_vars(in_names+out_names, param_names) + def add_dims_to_isl_set(isl_set, dim_type, names, new_pose_start): - new_set = isl_set.insert_dims(dim_type, new_pose_start, len(names)).set_dim_name(dim_type, new_pose_start, names[0]) + new_set = isl_set.insert_dims(dim_type, new_pose_start, len(names)).set_dim_name( + dim_type, new_pose_start, names[0]) for i, name in enumerate(names[1:]): new_set = new_set.set_dim_name(dim_type, new_pose_start+1+i, name) return new_set + def create_new_set_with_primes(old_set): new_set = old_set.copy() for i in range(old_set.n_dim()): - new_set = new_set.set_dim_name(isl.dim_type.out, i, old_set.get_dim_name(isl.dim_type.out, i)+"'") + new_set = new_set.set_dim_name(isl.dim_type.out, i, old_set.get_dim_name( + isl.dim_type.out, i)+"'") return new_set + def add_missing_set_dims_to_map_indims(islmap, islset): new_map = islmap.copy() for i in range(islset.n_dim()): -- GitLab From 6c6d08b881f5189c3120905632f05d9709f5582f Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 24 Jun 2019 20:07:21 -0500 Subject: [PATCH 047/415] added some todos to clean up examples --- dependency.py | 2 ++ example_dep_pairwise_schedule_creation.py | 1 + example_dependency_checking.py | 3 ++- example_lex_map_creation.py | 1 + example_schedule_creation_old.py | 3 +++ 5 files changed, 9 insertions(+), 1 deletion(-) diff --git a/dependency.py b/dependency.py index 07ad15112..a491ff347 100644 --- a/dependency.py +++ b/dependency.py @@ -70,6 +70,7 @@ def _convert_constraint_set_to_map(constraint_set, mv_count, src_position=None): dim_type.out, 0, dim_type.in_, mv_count, mv_count) +""" def create_dependency_constraint_old( dependencies, all_inames_ordered, @@ -154,6 +155,7 @@ def create_dependency_constraint_old( map_with_loop_domain_constraints = all_constraints_map.intersect_domain( domain_to_intersect).intersect_range(range_to_intersect) return map_with_loop_domain_constraints +""" def create_dependency_constraint( diff --git a/example_dep_pairwise_schedule_creation.py b/example_dep_pairwise_schedule_creation.py index 2f995eb3e..1befbce3b 100644 --- a/example_dep_pairwise_schedule_creation.py +++ b/example_dep_pairwise_schedule_creation.py @@ -406,6 +406,7 @@ print("is valid sched valid? constraint map subset of SIO?") print(sched_is_valid) +# TODO create example with simple explicit sched ''' all_inames = ['i', 'j'] iname_params = ['p0', 'p1'] diff --git a/example_dependency_checking.py b/example_dependency_checking.py index e394e779b..b9d2b96dc 100644 --- a/example_dependency_checking.py +++ b/example_dependency_checking.py @@ -1,4 +1,3 @@ -import islpy as isl import loopy as lp from schedule_checker.dependency import ( Dependency, @@ -18,6 +17,8 @@ from schedule_checker.sched_check_utils import ( append_apostrophes, ) +# TODO update these examples to work with dep code changes + # make example kernel knl = lp.make_kernel( "{[i,j]: 0<=i,j<2}", diff --git a/example_lex_map_creation.py b/example_lex_map_creation.py index d94d4b313..aebe48cd0 100644 --- a/example_lex_map_creation.py +++ b/example_lex_map_creation.py @@ -12,6 +12,7 @@ from schedule_checker.lexicographic_order_map import ( create_symbolic_lex_mapping, ) +# TODO update to work with new changes to lex code # *Symbolic* lexicographic mapping- map each tuple to all tuples occuring later diff --git a/example_schedule_creation_old.py b/example_schedule_creation_old.py index 47876b51c..53337ac65 100644 --- a/example_schedule_creation_old.py +++ b/example_schedule_creation_old.py @@ -21,6 +21,9 @@ from schedule_checker.sched_check_utils import ( order_var_names_to_match_islset, ) +# TODO either remove this file or update and keep as an example of full schedule creation +# (rather than the usual pairwise schedule creation) + knl_choice = "example" #knl_choice = "matmul" #knl_choice = "scan" -- GitLab From 12389aaf14e25ea55d6266d8987ad4b645556ed6 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 24 Jun 2019 20:47:44 -0500 Subject: [PATCH 048/415] removed unused function --- sched_check_utils.py | 21 --------------------- 1 file changed, 21 deletions(-) diff --git a/sched_check_utils.py b/sched_check_utils.py index dd9d636ab..e14c5e12c 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -61,27 +61,6 @@ def make_islvars_with_var_primes(var_names, param_names): var_names+append_apostrophes(var_names), param_names) -def _create_positive_set_with_bounds( - var_names, param_names, upper_bounds): - - # TODO assumes lower bound is zero - islvars = make_islvars_with_var_primes(var_names, param_names) - - bounded_set = islvars[0].eq_set(islvars[0]) # initialize to True - - for v, p, b in zip(var_names, param_names, upper_bounds): - # create constraint 0 <= v,v'< p = b - v_prime = v+"'" - bounded_set = bounded_set \ - & islvars[v].lt_set(islvars[p]) \ - & islvars[v_prime].lt_set(islvars[p]) \ - & (islvars[0]-1).lt_set(islvars[v]) \ - & (islvars[0]-1).lt_set(islvars[v_prime]) \ - & islvars[p].eq_set(islvars[0]+b) - - return bounded_set - - def append_apostrophes(strings): if not isinstance(strings, list): raise ValueError("append_apostrophes did not receive a list") -- GitLab From 1f81864635e0d1fbb6fee032c01c74ed0c0af9f1 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 24 Jun 2019 20:49:01 -0500 Subject: [PATCH 049/415] temporarily uncommenting out old dep creation function to keep old example working; will remove later --- dependency.py | 175 ++++++++++++++++----------------- example_dependency_checking.py | 4 +- 2 files changed, 89 insertions(+), 90 deletions(-) diff --git a/dependency.py b/dependency.py index a491ff347..7a35cd901 100644 --- a/dependency.py +++ b/dependency.py @@ -70,94 +70,6 @@ def _convert_constraint_set_to_map(constraint_set, mv_count, src_position=None): dim_type.out, 0, dim_type.in_, mv_count, mv_count) -""" -def create_dependency_constraint_old( - dependencies, - all_inames_ordered, - statement_var, - domain_constraint_set, - sid_to_int, - ): - from schedule_checker.sched_check_utils import ( - make_islvars_with_var_primes, - append_apostrophes, - add_dims_to_isl_set, - create_new_set_with_primes, - ) - # This function uses the dependencies given to create the following constraint: - # Statement [s,i,j] comes before statement [s',i',j'] iff - - # assumes statements are numbered sequentially - # (statement_bound = max statement id + 1) - - # make sure all dependencies involve same two statements - if len(set([dep.statement_before.sid for dep in dependencies])) != 1 or \ - len(set([dep.statement_after.sid for dep in dependencies])) != 1: - raise ValueError("All depencencies must be between same two statements.") - # make sure all dependencies involve different inames - if len(set([dep.iname for dep in dependencies])) != len(dependencies): - raise ValueError("All depencencies must apply to different inames.") - - statement_var_prime = statement_var+"'" - dt = DependencyType - islvars = make_islvars_with_var_primes( - [statement_var]+all_inames_ordered, - []) - - # initialize constraints to False - # this will disappear as soon as we add a constraint that is not dt.NONE - all_constraints_set = islvars[0].eq_set(islvars[0] + 1) - - for dep in dependencies: - iname = dep.iname - dep_type = dep.dep_type - if dep_type == dt.NONE: - continue - - iname_prime = iname+"'" # i' - other_inames = all_inames_ordered.copy() - other_inames.remove(iname) # remaining inames, e.g., [j, k] - other_inames_prime = append_apostrophes(other_inames) # e.g., [j', k'] - - # initialize constraint set with what we know about other inames - # (e.g., j = j', k = k') - constraint_set = create_equality_conjunction_set( - other_inames, other_inames_prime, islvars) - if dep_type == dt.SAME: - constraint_set = constraint_set & islvars[iname].eq_set( - islvars[iname_prime]) - elif dep_type == dt.PRIOR: - constraint_set = constraint_set & islvars[iname].lt_set( - islvars[iname_prime]) - elif dep_type == dt.ALL: - constraint_set = constraint_set & islvars[0].eq_set(islvars[0]) # True - - s_before_int = sid_to_int[dep.statement_before.sid] - s_after_int = sid_to_int[dep.statement_after.sid] - constraint_set = constraint_set & islvars[statement_var].eq_set( - islvars[0]+s_before_int) - constraint_set = constraint_set & islvars[statement_var_prime].eq_set( - islvars[0]+s_after_int) - - all_constraints_set = all_constraints_set | constraint_set - - all_constraints_map = _convert_constraint_set_to_map( - all_constraints_set, len(all_inames_ordered)+1) - - range_constraint_set = create_new_set_with_primes(domain_constraint_set) - - new_pose = 0 # insert 's' at beginning # TODO don't hardcode 's' - domain_to_intersect = add_dims_to_isl_set( - domain_constraint_set, isl.dim_type.out, ["s"], new_pose) - range_to_intersect = add_dims_to_isl_set( - range_constraint_set, isl.dim_type.out, ["s'"], new_pose) - - map_with_loop_domain_constraints = all_constraints_map.intersect_domain( - domain_to_intersect).intersect_range(range_to_intersect) - return map_with_loop_domain_constraints -""" - - def create_dependency_constraint( statement_dep, all_inames_ordered, @@ -333,3 +245,90 @@ def create_dependencies_from_legacy_knl(knl): deps.append(StatementDependency(s_before, s_after, iname_deps)) return deps + + +# TODO update previous calls to this to use new function, then remove this +def create_dependency_constraint_old( + dependencies, + all_inames_ordered, + statement_var, + domain_constraint_set, + sid_to_int, + ): + from schedule_checker.sched_check_utils import ( + make_islvars_with_var_primes, + append_apostrophes, + add_dims_to_isl_set, + create_new_set_with_primes, + ) + # This function uses the dependencies given to create the following constraint: + # Statement [s,i,j] comes before statement [s',i',j'] iff + + # assumes statements are numbered sequentially + # (statement_bound = max statement id + 1) + + # make sure all dependencies involve same two statements + if len(set([dep.statement_before.sid for dep in dependencies])) != 1 or \ + len(set([dep.statement_after.sid for dep in dependencies])) != 1: + raise ValueError("All depencencies must be between same two statements.") + # make sure all dependencies involve different inames + if len(set([dep.iname for dep in dependencies])) != len(dependencies): + raise ValueError("All depencencies must apply to different inames.") + + statement_var_prime = statement_var+"'" + dt = DependencyType + islvars = make_islvars_with_var_primes( + [statement_var]+all_inames_ordered, + []) + + # initialize constraints to False + # this will disappear as soon as we add a constraint that is not dt.NONE + all_constraints_set = islvars[0].eq_set(islvars[0] + 1) + + for dep in dependencies: + iname = dep.iname + dep_type = dep.dep_type + if dep_type == dt.NONE: + continue + + iname_prime = iname+"'" # i' + other_inames = all_inames_ordered.copy() + other_inames.remove(iname) # remaining inames, e.g., [j, k] + other_inames_prime = append_apostrophes(other_inames) # e.g., [j', k'] + + # initialize constraint set with what we know about other inames + # (e.g., j = j', k = k') + constraint_set = create_equality_conjunction_set( + other_inames, other_inames_prime, islvars) + if dep_type == dt.SAME: + constraint_set = constraint_set & islvars[iname].eq_set( + islvars[iname_prime]) + elif dep_type == dt.PRIOR: + constraint_set = constraint_set & islvars[iname].lt_set( + islvars[iname_prime]) + elif dep_type == dt.ALL: + constraint_set = constraint_set & islvars[0].eq_set(islvars[0]) # True + + s_before_int = sid_to_int[dep.statement_before.sid] + s_after_int = sid_to_int[dep.statement_after.sid] + constraint_set = constraint_set & islvars[statement_var].eq_set( + islvars[0]+s_before_int) + constraint_set = constraint_set & islvars[statement_var_prime].eq_set( + islvars[0]+s_after_int) + + all_constraints_set = all_constraints_set | constraint_set + + all_constraints_map = _convert_constraint_set_to_map( + all_constraints_set, len(all_inames_ordered)+1) + + range_constraint_set = create_new_set_with_primes(domain_constraint_set) + + new_pose = 0 # insert 's' at beginning # TODO don't hardcode 's' + domain_to_intersect = add_dims_to_isl_set( + domain_constraint_set, isl.dim_type.out, ["s"], new_pose) + range_to_intersect = add_dims_to_isl_set( + range_constraint_set, isl.dim_type.out, ["s'"], new_pose) + + map_with_loop_domain_constraints = all_constraints_map.intersect_domain( + domain_to_intersect).intersect_range(range_to_intersect) + return map_with_loop_domain_constraints diff --git a/example_dependency_checking.py b/example_dependency_checking.py index b9d2b96dc..d91facebd 100644 --- a/example_dependency_checking.py +++ b/example_dependency_checking.py @@ -2,7 +2,7 @@ import loopy as lp from schedule_checker.dependency import ( Dependency, DependencyType as DT, - create_dependency_constraint, + create_dependency_constraint_old, ) from schedule_checker.lexicographic_order_map import ( make_lex_mapping_tuple_pairs, @@ -140,7 +140,7 @@ deps = [ ] print([str(dep) for dep in deps]) -constraint_map = create_dependency_constraint( +constraint_map = create_dependency_constraint_old( deps, all_inames_ordered, statement_var, -- GitLab From b72cf2d8eaa267d7168c89a31964e0b9ecf27ca6 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 24 Jun 2019 20:49:55 -0500 Subject: [PATCH 050/415] clarifying make_lex_mapping_tuple_pairs() with some comments and better variable names --- lexicographic_order_map.py | 20 ++++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/lexicographic_order_map.py b/lexicographic_order_map.py index 32c33cbf6..ad99db010 100644 --- a/lexicographic_order_map.py +++ b/lexicographic_order_map.py @@ -3,6 +3,14 @@ import islpy as isl def make_lex_mapping_tuple_pairs(dim_bounds): + # Given list of integer dimension bound pairs + # [(lower0, upper0), (lower1, upper1) ... ], + # create a list of tuple pairs [(x0, x1, ...), (y0, y1, ...)] + # representing a relation that maps from each point + # to every point that comes after that point in a lexicographic ordering + + # lower bounds are inclusive, upper bounds are exclusive + import itertools # all lex tuples in order: lex_tuples = list( @@ -155,18 +163,18 @@ def create_symbolic_lex_mapping( # create constraint enforcing lex ordering, e.g., in the 3-dim case: # i0 < o0 or ((i0 = o0) and (i1 < o1)) # or ((i0 = o0) and (i1 = o1) and (i2 < o2)) - lex_set_order_bound = islvars[in_names[0]].lt_set(islvars[out_names[0]]) + lex_order_constraint = islvars[in_names[0]].lt_set(islvars[out_names[0]]) for i in range(1, len(in_names)): - lex_set_order_bound_conj = islvars[in_names[i]].lt_set( + lex_order_constraint_conj = islvars[in_names[i]].lt_set( islvars[out_names[i]]) for j in range(i): - lex_set_order_bound_conj = lex_set_order_bound_conj & \ + lex_order_constraint_conj = lex_order_constraint_conj & \ islvars[in_names[j]].eq_set(islvars[out_names[j]]) - lex_set_order_bound = lex_set_order_bound | lex_set_order_bound_conj + lex_order_constraint = lex_order_constraint | lex_order_constraint_conj - #lex_set = lex_set_outer_bounds & lex_set_order_bound + #lex_set = lex_set_outer_bounds & lex_order_constraint #lex_map = isl.Map.from_domain(lex_set) - lex_map = isl.Map.from_domain(lex_set_order_bound) + lex_map = isl.Map.from_domain(lex_order_constraint) lex_map = lex_map.move_dims( dim_type.out, 0, dim_type.in_, len(in_names), len(out_names)) -- GitLab From 37475dcb8bd3a4dcfe3630c3d91f2bb1c3c2d930 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 24 Jun 2019 20:50:38 -0500 Subject: [PATCH 051/415] moving methods for creating explicit schedules to end of file; may remove soon --- schedule.py | 166 ++++++++++++++++++++++++---------------------------- 1 file changed, 78 insertions(+), 88 deletions(-) diff --git a/schedule.py b/schedule.py index 2371c40a1..306ffd5de 100644 --- a/schedule.py +++ b/schedule.py @@ -105,25 +105,6 @@ class LexSchedule(object): new_sched[insn] = lex_pt + [0]*(max_lex_dim-len(lex_pt)) self.lex_schedule = new_sched - def enumerate_iname(self, iname, bound): - new_sched = OrderedDict() - iname_found = False - for insn, lex_pt in self.lex_schedule.items(): - if iname in lex_pt: - for v in range(bound[0], bound[1]): - new_sched[tuple(list(insn)+[v])] = [ - lx if lx != iname else v for lx in lex_pt] - iname_found = True - else: - new_sched[insn] = lex_pt - self.lex_schedule = new_sched - if iname_found: - self.inames_enumerated.append(iname) - - def enumerate_inames(self, iname_bounds): - for iname, bound in iname_bounds.items(): - self.enumerate_iname(iname, bound) - def add_symbolic_inames_to_statement_instances(self, inames): for iname in inames: new_sched = OrderedDict() @@ -145,13 +126,6 @@ class LexSchedule(object): else: self.lp_insnid_to_int_sid[lp_insnid] = 0 - def get_space_for_explicit_sched(self): - params_sched = ["ps"] + ["p"+iname for iname in self.inames_enumerated] - in_names_sched = ["s"] + self.inames_enumerated - out_names_sched = ["l"+str(i) for i in range(self.max_lex_dims())] - from schedule_checker.lexicographic_order_map import get_space - return get_space(params_sched, in_names_sched, out_names_sched) - def get_space_for_symbolic_sched(self): params_sched = [] in_names_sched = ["s"] + self.inames_not_enumerated @@ -159,33 +133,6 @@ class LexSchedule(object): from schedule_checker.lexicographic_order_map import get_space return get_space(params_sched, in_names_sched, out_names_sched) - def get_max_lex_dim_bounds(self, var_bounds_dict): - # this only works for integer lex pts (no symbolic vars) - #return [max(dim_pts) for dim_pts in zip(*self.lex_schedule.values())] - result = [] - for dim_pts in zip(*self.lex_schedule.values()): - if all(isinstance(pt, int) for pt in dim_pts): - result.append(max(dim_pts) + 1) - # +1 b/c this is the non-inclusive upper bound - else: - assert all(pt == dim_pts[0] for pt in dim_pts) - # append upper bound for this variable - result.append(var_bounds_dict[dim_pts[0]][1]) - return result - - def get_min_lex_dim_vals(self, var_bounds_dict): - # this only works for integer lex pts (no symbolic vars) - #return [min(dim_pts) for dim_pts in zip(*self.lex_schedule.values())] - result = [] - for dim_pts in zip(*self.lex_schedule.values()): - if all(isinstance(pt, int) for pt in dim_pts): - result.append(min(dim_pts)) - else: - assert all(pt == dim_pts[0] for pt in dim_pts) - # append lower bound for this variable - result.append(var_bounds_dict[dim_pts[0]][0]) - return result - def append_item(self, sched_item, lex_pt): self.lex_schedule[sched_item] = lex_pt @@ -195,17 +142,6 @@ class LexSchedule(object): def get_last_lex_pt(self): return self.lex_schedule[self.get_last_schedule_item()] - def create_explicit_isl_map(self, sched_space): - from schedule_checker.lexicographic_order_map import ( - create_explicit_map_from_tuples - ) - return create_explicit_map_from_tuples(list(self.items()), sched_space) - - def enumerate_symbolic_inames_and_create_explicit_isl_map(self, iname_bounds): - self.enumerate_inames(iname_bounds) - sched_space = self.get_space_for_explicit_sched() - return self.create_explicit_isl_map(sched_space) - #def create_symbolic_isl_map(self, dom_before, dom_after, inames): def create_symbolic_isl_map(self, domain, inames): # TODO if inames will always match domain out vars, don't need to pass them @@ -245,30 +181,6 @@ class LexSchedule(object): #dom_before_to_intersect, dom_after_to_intersect) list(self.items()), sched_space, domain_to_intersect) - def get_lex_map_explicit(self): - - from schedule_checker.lexicographic_order_map import ( - make_lex_mapping_tuple_pairs, - create_explicit_map_from_tuples, - get_space, - ) - from schedule_checker.sched_check_utils import append_apostrophes - - # TODO lower bound may not be zero - lex_dim_bounds = list(zip(self.get_min_lex_dim_vals(), - self.get_max_lex_dim_vals())) - sched_space = self.get_space_for_explicit_sched() - - lex_in_names = sched_space.get_var_names(isl.dim_type.out) - lex_out_names = append_apostrophes(lex_in_names) - lex_params = [] - - explicit_lex_map_pairs = make_lex_mapping_tuple_pairs(lex_dim_bounds) - lex_space_explicit = get_space(lex_params, lex_in_names, lex_out_names) - - return create_explicit_map_from_tuples(explicit_lex_map_pairs, - lex_space_explicit) - def get_lex_map_symbolic(self): from schedule_checker.lexicographic_order_map import ( create_symbolic_lex_mapping, @@ -317,3 +229,81 @@ class LexSchedule(object): def __str__(self): return str(list(self.lex_schedule.items())) + + # Methods related to *explicit* schedule/map creation ------------------ + # TODO consider removing these + + def get_min_lex_dim_vals(self, var_bounds_dict): + + # this only works for integer lex pts (no symbolic vars): + #return [min(dim_pts) for dim_pts in zip(*self.lex_schedule.values())] + result = [] + for dim_pts in zip(*self.lex_schedule.values()): + if all(isinstance(pt, int) for pt in dim_pts): + result.append(min(dim_pts)) + else: + assert all(pt == dim_pts[0] for pt in dim_pts) + # append lower bound for this variable + result.append(var_bounds_dict[dim_pts[0]][0]) + return result + + def enumerate_iname(self, iname, bound): + new_sched = OrderedDict() + iname_found = False + for insn, lex_pt in self.lex_schedule.items(): + if iname in lex_pt: + for v in range(bound[0], bound[1]): + new_sched[tuple(list(insn)+[v])] = [ + lx if lx != iname else v for lx in lex_pt] + iname_found = True + else: + new_sched[insn] = lex_pt + self.lex_schedule = new_sched + if iname_found: + self.inames_enumerated.append(iname) + + def enumerate_inames(self, iname_bounds): + for iname, bound in iname_bounds.items(): + self.enumerate_iname(iname, bound) + + def get_space_for_explicit_sched(self): + params_sched = ["ps"] + ["p"+iname for iname in self.inames_enumerated] + in_names_sched = ["s"] + self.inames_enumerated + out_names_sched = ["l"+str(i) for i in range(self.max_lex_dims())] + from schedule_checker.lexicographic_order_map import get_space + return get_space(params_sched, in_names_sched, out_names_sched) + + def create_explicit_isl_map(self, sched_space): + from schedule_checker.lexicographic_order_map import ( + create_explicit_map_from_tuples + ) + return create_explicit_map_from_tuples(list(self.items()), sched_space) + + def enumerate_symbolic_inames_and_create_explicit_isl_map(self, iname_bounds): + self.enumerate_inames(iname_bounds) + sched_space = self.get_space_for_explicit_sched() + return self.create_explicit_isl_map(sched_space) + + def get_lex_map_explicit(self): + + from schedule_checker.lexicographic_order_map import ( + make_lex_mapping_tuple_pairs, + create_explicit_map_from_tuples, + get_space, + ) + from schedule_checker.sched_check_utils import append_apostrophes + + # TODO lower bound may not be zero + lex_dim_bounds = list(zip(self.get_min_lex_dim_vals(), + self.get_max_lex_dim_vals())) + sched_space = self.get_space_for_explicit_sched() + + lex_in_names = sched_space.get_var_names(isl.dim_type.out) + lex_out_names = append_apostrophes(lex_in_names) + lex_params = [] + + explicit_lex_map_pairs = make_lex_mapping_tuple_pairs(lex_dim_bounds) + lex_space_explicit = get_space(lex_params, lex_in_names, lex_out_names) + + return create_explicit_map_from_tuples(explicit_lex_map_pairs, + lex_space_explicit) -- GitLab From 6b4f07a41083808a0e05344e5452687fd8086f36 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 24 Jun 2019 21:19:11 -0500 Subject: [PATCH 052/415] converted dependency example to use new dependency class --- dependency.py | 88 +------------------------------- example_dependency_checking.py | 25 ++++----- example_schedule_creation_old.py | 3 +- 3 files changed, 16 insertions(+), 100 deletions(-) diff --git a/dependency.py b/dependency.py index 7a35cd901..f186f95e2 100644 --- a/dependency.py +++ b/dependency.py @@ -189,6 +189,7 @@ def get_concurrent_inames(knl): return conc_inames, all_inames-conc_inames +# TODO remove after updating example def create_dependencies_from_legacy_knl_old(knl): from schedule_checker.schedule import Statement dt = DependencyType @@ -245,90 +246,3 @@ def create_dependencies_from_legacy_knl(knl): deps.append(StatementDependency(s_before, s_after, iname_deps)) return deps - - -# TODO update previous calls to this to use new function, then remove this -def create_dependency_constraint_old( - dependencies, - all_inames_ordered, - statement_var, - domain_constraint_set, - sid_to_int, - ): - from schedule_checker.sched_check_utils import ( - make_islvars_with_var_primes, - append_apostrophes, - add_dims_to_isl_set, - create_new_set_with_primes, - ) - # This function uses the dependencies given to create the following constraint: - # Statement [s,i,j] comes before statement [s',i',j'] iff - - # assumes statements are numbered sequentially - # (statement_bound = max statement id + 1) - - # make sure all dependencies involve same two statements - if len(set([dep.statement_before.sid for dep in dependencies])) != 1 or \ - len(set([dep.statement_after.sid for dep in dependencies])) != 1: - raise ValueError("All depencencies must be between same two statements.") - # make sure all dependencies involve different inames - if len(set([dep.iname for dep in dependencies])) != len(dependencies): - raise ValueError("All depencencies must apply to different inames.") - - statement_var_prime = statement_var+"'" - dt = DependencyType - islvars = make_islvars_with_var_primes( - [statement_var]+all_inames_ordered, - []) - - # initialize constraints to False - # this will disappear as soon as we add a constraint that is not dt.NONE - all_constraints_set = islvars[0].eq_set(islvars[0] + 1) - - for dep in dependencies: - iname = dep.iname - dep_type = dep.dep_type - if dep_type == dt.NONE: - continue - - iname_prime = iname+"'" # i' - other_inames = all_inames_ordered.copy() - other_inames.remove(iname) # remaining inames, e.g., [j, k] - other_inames_prime = append_apostrophes(other_inames) # e.g., [j', k'] - - # initialize constraint set with what we know about other inames - # (e.g., j = j', k = k') - constraint_set = create_equality_conjunction_set( - other_inames, other_inames_prime, islvars) - if dep_type == dt.SAME: - constraint_set = constraint_set & islvars[iname].eq_set( - islvars[iname_prime]) - elif dep_type == dt.PRIOR: - constraint_set = constraint_set & islvars[iname].lt_set( - islvars[iname_prime]) - elif dep_type == dt.ALL: - constraint_set = constraint_set & islvars[0].eq_set(islvars[0]) # True - - s_before_int = sid_to_int[dep.statement_before.sid] - s_after_int = sid_to_int[dep.statement_after.sid] - constraint_set = constraint_set & islvars[statement_var].eq_set( - islvars[0]+s_before_int) - constraint_set = constraint_set & islvars[statement_var_prime].eq_set( - islvars[0]+s_after_int) - - all_constraints_set = all_constraints_set | constraint_set - - all_constraints_map = _convert_constraint_set_to_map( - all_constraints_set, len(all_inames_ordered)+1) - - range_constraint_set = create_new_set_with_primes(domain_constraint_set) - - new_pose = 0 # insert 's' at beginning # TODO don't hardcode 's' - domain_to_intersect = add_dims_to_isl_set( - domain_constraint_set, isl.dim_type.out, ["s"], new_pose) - range_to_intersect = add_dims_to_isl_set( - range_constraint_set, isl.dim_type.out, ["s'"], new_pose) - - map_with_loop_domain_constraints = all_constraints_map.intersect_domain( - domain_to_intersect).intersect_range(range_to_intersect) - return map_with_loop_domain_constraints diff --git a/example_dependency_checking.py b/example_dependency_checking.py index d91facebd..373cd7d9d 100644 --- a/example_dependency_checking.py +++ b/example_dependency_checking.py @@ -1,8 +1,8 @@ import loopy as lp from schedule_checker.dependency import ( - Dependency, + StatementDependency, DependencyType as DT, - create_dependency_constraint_old, + create_dependency_constraint, ) from schedule_checker.lexicographic_order_map import ( make_lex_mapping_tuple_pairs, @@ -130,21 +130,22 @@ print("domain union:") print(domain_union) # make some dependencies manually for now: -s0 = Statement("0", ["i", "j"]) -s1 = Statement("1", ["i", "j"]) +s0 = Statement("0", {"i", "j"}) +s1 = Statement("1", {"i", "j"}) insnid_to_int_sid = {"0": 0, "1": 1} -deps = [ - Dependency(s0, s1, DT.SAME, "i"), - Dependency(s0, s1, DT.SAME, "j"), - ] +statement_dep = StatementDependency(s0, s1, {"i": DT.SAME, "j": DT.SAME}) +print(statement_dep) +combined_doms = knl.get_inames_domain( + statement_dep.statement_before.active_inames | + statement_dep.statement_after.active_inames + ) -print([str(dep) for dep in deps]) -constraint_map = create_dependency_constraint_old( - deps, +constraint_map = create_dependency_constraint( + statement_dep, all_inames_ordered, statement_var, - domain_union, + combined_doms, insnid_to_int_sid, ) print("constraint map space:") diff --git a/example_schedule_creation_old.py b/example_schedule_creation_old.py index 53337ac65..3dd456b84 100644 --- a/example_schedule_creation_old.py +++ b/example_schedule_creation_old.py @@ -5,7 +5,7 @@ from schedule_checker.dependency import ( Dependency, DependencyType as DT, create_dependencies_from_legacy_knl_old, - create_dependency_constraint_old, + create_dependency_constraint, ) from schedule_checker.schedule import Statement, StatementInstance, LexSchedule from schedule_checker.sched_check_utils import prettier_map_string @@ -183,6 +183,7 @@ deps = [ #For every shared (between depender and dependee) concurrent iname Introduce an all dep print("----------------------------------------------------------------------") +# TODO use new version of this function dep_sets = create_dependencies_from_legacy_knl_old(knl) print("Dependency sets:") for dep_set in dep_sets: -- GitLab From f867424a365c5876d09b68c5aed7f7ffa2d410e9 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 24 Jun 2019 22:05:30 -0500 Subject: [PATCH 053/415] updated old sched creation example to use new StatementDependency class --- example_schedule_creation_old.py | 187 +++++-------------------------- 1 file changed, 30 insertions(+), 157 deletions(-) diff --git a/example_schedule_creation_old.py b/example_schedule_creation_old.py index 3dd456b84..944c17b93 100644 --- a/example_schedule_creation_old.py +++ b/example_schedule_creation_old.py @@ -1,18 +1,12 @@ -import islpy as isl import loopy as lp import numpy as np from schedule_checker.dependency import ( - Dependency, - DependencyType as DT, - create_dependencies_from_legacy_knl_old, + create_dependencies_from_legacy_knl, create_dependency_constraint, ) -from schedule_checker.schedule import Statement, StatementInstance, LexSchedule -from schedule_checker.sched_check_utils import prettier_map_string +from schedule_checker.schedule import LexSchedule from schedule_checker.lexicographic_order_map import ( - create_explicit_map_from_tuples, get_statement_ordering_map, - get_space, ) from schedule_checker.sched_check_utils import ( prettier_map_string, @@ -21,7 +15,7 @@ from schedule_checker.sched_check_utils import ( order_var_names_to_match_islset, ) -# TODO either remove this file or update and keep as an example of full schedule creation +# TODO either remove this file or update as an example of full sched creation # (rather than the usual pairwise schedule creation) knl_choice = "example" @@ -49,7 +43,8 @@ if knl_choice == "example": lang_version=(2018, 2) ) #knl = lp.add_and_infer_dtypes(knl, {"b": np.float32, "d": np.float32}) - knl = lp.add_and_infer_dtypes(knl, {"b": np.float32, "d": np.float32, "in": np.float32}) + knl = lp.add_and_infer_dtypes(knl, + {"b": np.float32, "d": np.float32, "in": np.float32}) knl = lp.tag_inames(knl, {"i": "l.0"}) knl = lp.preprocess_kernel(knl) knl = lp.get_one_scheduled_kernel(knl) @@ -68,7 +63,7 @@ elif knl_choice == "matmul": knl = lp.split_iname(knl, "i", bsize, outer_tag="g.0", inner_tag="l.1") knl = lp.split_iname(knl, "j", bsize, outer_tag="g.1", inner_tag="l.0") knl = lp.split_iname(knl, "k", bsize) - knl = lp.add_prefetch(knl, "a", ["k_inner", "i_inner"], default_tag="l.auto") + knl = lp.add_prefetch(knl, "a", ["k_inner", "i_inner"], default_tag="l.auto") knl = lp.add_prefetch(knl, "b", ["j_inner", "k_inner"], default_tag="l.auto") knl = lp.preprocess_kernel(knl) knl = lp.get_one_scheduled_kernel(knl) @@ -118,15 +113,6 @@ sched_map_symbolic = sched.create_symbolic_isl_map(domain_union, all_inames_orde print("LexSchedule after processing:") print(sched) # ------------------------------------------------------------------- -""" -from schedule_checker.sched_check_utils import (add_dims_to_isl_set) -new_pose = 0 # insert 's' at beginning # TODO don't hardcode 's' -domain_w_s = add_dims_to_isl_set( - domain_union, isl.dim_type.out, ['s'], new_pose) # TODO don't hardcode 's' -sched_map_vars_in = sched_map_symbolic.space.get_var_names(isl.dim_type.in_) -domain_stripped = domain_w_s.project_out_except(sched_map_vars_in, [isl.dim_type.set]) -""" -# ------------------------------------------------------------------- print("LexSched (valid):") print(prettier_map_string(sched_map_symbolic)) @@ -157,38 +143,23 @@ print(prettier_map_string(SIO_symbolic_valid)) print("space (statement instances -> statement instances):") print(SIO_symbolic_valid.space) - - -""" -# i is parallel, suppose we want to enforce the following: -# for a given i, statement 0 happens before statement 1 -# i dependency is none, j dependency is `prior` - -# make some dependencies manually for now: -s0 = Statement("0", ["i", "j"]) -s1 = Statement("1", ["i", "j"]) -s2 = Statement("2", ["i", "j"]) -#dep_s1_i = Dependency(s0, s1, DT.NONE, "i") -#dep_s1_j = Dependency(s0, s1, DT.PRIOR, "j") -#insn_to_deps = {"0":[], "1":[dep_s1_i, dep_s1_j], "2":[]} - -deps = [ - Dependency(s0, s1, DT.NONE, "i"), - Dependency(s0, s1, DT.PRIOR, "j"), - ] -""" - -#For every shared (between depender and dependee) non-concurrent iname Introduce a same dep +# For every shared (between depender and dependee) non-concurrent iname, +# Introduce a same dep # (Perform voodoo guesswork to determine whether a ‘prior’ dep is needed) -#For every shared (between depender and dependee) concurrent iname Introduce an all dep +# For every shared (between depender and dependee) concurrent iname, +# Introduce an all dep print("----------------------------------------------------------------------") # TODO use new version of this function -dep_sets = create_dependencies_from_legacy_knl_old(knl) -print("Dependency sets:") -for dep_set in dep_sets: - for dep in dep_set: - print(dep) +statement_deps = create_dependencies_from_legacy_knl(knl) +#print("Dependency sets:") +#for dep_set in dep_sets: +# for dep in dep_set: +# print(dep) +# print("") +print("Statement Dependencies:") +for sd in statement_deps: + print(sd) print("") print("----------------------------------------------------------------------") print("dict{lp insn id : sched sid int}:") @@ -196,13 +167,19 @@ print(sched.lp_insnid_to_int_sid) print("----------------------------------------------------------------------") statement_var = 's' sched_is_valid = True -for dep_set in dep_sets: - # TODO make create_dep_constraint accept whole set of dep_sets - constraint_map = create_dependency_constraint_old( - dep_set, +for statement_dep in statement_deps: + + # TODO is using this union in creating schedule (not deps) okay? + combined_doms = knl.get_inames_domain( + statement_dep.statement_before.active_inames | + statement_dep.statement_after.active_inames + ) + + constraint_map = create_dependency_constraint( + statement_dep, all_inames_ordered, statement_var, - domain_union, + combined_doms, sched.lp_insnid_to_int_sid, ) print("constraint map:") @@ -216,107 +193,3 @@ for dep_set in dep_sets: print("is valid sched valid? constraint map subset of SIO?") print(sched_is_valid) - - -''' -all_inames = ['i', 'j'] -iname_params = ['p0', 'p1'] -iname_param_vals = [2, 2] -statement_var = 's' -statement_param = 'ps' -statement_bound = 2 - - - -s0 = Statement("0", ["i", "j"]) -s1 = Statement("1", ["i", "j"]) -print("Statements:") -print(s0) -print(s1) - -s0_00 = StatementInstance(s0, {"i": 0, "j": 0}) -s0_10 = StatementInstance(s0, {"i": 1, "j": 0}) -s0_01 = StatementInstance(s0, {"i": 0, "j": 1}) -s0_11 = StatementInstance(s0, {"i": 1, "j": 1}) -s1_00 = StatementInstance(s1, {"i": 0, "j": 0}) -s1_10 = StatementInstance(s1, {"i": 1, "j": 0}) -s1_01 = StatementInstance(s1, {"i": 0, "j": 1}) -s1_11 = StatementInstance(s1, {"i": 1, "j": 1}) -print("Statement instances:") -print(s0_00) -print(s0_10) -print(s0_01) -print(s0_11) -print(s1_00) -print(s1_10) -print(s1_01) -print(s1_11) - -state_inst_to_lex_time_dict = { - s0_00: (0,0), - s1_00: (0,1), - s0_10: (0,0), - s1_10: (0,1), - s0_01: (1,0), - s1_01: (1,1), - s0_11: (1,0), - s1_11: (1,1), - } - -sched = LexSchedule(state_inst_to_lex_time_dict) -print("LexSchedule:") -print(sched) - -# sched map should be this: -schedule_explicit_map = isl.Map( - """{ - [s,i,j] -> [0,0] : s = 0 and i = 0 and j = 0; - [s,i,j] -> [0,1] : s = 1 and i = 0 and j = 0; - [s,i,j] -> [0,0] : s = 0 and i = 1 and j = 0; - [s,i,j] -> [0,1] : s = 1 and i = 1 and j = 0; - [s,i,j] -> [1,0] : s = 0 and i = 0 and j = 1; - [s,i,j] -> [1,1] : s = 1 and i = 0 and j = 1; - [s,i,j] -> [1,0] : s = 0 and i = 1 and j = 1; - [s,i,j] -> [1,1] : s = 1 and i = 1 and j = 1; - }""") - -schedule_general_map = isl.Map("{[s,i,j] -> [j,s]}") - -print("Map representing schedule generally:") -print(schedule_general_map) - -# the following is equivalent to explicit map above: -schedule_explicit_map2 = isl.Map( - """{ - [s=0,i=0,j=0] -> [0,0]; - [s=1,i=0,j=0] -> [0,1]; - [s=0,i=1,j=0] -> [0,0]; - [s=1,i=1,j=0] -> [0,1]; - [s=0,i=0,j=1] -> [1,0]; - [s=1,i=0,j=1] -> [1,1]; - [s=0,i=1,j=1] -> [1,0]; - [s=1,i=1,j=1] -> [1,1]; - }""") -assert schedule_explicit_map2 == schedule_explicit_map == sched.get_isl_map() - -''' - -""" -dep_i_same = Dependency(s0, s1, "i", DependencyType.SAME) -dep_i_none = Dependency(s0, s1, "i", DependencyType.NONE) -dep_i_prior = Dependency(s0, s1, "i", DependencyType.PRIOR) -dep_i_all = Dependency(s0, s1, "i", DependencyType.ALL) -dep_j_same = Dependency(s0, s1, "j", DependencyType.SAME) -dep_j_none = Dependency(s0, s1, "j", DependencyType.NONE) -dep_j_prior = Dependency(s0, s1, "j", DependencyType.PRIOR) -dep_j_all = Dependency(s0, s1, "j", DependencyType.ALL) -print("Example dependencies: ") -print(dep_i_same) -print(dep_i_none) -print(dep_i_prior) -print(dep_i_all) -print(dep_j_same) -print(dep_j_none) -print(dep_j_prior) -print(dep_j_all) -""" -- GitLab From 16616622cc442e07d99d49ab507205a92b66d451 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 24 Jun 2019 22:08:15 -0500 Subject: [PATCH 054/415] removed create_dependencies_from_legacy_knl_old --- dependency.py | 28 ---------------------------- 1 file changed, 28 deletions(-) diff --git a/dependency.py b/dependency.py index f186f95e2..53b2bae0d 100644 --- a/dependency.py +++ b/dependency.py @@ -189,34 +189,6 @@ def get_concurrent_inames(knl): return conc_inames, all_inames-conc_inames -# TODO remove after updating example -def create_dependencies_from_legacy_knl_old(knl): - from schedule_checker.schedule import Statement - dt = DependencyType - conc_inames, non_conc_inames = get_concurrent_inames(knl) - all_inames = list(knl.all_inames()) - dep_sets = [] - for insn_after in knl.instructions: - for insn_before_id in insn_after.depends_on: - dep_set = [] - insn_before = knl.id_to_insn[insn_before_id] - insn_before_inames = insn_before.within_inames - insn_after_inames = insn_after.within_inames - shared_inames = insn_before_inames & insn_after_inames - shared_conc_inames = shared_inames & conc_inames - shared_non_conc_inames = shared_inames & non_conc_inames - s_before = Statement(insn_before.id, all_inames) - s_after = Statement(insn_after.id, all_inames) - for non_conc_iname in shared_non_conc_inames: - dep_set.append( - Dependency(s_before, s_after, dt.SAME, non_conc_iname)) - for conc_iname in shared_conc_inames: - dep_set.append( - Dependency(s_before, s_after, dt.ALL, conc_iname)) - dep_sets.append(dep_set) - return dep_sets - - def create_dependencies_from_legacy_knl(knl): from schedule_checker.schedule import Statement dt = DependencyType -- GitLab From 2724a066954e0485a34b08c9a5a7920539d32efc Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 24 Jun 2019 22:12:22 -0500 Subject: [PATCH 055/415] removed old Dependency class --- dependency.py | 22 ----------- example_dep_pairwise_schedule_creation.py | 47 ----------------------- 2 files changed, 69 deletions(-) diff --git a/dependency.py b/dependency.py index 53b2bae0d..b736474f2 100644 --- a/dependency.py +++ b/dependency.py @@ -8,28 +8,6 @@ class DependencyType: ALL = "all" -# TODO remove old dep class -class Dependency(object): - def __init__( - self, - statement_before, - statement_after, - dep_type, - iname, - ): - self.statement_before = statement_before - self.statement_after = statement_after - self.dep_type = dep_type - self.iname = iname - - def __str__(self): - return "%s -> %s {%s dep: %s}" % ( - self.statement_before, - self.statement_after, - self.iname, - self.dep_type) - - class StatementDependency(object): def __init__( self, diff --git a/example_dep_pairwise_schedule_creation.py b/example_dep_pairwise_schedule_creation.py index 1befbce3b..bc6d59ee6 100644 --- a/example_dep_pairwise_schedule_creation.py +++ b/example_dep_pairwise_schedule_creation.py @@ -302,16 +302,6 @@ for statement_dep, dom_before, dom_after in deps_and_domains: print("LexSchedule after processing:") print(sched) # ------------------------------------------------------------------- - """ - from schedule_checker.sched_check_utils import (add_dims_to_isl_set) - new_pose = 0 # insert 's' at beginning # TODO don't hardcode 's' - domain_w_s = add_dims_to_isl_set( - domain_union, isl.dim_type.out, ['s'], new_pose) # TODO don't hardcode 's' - sched_map_vars_in = sched_map_symbolic.space.get_var_names(isl.dim_type.in_) - domain_stripped = domain_w_s.project_out_except( - sched_map_vars_in, [isl.dim_type.set]) - """ - # ------------------------------------------------------------------- print("LexSched (valid):") print(prettier_map_string(sched_map_symbolic)) @@ -341,24 +331,6 @@ for statement_dep, dom_before, dom_after in deps_and_domains: print(prettier_map_string(SIO_symbolic_valid)) print("SIO space (statement instances -> statement instances):") print(SIO_symbolic_valid.space) - """ - # i is parallel, suppose we want to enforce the following: - # for a given i, statement 0 happens before statement 1 - # i dependency is none, j dependency is `prior` - - # make some dependencies manually for now: - s0 = Statement("0", ["i", "j"]) - s1 = Statement("1", ["i", "j"]) - s2 = Statement("2", ["i", "j"]) - #dep_s1_i = Dependency(s0, s1, DT.NONE, "i") - #dep_s1_j = Dependency(s0, s1, DT.PRIOR, "j") - #insn_to_deps = {"0":[], "1":[dep_s1_i, dep_s1_j], "2":[]} - - deps = [ - Dependency(s0, s1, DT.NONE, "i"), - Dependency(s0, s1, DT.PRIOR, "j"), - ] - """ print("-"*85) print("dict{lp insn id : sched sid int}:") @@ -490,22 +462,3 @@ assert schedule_explicit_map2 == schedule_explicit_map == sched.get_isl_map() ''' -""" -dep_i_same = Dependency(s0, s1, "i", DependencyType.SAME) -dep_i_none = Dependency(s0, s1, "i", DependencyType.NONE) -dep_i_prior = Dependency(s0, s1, "i", DependencyType.PRIOR) -dep_i_all = Dependency(s0, s1, "i", DependencyType.ALL) -dep_j_same = Dependency(s0, s1, "j", DependencyType.SAME) -dep_j_none = Dependency(s0, s1, "j", DependencyType.NONE) -dep_j_prior = Dependency(s0, s1, "j", DependencyType.PRIOR) -dep_j_all = Dependency(s0, s1, "j", DependencyType.ALL) -print("Example dependencies: ") -print(dep_i_same) -print(dep_i_none) -print(dep_i_prior) -print(dep_i_all) -print(dep_j_same) -print(dep_j_none) -print(dep_j_prior) -print(dep_j_all) -""" -- GitLab From a9c3f748f9b0619d7c08e661ba1aebb9ca5af0d5 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 24 Jun 2019 22:17:00 -0500 Subject: [PATCH 056/415] fixing pep8 issues --- example_dependency_checking.py | 42 +++++++++++++++------------------- 1 file changed, 19 insertions(+), 23 deletions(-) diff --git a/example_dependency_checking.py b/example_dependency_checking.py index 373cd7d9d..c236cc144 100644 --- a/example_dependency_checking.py +++ b/example_dependency_checking.py @@ -1,14 +1,13 @@ import loopy as lp from schedule_checker.dependency import ( StatementDependency, - DependencyType as DT, + DependencyType as dt, create_dependency_constraint, ) from schedule_checker.lexicographic_order_map import ( make_lex_mapping_tuple_pairs, create_explicit_map_from_tuples, get_statement_ordering_map, - set_space_names, get_space, ) from schedule_checker.schedule import Statement @@ -17,7 +16,6 @@ from schedule_checker.sched_check_utils import ( append_apostrophes, ) -# TODO update these examples to work with dep code changes # make example kernel knl = lp.make_kernel( @@ -32,7 +30,6 @@ knl = lp.tag_inames(knl, {"i": "l.0"}) print("Kernel:") print(knl) -from schedule_checker.sched_check_utils import flatten_2d_list all_inames_ordered = ['i', 'j'] #all_inames_ordered = sorted(list(knl.all_inames())) statement_var = 's' @@ -50,14 +47,14 @@ sched_space = get_space(params_sched, in_names_sched, out_names_sched) example_sched_valid = create_explicit_map_from_tuples( [ - ((0,0,0), (0, 0)), - ((0,1,0), (0, 0)), - ((1,0,0), (0, 1)), - ((1,1,0), (0, 1)), - ((0,0,1), (1, 0)), - ((0,1,1), (1, 0)), - ((1,0,1), (1, 1)), - ((1,1,1), (1, 1)), + ((0, 0, 0), (0, 0)), + ((0, 1, 0), (0, 0)), + ((1, 0, 0), (0, 1)), + ((1, 1, 0), (0, 1)), + ((0, 0, 1), (1, 0)), + ((0, 1, 1), (1, 0)), + ((1, 0, 1), (1, 1)), + ((1, 1, 1), (1, 1)), ], sched_space, ) @@ -66,14 +63,14 @@ print(prettier_map_string(example_sched_valid)) example_sched_invalid = create_explicit_map_from_tuples( [ - ((0,0,0), (0, 0)), - ((0,1,0), (1, 1)), # these two are out of order, violation - ((1,0,0), (0, 1)), - ((1,1,0), (0, 1)), - ((0,0,1), (1, 0)), - ((0,1,1), (1, 0)), - ((1,0,1), (1, 1)), - ((1,1,1), (0, 0)), # these two are out of order, violation + ((0, 0, 0), (0, 0)), + ((0, 1, 0), (1, 1)), # these two are out of order, violation + ((1, 0, 0), (0, 1)), + ((1, 1, 0), (0, 1)), + ((0, 0, 1), (1, 0)), + ((0, 1, 1), (1, 0)), + ((1, 0, 1), (1, 1)), + ((1, 1, 1), (0, 0)), # these two are out of order, violation ], sched_space, ) @@ -82,7 +79,7 @@ print(prettier_map_string(example_sched_invalid)) # *Explicit* lexicographic mapping- map each tuple to all tuples occuring later print("---------------------------------------------------------------------------") -lex_dim_bounds = [(0,2), (0,2)] # max vals for each dim (e.g., 0 <= i0 < max0 ...) +lex_dim_bounds = [(0, 2), (0, 2)] # max vals for each dim (e.g., 0 <= i0 < max0 ...) lex_params = [] lex_in_names = out_names_sched lex_out_names = append_apostrophes(out_names_sched) @@ -134,7 +131,7 @@ s0 = Statement("0", {"i", "j"}) s1 = Statement("1", {"i", "j"}) insnid_to_int_sid = {"0": 0, "1": 1} -statement_dep = StatementDependency(s0, s1, {"i": DT.SAME, "j": DT.SAME}) +statement_dep = StatementDependency(s0, s1, {"i": dt.SAME, "j": dt.SAME}) print(statement_dep) combined_doms = knl.get_inames_domain( statement_dep.statement_before.active_inames | @@ -163,4 +160,3 @@ print(constraint_map.is_subset(SIO_explicit_valid)) print("is invalid sched valid?") print(constraint_map.is_subset(SIO_explicit_invalid)) #print(SIO_explicit_invalid.is_subset(constraint_map)) - -- GitLab From dda8f89abcd7c47a202393ba96dbc4cc5322c2a9 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 24 Jun 2019 22:47:53 -0500 Subject: [PATCH 057/415] renamed some functions to clarify difference between lex ordering map and schedule (statement->lex_pt map); also cleaned up some TODOs --- example_dep_pairwise_schedule_creation.py | 4 ++-- example_dependency_checking.py | 14 ++++---------- example_lex_map_creation.py | 19 ++++--------------- example_schedule_creation_old.py | 10 ++-------- lexicographic_order_map.py | 8 ++------ schedule.py | 16 ++++++++-------- 6 files changed, 22 insertions(+), 49 deletions(-) diff --git a/example_dep_pairwise_schedule_creation.py b/example_dep_pairwise_schedule_creation.py index bc6d59ee6..a667638ea 100644 --- a/example_dep_pairwise_schedule_creation.py +++ b/example_dep_pairwise_schedule_creation.py @@ -310,9 +310,9 @@ for statement_dep, dom_before, dom_after in deps_and_domains: # *Explicit* lexicographic mapping- map each tuple to all tuples occuring later print("-"*85) - #lex_map_explicit = sched.get_lex_map_explicit() + #lex_map_explicit = sched.get_explicit_sched_map() - lex_map_symbolic = sched.get_lex_map_symbolic() + lex_map_symbolic = sched.get_symbolic_sched_map() print("lex map symbolic:") print(prettier_map_string(lex_map_symbolic)) diff --git a/example_dependency_checking.py b/example_dependency_checking.py index c236cc144..bd083947c 100644 --- a/example_dependency_checking.py +++ b/example_dependency_checking.py @@ -1,11 +1,11 @@ import loopy as lp -from schedule_checker.dependency import ( +from schedule_checker.dependency import ( # noqa StatementDependency, DependencyType as dt, create_dependency_constraint, ) from schedule_checker.lexicographic_order_map import ( - make_lex_mapping_tuple_pairs, + make_lex_order_map_tuple_pairs, create_explicit_map_from_tuples, get_statement_ordering_map, get_space, @@ -84,7 +84,7 @@ lex_params = [] lex_in_names = out_names_sched lex_out_names = append_apostrophes(out_names_sched) -explicit_lex_map_pairs = make_lex_mapping_tuple_pairs(lex_dim_bounds) +explicit_lex_map_pairs = make_lex_order_map_tuple_pairs(lex_dim_bounds) # for pair in explicit_lex_map_pairs: # print(pair[0], pair[1]) lex_space_explicit = get_space(lex_params, lex_in_names, lex_out_names) @@ -119,12 +119,6 @@ domains = {} for iname in all_inames_ordered: domains[iname] = knl.get_inames_domain(iname) domains_list = list(domains.values()) -domain_union = domains_list[0] -#TODO is union the right thing to do here? -for dom in domains_list[1:]: - domain_union = domain_union.union(dom) -print("domain union:") -print(domain_union) # make some dependencies manually for now: s0 = Statement("0", {"i", "j"}) @@ -134,7 +128,7 @@ insnid_to_int_sid = {"0": 0, "1": 1} statement_dep = StatementDependency(s0, s1, {"i": dt.SAME, "j": dt.SAME}) print(statement_dep) combined_doms = knl.get_inames_domain( - statement_dep.statement_before.active_inames | + statement_dep.statement_before.active_inames | # noqa statement_dep.statement_after.active_inames ) diff --git a/example_lex_map_creation.py b/example_lex_map_creation.py index aebe48cd0..fc9482a9f 100644 --- a/example_lex_map_creation.py +++ b/example_lex_map_creation.py @@ -1,25 +1,16 @@ -import islpy as isl -from schedule_checker.dependency import ( - Dependency, - DependencyType as DT, -) from schedule_checker.lexicographic_order_map import ( - make_lex_mapping_tuple_pairs, create_explicit_map_from_tuples, get_statement_ordering_map, - set_space_names, get_space, - create_symbolic_lex_mapping, + create_symbolic_lex_order_map, ) -# TODO update to work with new changes to lex code - # *Symbolic* lexicographic mapping- map each tuple to all tuples occuring later #in_names = ["i", "j"] #out_names = append_apostrophes(in_names) -n_dims = 2 #len(in_names) -lex_map_symbolic = create_symbolic_lex_mapping( +n_dims = 2 # len(in_names) +lex_map_symbolic = create_symbolic_lex_order_map( n_dims) print("lex_map (symbolic):") print(lex_map_symbolic) @@ -29,7 +20,7 @@ print(lex_map_symbolic) """ dim_bounds = [(0,2), (0,2)] # max vals for each dim (e.g., 0 <= i0 < max0 ...) -explicit_lex_map_pairs = make_lex_mapping_tuple_pairs(dim_bounds) +explicit_lex_map_pairs = make_lex_order_map_tuple_pairs(dim_bounds) # for pair in explicit_lex_map_pairs: # print(pair[0], pair[1]) lex_map_explicit = create_explicit_map_from_tuples(explicit_lex_map_pairs, @@ -72,7 +63,6 @@ print("statement instance ordering explicit:") print(statement_instance_ordering_explicit) """ -# TODO figure out where these "p0 >= 2 and p1 >= 2" are coming from: statement_instance_ordering_symbolic = get_statement_ordering_map( example_sched, lex_map_symbolic) print("statement instance ordering symbolic:") @@ -137,4 +127,3 @@ statement_instance_ordering_explicit = get_statement_ordering_map( print("statement instance ordering explicit:") print(statement_instance_ordering_explicit) """ - diff --git a/example_schedule_creation_old.py b/example_schedule_creation_old.py index 944c17b93..2656062c9 100644 --- a/example_schedule_creation_old.py +++ b/example_schedule_creation_old.py @@ -121,9 +121,9 @@ print(sched_map_symbolic.space) # *Explicit* lexicographic mapping- map each tuple to all tuples occuring later print("---------------------------------------------------------------------------") -#lex_map_explicit = sched.get_lex_map_explicit() +#lex_map_explicit = sched.get_explicit_sched_map() -lex_map_symbolic = sched.get_lex_map_symbolic() +lex_map_symbolic = sched.get_symbolic_sched_map() print("lex map symbolic:") print(prettier_map_string(lex_map_symbolic)) @@ -150,13 +150,7 @@ print(SIO_symbolic_valid.space) # Introduce an all dep print("----------------------------------------------------------------------") -# TODO use new version of this function statement_deps = create_dependencies_from_legacy_knl(knl) -#print("Dependency sets:") -#for dep_set in dep_sets: -# for dep in dep_set: -# print(dep) -# print("") print("Statement Dependencies:") for sd in statement_deps: print(sd) diff --git a/lexicographic_order_map.py b/lexicographic_order_map.py index ad99db010..e9596b1bc 100644 --- a/lexicographic_order_map.py +++ b/lexicographic_order_map.py @@ -1,7 +1,7 @@ import islpy as isl -def make_lex_mapping_tuple_pairs(dim_bounds): +def make_lex_order_map_tuple_pairs(dim_bounds): # Given list of integer dimension bound pairs # [(lower0, upper0), (lower1, upper1) ... ], @@ -138,11 +138,7 @@ def get_space(param_names, in_names, out_names): space, param_names=param_names, in_names=in_names, out_names=out_names) -#TODO rename these functions for clarity -#(distinguish betwen map representing lex order from all before pts to all after pts -# from map representing a schedule -# from other things...) -def create_symbolic_lex_mapping( +def create_symbolic_lex_order_map( n_dims, in_names=None, out_names=None, diff --git a/schedule.py b/schedule.py index 306ffd5de..ad2c970d7 100644 --- a/schedule.py +++ b/schedule.py @@ -181,22 +181,22 @@ class LexSchedule(object): #dom_before_to_intersect, dom_after_to_intersect) list(self.items()), sched_space, domain_to_intersect) - def get_lex_map_symbolic(self): + def get_symbolic_sched_map(self): from schedule_checker.lexicographic_order_map import ( - create_symbolic_lex_mapping, + create_symbolic_lex_order_map, ) n_dims = self.max_lex_dims() - return create_symbolic_lex_mapping(n_dims) + return create_symbolic_lex_order_map(n_dims) def get_isl_map_str(self): map_str = "{" - for state_inst, lex in self.lex_schedule.items(): + for state_inst, lex_pt in self.lex_schedule.items(): domain_elem = "[s=%s,%s]" % ( state_inst.statement.sid, ",".join( ["%s=%d" % (iname, val) for iname, val in state_inst.iname_vals.items()])) - range_elem = "[%s]" % (",".join("%s" % (l) for l in lex)) + range_elem = "[%s]" % (",".join("%s" % (lx) for lx in lex_pt)) map_str += "%s -> %s; " % (domain_elem, range_elem) map_str += "}" #TODO return map not string @@ -284,10 +284,10 @@ class LexSchedule(object): sched_space = self.get_space_for_explicit_sched() return self.create_explicit_isl_map(sched_space) - def get_lex_map_explicit(self): + def get_explicit_sched_map(self): from schedule_checker.lexicographic_order_map import ( - make_lex_mapping_tuple_pairs, + make_lex_order_map_tuple_pairs, create_explicit_map_from_tuples, get_space, ) @@ -302,7 +302,7 @@ class LexSchedule(object): lex_out_names = append_apostrophes(lex_in_names) lex_params = [] - explicit_lex_map_pairs = make_lex_mapping_tuple_pairs(lex_dim_bounds) + explicit_lex_map_pairs = make_lex_order_map_tuple_pairs(lex_dim_bounds) lex_space_explicit = get_space(lex_params, lex_in_names, lex_out_names) return create_explicit_map_from_tuples(explicit_lex_map_pairs, -- GitLab From aea7333f680ae26823137c2bd8a4dec5ab825b61 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 24 Jun 2019 23:12:43 -0500 Subject: [PATCH 058/415] moved functions that belong in utils to utils --- dependency.py | 14 +-- example_dep_pairwise_schedule_creation.py | 2 +- example_dependency_checking.py | 4 +- example_lex_map_creation.py | 6 +- lexicographic_order_map.py | 106 ------------------- sched_check_utils.py | 118 ++++++++++++++++++++++ schedule.py | 16 +-- 7 files changed, 134 insertions(+), 132 deletions(-) diff --git a/dependency.py b/dependency.py index b736474f2..be5651177 100644 --- a/dependency.py +++ b/dependency.py @@ -154,20 +154,8 @@ def create_dependency_constraint( return map_with_loop_domain_constraints -def get_concurrent_inames(knl): - from loopy.kernel.data import LocalIndexTag, GroupIndexTag - conc_inames = set() - all_inames = knl.all_inames() - for iname in all_inames: - iname_tags = knl.iname_to_tags.get(iname, None) - if iname_tags and any( - isinstance(tag, (LocalIndexTag, GroupIndexTag)) - for tag in iname_tags): - conc_inames.add(iname) - return conc_inames, all_inames-conc_inames - - def create_dependencies_from_legacy_knl(knl): + from schedule_checker.sched_check_utils import get_concurrent_inames from schedule_checker.schedule import Statement dt = DependencyType conc_inames, non_conc_inames = get_concurrent_inames(knl) diff --git a/example_dep_pairwise_schedule_creation.py b/example_dep_pairwise_schedule_creation.py index a667638ea..0a69d569e 100644 --- a/example_dep_pairwise_schedule_creation.py +++ b/example_dep_pairwise_schedule_creation.py @@ -3,7 +3,6 @@ import numpy as np from schedule_checker.dependency import ( create_dependencies_from_legacy_knl, create_dependency_constraint, - get_concurrent_inames, ) from schedule_checker.schedule import LexSchedule from schedule_checker.lexicographic_order_map import ( @@ -11,6 +10,7 @@ from schedule_checker.lexicographic_order_map import ( get_statement_ordering_map, ) from schedule_checker.sched_check_utils import ( + get_concurrent_inames, prettier_map_string, order_var_names_to_match_islset, ) diff --git a/example_dependency_checking.py b/example_dependency_checking.py index bd083947c..4c9c29e8c 100644 --- a/example_dependency_checking.py +++ b/example_dependency_checking.py @@ -6,14 +6,14 @@ from schedule_checker.dependency import ( # noqa ) from schedule_checker.lexicographic_order_map import ( make_lex_order_map_tuple_pairs, - create_explicit_map_from_tuples, get_statement_ordering_map, - get_space, ) from schedule_checker.schedule import Statement from schedule_checker.sched_check_utils import ( prettier_map_string, append_apostrophes, + create_explicit_map_from_tuples, + get_space, ) diff --git a/example_lex_map_creation.py b/example_lex_map_creation.py index fc9482a9f..8cf947fb9 100644 --- a/example_lex_map_creation.py +++ b/example_lex_map_creation.py @@ -1,9 +1,11 @@ from schedule_checker.lexicographic_order_map import ( - create_explicit_map_from_tuples, get_statement_ordering_map, - get_space, create_symbolic_lex_order_map, ) +from schedule_checker.sched_check_utils import( + create_explicit_map_from_tuples, + get_space, +) # *Symbolic* lexicographic mapping- map each tuple to all tuples occuring later diff --git a/lexicographic_order_map.py b/lexicographic_order_map.py index e9596b1bc..7b34d190b 100644 --- a/lexicographic_order_map.py +++ b/lexicographic_order_map.py @@ -25,81 +25,6 @@ def make_lex_order_map_tuple_pairs(dim_bounds): return map_pairs -def create_explicit_map_from_tuples(tuple_pairs, space): - - dim_type = isl.dim_type - individual_maps = [] - - for tup_in, tup_out in tuple_pairs: - constraints = [] - for i, val_in in enumerate(tup_in): - constraints.append( - isl.Constraint.equality_alloc(space) - .set_coefficient_val(dim_type.in_, i, 1) - .set_constant_val(-1*val_in)) - for i, val_out in enumerate(tup_out): - constraints.append( - isl.Constraint.equality_alloc(space) - .set_coefficient_val(dim_type.out, i, 1) - .set_constant_val(-1*val_out)) - individual_maps.append( - isl.Map.universe(space).add_constraints(constraints)) - - union_map = individual_maps[0] - for m in individual_maps[1:]: - union_map = union_map.union(m) - - return union_map - - -def create_symbolic_map_from_tuples(tuple_pairs, space, domain_to_intersect): - - dim_type = isl.dim_type - - from schedule_checker.sched_check_utils import get_islvars_from_space - #param_names = space.get_var_names(isl.dim_type.param) - out_names = space.get_var_names(dim_type.out) - in_names = space.get_var_names(isl.dim_type.in_) - - islvars = get_islvars_from_space(space) - - # initialize set with constraint that is always false - constraints_set = islvars[0].eq_set(islvars[0] + 1) - for tup_in, tup_out in tuple_pairs: - # initialize set with constraint that is always true - constraint = islvars[0].eq_set(islvars[0]) - for i, val_in in enumerate(tup_in): - if isinstance(val_in, int): - constraint = constraint \ - & islvars[in_names[i]].eq_set(islvars[0]+val_in) - else: - constraint = constraint \ - & islvars[in_names[i]].eq_set(islvars[val_in]) - for i, val_out in enumerate(tup_out): - if isinstance(val_out, int): - constraint = constraint \ - & islvars[out_names[i]].eq_set(islvars[0]+val_out) - else: - constraint = constraint \ - & islvars[out_names[i]].eq_set(islvars[val_out]) - constraints_set = constraints_set | constraint - - result_map = isl.Map.from_domain(constraints_set) - result_map = result_map.move_dims( - dim_type.out, 0, dim_type.in_, - len(in_names), len(out_names)) - """ - result_map_vars_in = result_map.space.get_var_names(isl.dim_type.in_) - domain_stripped = domain_to_intersect.project_out_except( - result_map_vars_in, [isl.dim_type.set]) - return result_map.intersect_domain(domain_stripped) - """ - from schedule_checker.sched_check_utils import add_missing_set_dims_to_map_indims - # TODO make sure these always align properly - result_map = add_missing_set_dims_to_map_indims(result_map, domain_to_intersect) - return result_map.intersect_domain(domain_to_intersect) - - def get_statement_ordering_map(sched_map, lex_map): # statement ordering: # map each statement instance to all statement instances that occur later @@ -107,37 +32,6 @@ def get_statement_ordering_map(sched_map, lex_map): return sched_map.apply_range(lex_map).apply_range(sched_map.reverse()) -def set_space_names(space, param_names=None, in_names=None, out_names=None): - new_space = space.copy() - dim_type = isl.dim_type - if param_names: - for i, p in enumerate(param_names): - new_space = new_space.set_dim_name(dim_type.param, i, p) - else: - for i in range(len(space.get_var_names(dim_type.param))): - new_space = new_space.set_dim_name(dim_type.param, i, "p%d" % (i)) - if in_names: - for i, p in enumerate(in_names): - new_space = new_space.set_dim_name(dim_type.in_, i, p) - else: - for i in range(len(space.get_var_names(dim_type.in_))): - new_space = new_space.set_dim_name(dim_type.in_, i, "i%d" % (i)) - if out_names: - for i, p in enumerate(out_names): - new_space = new_space.set_dim_name(dim_type.out, i, p) - else: - for i in range(len(space.get_var_names(dim_type.out))): - new_space = new_space.set_dim_name(dim_type.out, i, "o%d" % (i)) - return new_space - - -def get_space(param_names, in_names, out_names): - space = isl.Space.alloc( - isl.DEFAULT_CONTEXT, len(param_names), len(in_names), len(out_names)) - return set_space_names( - space, param_names=param_names, in_names=in_names, out_names=out_names) - - def create_symbolic_lex_order_map( n_dims, in_names=None, diff --git a/sched_check_utils.py b/sched_check_utils.py index e14c5e12c..764edce26 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -100,3 +100,121 @@ def order_var_names_to_match_islset(var_names, islset): if v in var_names: names_ordered_to_match_islset.append(v) return names_ordered_to_match_islset + + +def create_explicit_map_from_tuples(tuple_pairs, space): + + dim_type = isl.dim_type + individual_maps = [] + + for tup_in, tup_out in tuple_pairs: + constraints = [] + for i, val_in in enumerate(tup_in): + constraints.append( + isl.Constraint.equality_alloc(space) + .set_coefficient_val(dim_type.in_, i, 1) + .set_constant_val(-1*val_in)) + for i, val_out in enumerate(tup_out): + constraints.append( + isl.Constraint.equality_alloc(space) + .set_coefficient_val(dim_type.out, i, 1) + .set_constant_val(-1*val_out)) + individual_maps.append( + isl.Map.universe(space).add_constraints(constraints)) + + union_map = individual_maps[0] + for m in individual_maps[1:]: + union_map = union_map.union(m) + + return union_map + + +def create_symbolic_map_from_tuples(tuple_pairs, space, domain_to_intersect): + + dim_type = isl.dim_type + + from schedule_checker.sched_check_utils import get_islvars_from_space + #param_names = space.get_var_names(isl.dim_type.param) + out_names = space.get_var_names(dim_type.out) + in_names = space.get_var_names(isl.dim_type.in_) + + islvars = get_islvars_from_space(space) + + # initialize set with constraint that is always false + constraints_set = islvars[0].eq_set(islvars[0] + 1) + for tup_in, tup_out in tuple_pairs: + # initialize set with constraint that is always true + constraint = islvars[0].eq_set(islvars[0]) + for i, val_in in enumerate(tup_in): + if isinstance(val_in, int): + constraint = constraint \ + & islvars[in_names[i]].eq_set(islvars[0]+val_in) + else: + constraint = constraint \ + & islvars[in_names[i]].eq_set(islvars[val_in]) + for i, val_out in enumerate(tup_out): + if isinstance(val_out, int): + constraint = constraint \ + & islvars[out_names[i]].eq_set(islvars[0]+val_out) + else: + constraint = constraint \ + & islvars[out_names[i]].eq_set(islvars[val_out]) + constraints_set = constraints_set | constraint + + result_map = isl.Map.from_domain(constraints_set) + result_map = result_map.move_dims( + dim_type.out, 0, dim_type.in_, + len(in_names), len(out_names)) + ''' + result_map_vars_in = result_map.space.get_var_names(isl.dim_type.in_) + domain_stripped = domain_to_intersect.project_out_except( + result_map_vars_in, [isl.dim_type.set]) + return result_map.intersect_domain(domain_stripped) + ''' + from schedule_checker.sched_check_utils import add_missing_set_dims_to_map_indims + # TODO make sure these always align properly + result_map = add_missing_set_dims_to_map_indims(result_map, domain_to_intersect) + return result_map.intersect_domain(domain_to_intersect) + + +def set_space_names(space, param_names=None, in_names=None, out_names=None): + new_space = space.copy() + dim_type = isl.dim_type + if param_names: + for i, p in enumerate(param_names): + new_space = new_space.set_dim_name(dim_type.param, i, p) + else: + for i in range(len(space.get_var_names(dim_type.param))): + new_space = new_space.set_dim_name(dim_type.param, i, "p%d" % (i)) + if in_names: + for i, p in enumerate(in_names): + new_space = new_space.set_dim_name(dim_type.in_, i, p) + else: + for i in range(len(space.get_var_names(dim_type.in_))): + new_space = new_space.set_dim_name(dim_type.in_, i, "i%d" % (i)) + if out_names: + for i, p in enumerate(out_names): + new_space = new_space.set_dim_name(dim_type.out, i, p) + else: + for i in range(len(space.get_var_names(dim_type.out))): + new_space = new_space.set_dim_name(dim_type.out, i, "o%d" % (i)) + return new_space + + +def get_space(param_names, in_names, out_names): + space = isl.Space.alloc( + isl.DEFAULT_CONTEXT, len(param_names), len(in_names), len(out_names)) + return set_space_names( + space, param_names=param_names, in_names=in_names, out_names=out_names) + +def get_concurrent_inames(knl): + from loopy.kernel.data import LocalIndexTag, GroupIndexTag + conc_inames = set() + all_inames = knl.all_inames() + for iname in all_inames: + iname_tags = knl.iname_to_tags.get(iname, None) + if iname_tags and any( + isinstance(tag, (LocalIndexTag, GroupIndexTag)) + for tag in iname_tags): + conc_inames.add(iname) + return conc_inames, all_inames-conc_inames diff --git a/schedule.py b/schedule.py index ad2c970d7..08de95c75 100644 --- a/schedule.py +++ b/schedule.py @@ -130,7 +130,7 @@ class LexSchedule(object): params_sched = [] in_names_sched = ["s"] + self.inames_not_enumerated out_names_sched = ["l"+str(i) for i in range(self.max_lex_dims())] - from schedule_checker.lexicographic_order_map import get_space + from schedule_checker.sched_check_utils import get_space return get_space(params_sched, in_names_sched, out_names_sched) def append_item(self, sched_item, lex_pt): @@ -145,11 +145,9 @@ class LexSchedule(object): #def create_symbolic_isl_map(self, dom_before, dom_after, inames): def create_symbolic_isl_map(self, domain, inames): # TODO if inames will always match domain out vars, don't need to pass them - from schedule_checker.lexicographic_order_map import ( - create_symbolic_map_from_tuples, - ) from schedule_checker.sched_check_utils import ( - add_dims_to_isl_set + create_symbolic_map_from_tuples, + add_dims_to_isl_set ) domain_iname_order = domain.get_var_names(isl.dim_type.out) inames_ordered_to_match_domain = [] @@ -270,11 +268,11 @@ class LexSchedule(object): params_sched = ["ps"] + ["p"+iname for iname in self.inames_enumerated] in_names_sched = ["s"] + self.inames_enumerated out_names_sched = ["l"+str(i) for i in range(self.max_lex_dims())] - from schedule_checker.lexicographic_order_map import get_space + from schedule_checker.sched_check_utils import get_space return get_space(params_sched, in_names_sched, out_names_sched) def create_explicit_isl_map(self, sched_space): - from schedule_checker.lexicographic_order_map import ( + from schedule_checker.sched_check_utils import ( create_explicit_map_from_tuples ) return create_explicit_map_from_tuples(list(self.items()), sched_space) @@ -288,10 +286,12 @@ class LexSchedule(object): from schedule_checker.lexicographic_order_map import ( make_lex_order_map_tuple_pairs, + ) + from schedule_checker.sched_check_utils import ( create_explicit_map_from_tuples, get_space, + append_apostrophes ) - from schedule_checker.sched_check_utils import append_apostrophes # TODO lower bound may not be zero lex_dim_bounds = list(zip(self.get_min_lex_dim_vals(), -- GitLab From 204c0cdf137484a4173b2647646f32baf7891391 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 25 Jun 2019 20:46:57 -0500 Subject: [PATCH 059/415] renamed example --- ..._schedule_creation.py => example_pairwise_schedule_validity.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename example_dep_pairwise_schedule_creation.py => example_pairwise_schedule_validity.py (100%) diff --git a/example_dep_pairwise_schedule_creation.py b/example_pairwise_schedule_validity.py similarity index 100% rename from example_dep_pairwise_schedule_creation.py rename to example_pairwise_schedule_validity.py -- GitLab From 3a6d624f5931edf21c81a97b8fd6b7c3bdba63c2 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Wed, 26 Jun 2019 00:20:30 -0500 Subject: [PATCH 060/415] removed unnecessary functions, changed func/var names for clarity, added lots of comments --- dependency.py | 25 +++-- example_dependency_checking.py | 6 +- example_lex_map_creation.py | 6 +- example_pairwise_schedule_validity.py | 92 +++++++++--------- example_schedule_creation_old.py | 2 +- sched_check_utils.py | 44 ++++++--- schedule.py | 128 ++++++++++++++++++-------- 7 files changed, 196 insertions(+), 107 deletions(-) diff --git a/dependency.py b/dependency.py index be5651177..77440cb52 100644 --- a/dependency.py +++ b/dependency.py @@ -67,15 +67,14 @@ def create_dependency_constraint( # Statement [s,i,j] comes before statement [s',i',j'] iff # assumes statements are numbered sequentially - # (statement_bound = max statement id + 1) # make sure all dependencies involve different inames if len(set(statement_dep.iname_deps.keys())) != len( statement_dep.iname_deps.keys()): raise ValueError("All depencencies must apply to different inames.") + # create some isl vars to use, e.g., {s, i, j, s', i', j'} statement_var_prime = statement_var+"'" - dt = DependencyType islvars = make_islvars_with_var_primes( [statement_var]+all_inames_ordered, []) @@ -84,13 +83,15 @@ def create_dependency_constraint( # this will disappear as soon as we add a constraint that is not dt.NONE all_constraints_set = islvars[0].eq_set(islvars[0] + 1) + # determine which inames are shared between instructions before_inames = statement_dep.statement_before.active_inames after_inames = statement_dep.statement_after.active_inames shared_inames = before_inames & after_inames #non_shared_inames = (before_inames | after_inames) - shared_inames - # for each (iname, dep_type) pair, create a constraint, + # for each (iname, dep_type) pair, create 'happens before' constraint, # all_constraints_set will be the union of all these constraints + dt = DependencyType for iname, dep_type in statement_dep.iname_deps.items(): if dep_type == dt.NONE: continue @@ -101,15 +102,14 @@ def create_dependency_constraint( #other_inames.remove(iname) # remaining inames, e.g., [j, k] #other_inames_prime = append_apostrophes(other_inames) # e.g., [j', k'] - # remaining shared inames, e.g., [j, k] + # get inames shared between instructions that are not this iname other_shared_inames = list(shared_inames - {iname}) - other_shared_inames_prime = append_apostrophes(other_shared_inames) # e.g., [j', k'] # initialize constraint set with what we know about other shared inames # (e.g., j = j', k = k') - # will be True if no shared inames + # if no shared inames present, constraint_set will be True constraint_set = create_equality_conjunction_set( other_shared_inames, other_shared_inames_prime, islvars) if dep_type == dt.SAME: @@ -122,7 +122,7 @@ def create_dependency_constraint( constraint_set = constraint_set & islvars[0].eq_set( islvars[0]) # True - # enforce statement_var == statement # + # set statement_var == statement # s_before_int = sid_to_int[statement_dep.statement_before.sid] s_after_int = sid_to_int[statement_dep.statement_after.sid] constraint_set = constraint_set & islvars[statement_var].eq_set( @@ -130,11 +130,15 @@ def create_dependency_constraint( constraint_set = constraint_set & islvars[statement_var_prime].eq_set( islvars[0]+s_after_int) + # union this constraint_set with all_constraints_set all_constraints_set = all_constraints_set | constraint_set + # convert constraint set to map all_constraints_map = _convert_constraint_set_to_map( all_constraints_set, len(all_inames_ordered)+1) + # now apply domain to constraint variables + # TODO use separate domain for before and after insns? range_constraint_set = create_new_set_with_primes(domain_constraint_set) new_pose = 0 # insert 's' at beginning # TODO don't hardcode 's' @@ -155,6 +159,12 @@ def create_dependency_constraint( def create_dependencies_from_legacy_knl(knl): + # Compare insn.within inames for each insn involved in the dep + # For every shared, non-concurrent iname, introduce SAME dep + # (Perform voodoo guesswork to determine whether a ‘prior’ dep is needed) + # For every shared, concurrent iname, introduce an ALL dep + # For every non-shared iname, introduce pseudo-ALL dep + from schedule_checker.sched_check_utils import get_concurrent_inames from schedule_checker.schedule import Statement dt = DependencyType @@ -183,4 +193,5 @@ def create_dependencies_from_legacy_knl(knl): iname_deps[non_shared_iname] = dt.ALL deps.append(StatementDependency(s_before, s_after, iname_deps)) + return deps diff --git a/example_dependency_checking.py b/example_dependency_checking.py index 4c9c29e8c..c718d6562 100644 --- a/example_dependency_checking.py +++ b/example_dependency_checking.py @@ -13,7 +13,7 @@ from schedule_checker.sched_check_utils import ( prettier_map_string, append_apostrophes, create_explicit_map_from_tuples, - get_space, + get_isl_space, ) @@ -43,7 +43,7 @@ print("------------------------------------------------------------------------- params_sched = ['p0', 'p1'] in_names_sched = [statement_var]+all_inames_ordered out_names_sched = ['l0', 'l1'] -sched_space = get_space(params_sched, in_names_sched, out_names_sched) +sched_space = get_isl_space(params_sched, in_names_sched, out_names_sched) example_sched_valid = create_explicit_map_from_tuples( [ @@ -87,7 +87,7 @@ lex_out_names = append_apostrophes(out_names_sched) explicit_lex_map_pairs = make_lex_order_map_tuple_pairs(lex_dim_bounds) # for pair in explicit_lex_map_pairs: # print(pair[0], pair[1]) -lex_space_explicit = get_space(lex_params, lex_in_names, lex_out_names) +lex_space_explicit = get_isl_space(lex_params, lex_in_names, lex_out_names) lex_map_explicit = create_explicit_map_from_tuples(explicit_lex_map_pairs, lex_space_explicit) print("lex_map (explicit):") diff --git a/example_lex_map_creation.py b/example_lex_map_creation.py index 8cf947fb9..527d97869 100644 --- a/example_lex_map_creation.py +++ b/example_lex_map_creation.py @@ -4,7 +4,7 @@ from schedule_checker.lexicographic_order_map import ( ) from schedule_checker.sched_check_utils import( create_explicit_map_from_tuples, - get_space, + get_isl_space, ) # *Symbolic* lexicographic mapping- map each tuple to all tuples occuring later @@ -37,7 +37,7 @@ print(lex_map_explicit) param_names_sched = [] in_names_sched = ["s"] out_names_sched = ["i", "j"] -sched_space = get_space(param_names_sched, in_names_sched, out_names_sched) +sched_space = get_isl_space(param_names_sched, in_names_sched, out_names_sched) example_sched = create_explicit_map_from_tuples( [ #((0,), (2, 0, 0)), @@ -104,7 +104,7 @@ print(example_sched) param_names_sched = ["ps", "p0", "p1"] in_names_sched = ["s","i","j"] out_names_sched = ["l0","l1"] -sched_space = get_space(param_names_sched, in_names_sched, out_names_sched) +sched_space = get_isl_space(param_names_sched, in_names_sched, out_names_sched) example_sched = create_explicit_map_from_tuples( [ ((0,0,0), (0, 0)), diff --git a/example_pairwise_schedule_validity.py b/example_pairwise_schedule_validity.py index 0a69d569e..66225fadb 100644 --- a/example_pairwise_schedule_validity.py +++ b/example_pairwise_schedule_validity.py @@ -6,7 +6,6 @@ from schedule_checker.dependency import ( ) from schedule_checker.schedule import LexSchedule from schedule_checker.lexicographic_order_map import ( - #create_explicit_map_from_tuples, get_statement_ordering_map, ) from schedule_checker.sched_check_utils import ( @@ -15,6 +14,8 @@ from schedule_checker.sched_check_utils import ( order_var_names_to_match_islset, ) +# Choose kernel ---------------------------------------------------------- + #knl_choice = "example" #knl_choice = "matmul" knl_choice = "scan" @@ -25,7 +26,6 @@ knl_choice = "scan" #knl_choice = "nest" if knl_choice == "example": - # make example kernel knl = lp.make_kernel( #"{[i,j]: 0<=i<2 and 1<=j<3}", #"{[i,j]: pi_lo<=i lex time):") print(sched_map_symbolic.space) + print("-"*85) # *Explicit* lexicographic mapping- map each tuple to all tuples occuring later - print("-"*85) #lex_map_explicit = sched.get_explicit_sched_map() - lex_map_symbolic = sched.get_symbolic_sched_map() - + # get map representing lexicographic ordering + lex_order_map_symbolic = sched.get_lex_order_map_for_symbolic_sched() print("lex map symbolic:") - print(prettier_map_string(lex_map_symbolic)) + print(prettier_map_string(lex_order_map_symbolic)) print("space (lex time -> lex time):") - print(lex_map_symbolic.space) - - # Statement instance ordering + print(lex_order_map_symbolic.space) print("-"*85) - #SIO_explicit_valid = get_statement_ordering_map( + + # create statement instance ordering, + # maps each statement instance to all statement instances occuring later + #SIO_explicit = get_statement_ordering_map( # example_sched_explicit, lex_map_explicit) - #print("statement instance ordering explicit (valid_sched):") - #print(prettier_map_string(SIO_explicit_valid)) - SIO_symbolic_valid = get_statement_ordering_map( - sched_map_symbolic, lex_map_symbolic) - print("statement instance ordering symbolic (valid_sched):") - print(prettier_map_string(SIO_symbolic_valid)) + #print("statement instance ordering explicit:") + #print(prettier_map_string(SIO_explicit)) + SIO_symbolic = get_statement_ordering_map( + sched_map_symbolic, lex_order_map_symbolic) + print("statement instance ordering symbolic:") + print(prettier_map_string(SIO_symbolic)) print("SIO space (statement instances -> statement instances):") - print(SIO_symbolic_valid.space) - + print(SIO_symbolic.space) print("-"*85) + print("dict{lp insn id : sched sid int}:") print(sched.lp_insnid_to_int_sid) print("-"*85) + # create a map representing constraints from the dependency, + # maps each statement instance to all statement instances that must occur later statement_var = 's' constraint_map = create_dependency_constraint( statement_dep, @@ -352,8 +356,8 @@ for statement_dep, dom_before, dom_after in deps_and_domains: #print("space (statment instances -> statement instances):") #print(constraint_map.space) - assert constraint_map.space == SIO_symbolic_valid.space - if not constraint_map.is_subset(SIO_symbolic_valid): + assert constraint_map.space == SIO_symbolic.space + if not constraint_map.is_subset(SIO_symbolic): # TODO is this the right question? sched_is_valid = False conc_inames, non_conc_inames = get_concurrent_inames(knl) print("================ constraint check failure =================") @@ -365,16 +369,16 @@ for statement_dep, dom_before, dom_after in deps_and_domains: print("constraint map space (statment instances -> statement instances):") print(constraint_map.space) print("SIO space (statement instances -> statement instances):") - print(SIO_symbolic_valid.space) + print(SIO_symbolic.space) print("constraint map:") print(prettier_map_string(constraint_map)) print("statement instance ordering:") - print(prettier_map_string(SIO_symbolic_valid)) + print(prettier_map_string(SIO_symbolic)) print("{insn id -> sched sid int} dict:") print(sched.lp_insnid_to_int_sid) print("===========================================================") -print("is valid sched valid? constraint map subset of SIO?") +print("is sched valid? constraint map subset of SIO?") print(sched_is_valid) diff --git a/example_schedule_creation_old.py b/example_schedule_creation_old.py index 2656062c9..5f1af3a22 100644 --- a/example_schedule_creation_old.py +++ b/example_schedule_creation_old.py @@ -123,7 +123,7 @@ print(sched_map_symbolic.space) print("---------------------------------------------------------------------------") #lex_map_explicit = sched.get_explicit_sched_map() -lex_map_symbolic = sched.get_symbolic_sched_map() +lex_map_symbolic = sched.get_lex_order_map_for_symbolic_sched() print("lex map symbolic:") print(prettier_map_string(lex_map_symbolic)) diff --git a/sched_check_utils.py b/sched_check_utils.py index 764edce26..9be7ca4dd 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -94,6 +94,8 @@ def all_iname_domains_equal(knl): def order_var_names_to_match_islset(var_names, islset): + # returns subset of var_names found in islset in + # order matching the islset variables name_order = islset.get_var_names(isl.dim_type.out) names_ordered_to_match_islset = [] for v in name_order: @@ -129,7 +131,10 @@ def create_explicit_map_from_tuples(tuple_pairs, space): return union_map -def create_symbolic_map_from_tuples(tuple_pairs, space, domain_to_intersect): +def create_symbolic_isl_map_from_tuples(tuple_pairs, space, domain_to_intersect): + + # given a list of pairs of ((input), (output)) tuples, create an isl map + # and intersect that map with domain_to_intersect dim_type = isl.dim_type @@ -140,11 +145,16 @@ def create_symbolic_map_from_tuples(tuple_pairs, space, domain_to_intersect): islvars = get_islvars_from_space(space) + # loop through pairs and create a set that will later be converted to a map + # initialize set with constraint that is always false constraints_set = islvars[0].eq_set(islvars[0] + 1) for tup_in, tup_out in tuple_pairs: - # initialize set with constraint that is always true + + # initialize constraint with true constraint = islvars[0].eq_set(islvars[0]) + + # set values for 'in' dimension using tuple vals for i, val_in in enumerate(tup_in): if isinstance(val_in, int): constraint = constraint \ @@ -152,6 +162,7 @@ def create_symbolic_map_from_tuples(tuple_pairs, space, domain_to_intersect): else: constraint = constraint \ & islvars[in_names[i]].eq_set(islvars[val_in]) + # set values for 'out' dimension using tuple vals for i, val_out in enumerate(tup_out): if isinstance(val_out, int): constraint = constraint \ @@ -159,8 +170,11 @@ def create_symbolic_map_from_tuples(tuple_pairs, space, domain_to_intersect): else: constraint = constraint \ & islvars[out_names[i]].eq_set(islvars[val_out]) + + # union this constraint with full set of constraints constraints_set = constraints_set | constraint + # convert set to map by moving dimensions around result_map = isl.Map.from_domain(constraints_set) result_map = result_map.move_dims( dim_type.out, 0, dim_type.in_, @@ -171,6 +185,9 @@ def create_symbolic_map_from_tuples(tuple_pairs, space, domain_to_intersect): result_map_vars_in, [isl.dim_type.set]) return result_map.intersect_domain(domain_stripped) ''' + + # if there are any dimensions in domain_to_intersect that are missing from + # result_map, insert these dimensions so that we can intersect the domain from schedule_checker.sched_check_utils import add_missing_set_dims_to_map_indims # TODO make sure these always align properly result_map = add_missing_set_dims_to_map_indims(result_map, domain_to_intersect) @@ -201,20 +218,21 @@ def set_space_names(space, param_names=None, in_names=None, out_names=None): return new_space -def get_space(param_names, in_names, out_names): +def get_isl_space(param_names, in_names, out_names): space = isl.Space.alloc( isl.DEFAULT_CONTEXT, len(param_names), len(in_names), len(out_names)) return set_space_names( space, param_names=param_names, in_names=in_names, out_names=out_names) + def get_concurrent_inames(knl): - from loopy.kernel.data import LocalIndexTag, GroupIndexTag - conc_inames = set() - all_inames = knl.all_inames() - for iname in all_inames: - iname_tags = knl.iname_to_tags.get(iname, None) - if iname_tags and any( - isinstance(tag, (LocalIndexTag, GroupIndexTag)) - for tag in iname_tags): - conc_inames.add(iname) - return conc_inames, all_inames-conc_inames + from loopy.kernel.data import LocalIndexTag, GroupIndexTag + conc_inames = set() + all_inames = knl.all_inames() + for iname in all_inames: + iname_tags = knl.iname_to_tags.get(iname, None) + if iname_tags and any( + isinstance(tag, (LocalIndexTag, GroupIndexTag)) + for tag in iname_tags): + conc_inames.add(iname) + return conc_inames, all_inames-conc_inames diff --git a/schedule.py b/schedule.py index 08de95c75..9a46f3338 100644 --- a/schedule.py +++ b/schedule.py @@ -43,62 +43,100 @@ class StatementInstance(object): class LexSchedule(object): + + # contains a mapping of {statement instance: lex point} + def __init__( self, knl, include_only_insn_ids=None, ): - self.lex_schedule = OrderedDict() # {statement instance: lex point} + + # mapping of {statement instance: lex point} + self.lex_schedule = OrderedDict() # symbolic inames in sched that have been enumerated # into explicit statement instances self.inames_enumerated = [] - self.inames_not_enumerated = [] # TODO better way to do this + # symbolic inames added to statement instances in sched + # that have *not* been enumerated into explicit statement instances + self.inames_added_to_statement_instances = [] + # map from loopy insn_id strings to statement id ints self.lp_insnid_to_int_sid = {} + # since 's' will be used to represent statement numbering, make sure + # we don't have an iname named 's' + # TODO change to var less common than 's' and/or generate something unique? assert not any(iname == 's' for iname in knl.all_inames()) from loopy.schedule import (EnterLoop, LeaveLoop, Barrier, RunInstruction) + + # go through knl.schedule and generate self.lex_schedule + + # keep track of the next point in our lexicographic ordering + # initially this as a 1-d point with value 0 next_insn_lex_pt = [0] # TODO originally assumed perfect loop nesting, still the case? for sched_item in knl.schedule: if isinstance(sched_item, EnterLoop): iname = sched_item.iname - if self: + # if the schedule is empty, this is the first schedule item, so + # don't increment lex dim val enumerating items in current code block, + # otherwise, this loop is next item in current code block, so + # increment lex dim val enumerating items in current code block + if self.lex_schedule: # if the schedule is not empty + # this lex value will correspond to everything inside this loop + # we will add new lex dimensions to enuerate items inside loop next_insn_lex_pt[-1] = next_insn_lex_pt[-1]+1 + + # upon entering a loop, we enter a new (deeper) code block, so + # add one lex dimension for the loop variable, and + # add a second lex dim to enumerate code blocks within the new loop next_insn_lex_pt.append(iname) next_insn_lex_pt.append(0) elif isinstance(sched_item, LeaveLoop): + # upon leaving a loop, + # pop lex dimension for enumerating code blocks within this loop, and + # pop lex dimension for the loop variable, and + # increment lex dim val enumerating items in current code block next_insn_lex_pt.pop() next_insn_lex_pt.pop() next_insn_lex_pt[-1] = next_insn_lex_pt[-1]+1 - elif isinstance(sched_item, RunInstruction): + elif isinstance(sched_item, (RunInstruction, Barrier)): + if isinstance(sched_item, RunInstruction): + lp_insn_id = sched_item.insn_id + else: # Barrier + lp_insn_id = sched_item.originating_insn_id + + # if include_only_insn_ids list was passed, + # only process insns found in list, + # otherwise process all instructions if (include_only_insn_ids is None - or sched_item.insn_id in include_only_insn_ids): - self.add_new_lp_insnid(sched_item.insn_id) - insn_id_int = self.lp_insnid_to_int_sid[sched_item.insn_id] + or lp_insn_id in include_only_insn_ids): + # create an int representing this instruction and + # update the map from loopy insn_ids to statement ids + self.add_new_lp_insnid(lp_insn_id) + insn_id_int = self.lp_insnid_to_int_sid[lp_insn_id] - self.append_item((insn_id_int,), next_insn_lex_pt[:]) - next_insn_lex_pt[-1] = next_insn_lex_pt[-1] + 1 - elif isinstance(sched_item, Barrier): - if (include_only_insn_ids is None - or sched_item.originating_insn_id in include_only_insn_ids): - self.add_new_lp_insnid(sched_item.originating_insn_id) - insn_id_int = self.lp_insnid_to_int_sid[ - sched_item.originating_insn_id] + # add ((sid,), lex_pt) pair to lex schedule + self.lex_schedule[(insn_id_int,)] = next_insn_lex_pt[:] - self.append_item((insn_id_int,), next_insn_lex_pt[:]) + # increment lex dim val enumerating items in current code block next_insn_lex_pt[-1] = next_insn_lex_pt[-1] + 1 else: pass + + # at this point, lex_schedule may contain lex points missing dimensions, + # the values in these missing dims should be zero, so add them self.pad_lex_pts_with_zeros() def max_lex_dims(self): return max(len(lex_pt) for insn, lex_pt in self.items()) def pad_lex_pts_with_zeros(self): + # pad lex points with zeros so that all points have same number of dims max_lex_dim = self.max_lex_dims() new_sched = OrderedDict() for insn, lex_pt in self.items(): @@ -117,24 +155,29 @@ class LexSchedule(object): new_sched[insn] = lex_pt self.lex_schedule = new_sched if iname_found: - self.inames_not_enumerated.append(iname) + self.inames_added_to_statement_instances.append(iname) def add_new_lp_insnid(self, lp_insnid): + # create an int representing this instruction and + # update the map from loopy insn_ids to statement ids if self.lp_insnid_to_int_sid: self.lp_insnid_to_int_sid[lp_insnid] = max( self.lp_insnid_to_int_sid.values()) + 1 else: self.lp_insnid_to_int_sid[lp_insnid] = 0 - def get_space_for_symbolic_sched(self): + def get_isl_space_for_symbolic_sched(self): + # create an isl space + # {('s', ) -> + # (lexicographic ordering dims)} + params_sched = [] - in_names_sched = ["s"] + self.inames_not_enumerated + # TODO make "s" a variable for consistency + in_names_sched = ["s"] + self.inames_added_to_statement_instances + # TODO make "l" a variable for consistency out_names_sched = ["l"+str(i) for i in range(self.max_lex_dims())] - from schedule_checker.sched_check_utils import get_space - return get_space(params_sched, in_names_sched, out_names_sched) - - def append_item(self, sched_item, lex_pt): - self.lex_schedule[sched_item] = lex_pt + from schedule_checker.sched_check_utils import get_isl_space + return get_isl_space(params_sched, in_names_sched, out_names_sched) def get_last_schedule_item(self): return next(reversed(self.lex_schedule)) @@ -144,11 +187,17 @@ class LexSchedule(object): #def create_symbolic_isl_map(self, dom_before, dom_after, inames): def create_symbolic_isl_map(self, domain, inames): + + # create isl map representing lex schedule + # TODO if inames will always match domain out vars, don't need to pass them from schedule_checker.sched_check_utils import ( - create_symbolic_map_from_tuples, + create_symbolic_isl_map_from_tuples, add_dims_to_isl_set ) + + # Get all inames now in order to maintain list with consistent ordering + # This will help keep isl maps/sets compatible domain_iname_order = domain.get_var_names(isl.dim_type.out) inames_ordered_to_match_domain = [] for iname in domain_iname_order: @@ -157,14 +206,20 @@ class LexSchedule(object): self.add_symbolic_inames_to_statement_instances( inames_ordered_to_match_domain) - sched_space = self.get_space_for_symbolic_sched() + # create an isl space + # {('s', ) -> + # (lexicographic ordering dims)} + sched_space = self.get_isl_space_for_symbolic_sched() """ # TODO maybe don't project this out, constraints may involve any iname later? domain_stripped = domain_intersection.project_out_except( - self.inames_not_enumerated, + self.inames_added_to_statement_instances, [isl.dim_type.set] ) """ + + # insert 's' dim into domain so that its space allows for + # intersection with sched map later # TODO first need to make sure statement var name isn't already being used new_pose = 0 # insert 's' at beginning domain_to_intersect = add_dims_to_isl_set( @@ -174,12 +229,13 @@ class LexSchedule(object): #dom_after_to_intersect = add_dims_to_isl_set( # dom_before, isl.dim_type.out, ['s'], new_pose) - return create_symbolic_map_from_tuples( + # create isl map + return create_symbolic_isl_map_from_tuples( #list(self.items()), sched_space, #dom_before_to_intersect, dom_after_to_intersect) list(self.items()), sched_space, domain_to_intersect) - def get_symbolic_sched_map(self): + def get_lex_order_map_for_symbolic_sched(self): from schedule_checker.lexicographic_order_map import ( create_symbolic_lex_order_map, ) @@ -264,12 +320,12 @@ class LexSchedule(object): for iname, bound in iname_bounds.items(): self.enumerate_iname(iname, bound) - def get_space_for_explicit_sched(self): + def get_isl_space_for_explicit_sched(self): params_sched = ["ps"] + ["p"+iname for iname in self.inames_enumerated] in_names_sched = ["s"] + self.inames_enumerated out_names_sched = ["l"+str(i) for i in range(self.max_lex_dims())] - from schedule_checker.sched_check_utils import get_space - return get_space(params_sched, in_names_sched, out_names_sched) + from schedule_checker.sched_check_utils import get_isl_space + return get_isl_space(params_sched, in_names_sched, out_names_sched) def create_explicit_isl_map(self, sched_space): from schedule_checker.sched_check_utils import ( @@ -279,7 +335,7 @@ class LexSchedule(object): def enumerate_symbolic_inames_and_create_explicit_isl_map(self, iname_bounds): self.enumerate_inames(iname_bounds) - sched_space = self.get_space_for_explicit_sched() + sched_space = self.get_isl_space_for_explicit_sched() return self.create_explicit_isl_map(sched_space) def get_explicit_sched_map(self): @@ -289,21 +345,21 @@ class LexSchedule(object): ) from schedule_checker.sched_check_utils import ( create_explicit_map_from_tuples, - get_space, + get_isl_space, append_apostrophes ) # TODO lower bound may not be zero lex_dim_bounds = list(zip(self.get_min_lex_dim_vals(), self.get_max_lex_dim_vals())) - sched_space = self.get_space_for_explicit_sched() + sched_space = self.get_isl_space_for_explicit_sched() lex_in_names = sched_space.get_var_names(isl.dim_type.out) lex_out_names = append_apostrophes(lex_in_names) lex_params = [] explicit_lex_map_pairs = make_lex_order_map_tuple_pairs(lex_dim_bounds) - lex_space_explicit = get_space(lex_params, lex_in_names, lex_out_names) + lex_space_explicit = get_isl_space(lex_params, lex_in_names, lex_out_names) return create_explicit_map_from_tuples(explicit_lex_map_pairs, lex_space_explicit) -- GitLab From 37522e1e422d225b4c8fb23fa9824157c30bc962 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 1 Jul 2019 11:43:15 -0500 Subject: [PATCH 061/415] simplified legacy dep creation logic- got rid of PRIOR and ALL, now introducing SAME dep for set of shared non-concurrent inames; also renamed StatementDependency->StatementDependencySet (and related variable names) for clarity --- dependency.py | 97 +++++++++++---------------- example_dependency_checking.py | 12 ++-- example_pairwise_schedule_validity.py | 56 ++++++++-------- example_schedule_creation_old.py | 14 ++-- 4 files changed, 78 insertions(+), 101 deletions(-) diff --git a/dependency.py b/dependency.py index 77440cb52..0cab987f7 100644 --- a/dependency.py +++ b/dependency.py @@ -4,30 +4,30 @@ import islpy as isl class DependencyType: NONE = "none" SAME = "same" - PRIOR = "prior" - ALL = "all" + #PRIOR = "prior" + #ALL = "all" -class StatementDependency(object): +class StatementDependencySet(object): def __init__( self, statement_before, statement_after, - iname_deps, # {iname: dep_type} + deps, # {dep_type: iname} ): self.statement_before = statement_before self.statement_after = statement_after - self.iname_deps = iname_deps + self.deps = deps def __str__(self): result = "%s --before->\n%s iff\n " % ( self.statement_before, self.statement_after) return result + " and\n ".join( - ["(%s dep: %s)" % (iname, dep_type) - for iname, dep_type in self.iname_deps.items()]) + ["(%s : %s)" % (dep_type, inames) + for dep_type, inames in self.deps.items()]) -def create_equality_conjunction_set(names0, names1, islvars): +def create_elementwise_equality_conjunction_set(names0, names1, islvars): # initialize set with constraint that is always true eq_set = islvars[0].eq_set(islvars[0]) @@ -49,7 +49,7 @@ def _convert_constraint_set_to_map(constraint_set, mv_count, src_position=None): def create_dependency_constraint( - statement_dep, + statement_dep_set, all_inames_ordered, statement_var, domain_constraint_set, @@ -68,11 +68,6 @@ def create_dependency_constraint( # assumes statements are numbered sequentially - # make sure all dependencies involve different inames - if len(set(statement_dep.iname_deps.keys())) != len( - statement_dep.iname_deps.keys()): - raise ValueError("All depencencies must apply to different inames.") - # create some isl vars to use, e.g., {s, i, j, s', i', j'} statement_var_prime = statement_var+"'" islvars = make_islvars_with_var_primes( @@ -83,48 +78,37 @@ def create_dependency_constraint( # this will disappear as soon as we add a constraint that is not dt.NONE all_constraints_set = islvars[0].eq_set(islvars[0] + 1) - # determine which inames are shared between instructions - before_inames = statement_dep.statement_before.active_inames - after_inames = statement_dep.statement_after.active_inames - shared_inames = before_inames & after_inames - #non_shared_inames = (before_inames | after_inames) - shared_inames - - # for each (iname, dep_type) pair, create 'happens before' constraint, + # for each (dep_type, inames) pair, create 'happens before' constraint, # all_constraints_set will be the union of all these constraints dt = DependencyType - for iname, dep_type in statement_dep.iname_deps.items(): + for dep_type, inames in statement_dep_set.deps.items(): if dep_type == dt.NONE: continue - iname_prime = iname+"'" # i' - - #other_inames = all_inames_ordered.copy() - #other_inames.remove(iname) # remaining inames, e.g., [j, k] - #other_inames_prime = append_apostrophes(other_inames) # e.g., [j', k'] + # need to put inames in a list so that order of inames and inames' + # matches when calling create_elementwise_equality_conj... + if not isinstance(inames, list): + inames_list = list(inames) + else: + inames_list = inames[:] + inames_prime = append_apostrophes(inames_list) # e.g., [j', k'] - # get inames shared between instructions that are not this iname - other_shared_inames = list(shared_inames - {iname}) - other_shared_inames_prime = append_apostrophes(other_shared_inames) - # e.g., [j', k'] - - # initialize constraint set with what we know about other shared inames - # (e.g., j = j', k = k') - # if no shared inames present, constraint_set will be True - constraint_set = create_equality_conjunction_set( - other_shared_inames, other_shared_inames_prime, islvars) if dep_type == dt.SAME: - constraint_set = constraint_set & islvars[iname].eq_set( - islvars[iname_prime]) + constraint_set = create_elementwise_equality_conjunction_set( + inames_list, inames_prime, islvars) + """ + # TODO define these if useful, otherwise remove elif dep_type == dt.PRIOR: constraint_set = constraint_set & islvars[iname].lt_set( islvars[iname_prime]) elif dep_type == dt.ALL: constraint_set = constraint_set & islvars[0].eq_set( islvars[0]) # True + """ # set statement_var == statement # - s_before_int = sid_to_int[statement_dep.statement_before.sid] - s_after_int = sid_to_int[statement_dep.statement_after.sid] + s_before_int = sid_to_int[statement_dep_set.statement_before.sid] + s_after_int = sid_to_int[statement_dep_set.statement_after.sid] constraint_set = constraint_set & islvars[statement_var].eq_set( islvars[0]+s_before_int) constraint_set = constraint_set & islvars[statement_var_prime].eq_set( @@ -159,39 +143,34 @@ def create_dependency_constraint( def create_dependencies_from_legacy_knl(knl): - # Compare insn.within inames for each insn involved in the dep - # For every shared, non-concurrent iname, introduce SAME dep - # (Perform voodoo guesswork to determine whether a ‘prior’ dep is needed) - # For every shared, concurrent iname, introduce an ALL dep - # For every non-shared iname, introduce pseudo-ALL dep + # Introduce SAME dep for set of shared, non-concurrent inames from schedule_checker.sched_check_utils import get_concurrent_inames from schedule_checker.schedule import Statement dt = DependencyType conc_inames, non_conc_inames = get_concurrent_inames(knl) #all_inames = list(knl.all_inames()) - deps = [] + statement_dep_sets = [] for insn_after in knl.instructions: for insn_before_id in insn_after.depends_on: - iname_deps = {} + dep_dict = {} insn_before = knl.id_to_insn[insn_before_id] insn_before_inames = insn_before.within_inames insn_after_inames = insn_after.within_inames shared_inames = insn_before_inames & insn_after_inames - non_shared_inames = (insn_before_inames | insn_after_inames - ) - shared_inames - shared_conc_inames = shared_inames & conc_inames shared_non_conc_inames = shared_inames & non_conc_inames - s_before = Statement(insn_before.id, insn_before_inames) - s_after = Statement(insn_after.id, insn_after_inames) - for non_conc_iname in shared_non_conc_inames: - iname_deps[non_conc_iname] = dt.SAME + dep_dict[dt.SAME] = shared_non_conc_inames + """ for conc_iname in shared_conc_inames: - iname_deps[conc_iname] = dt.ALL + dep_dict[conc_iname] = dt.ALL for non_shared_iname in non_shared_inames: - iname_deps[non_shared_iname] = dt.ALL + dep_dict[non_shared_iname] = dt.ALL + """ - deps.append(StatementDependency(s_before, s_after, iname_deps)) + s_before = Statement(insn_before.id, insn_before_inames) + s_after = Statement(insn_after.id, insn_after_inames) + statement_dep_sets.append( + StatementDependencySet(s_before, s_after, dep_dict)) - return deps + return statement_dep_sets diff --git a/example_dependency_checking.py b/example_dependency_checking.py index c718d6562..739e01091 100644 --- a/example_dependency_checking.py +++ b/example_dependency_checking.py @@ -1,6 +1,6 @@ import loopy as lp from schedule_checker.dependency import ( # noqa - StatementDependency, + StatementDependencySet, DependencyType as dt, create_dependency_constraint, ) @@ -125,15 +125,15 @@ s0 = Statement("0", {"i", "j"}) s1 = Statement("1", {"i", "j"}) insnid_to_int_sid = {"0": 0, "1": 1} -statement_dep = StatementDependency(s0, s1, {"i": dt.SAME, "j": dt.SAME}) -print(statement_dep) +statement_dep_set = StatementDependencySet(s0, s1, {dt.SAME: ["i", "j"]}) +print(statement_dep_set) combined_doms = knl.get_inames_domain( - statement_dep.statement_before.active_inames | # noqa - statement_dep.statement_after.active_inames + statement_dep_set.statement_before.active_inames | # noqa + statement_dep_set.statement_after.active_inames ) constraint_map = create_dependency_constraint( - statement_dep, + statement_dep_set, all_inames_ordered, statement_var, combined_doms, diff --git a/example_pairwise_schedule_validity.py b/example_pairwise_schedule_validity.py index 66225fadb..cde2bfddb 100644 --- a/example_pairwise_schedule_validity.py +++ b/example_pairwise_schedule_validity.py @@ -16,13 +16,13 @@ from schedule_checker.sched_check_utils import ( # Choose kernel ---------------------------------------------------------- -#knl_choice = "example" +knl_choice = "example" #knl_choice = "matmul" -knl_choice = "scan" +#knl_choice = "scan" #knl_choice = "dependent_domain" -#knl_choice = "stroud" +#knl_choice = "stroud" # TODO invalid sched? #knl_choice = "add_barrier" -#knl_choice = "nop" #TODO +#knl_choice = "nop" #TODO nop not in sched... error #knl_choice = "nest" if knl_choice == "example": @@ -232,52 +232,50 @@ for sched_item in knl.schedule: print(sched_item) print("="*80) -# Create StatementDependency(s) from kernel dependencies ----------------- - -# Compare insn.within inames for each insn involved in the dep -# For every shared, non-concurrent iname, introduce SAME dep -# (Perform voodoo guesswork to determine whether a ‘prior’ dep is needed) -# For every shared, concurrent iname, introduce an ALL dep -# For every non-shared iname, introduce pseudo-ALL dep +# Create StatementDependencySet(s) from kernel dependencies ----------------- +# Introduce SAME dep for set of shared, non-concurrent inames print("-"*85) -statement_deps = create_dependencies_from_legacy_knl(knl) +statement_dep_sets = create_dependencies_from_legacy_knl(knl) print("Statement Dependencies:") -for sd in statement_deps: - print(sd) +for dep_set in statement_dep_sets: + print(dep_set) print("") # get separate domains for before.active_inames and after.active_inames deps_and_domains = [] -for sd in statement_deps: +for dep_set in statement_dep_sets: #deps_and_domains.append([ - # sd, knl.get_inames_domain( - # sd.statement_before.active_inames | sd.statement_after.active_inames)]) + # dep_set, knl.get_inames_domain( + # dep_set.statement_before.active_inames | + # dep_set.statement_after.active_inames)]) # TODO need to have separate domains for separate instructions? # ...domain for after distinct from before deps_and_domains.append([ - sd, - knl.get_inames_domain(sd.statement_before.active_inames), - knl.get_inames_domain(sd.statement_after.active_inames) + dep_set, + knl.get_inames_domain(dep_set.statement_before.active_inames), + knl.get_inames_domain(dep_set.statement_after.active_inames) ]) print("-"*85) print("StatementDependencies w/domains:") -for sd, dom_before, dom_after in deps_and_domains: - print(sd) +for dep_set, dom_before, dom_after in deps_and_domains: + print(dep_set) print(dom_before) print(dom_after) # For each dependency, create+test schedule containing pair of insns------ sched_is_valid = True -for statement_dep, dom_before, dom_after in deps_and_domains: +for statement_dep_set, dom_before, dom_after in deps_and_domains: + s_before = statement_dep_set.statement_before + s_after = statement_dep_set.statement_after # TODO separate dom for before and after insns # TODO is using this union in creating schedule (not deps) okay? combined_doms = knl.get_inames_domain( - statement_dep.statement_before.active_inames | - statement_dep.statement_after.active_inames + s_before.active_inames | + s_after.active_inames ) # Get all inames now in order to maintain list with consistent ordering @@ -289,8 +287,8 @@ for statement_dep, dom_before, dom_after in deps_and_domains: # Create a mapping of {statement instance: lex point} # including only instructions involved in this dependency sched = LexSchedule(knl, include_only_insn_ids=[ - statement_dep.statement_before.sid, - statement_dep.statement_after.sid + s_before.sid, + s_after.sid ]) print("-"*85) print("LexSchedule before processing:") @@ -343,7 +341,7 @@ for statement_dep, dom_before, dom_after in deps_and_domains: # maps each statement instance to all statement instances that must occur later statement_var = 's' constraint_map = create_dependency_constraint( - statement_dep, + statement_dep_set, all_inames_ordered, # TODO separate lists for separate doms? statement_var, combined_doms, # TODO separate domains for before/after @@ -363,7 +361,7 @@ for statement_dep, dom_before, dom_after in deps_and_domains: print("================ constraint check failure =================") print("constraint map not subset of SIO") print("dependency:") - print(statement_dep) + print(statement_dep_set) print("concurrent inames:", conc_inames) print("sequential inames:", non_conc_inames) print("constraint map space (statment instances -> statement instances):") diff --git a/example_schedule_creation_old.py b/example_schedule_creation_old.py index 5f1af3a22..c1589ec12 100644 --- a/example_schedule_creation_old.py +++ b/example_schedule_creation_old.py @@ -150,10 +150,10 @@ print(SIO_symbolic_valid.space) # Introduce an all dep print("----------------------------------------------------------------------") -statement_deps = create_dependencies_from_legacy_knl(knl) +statement_dep_sets = create_dependencies_from_legacy_knl(knl) print("Statement Dependencies:") -for sd in statement_deps: - print(sd) +for dep_set in statement_dep_sets: + print(dep_set) print("") print("----------------------------------------------------------------------") print("dict{lp insn id : sched sid int}:") @@ -161,16 +161,16 @@ print(sched.lp_insnid_to_int_sid) print("----------------------------------------------------------------------") statement_var = 's' sched_is_valid = True -for statement_dep in statement_deps: +for statement_dep_set in statement_dep_sets: # TODO is using this union in creating schedule (not deps) okay? combined_doms = knl.get_inames_domain( - statement_dep.statement_before.active_inames | - statement_dep.statement_after.active_inames + statement_dep_set.statement_before.active_inames | + statement_dep_set.statement_after.active_inames ) constraint_map = create_dependency_constraint( - statement_dep, + statement_dep_set, all_inames_ordered, statement_var, combined_doms, -- GitLab From 634ee89b76986ce5f19d6a02ce809d79dca4d13e Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sat, 6 Jul 2019 23:18:16 -0500 Subject: [PATCH 062/415] now keeping domains for insn_before and insn_after separate; insn instance tuples must have same space, so they require inames not present in insns/domains, so setting those inames in those statement instances to dummy parameter --- dependency.py | 54 +++++--- example_dependency_checking.py | 21 +++- example_pairwise_schedule_validity.py | 169 +++++++++++++++----------- example_schedule_creation_old.py | 33 +++-- sched_check_utils.py | 138 ++++++++++++++------- schedule.py | 94 +++++--------- 6 files changed, 302 insertions(+), 207 deletions(-) diff --git a/dependency.py b/dependency.py index 0cab987f7..810f2d62a 100644 --- a/dependency.py +++ b/dependency.py @@ -52,15 +52,16 @@ def create_dependency_constraint( statement_dep_set, all_inames_ordered, statement_var, - domain_constraint_set, - #dom_before_constraint_set, - #dom_after_constraint_set, + dom_before_constraint_set, + dom_after_constraint_set, sid_to_int, + unused_param_name, ): from schedule_checker.sched_check_utils import ( make_islvars_with_var_primes, append_apostrophes, add_dims_to_isl_set, + add_missing_dims_to_isl_set, create_new_set_with_primes, ) # This function uses the dependency given to create the following constraint: @@ -68,11 +69,21 @@ def create_dependency_constraint( # assumes statements are numbered sequentially - # create some isl vars to use, e.g., {s, i, j, s', i', j'} - statement_var_prime = statement_var+"'" + # create some (ordered) isl vars to use, e.g., {s, i, j, s', i', j'} islvars = make_islvars_with_var_primes( [statement_var]+all_inames_ordered, - []) + [unused_param_name]) + statement_var_prime = statement_var+"'" + + # get (ordered) list of unused before/after inames + inames_before_unused = [] + for iname in all_inames_ordered: + if iname not in dom_before_constraint_set.get_var_names(isl.dim_type.out): + inames_before_unused.append(iname) + inames_after_unused = [] + for iname in all_inames_ordered: + if iname not in dom_after_constraint_set.get_var_names(isl.dim_type.out): + inames_after_unused.append(iname + "'") # initialize constraints to False # this will disappear as soon as we add a constraint that is not dt.NONE @@ -106,6 +117,11 @@ def create_dependency_constraint( islvars[0]) # True """ + # set unused vars == unused dummy param + for iname in inames_before_unused+inames_after_unused: + constraint_set = constraint_set & islvars[iname].eq_set( + islvars[unused_param_name]) + # set statement_var == statement # s_before_int = sid_to_int[statement_dep_set.statement_before.sid] s_after_int = sid_to_int[statement_dep_set.statement_after.sid] @@ -119,26 +135,30 @@ def create_dependency_constraint( # convert constraint set to map all_constraints_map = _convert_constraint_set_to_map( - all_constraints_set, len(all_inames_ordered)+1) + all_constraints_set, len(all_inames_ordered) + 1) # +1 for statement var - # now apply domain to constraint variables + # now apply domain sets to constraint variables - # TODO use separate domain for before and after insns? - range_constraint_set = create_new_set_with_primes(domain_constraint_set) + # add statement variable to doms to enable intersection new_pose = 0 # insert 's' at beginning # TODO don't hardcode 's' domain_to_intersect = add_dims_to_isl_set( - domain_constraint_set, isl.dim_type.out, ["s"], new_pose) + dom_before_constraint_set, isl.dim_type.out, ["s"], new_pose) + range_constraint_set = create_new_set_with_primes(dom_after_constraint_set) range_to_intersect = add_dims_to_isl_set( range_constraint_set, isl.dim_type.out, ["s'"], new_pose) - #new_pose = 0 # insert 's' at beginning # TODO don't hardcode 's' - #domain_to_intersect = add_dims_to_isl_set( - # dom_before_constraint_set, isl.dim_type.out, ["s"], new_pose) - #range_constraint_set = create_new_set_with_primes(dom_after_constraint_set) - #range_to_intersect = add_dims_to_isl_set( - # range_constraint_set, isl.dim_type.out, ["s'"], new_pose) + # insert inames missing from doms to enable intersection + domain_to_intersect = add_missing_dims_to_isl_set( + domain_to_intersect, isl.dim_type.out, ["s"] + all_inames_ordered) + range_to_intersect = add_missing_dims_to_isl_set( + range_to_intersect, + isl.dim_type.out, + append_apostrophes(["s"] + all_inames_ordered)) + + # intersect doms map_with_loop_domain_constraints = all_constraints_map.intersect_domain( domain_to_intersect).intersect_range(range_to_intersect) + return map_with_loop_domain_constraints diff --git a/example_dependency_checking.py b/example_dependency_checking.py index 739e01091..1168ddd46 100644 --- a/example_dependency_checking.py +++ b/example_dependency_checking.py @@ -30,8 +30,8 @@ knl = lp.tag_inames(knl, {"i": "l.0"}) print("Kernel:") print(knl) -all_inames_ordered = ['i', 'j'] -#all_inames_ordered = sorted(list(knl.all_inames())) +all_necessary_inames_ordered = ['i', 'j'] +#all_necessary_inames_ordered = sorted(list(knl.all_inames())) statement_var = 's' # example sched: @@ -41,7 +41,7 @@ print("------------------------------------------------------------------------- # for a given i, statement 0 happens before statement 1 params_sched = ['p0', 'p1'] -in_names_sched = [statement_var]+all_inames_ordered +in_names_sched = [statement_var]+all_necessary_inames_ordered out_names_sched = ['l0', 'l1'] sched_space = get_isl_space(params_sched, in_names_sched, out_names_sched) @@ -114,9 +114,10 @@ print("----------------------------------------------------------------------") # i dependency is none, j dependency is `prior` statement_var = 's' +unused_param_name = 'unused' domains = {} -for iname in all_inames_ordered: +for iname in all_necessary_inames_ordered: domains[iname] = knl.get_inames_domain(iname) domains_list = list(domains.values()) @@ -131,13 +132,21 @@ combined_doms = knl.get_inames_domain( statement_dep_set.statement_before.active_inames | # noqa statement_dep_set.statement_after.active_inames ) +dom_before = knl.get_inames_domain( + statement_dep_set.statement_before.active_inames + ) +dom_after = knl.get_inames_domain( + statement_dep_set.statement_after.active_inames + ) constraint_map = create_dependency_constraint( statement_dep_set, - all_inames_ordered, + all_necessary_inames_ordered, statement_var, - combined_doms, + dom_before, + dom_after, insnid_to_int_sid, + unused_param_name=unused_param_name, ) print("constraint map space:") print(constraint_map.space) diff --git a/example_pairwise_schedule_validity.py b/example_pairwise_schedule_validity.py index cde2bfddb..ec17c93df 100644 --- a/example_pairwise_schedule_validity.py +++ b/example_pairwise_schedule_validity.py @@ -20,34 +20,43 @@ knl_choice = "example" #knl_choice = "matmul" #knl_choice = "scan" #knl_choice = "dependent_domain" -#knl_choice = "stroud" # TODO invalid sched? +#knl_choice = "stroud" # invalid sched? #knl_choice = "add_barrier" #knl_choice = "nop" #TODO nop not in sched... error -#knl_choice = "nest" +#knl_choice = "nest_multi_dom" if knl_choice == "example": knl = lp.make_kernel( - #"{[i,j]: 0<=i<2 and 1<=j<3}", - #"{[i,j]: pi_lo<=itemp = b[i,j] {id=insn_a}", - "a[i,j] = temp + 1 {id=insn_b,dep=insn_a}", - "c[i,j] = d[i,j] {id=insn_c}", - "out[t,tt] = in[t,tt] {id=insn_d}", + "{[i,ii]: 0<=itemp = b[i,k] {id=insn_a} + end + for j + a[i,j] = temp + 1 {id=insn_b,dep=insn_a} + c[i,j] = d[i,j] {id=insn_c} + end + end + for t + e[t] = f[t] {id=insn_d} + end + """ ], name="example", - #assumptions="pi_lo,pi_up,pj_lo,pj_up >= 1", - #assumptions="pi_up,pj_up >= 1", - #assumptions="pi_up,pj_up,pt_up,pt_lo >= 1 and pt_lo < pt_up", - assumptions="pi_up,pj_up,pt_up,pt_lo >= 1 and pt_lo < pt_up", + assumptions="pi,pj,pk,pt >= 1", lang_version=(2018, 2) ) knl = lp.add_and_infer_dtypes( knl, - {"b": np.float32, "d": np.float32, "in": np.float32}) - knl = lp.tag_inames(knl, {"i": "l.0"}) + {"b": np.float32, "d": np.float32, "f": np.float32}) + #knl = lp.tag_inames(knl, {"i": "l.0"}) knl = lp.preprocess_kernel(knl) knl = lp.get_one_scheduled_kernel(knl) elif knl_choice == "matmul": @@ -195,34 +204,50 @@ if knl_choice == "nop": knl = lp.fix_parameters(knl, dim=3) knl = lp.preprocess_kernel(knl) knl = lp.get_one_scheduled_kernel(knl) -if knl_choice == "nest": +if knl_choice == "nest_multi_dom": + #"{[i,j,k]: 0<=i,j,kfoo = 0 {id=insn0} for i - <>acc = 0 {id=insn1} + <>acc = 0 {id=insn0} for j for k - acc = acc + j + k {id=insn2,dep=insn1} + acc = acc + j + k {id=insn1,dep=insn0} end end - foo = foo + acc {id=insn3,dep=insn2} end - <>bar = foo {id=insn4,dep=insn3} """, - name="nest", - assumptions="n >= 1", + name="nest_multi_dom", + #assumptions="n >= 1", + assumptions="ni,nj,nk >= 1", lang_version=(2018, 2) ) + """ + <>foo = 0 {id=insn0} + for i + <>acc = 0 {id=insn1} + for j + for k + acc = acc + j + k {id=insn2,dep=insn1} + end + end + foo = foo + acc {id=insn3,dep=insn2} + end + <>bar = foo {id=insn4,dep=insn3} + """ knl = lp.preprocess_kernel(knl) knl = lp.get_one_scheduled_kernel(knl) # Print kernel info ------------------------------------------------------ -print("Kernel:") -print(knl) +#print("Kernel:") +#print(knl) #print(lp.generate_code_v2(knl).device_code()) print("="*80) print("Iname tags: %s" % (knl.iname_to_tags)) @@ -245,17 +270,11 @@ for dep_set in statement_dep_sets: # get separate domains for before.active_inames and after.active_inames deps_and_domains = [] for dep_set in statement_dep_sets: - #deps_and_domains.append([ - # dep_set, knl.get_inames_domain( - # dep_set.statement_before.active_inames | - # dep_set.statement_after.active_inames)]) - # TODO need to have separate domains for separate instructions? - # ...domain for after distinct from before deps_and_domains.append([ - dep_set, - knl.get_inames_domain(dep_set.statement_before.active_inames), - knl.get_inames_domain(dep_set.statement_after.active_inames) - ]) + dep_set, + knl.get_inames_domain(dep_set.statement_before.active_inames), + knl.get_inames_domain(dep_set.statement_after.active_inames) + ]) print("-"*85) print("StatementDependencies w/domains:") @@ -270,19 +289,22 @@ sched_is_valid = True for statement_dep_set, dom_before, dom_after in deps_and_domains: s_before = statement_dep_set.statement_before s_after = statement_dep_set.statement_after - # TODO separate dom for before and after insns - # TODO is using this union in creating schedule (not deps) okay? + # The isl map representing the schedule maps + # statement instances -> lex time + # The 'in_' dim vars need to match for all sched items in the map, + # Instructions that use fewer inames will still need to + # have the unused inames in their 'in_' dim vars, so we'll + # include them and set them equal to a dummy variable. + + # Get all inames now in order to maintain list with consistent ordering + # This will help keep isl maps/sets compatible combined_doms = knl.get_inames_domain( s_before.active_inames | s_after.active_inames ) - - # Get all inames now in order to maintain list with consistent ordering - # This will help keep isl maps/sets compatible - # TODO is this still necessary? - all_inames_ordered = order_var_names_to_match_islset( - knl.all_inames(), combined_doms) # should separate doms? + all_necessary_inames_ordered = order_var_names_to_match_islset( + knl.all_inames(), combined_doms) # Create a mapping of {statement instance: lex point} # including only instructions involved in this dependency @@ -294,13 +316,22 @@ for statement_dep_set, dom_before, dom_after in deps_and_domains: print("LexSchedule before processing:") print(sched) - # get an isl map representing the LexSchedule - # this requires information about the iname domains + # Right now, statement tuples consist of single int. + # Add all inames from combined domains to statement tuples. + # This may include inames not used in every instruction, + # but all in-tuples need to match because they will become + # the in-dims for an isl map, so if an iname is needed in one + # statement tuple, then it is needed in all statement tuples. + sched.add_symbolic_inames_to_statement_instances( + all_necessary_inames_ordered) + print("LexSchedule with inames added:") + print(sched) + + # Get an isl map representing the LexSchedule; + # this requires the iname domains sched_map_symbolic = sched.create_symbolic_isl_map( - combined_doms, all_inames_ordered) # should separate doms? - #sched_map_symbolic = sched.create_symbolic_isl_map( - # dom_before, dom_after, all_inames_ordered) - print("LexSchedule after processing:") + [dom_before, dom_after], all_necessary_inames_ordered) + print("LexSchedule after creating symbolic isl map:") print(sched) print("LexSched:") print(prettier_map_string(sched_map_symbolic)) @@ -308,12 +339,9 @@ for statement_dep_set, dom_before, dom_after in deps_and_domains: print(sched_map_symbolic.space) print("-"*85) - # *Explicit* lexicographic mapping- map each tuple to all tuples occuring later - #lex_map_explicit = sched.get_explicit_sched_map() - # get map representing lexicographic ordering lex_order_map_symbolic = sched.get_lex_order_map_for_symbolic_sched() - print("lex map symbolic:") + print("lex order map symbolic:") print(prettier_map_string(lex_order_map_symbolic)) print("space (lex time -> lex time):") print(lex_order_map_symbolic.space) @@ -321,10 +349,6 @@ for statement_dep_set, dom_before, dom_after in deps_and_domains: # create statement instance ordering, # maps each statement instance to all statement instances occuring later - #SIO_explicit = get_statement_ordering_map( - # example_sched_explicit, lex_map_explicit) - #print("statement instance ordering explicit:") - #print(prettier_map_string(SIO_explicit)) SIO_symbolic = get_statement_ordering_map( sched_map_symbolic, lex_order_map_symbolic) print("statement instance ordering symbolic:") @@ -342,38 +366,44 @@ for statement_dep_set, dom_before, dom_after in deps_and_domains: statement_var = 's' constraint_map = create_dependency_constraint( statement_dep_set, - all_inames_ordered, # TODO separate lists for separate doms? + all_necessary_inames_ordered, statement_var, - combined_doms, # TODO separate domains for before/after - #dom_before, - #dom_after, + dom_before, + dom_after, sched.lp_insnid_to_int_sid, + sched.unused_param_name, ) print("constraint map:") print(prettier_map_string(constraint_map)) - #print("space (statment instances -> statement instances):") - #print(constraint_map.space) + aligned_constraint_map = constraint_map.align_params(SIO_symbolic.space) + print("aligned constraint map:") + print(prettier_map_string(aligned_constraint_map)) - assert constraint_map.space == SIO_symbolic.space - if not constraint_map.is_subset(SIO_symbolic): # TODO is this the right question? + assert aligned_constraint_map.space == SIO_symbolic.space + if not aligned_constraint_map.is_subset(SIO_symbolic): sched_is_valid = False conc_inames, non_conc_inames = get_concurrent_inames(knl) print("================ constraint check failure =================") print("constraint map not subset of SIO") print("dependency:") print(statement_dep_set) + """ print("concurrent inames:", conc_inames) print("sequential inames:", non_conc_inames) print("constraint map space (statment instances -> statement instances):") - print(constraint_map.space) + print(aligned_constraint_map.space) print("SIO space (statement instances -> statement instances):") print(SIO_symbolic.space) print("constraint map:") - print(prettier_map_string(constraint_map)) + print(prettier_map_string(aligned_constraint_map)) print("statement instance ordering:") print(prettier_map_string(SIO_symbolic)) print("{insn id -> sched sid int} dict:") print(sched.lp_insnid_to_int_sid) + print("gist") + print(aligned_constraint_map.gist(SIO_symbolic)) + print(SIO_symbolic.gist(aligned_constraint_map)) + """ print("===========================================================") print("is sched valid? constraint map subset of SIO?") @@ -463,4 +493,3 @@ schedule_explicit_map2 = isl.Map( assert schedule_explicit_map2 == schedule_explicit_map == sched.get_isl_map() ''' - diff --git a/example_schedule_creation_old.py b/example_schedule_creation_old.py index c1589ec12..5abda6748 100644 --- a/example_schedule_creation_old.py +++ b/example_schedule_creation_old.py @@ -102,14 +102,27 @@ if not all_iname_domains_equal(knl): "schedule checker does not yet handle kernels where " "get_inames_domain(iname) is not same for all inames") domain_union = _union_inames_domains(knl) -all_inames_ordered = order_var_names_to_match_islset(knl.all_inames(), domain_union) +all_necessary_inames_ordered = order_var_names_to_match_islset( + knl.all_inames(), domain_union) # get all inames in consistent ordering: sched = LexSchedule(knl) print("LexSchedule before processing:") print(sched) +# Right now, statement tuples consist of single int. +# Add all inames from combined domains to statement tuples. +# This may include inames not used in every instruction, +# but all in-tuples need to match because they will become +# the in-dims for an isl map, so if an iname is needed in one +# statement tuple, then it is needed in all statement tuples. +sched.add_symbolic_inames_to_statement_instances( + all_necessary_inames_ordered) +print("LexSchedule with inames added:") +print(sched) -sched_map_symbolic = sched.create_symbolic_isl_map(domain_union, all_inames_ordered) +sched_map_symbolic = sched.create_symbolic_isl_map( + [domain_union]*len(sched.lex_schedule), # due to changes, need one per insn + all_necessary_inames_ordered) print("LexSchedule after processing:") print(sched) # ------------------------------------------------------------------- @@ -163,18 +176,18 @@ statement_var = 's' sched_is_valid = True for statement_dep_set in statement_dep_sets: - # TODO is using this union in creating schedule (not deps) okay? - combined_doms = knl.get_inames_domain( - statement_dep_set.statement_before.active_inames | - statement_dep_set.statement_after.active_inames - ) - + dom_before = knl.get_inames_domain( + statement_dep_set.statement_before.active_inames) + dom_after = knl.get_inames_domain( + statement_dep_set.statement_after.active_inames) constraint_map = create_dependency_constraint( statement_dep_set, - all_inames_ordered, + all_necessary_inames_ordered, statement_var, - combined_doms, + dom_before, + dom_after, sched.lp_insnid_to_int_sid, + sched.unused_param_name, ) print("constraint map:") print(prettier_map_string(constraint_map)) diff --git a/sched_check_utils.py b/sched_check_utils.py index 9be7ca4dd..f436db247 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -18,13 +18,44 @@ def get_islvars_from_space(space): def add_dims_to_isl_set(isl_set, dim_type, names, new_pose_start): - new_set = isl_set.insert_dims(dim_type, new_pose_start, len(names)).set_dim_name( - dim_type, new_pose_start, names[0]) + new_set = isl_set.insert_dims( + dim_type, new_pose_start, len(names) + ).set_dim_name(dim_type, new_pose_start, names[0]) for i, name in enumerate(names[1:]): new_set = new_set.set_dim_name(dim_type, new_pose_start+1+i, name) return new_set +def is_ordered_sublist(sub_list, full_list): + full_idx = 0 + sub_idx = 0 + while sub_idx < len(sub_list) and full_idx < len(full_list): + if sub_list[sub_idx] == full_list[full_idx]: + sub_idx += 1 + full_idx += 1 + return sub_idx == len(sub_list) + + +def add_missing_dims_to_isl_set(isl_set, dim_type, all_dim_names): + # assumes vars in set are ordered subset of all_dim_names + assert is_ordered_sublist( + isl_set.get_var_names(dim_type), + all_dim_names, + ) + + new_set = isl_set.copy() + for i, name in enumerate(all_dim_names): + if i >= new_set.n_dim() or \ + new_set.get_dim_name(dim_type, i) != name: + # insert missing dim + new_set = new_set.insert_dims( + dim_type, i, 1 + ).set_dim_name( + dim_type, i, name) + + return new_set + + def create_new_set_with_primes(old_set): new_set = old_set.copy() for i in range(old_set.n_dim()): @@ -68,7 +99,7 @@ def append_apostrophes(strings): return [s+"'" for s in strings] -def _union_of_sets(set_list): +def _union_of_isl_sets_or_maps(set_list): union = set_list[0] for s in set_list[1:]: union = union.union(s) @@ -131,70 +162,95 @@ def create_explicit_map_from_tuples(tuple_pairs, space): return union_map -def create_symbolic_isl_map_from_tuples(tuple_pairs, space, domain_to_intersect): +def create_symbolic_isl_map_from_tuples( + tuple_pairs, + space, + domains_to_intersect, + unused_param_name, + ): # given a list of pairs of ((input), (output)) tuples, create an isl map - # and intersect that map with domain_to_intersect + # and intersect each pair with corresponding domain_to_intersect + #TODO allow None for domains + assert len(tuple_pairs) == len(domains_to_intersect) dim_type = isl.dim_type - from schedule_checker.sched_check_utils import get_islvars_from_space #param_names = space.get_var_names(isl.dim_type.param) - out_names = space.get_var_names(dim_type.out) - in_names = space.get_var_names(isl.dim_type.in_) + space_out_names = space.get_var_names(dim_type.out) + space_in_names = space.get_var_names(isl.dim_type.in_) islvars = get_islvars_from_space(space) # loop through pairs and create a set that will later be converted to a map # initialize set with constraint that is always false - constraints_set = islvars[0].eq_set(islvars[0] + 1) - for tup_in, tup_out in tuple_pairs: + #constraints_set = islvars[0].eq_set(islvars[0] + 1) + all_maps = [] + for (tup_in, tup_out), dom in zip(tuple_pairs, domains_to_intersect): # initialize constraint with true constraint = islvars[0].eq_set(islvars[0]) # set values for 'in' dimension using tuple vals - for i, val_in in enumerate(tup_in): + assert len(tup_in) == len(space_in_names) + for dim_name, val_in in zip(space_in_names, tup_in): if isinstance(val_in, int): constraint = constraint \ - & islvars[in_names[i]].eq_set(islvars[0]+val_in) + & islvars[dim_name].eq_set(islvars[0]+val_in) else: constraint = constraint \ - & islvars[in_names[i]].eq_set(islvars[val_in]) + & islvars[dim_name].eq_set(islvars[val_in]) + + # TODO we probably shouldn't rely on domains_to_intersect + # here for determing where to set inames equal to dummy vars, + # should instead determine before in LexSchedule and pass info in + dom_var_names = dom.get_var_names(dim_type.out) + assert set( + [var for var in tup_out if not isinstance(var, int)] + ).issubset(set(dom_var_names)) + unused_inames = set(space_in_names) - set(dom_var_names) - set(['s']) + for unused_iname in unused_inames: + constraint = constraint & islvars[unused_iname].eq_set( + islvars[unused_param_name]) + # set values for 'out' dimension using tuple vals - for i, val_out in enumerate(tup_out): + assert len(tup_out) == len(space_out_names) + for dim_name, val_out in zip(space_out_names, tup_out): if isinstance(val_out, int): constraint = constraint \ - & islvars[out_names[i]].eq_set(islvars[0]+val_out) + & islvars[dim_name].eq_set(islvars[0]+val_out) else: constraint = constraint \ - & islvars[out_names[i]].eq_set(islvars[val_out]) - - # union this constraint with full set of constraints - constraints_set = constraints_set | constraint - - # convert set to map by moving dimensions around - result_map = isl.Map.from_domain(constraints_set) - result_map = result_map.move_dims( - dim_type.out, 0, dim_type.in_, - len(in_names), len(out_names)) - ''' - result_map_vars_in = result_map.space.get_var_names(isl.dim_type.in_) - domain_stripped = domain_to_intersect.project_out_except( - result_map_vars_in, [isl.dim_type.set]) - return result_map.intersect_domain(domain_stripped) - ''' - - # if there are any dimensions in domain_to_intersect that are missing from - # result_map, insert these dimensions so that we can intersect the domain - from schedule_checker.sched_check_utils import add_missing_set_dims_to_map_indims - # TODO make sure these always align properly - result_map = add_missing_set_dims_to_map_indims(result_map, domain_to_intersect) - return result_map.intersect_domain(domain_to_intersect) - - -def set_space_names(space, param_names=None, in_names=None, out_names=None): + & islvars[dim_name].eq_set(islvars[val_out]) + + # convert set to map by moving dimensions around + map_from_set = isl.Map.from_domain(constraint) + map_from_set = map_from_set.move_dims( + dim_type.out, 0, dim_type.in_, + len(space_in_names), len(space_out_names)) + + # TODO remove: + assert space_in_names == map_from_set.get_var_names( + isl.dim_type.in_) + + # if there are any dimensions in dom that are missing from + # map_from_set, we have a problem I think? + # (assertion checks this in add_missing... + dom_with_all_inames = add_missing_dims_to_isl_set( + dom, isl.dim_type.out, + space_in_names, + ) + + # intersect domain with this map + all_maps.append( + map_from_set.intersect_domain(dom_with_all_inames)) + + return _union_of_isl_sets_or_maps(all_maps) + + +def set_space_names( + space, param_names=None, in_names=None, out_names=None): new_space = space.copy() dim_type = isl.dim_type if param_names: diff --git a/schedule.py b/schedule.py index 9a46f3338..66327db08 100644 --- a/schedule.py +++ b/schedule.py @@ -45,6 +45,9 @@ class StatementInstance(object): class LexSchedule(object): # contains a mapping of {statement instance: lex point} + unused_param_name = "unused" + #TODO use statement var + #statement_variable = "statement" def __init__( self, @@ -59,10 +62,6 @@ class LexSchedule(object): # into explicit statement instances self.inames_enumerated = [] - # symbolic inames added to statement instances in sched - # that have *not* been enumerated into explicit statement instances - self.inames_added_to_statement_instances = [] - # map from loopy insn_id strings to statement id ints self.lp_insnid_to_int_sid = {} @@ -70,6 +69,7 @@ class LexSchedule(object): # we don't have an iname named 's' # TODO change to var less common than 's' and/or generate something unique? assert not any(iname == 's' for iname in knl.all_inames()) + assert not any(iname == self.unused_param_name for iname in knl.all_inames()) from loopy.schedule import (EnterLoop, LeaveLoop, Barrier, RunInstruction) @@ -83,7 +83,7 @@ class LexSchedule(object): if isinstance(sched_item, EnterLoop): iname = sched_item.iname # if the schedule is empty, this is the first schedule item, so - # don't increment lex dim val enumerating items in current code block, + # don't increment lex dim val enumerating items in current block, # otherwise, this loop is next item in current code block, so # increment lex dim val enumerating items in current code block if self.lex_schedule: # if the schedule is not empty @@ -108,6 +108,9 @@ class LexSchedule(object): if isinstance(sched_item, RunInstruction): lp_insn_id = sched_item.insn_id else: # Barrier + # TODO make sure it's okay to ignore barriers without id + # matmul example has barrier that fails this assertion... + # assert sched_item.originating_insn_id is not None lp_insn_id = sched_item.originating_insn_id # if include_only_insn_ids list was passed, @@ -144,18 +147,11 @@ class LexSchedule(object): self.lex_schedule = new_sched def add_symbolic_inames_to_statement_instances(self, inames): - for iname in inames: - new_sched = OrderedDict() - iname_found = False - for insn, lex_pt in self.lex_schedule.items(): - if iname in lex_pt: - new_sched[tuple(list(insn)+[iname])] = lex_pt - iname_found = True - else: - new_sched[insn] = lex_pt - self.lex_schedule = new_sched - if iname_found: - self.inames_added_to_statement_instances.append(iname) + # append inames to lex tuples (matching specified order) + new_sched = OrderedDict() + for insn, lex_pt in self.lex_schedule.items(): + new_sched[tuple(list(insn)+inames[:])] = lex_pt + self.lex_schedule = new_sched def add_new_lp_insnid(self, lp_insnid): # create an int representing this instruction and @@ -166,74 +162,46 @@ class LexSchedule(object): else: self.lp_insnid_to_int_sid[lp_insnid] = 0 - def get_isl_space_for_symbolic_sched(self): - # create an isl space - # {('s', ) -> - # (lexicographic ordering dims)} - - params_sched = [] - # TODO make "s" a variable for consistency - in_names_sched = ["s"] + self.inames_added_to_statement_instances - # TODO make "l" a variable for consistency - out_names_sched = ["l"+str(i) for i in range(self.max_lex_dims())] - from schedule_checker.sched_check_utils import get_isl_space - return get_isl_space(params_sched, in_names_sched, out_names_sched) - def get_last_schedule_item(self): return next(reversed(self.lex_schedule)) def get_last_lex_pt(self): return self.lex_schedule[self.get_last_schedule_item()] - #def create_symbolic_isl_map(self, dom_before, dom_after, inames): - def create_symbolic_isl_map(self, domain, inames): - + def create_symbolic_isl_map(self, domains, inames_ordered): # create isl map representing lex schedule - # TODO if inames will always match domain out vars, don't need to pass them from schedule_checker.sched_check_utils import ( create_symbolic_isl_map_from_tuples, add_dims_to_isl_set ) - # Get all inames now in order to maintain list with consistent ordering - # This will help keep isl maps/sets compatible - domain_iname_order = domain.get_var_names(isl.dim_type.out) - inames_ordered_to_match_domain = [] - for iname in domain_iname_order: - if iname in inames: - inames_ordered_to_match_domain.append(iname) - self.add_symbolic_inames_to_statement_instances( - inames_ordered_to_match_domain) + assert len(domains) == len(self.lex_schedule) # create an isl space - # {('s', ) -> + # {('s', used in >=1 statement domain>) -> # (lexicographic ordering dims)} - sched_space = self.get_isl_space_for_symbolic_sched() - """ - # TODO maybe don't project this out, constraints may involve any iname later? - domain_stripped = domain_intersection.project_out_except( - self.inames_added_to_statement_instances, - [isl.dim_type.set] - ) - """ - - # insert 's' dim into domain so that its space allows for + params_sched = [self.unused_param_name] + # TODO make "s" a variable for consistency + in_names_sched = ["s"] + inames_ordered + # TODO make "l" a variable for consistency + out_names_sched = ["l"+str(i) for i in range(self.max_lex_dims())] + from schedule_checker.sched_check_utils import get_isl_space + sched_space = get_isl_space(params_sched, in_names_sched, out_names_sched) + + # Insert 's' dim into domain so that its space allows for # intersection with sched map later # TODO first need to make sure statement var name isn't already being used new_pose = 0 # insert 's' at beginning - domain_to_intersect = add_dims_to_isl_set( - domain, isl.dim_type.out, ['s'], new_pose) - #dom_before_to_intersect = add_dims_to_isl_set( - # dom_before, isl.dim_type.out, ['s'], new_pose) - #dom_after_to_intersect = add_dims_to_isl_set( - # dom_before, isl.dim_type.out, ['s'], new_pose) + doms_to_intersect = [] + for dom in domains: + doms_to_intersect.append(add_dims_to_isl_set( + dom, isl.dim_type.out, ['s'], new_pose)) # create isl map return create_symbolic_isl_map_from_tuples( - #list(self.items()), sched_space, - #dom_before_to_intersect, dom_after_to_intersect) - list(self.items()), sched_space, domain_to_intersect) + list(self.items()), sched_space, + doms_to_intersect, self.unused_param_name) def get_lex_order_map_for_symbolic_sched(self): from schedule_checker.lexicographic_order_map import ( -- GitLab From b7a0ca0d82bc23882a22187db5074420983adab3 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sat, 6 Jul 2019 23:52:44 -0500 Subject: [PATCH 063/415] changed hardcoded statement var (in multiple places) into lexsched class var --- dependency.py | 23 +++++++------- example_dependency_checking.py | 4 +-- example_pairwise_schedule_validity.py | 45 +++++++++++++-------------- example_schedule_creation_old.py | 3 +- sched_check_utils.py | 4 ++- schedule.py | 32 +++++++++---------- 6 files changed, 56 insertions(+), 55 deletions(-) diff --git a/dependency.py b/dependency.py index 810f2d62a..73161fb5d 100644 --- a/dependency.py +++ b/dependency.py @@ -51,11 +51,11 @@ def _convert_constraint_set_to_map(constraint_set, mv_count, src_position=None): def create_dependency_constraint( statement_dep_set, all_inames_ordered, - statement_var, dom_before_constraint_set, dom_after_constraint_set, sid_to_int, unused_param_name, + statement_var_name, ): from schedule_checker.sched_check_utils import ( make_islvars_with_var_primes, @@ -71,9 +71,9 @@ def create_dependency_constraint( # create some (ordered) isl vars to use, e.g., {s, i, j, s', i', j'} islvars = make_islvars_with_var_primes( - [statement_var]+all_inames_ordered, + [statement_var_name]+all_inames_ordered, [unused_param_name]) - statement_var_prime = statement_var+"'" + statement_var_name_prime = statement_var_name+"'" # get (ordered) list of unused before/after inames inames_before_unused = [] @@ -122,12 +122,12 @@ def create_dependency_constraint( constraint_set = constraint_set & islvars[iname].eq_set( islvars[unused_param_name]) - # set statement_var == statement # + # set statement_var_name == statement # s_before_int = sid_to_int[statement_dep_set.statement_before.sid] s_after_int = sid_to_int[statement_dep_set.statement_after.sid] - constraint_set = constraint_set & islvars[statement_var].eq_set( + constraint_set = constraint_set & islvars[statement_var_name].eq_set( islvars[0]+s_before_int) - constraint_set = constraint_set & islvars[statement_var_prime].eq_set( + constraint_set = constraint_set & islvars[statement_var_name_prime].eq_set( islvars[0]+s_after_int) # union this constraint_set with all_constraints_set @@ -140,20 +140,21 @@ def create_dependency_constraint( # now apply domain sets to constraint variables # add statement variable to doms to enable intersection - new_pose = 0 # insert 's' at beginning # TODO don't hardcode 's' + new_pose = 0 # insert 'statement' at beginning # TODO don't hardcode position domain_to_intersect = add_dims_to_isl_set( - dom_before_constraint_set, isl.dim_type.out, ["s"], new_pose) + dom_before_constraint_set, isl.dim_type.out, [statement_var_name], new_pose) range_constraint_set = create_new_set_with_primes(dom_after_constraint_set) range_to_intersect = add_dims_to_isl_set( - range_constraint_set, isl.dim_type.out, ["s'"], new_pose) + range_constraint_set, isl.dim_type.out, [statement_var_name_prime], new_pose) # insert inames missing from doms to enable intersection domain_to_intersect = add_missing_dims_to_isl_set( - domain_to_intersect, isl.dim_type.out, ["s"] + all_inames_ordered) + domain_to_intersect, isl.dim_type.out, + [statement_var_name] + all_inames_ordered) range_to_intersect = add_missing_dims_to_isl_set( range_to_intersect, isl.dim_type.out, - append_apostrophes(["s"] + all_inames_ordered)) + append_apostrophes([statement_var_name] + all_inames_ordered)) # intersect doms map_with_loop_domain_constraints = all_constraints_map.intersect_domain( diff --git a/example_dependency_checking.py b/example_dependency_checking.py index 1168ddd46..84adc8fdc 100644 --- a/example_dependency_checking.py +++ b/example_dependency_checking.py @@ -142,11 +142,11 @@ dom_after = knl.get_inames_domain( constraint_map = create_dependency_constraint( statement_dep_set, all_necessary_inames_ordered, - statement_var, dom_before, dom_after, insnid_to_int_sid, - unused_param_name=unused_param_name, + unused_param_name, + statement_var, ) print("constraint map space:") print(constraint_map.space) diff --git a/example_pairwise_schedule_validity.py b/example_pairwise_schedule_validity.py index ec17c93df..de3108e47 100644 --- a/example_pairwise_schedule_validity.py +++ b/example_pairwise_schedule_validity.py @@ -18,7 +18,7 @@ from schedule_checker.sched_check_utils import ( knl_choice = "example" #knl_choice = "matmul" -#knl_choice = "scan" +knl_choice = "scan" #knl_choice = "dependent_domain" #knl_choice = "stroud" # invalid sched? #knl_choice = "add_barrier" @@ -27,32 +27,32 @@ knl_choice = "example" if knl_choice == "example": knl = lp.make_kernel( - [ + [ "{[i,ii]: 0<=itemp = b[i,k] {id=insn_a} - end - for j - a[i,j] = temp + 1 {id=insn_b,dep=insn_a} - c[i,j] = d[i,j] {id=insn_c} - end + ], + [ + """ + for i + for k + <>temp = b[i,k] {id=insn_a} end - for t - e[t] = f[t] {id=insn_d} + for j + a[i,j] = temp + 1 {id=insn_b,dep=insn_a} + c[i,j] = d[i,j] {id=insn_c} end - """ - ], - name="example", - assumptions="pi,pj,pk,pt >= 1", - lang_version=(2018, 2) - ) + end + for t + e[t] = f[t] {id=insn_d} + end + """ + ], + name="example", + assumptions="pi,pj,pk,pt >= 1", + lang_version=(2018, 2) + ) knl = lp.add_and_infer_dtypes( knl, {"b": np.float32, "d": np.float32, "f": np.float32}) @@ -363,15 +363,14 @@ for statement_dep_set, dom_before, dom_after in deps_and_domains: # create a map representing constraints from the dependency, # maps each statement instance to all statement instances that must occur later - statement_var = 's' constraint_map = create_dependency_constraint( statement_dep_set, all_necessary_inames_ordered, - statement_var, dom_before, dom_after, sched.lp_insnid_to_int_sid, sched.unused_param_name, + sched.statement_var_name, ) print("constraint map:") print(prettier_map_string(constraint_map)) diff --git a/example_schedule_creation_old.py b/example_schedule_creation_old.py index 5abda6748..9e9599fe5 100644 --- a/example_schedule_creation_old.py +++ b/example_schedule_creation_old.py @@ -172,7 +172,6 @@ print("----------------------------------------------------------------------") print("dict{lp insn id : sched sid int}:") print(sched.lp_insnid_to_int_sid) print("----------------------------------------------------------------------") -statement_var = 's' sched_is_valid = True for statement_dep_set in statement_dep_sets: @@ -183,11 +182,11 @@ for statement_dep_set in statement_dep_sets: constraint_map = create_dependency_constraint( statement_dep_set, all_necessary_inames_ordered, - statement_var, dom_before, dom_after, sched.lp_insnid_to_int_sid, sched.unused_param_name, + sched.statement_var_name, ) print("constraint map:") print(prettier_map_string(constraint_map)) diff --git a/sched_check_utils.py b/sched_check_utils.py index f436db247..bf085048d 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -167,6 +167,7 @@ def create_symbolic_isl_map_from_tuples( space, domains_to_intersect, unused_param_name, + statement_var_name, # TODO can we not pass this? ): # given a list of pairs of ((input), (output)) tuples, create an isl map @@ -209,7 +210,8 @@ def create_symbolic_isl_map_from_tuples( assert set( [var for var in tup_out if not isinstance(var, int)] ).issubset(set(dom_var_names)) - unused_inames = set(space_in_names) - set(dom_var_names) - set(['s']) + unused_inames = set(space_in_names) \ + - set(dom_var_names) - set([statement_var_name]) for unused_iname in unused_inames: constraint = constraint & islvars[unused_iname].eq_set( islvars[unused_param_name]) diff --git a/schedule.py b/schedule.py index 66327db08..dde3c2ea5 100644 --- a/schedule.py +++ b/schedule.py @@ -45,9 +45,9 @@ class StatementInstance(object): class LexSchedule(object): # contains a mapping of {statement instance: lex point} + unused_param_name = "unused" - #TODO use statement var - #statement_variable = "statement" + statement_var_name = "statement" def __init__( self, @@ -65,11 +65,11 @@ class LexSchedule(object): # map from loopy insn_id strings to statement id ints self.lp_insnid_to_int_sid = {} - # since 's' will be used to represent statement numbering, make sure - # we don't have an iname named 's' - # TODO change to var less common than 's' and/or generate something unique? - assert not any(iname == 's' for iname in knl.all_inames()) - assert not any(iname == self.unused_param_name for iname in knl.all_inames()) + # make sure we don't have an iname name conflict + assert not any( + iname == self.statement_var_name for iname in knl.all_inames()) + assert not any( + iname == self.unused_param_name for iname in knl.all_inames()) from loopy.schedule import (EnterLoop, LeaveLoop, Barrier, RunInstruction) @@ -179,29 +179,29 @@ class LexSchedule(object): assert len(domains) == len(self.lex_schedule) # create an isl space - # {('s', used in >=1 statement domain>) -> + # {('statement', used in >=1 statement domain>) -> # (lexicographic ordering dims)} params_sched = [self.unused_param_name] - # TODO make "s" a variable for consistency - in_names_sched = ["s"] + inames_ordered + in_names_sched = [self.statement_var_name] + inames_ordered # TODO make "l" a variable for consistency out_names_sched = ["l"+str(i) for i in range(self.max_lex_dims())] from schedule_checker.sched_check_utils import get_isl_space sched_space = get_isl_space(params_sched, in_names_sched, out_names_sched) - # Insert 's' dim into domain so that its space allows for + # Insert 'statement' dim into domain so that its space allows for # intersection with sched map later - # TODO first need to make sure statement var name isn't already being used - new_pose = 0 # insert 's' at beginning + new_pose = 0 # insert 'statement' dim at beginning + # TODO don't hardcode pose doms_to_intersect = [] for dom in domains: doms_to_intersect.append(add_dims_to_isl_set( - dom, isl.dim_type.out, ['s'], new_pose)) + dom, isl.dim_type.out, [self.statement_var_name], new_pose)) # create isl map return create_symbolic_isl_map_from_tuples( list(self.items()), sched_space, - doms_to_intersect, self.unused_param_name) + doms_to_intersect, + self.unused_param_name, self.statement_var_name) def get_lex_order_map_for_symbolic_sched(self): from schedule_checker.lexicographic_order_map import ( @@ -290,7 +290,7 @@ class LexSchedule(object): def get_isl_space_for_explicit_sched(self): params_sched = ["ps"] + ["p"+iname for iname in self.inames_enumerated] - in_names_sched = ["s"] + self.inames_enumerated + in_names_sched = [self.statement_var_name] + self.inames_enumerated out_names_sched = ["l"+str(i) for i in range(self.max_lex_dims())] from schedule_checker.sched_check_utils import get_isl_space return get_isl_space(params_sched, in_names_sched, out_names_sched) -- GitLab From 03b418023ca24f164400dde862e95e6a346c41ca Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sun, 7 Jul 2019 00:06:25 -0500 Subject: [PATCH 064/415] made lex time var names a lexsched class variable instead of hardcoding them in multiple places --- example_pairwise_schedule_validity.py | 2 +- schedule.py | 13 +++++++++---- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/example_pairwise_schedule_validity.py b/example_pairwise_schedule_validity.py index de3108e47..7be1ea588 100644 --- a/example_pairwise_schedule_validity.py +++ b/example_pairwise_schedule_validity.py @@ -18,7 +18,7 @@ from schedule_checker.sched_check_utils import ( knl_choice = "example" #knl_choice = "matmul" -knl_choice = "scan" +#knl_choice = "scan" #knl_choice = "dependent_domain" #knl_choice = "stroud" # invalid sched? #knl_choice = "add_barrier" diff --git a/schedule.py b/schedule.py index dde3c2ea5..f52fb819a 100644 --- a/schedule.py +++ b/schedule.py @@ -48,6 +48,7 @@ class LexSchedule(object): unused_param_name = "unused" statement_var_name = "statement" + lex_var_prefix = "l" def __init__( self, @@ -184,7 +185,7 @@ class LexSchedule(object): params_sched = [self.unused_param_name] in_names_sched = [self.statement_var_name] + inames_ordered # TODO make "l" a variable for consistency - out_names_sched = ["l"+str(i) for i in range(self.max_lex_dims())] + out_names_sched = self.get_lex_var_names() from schedule_checker.sched_check_utils import get_isl_space sched_space = get_isl_space(params_sched, in_names_sched, out_names_sched) @@ -203,13 +204,17 @@ class LexSchedule(object): doms_to_intersect, self.unused_param_name, self.statement_var_name) + def get_lex_var_names(self): + return [self.lex_var_prefix+str(i) + for i in range(self.max_lex_dims())] + def get_lex_order_map_for_symbolic_sched(self): from schedule_checker.lexicographic_order_map import ( create_symbolic_lex_order_map, ) - n_dims = self.max_lex_dims() - return create_symbolic_lex_order_map(n_dims) + return create_symbolic_lex_order_map( + n_dims, in_names=self.get_lex_var_names()) def get_isl_map_str(self): map_str = "{" @@ -291,7 +296,7 @@ class LexSchedule(object): def get_isl_space_for_explicit_sched(self): params_sched = ["ps"] + ["p"+iname for iname in self.inames_enumerated] in_names_sched = [self.statement_var_name] + self.inames_enumerated - out_names_sched = ["l"+str(i) for i in range(self.max_lex_dims())] + out_names_sched = self.get_lex_var_names() from schedule_checker.sched_check_utils import get_isl_space return get_isl_space(params_sched, in_names_sched, out_names_sched) -- GitLab From 4eac76a933b823320ba01ca4e4fd770b8227694c Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sun, 14 Jul 2019 19:13:58 -0500 Subject: [PATCH 065/415] simplified no-op kernel --- example_pairwise_schedule_validity.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/example_pairwise_schedule_validity.py b/example_pairwise_schedule_validity.py index 7be1ea588..bb2cbf9dc 100644 --- a/example_pairwise_schedule_validity.py +++ b/example_pairwise_schedule_validity.py @@ -178,29 +178,19 @@ if knl_choice == "add_barrier": if knl_choice == "nop": knl = lp.make_kernel( [ - "{[a]: 0<=a<10}", "{[b]: b_start<=b b_start = 1 - <> b_end = 2 for b - <> c_start = 1 <> c_end = 2 - for c ... nop end - - <>t[idim] = 1 end - end """, "...", seq_dependencies=True) - knl = lp.fix_parameters(knl, dim=3) knl = lp.preprocess_kernel(knl) knl = lp.get_one_scheduled_kernel(knl) -- GitLab From df77ff5aa4ee21647d02b8de5208f00aa9f79473 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sun, 14 Jul 2019 20:47:10 -0500 Subject: [PATCH 066/415] ignore barrier insns when originating_insn_id is None --- schedule.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/schedule.py b/schedule.py index f52fb819a..3f76e8807 100644 --- a/schedule.py +++ b/schedule.py @@ -110,9 +110,12 @@ class LexSchedule(object): lp_insn_id = sched_item.insn_id else: # Barrier # TODO make sure it's okay to ignore barriers without id + # (because they'll never be part of a dependency?) # matmul example has barrier that fails this assertion... # assert sched_item.originating_insn_id is not None lp_insn_id = sched_item.originating_insn_id + if lp_insn_id is None: + continue # if include_only_insn_ids list was passed, # only process insns found in list, -- GitLab From c85add94c5b9033cf7d86f88962cd36246c01fa7 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sun, 14 Jul 2019 20:48:07 -0500 Subject: [PATCH 067/415] added get_all_nonconcurrent_insn_iname_subsets(knl) and get_all_sched_items_within_inames(knl, inames) --- sched_check_utils.py | 61 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 61 insertions(+) diff --git a/sched_check_utils.py b/sched_check_utils.py index bf085048d..f15dcc99f 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -286,6 +286,10 @@ def get_isl_space(param_names, in_names, out_names): def get_concurrent_inames(knl): from loopy.kernel.data import LocalIndexTag, GroupIndexTag conc_inames = set() + + # TODO remove non-conc test + assertion + nonconc_inames = set() + all_inames = knl.all_inames() for iname in all_inames: iname_tags = knl.iname_to_tags.get(iname, None) @@ -293,4 +297,61 @@ def get_concurrent_inames(knl): isinstance(tag, (LocalIndexTag, GroupIndexTag)) for tag in iname_tags): conc_inames.add(iname) + else: + nonconc_inames.add(iname) + + # TODO remove non-conc test + assertion + assert all_inames-conc_inames == nonconc_inames + return conc_inames, all_inames-conc_inames + + +def get_all_nonconcurrent_insn_iname_subsets(knl, exclude_empty=False): + from loopy.schedule import Barrier, RunInstruction + + _, non_conc_inames = get_concurrent_inames(knl) + + iname_subsets = set() + #TODO do we need to check anything besides Barrer, RunInsn? + for sched_item in knl.schedule: + if isinstance(sched_item, (RunInstruction, Barrier)): + if isinstance(sched_item, RunInstruction): + insn_id = sched_item.insn_id + else: # Barrier + # TODO make sure it's okay to ignore barriers without id + # matmul example has barrier that fails this assertion... + # assert sched_item.originating_insn_id is not None + insn_id = sched_item.originating_insn_id + if insn_id is None: + continue + + insn = knl.id_to_insn[insn_id] + + iname_subsets.add(insn.within_inames & non_conc_inames) + + if exclude_empty: + iname_subsets.discard(frozenset()) + + return iname_subsets + + +def get_all_sched_items_within_inames(knl, inames): + from loopy.schedule import Barrier, RunInstruction + + sched_items = [] + for sched_item in knl.schedule: + if isinstance(sched_item, (RunInstruction, Barrier)): + if isinstance(sched_item, RunInstruction): + insn_id = sched_item.insn_id + else: # Barrier + # TODO make sure it's okay to ignore barriers without id + # matmul example has barrier that fails this assertion... + # assert sched_item.originating_insn_id is not None + insn_id = sched_item.originating_insn_id + if insn_id is None: + continue + + insn = knl.id_to_insn[insn_id] + if inames.issubset(insn.within_inames): + sched_items.append(sched_item) + return sched_items -- GitLab From 20bcbcee5304efb47c7295faf30306e5e1c7ff7f Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sun, 14 Jul 2019 20:48:51 -0500 Subject: [PATCH 068/415] adding example to test loop carried dep logic --- example_pairwise_schedule_validity.py | 31 +++++++++++++++++++++++---- 1 file changed, 27 insertions(+), 4 deletions(-) diff --git a/example_pairwise_schedule_validity.py b/example_pairwise_schedule_validity.py index bb2cbf9dc..cb9f45601 100644 --- a/example_pairwise_schedule_validity.py +++ b/example_pairwise_schedule_validity.py @@ -16,7 +16,7 @@ from schedule_checker.sched_check_utils import ( # Choose kernel ---------------------------------------------------------- -knl_choice = "example" +#knl_choice = "example" #knl_choice = "matmul" #knl_choice = "scan" #knl_choice = "dependent_domain" @@ -24,6 +24,7 @@ knl_choice = "example" #knl_choice = "add_barrier" #knl_choice = "nop" #TODO nop not in sched... error #knl_choice = "nest_multi_dom" +knl_choice = "loop_carried_deps" if knl_choice == "example": knl = lp.make_kernel( @@ -147,6 +148,9 @@ elif knl_choice == "stroud": assumptions="deg>=0 and nels>=1" ) + knl = lp.add_and_infer_dtypes( + knl, + dict(coeffs=np.float32, qpts=np.int32)) knl = lp.fix_parameters(knl, nqp1d=7, deg=4) knl = lp.split_iname(knl, "el", 16, inner_tag="l.0") knl = lp.split_iname(knl, "el_outer", 2, outer_tag="g.0", inner_tag="ilp", @@ -168,6 +172,7 @@ if knl_choice == "add_barrier": 'cnst', shape=('n'), initializer=cnst, scope=lp.AddressSpace.GLOBAL, read_only=True), '...']) + knl = lp.add_and_infer_dtypes(knl, dict(a=np.float32)) knl = lp.fix_parameters(knl, n=16) knl = lp.add_barrier(knl, "id:first", "id:second") @@ -233,12 +238,30 @@ if knl_choice == "nest_multi_dom": knl = lp.preprocess_kernel(knl) knl = lp.get_one_scheduled_kernel(knl) +if knl_choice == "loop_carried_deps": + knl = lp.make_kernel( + "{[i]: 0<=iacc0 = 0 {id=insn0} + for i + acc0 = acc0 + i {id=insn1,dep=insn0} + <>acc2 = acc0 + i {id=insn2,dep=insn1} + <>acc3 = acc2 + i {id=insn3,dep=insn2} + <>acc4 = acc0 + i {id=insn4,dep=insn1} + end + """, + name="loop_carried_deps", + assumptions="n >= 1", + lang_version=(2018, 2) + ) + knl = lp.preprocess_kernel(knl) + knl = lp.get_one_scheduled_kernel(knl) # Print kernel info ------------------------------------------------------ -#print("Kernel:") -#print(knl) -#print(lp.generate_code_v2(knl).device_code()) +print("Kernel:") +print(knl) +print(lp.generate_code_v2(knl).device_code()) print("="*80) print("Iname tags: %s" % (knl.iname_to_tags)) print("="*80) -- GitLab From 14a62cbfc71d33072735d43d8cffca439f9f213f Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sun, 14 Jul 2019 22:44:19 -0500 Subject: [PATCH 069/415] created _get_insn_id_from_sched_item(); changed get_all_sched_items_within_inames() -> get_sched_item_ids_within_inames(), which returns ids instead of sched items --- sched_check_utils.py | 38 ++++++++++++++++++++------------------ schedule.py | 12 ++++++------ 2 files changed, 26 insertions(+), 24 deletions(-) diff --git a/sched_check_utils.py b/sched_check_utils.py index f15dcc99f..04a9041cc 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -306,6 +306,16 @@ def get_concurrent_inames(knl): return conc_inames, all_inames-conc_inames +def _get_insn_id_from_sched_item(knl, sched_item): + from loopy.schedule import Barrier + if isinstance(sched_item, Barrier): + return sched_item.originating_insn_id + else: + return sched_item.insn_id + + +# TODO for better performance, could combine these funcs so we don't +# loop over schedule more than once def get_all_nonconcurrent_insn_iname_subsets(knl, exclude_empty=False): from loopy.schedule import Barrier, RunInstruction @@ -315,18 +325,13 @@ def get_all_nonconcurrent_insn_iname_subsets(knl, exclude_empty=False): #TODO do we need to check anything besides Barrer, RunInsn? for sched_item in knl.schedule: if isinstance(sched_item, (RunInstruction, Barrier)): - if isinstance(sched_item, RunInstruction): - insn_id = sched_item.insn_id - else: # Barrier + insn_id = _get_insn_id_from_sched_item(knl, sched_item) + if insn_id is None: # TODO make sure it's okay to ignore barriers without id # matmul example has barrier that fails this assertion... # assert sched_item.originating_insn_id is not None - insn_id = sched_item.originating_insn_id - if insn_id is None: - continue - + continue insn = knl.id_to_insn[insn_id] - iname_subsets.add(insn.within_inames & non_conc_inames) if exclude_empty: @@ -335,23 +340,20 @@ def get_all_nonconcurrent_insn_iname_subsets(knl, exclude_empty=False): return iname_subsets -def get_all_sched_items_within_inames(knl, inames): +def get_sched_item_ids_within_inames(knl, inames): from loopy.schedule import Barrier, RunInstruction - sched_items = [] + sched_item_ids = [] for sched_item in knl.schedule: if isinstance(sched_item, (RunInstruction, Barrier)): - if isinstance(sched_item, RunInstruction): - insn_id = sched_item.insn_id - else: # Barrier + insn_id = _get_insn_id_from_sched_item(knl, sched_item) + if insn_id is None: # TODO make sure it's okay to ignore barriers without id # matmul example has barrier that fails this assertion... # assert sched_item.originating_insn_id is not None - insn_id = sched_item.originating_insn_id - if insn_id is None: - continue + continue insn = knl.id_to_insn[insn_id] if inames.issubset(insn.within_inames): - sched_items.append(sched_item) - return sched_items + sched_item_ids.append(insn_id) + return sched_item_ids diff --git a/schedule.py b/schedule.py index 3f76e8807..be60ca533 100644 --- a/schedule.py +++ b/schedule.py @@ -106,16 +106,16 @@ class LexSchedule(object): next_insn_lex_pt.pop() next_insn_lex_pt[-1] = next_insn_lex_pt[-1]+1 elif isinstance(sched_item, (RunInstruction, Barrier)): - if isinstance(sched_item, RunInstruction): - lp_insn_id = sched_item.insn_id - else: # Barrier + from schedule_checker.sched_check_utils import ( + _get_insn_id_from_sched_item, + ) + lp_insn_id = _get_insn_id_from_sched_item(knl, sched_item) + if lp_insn_id is None: # TODO make sure it's okay to ignore barriers without id # (because they'll never be part of a dependency?) # matmul example has barrier that fails this assertion... # assert sched_item.originating_insn_id is not None - lp_insn_id = sched_item.originating_insn_id - if lp_insn_id is None: - continue + continue # if include_only_insn_ids list was passed, # only process insns found in list, -- GitLab From cb57453d2ef70371e6355132527dcc27e0c1597e Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sun, 14 Jul 2019 22:52:45 -0500 Subject: [PATCH 070/415] returning set rather than list from get_sched_item_ids_within_inames() --- sched_check_utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sched_check_utils.py b/sched_check_utils.py index 04a9041cc..bbd4ae81f 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -343,7 +343,7 @@ def get_all_nonconcurrent_insn_iname_subsets(knl, exclude_empty=False): def get_sched_item_ids_within_inames(knl, inames): from loopy.schedule import Barrier, RunInstruction - sched_item_ids = [] + sched_item_ids = set() for sched_item in knl.schedule: if isinstance(sched_item, (RunInstruction, Barrier)): insn_id = _get_insn_id_from_sched_item(knl, sched_item) @@ -355,5 +355,5 @@ def get_sched_item_ids_within_inames(knl, inames): insn = knl.id_to_insn[insn_id] if inames.issubset(insn.within_inames): - sched_item_ids.append(insn_id) + sched_item_ids.add(insn_id) return sched_item_ids -- GitLab From 4bcd56d1ac25c234339c63fd5a94dcdd5180cee9 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sun, 14 Jul 2019 23:42:37 -0500 Subject: [PATCH 071/415] creating PRIOR deps (no corresponding constraint yet) for loop-carried dependencies --- dependency.py | 84 ++++++++++++++++++++++++++++++++++++++------ sched_check_utils.py | 12 ++++--- 2 files changed, 80 insertions(+), 16 deletions(-) diff --git a/dependency.py b/dependency.py index 73161fb5d..8a02de011 100644 --- a/dependency.py +++ b/dependency.py @@ -4,7 +4,7 @@ import islpy as isl class DependencyType: NONE = "none" SAME = "same" - #PRIOR = "prior" + PRIOR = "prior" #ALL = "all" @@ -107,15 +107,10 @@ def create_dependency_constraint( if dep_type == dt.SAME: constraint_set = create_elementwise_equality_conjunction_set( inames_list, inames_prime, islvars) - """ - # TODO define these if useful, otherwise remove - elif dep_type == dt.PRIOR: - constraint_set = constraint_set & islvars[iname].lt_set( - islvars[iname_prime]) - elif dep_type == dt.ALL: - constraint_set = constraint_set & islvars[0].eq_set( - islvars[0]) # True - """ + if dep_type == dt.PRIOR: + # TODO using false as placeholder + constraint_set = islvars[0].eq_set(islvars[0] + 1) + pass # set unused vars == unused dummy param for iname in inames_before_unused+inames_after_unused: @@ -166,7 +161,11 @@ def create_dependency_constraint( def create_dependencies_from_legacy_knl(knl): # Introduce SAME dep for set of shared, non-concurrent inames - from schedule_checker.sched_check_utils import get_concurrent_inames + from schedule_checker.sched_check_utils import ( + get_concurrent_inames, + get_all_nonconcurrent_insn_iname_subsets, + get_sched_item_ids_within_inames, + ) from schedule_checker.schedule import Statement dt = DependencyType conc_inames, non_conc_inames = get_concurrent_inames(knl) @@ -194,4 +193,67 @@ def create_dependencies_from_legacy_knl(knl): statement_dep_sets.append( StatementDependencySet(s_before, s_after, dep_dict)) + # loop-carried deps ------------------------------------------ + + # Go through insns and get all unique insn.depends_on iname sets + non_conc_iname_subsets = get_all_nonconcurrent_insn_iname_subsets( + knl, exclude_empty=True, non_conc_inames=non_conc_inames) + print("NONCONCURRENT INAME SUBSETS") + print(non_conc_iname_subsets) + + # For each set of insns within a given iname set, find sources and sinks. + # Then make PRIOR dep from all sinks to all sources at previous iterations + for iname_subset in non_conc_iname_subsets: + # find items within this iname set + sched_item_ids = get_sched_item_ids_within_inames(knl, iname_subset) + print("") + print("inames:", iname_subset) + print("matching sched items:", sched_item_ids) + + # find sources and sinks + sources, sinks = get_dependency_sources_and_sinks(knl, sched_item_ids) + # TODO this ignores deps connecting to items outside sched_item_ids, + # is that okay? + print("sources:", sources) + print("sinks:", sinks) + + # create prior deps + for source_id in sources: + for sink_id in sinks: + dep_dict = {} + sink_insn_inames = knl.id_to_insn[sink_id].within_inames + source_insn_inames = knl.id_to_insn[source_id].within_inames + shared_inames = sink_insn_inames & source_insn_inames + shared_non_conc_inames = shared_inames & non_conc_inames + + # TODO who tracks the iname nesting (needed for prior)? + dep_dict[dt.PRIOR] = shared_non_conc_inames + + s_before = Statement(sink_id, sink_insn_inames) + s_after = Statement(source_id, source_insn_inames) + statement_dep_sets.append( + StatementDependencySet(s_before, s_after, dep_dict)) + return statement_dep_sets + + +def get_dependency_sources_and_sinks(knl, sched_item_ids): + from schedule_checker.sched_check_utils import ( + _get_insn_id_from_sched_item, + ) + sources = set() + dep_heads = set() # all dependency heads (within sched_item_ids) + for item_id in sched_item_ids: + # find the deps within sched_item_ids + deps = knl.id_to_insn[item_id].depends_on & sched_item_ids + if deps: + # add deps to dep_heads + dep_heads.update(deps) + else: # has no deps (within sched_item_ids), this is a source + sources.add(item_id) + + # sinks don't point to anyone + sinks = sched_item_ids - dep_heads + + return sources, sinks + diff --git a/sched_check_utils.py b/sched_check_utils.py index bbd4ae81f..1d89df6d2 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -288,7 +288,7 @@ def get_concurrent_inames(knl): conc_inames = set() # TODO remove non-conc test + assertion - nonconc_inames = set() + non_conc_inames = set() all_inames = knl.all_inames() for iname in all_inames: @@ -298,10 +298,10 @@ def get_concurrent_inames(knl): for tag in iname_tags): conc_inames.add(iname) else: - nonconc_inames.add(iname) + non_conc_inames.add(iname) # TODO remove non-conc test + assertion - assert all_inames-conc_inames == nonconc_inames + assert all_inames-conc_inames == non_conc_inames return conc_inames, all_inames-conc_inames @@ -316,10 +316,12 @@ def _get_insn_id_from_sched_item(knl, sched_item): # TODO for better performance, could combine these funcs so we don't # loop over schedule more than once -def get_all_nonconcurrent_insn_iname_subsets(knl, exclude_empty=False): +def get_all_nonconcurrent_insn_iname_subsets( + knl, exclude_empty=False, non_conc_inames=None): from loopy.schedule import Barrier, RunInstruction - _, non_conc_inames = get_concurrent_inames(knl) + if non_conc_inames is None: + _, non_conc_inames = get_concurrent_inames(knl) iname_subsets = set() #TODO do we need to check anything besides Barrer, RunInsn? -- GitLab From 22bcccc6e53f5794684f35c189b2723681aaa179 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 15 Jul 2019 01:43:29 -0500 Subject: [PATCH 072/415] adding+testing PRIOR deps for loop carried dependencies; renamed statement.sid to statement.insn_id for clarity; fixed domain list order ambiguity in sched.create_symbolic_isl_map() --- dependency.py | 49 ++++++++++-------- example_pairwise_schedule_validity.py | 71 +++++++++++++++++---------- example_schedule_creation_old.py | 6 ++- sched_check_utils.py | 7 +-- schedule.py | 30 ++++++----- 5 files changed, 101 insertions(+), 62 deletions(-) diff --git a/dependency.py b/dependency.py index 8a02de011..e705a4ec8 100644 --- a/dependency.py +++ b/dependency.py @@ -27,14 +27,18 @@ class StatementDependencySet(object): for dep_type, inames in self.deps.items()]) -def create_elementwise_equality_conjunction_set(names0, names1, islvars): +def create_elementwise_comparison_conjunction_set( + names0, names1, islvars, op="eq"): # initialize set with constraint that is always true - eq_set = islvars[0].eq_set(islvars[0]) + conj_set = islvars[0].eq_set(islvars[0]) for n0, n1 in zip(names0, names1): - eq_set = eq_set & islvars[n0].eq_set(islvars[n1]) + if op == "eq": + conj_set = conj_set & islvars[n0].eq_set(islvars[n1]) + elif op == "lt": + conj_set = conj_set & islvars[n0].lt_set(islvars[n1]) - return eq_set + return conj_set def _convert_constraint_set_to_map(constraint_set, mv_count, src_position=None): @@ -53,7 +57,7 @@ def create_dependency_constraint( all_inames_ordered, dom_before_constraint_set, dom_after_constraint_set, - sid_to_int, + insn_id_to_int, unused_param_name, statement_var_name, ): @@ -97,7 +101,7 @@ def create_dependency_constraint( continue # need to put inames in a list so that order of inames and inames' - # matches when calling create_elementwise_equality_conj... + # matches when calling create_elementwise_comparison_conj... if not isinstance(inames, list): inames_list = list(inames) else: @@ -105,12 +109,13 @@ def create_dependency_constraint( inames_prime = append_apostrophes(inames_list) # e.g., [j', k'] if dep_type == dt.SAME: - constraint_set = create_elementwise_equality_conjunction_set( - inames_list, inames_prime, islvars) - if dep_type == dt.PRIOR: - # TODO using false as placeholder - constraint_set = islvars[0].eq_set(islvars[0] + 1) - pass + constraint_set = create_elementwise_comparison_conjunction_set( + inames_list, inames_prime, islvars, op="eq") + elif dep_type == dt.PRIOR: + # TODO for now, PRIOR requires upper left quadrant happen before, + # but next need to switch this to ordering based on loop nest + constraint_set = create_elementwise_comparison_conjunction_set( + inames_list, inames_prime, islvars, op="lt") # set unused vars == unused dummy param for iname in inames_before_unused+inames_after_unused: @@ -118,8 +123,8 @@ def create_dependency_constraint( islvars[unused_param_name]) # set statement_var_name == statement # - s_before_int = sid_to_int[statement_dep_set.statement_before.sid] - s_after_int = sid_to_int[statement_dep_set.statement_after.sid] + s_before_int = insn_id_to_int[statement_dep_set.statement_before.insn_id] + s_after_int = insn_id_to_int[statement_dep_set.statement_after.insn_id] constraint_set = constraint_set & islvars[statement_var_name].eq_set( islvars[0]+s_before_int) constraint_set = constraint_set & islvars[statement_var_name_prime].eq_set( @@ -198,24 +203,25 @@ def create_dependencies_from_legacy_knl(knl): # Go through insns and get all unique insn.depends_on iname sets non_conc_iname_subsets = get_all_nonconcurrent_insn_iname_subsets( knl, exclude_empty=True, non_conc_inames=non_conc_inames) - print("NONCONCURRENT INAME SUBSETS") - print(non_conc_iname_subsets) + #print("-"*85) + #print("NONCONCURRENT INAME SUBSETS") + #print(non_conc_iname_subsets) # For each set of insns within a given iname set, find sources and sinks. # Then make PRIOR dep from all sinks to all sources at previous iterations for iname_subset in non_conc_iname_subsets: # find items within this iname set sched_item_ids = get_sched_item_ids_within_inames(knl, iname_subset) - print("") - print("inames:", iname_subset) - print("matching sched items:", sched_item_ids) + #print("") + #print("inames:", iname_subset) + #print("matching sched items:", sched_item_ids) # find sources and sinks sources, sinks = get_dependency_sources_and_sinks(knl, sched_item_ids) # TODO this ignores deps connecting to items outside sched_item_ids, # is that okay? - print("sources:", sources) - print("sinks:", sinks) + #print("sources:", sources) + #print("sinks:", sinks) # create prior deps for source_id in sources: @@ -233,6 +239,7 @@ def create_dependencies_from_legacy_knl(knl): s_after = Statement(source_id, source_insn_inames) statement_dep_sets.append( StatementDependencySet(s_before, s_after, dep_dict)) + #print("-"*85) return statement_dep_sets diff --git a/example_pairwise_schedule_validity.py b/example_pairwise_schedule_validity.py index cb9f45601..8b79cb551 100644 --- a/example_pairwise_schedule_validity.py +++ b/example_pairwise_schedule_validity.py @@ -20,7 +20,7 @@ from schedule_checker.sched_check_utils import ( #knl_choice = "matmul" #knl_choice = "scan" #knl_choice = "dependent_domain" -#knl_choice = "stroud" # invalid sched? +#knl_choice = "stroud" # TODO invalid sched? #knl_choice = "add_barrier" #knl_choice = "nop" #TODO nop not in sched... error #knl_choice = "nest_multi_dom" @@ -298,8 +298,16 @@ for dep_set, dom_before, dom_after in deps_and_domains: # For each dependency, create+test schedule containing pair of insns------ +print("="*85) +print("Looping through dep pairs...") + sched_is_valid = True for statement_dep_set, dom_before, dom_after in deps_and_domains: + print("="*85) + print(statement_dep_set) + print("dom_before:", dom_before) + print("dom_after:", dom_after) + s_before = statement_dep_set.statement_before s_after = statement_dep_set.statement_after @@ -322,12 +330,12 @@ for statement_dep_set, dom_before, dom_after in deps_and_domains: # Create a mapping of {statement instance: lex point} # including only instructions involved in this dependency sched = LexSchedule(knl, include_only_insn_ids=[ - s_before.sid, - s_after.sid + s_before.insn_id, + s_after.insn_id ]) - print("-"*85) - print("LexSchedule before processing:") - print(sched) + #print("-"*85) + #print("LexSchedule before processing:") + #print(sched) # Right now, statement tuples consist of single int. # Add all inames from combined domains to statement tuples. @@ -340,38 +348,51 @@ for statement_dep_set, dom_before, dom_after in deps_and_domains: print("LexSchedule with inames added:") print(sched) + print("dict{lp insn id : sched sid int}:") + print(sched.lp_insnid_to_int_sid) + print("-"*85) # Get an isl map representing the LexSchedule; # this requires the iname domains + + if len(sched) == 1: + assert dom_before == dom_after + sid_to_dom = { + sched.lp_insnid_to_int_sid[s_before.insn_id]: dom_before} + elif len(sched) == 2: + sid_to_dom = { + sched.lp_insnid_to_int_sid[s_before.insn_id]: dom_before, + sched.lp_insnid_to_int_sid[s_after.insn_id]: dom_after, + } + else: + assert False + print("sid_to_dom:\n",sid_to_dom) + sched_map_symbolic = sched.create_symbolic_isl_map( - [dom_before, dom_after], all_necessary_inames_ordered) + sid_to_dom, all_necessary_inames_ordered) print("LexSchedule after creating symbolic isl map:") print(sched) print("LexSched:") print(prettier_map_string(sched_map_symbolic)) - print("space (statement instances -> lex time):") - print(sched_map_symbolic.space) - print("-"*85) + #print("space (statement instances -> lex time):") + #print(sched_map_symbolic.space) + #print("-"*85) # get map representing lexicographic ordering lex_order_map_symbolic = sched.get_lex_order_map_for_symbolic_sched() - print("lex order map symbolic:") - print(prettier_map_string(lex_order_map_symbolic)) - print("space (lex time -> lex time):") - print(lex_order_map_symbolic.space) - print("-"*85) + #print("lex order map symbolic:") + #print(prettier_map_string(lex_order_map_symbolic)) + #print("space (lex time -> lex time):") + #print(lex_order_map_symbolic.space) + #print("-"*85) # create statement instance ordering, # maps each statement instance to all statement instances occuring later SIO_symbolic = get_statement_ordering_map( sched_map_symbolic, lex_order_map_symbolic) - print("statement instance ordering symbolic:") - print(prettier_map_string(SIO_symbolic)) - print("SIO space (statement instances -> statement instances):") - print(SIO_symbolic.space) - print("-"*85) - - print("dict{lp insn id : sched sid int}:") - print(sched.lp_insnid_to_int_sid) + #print("statement instance ordering symbolic:") + #print(prettier_map_string(SIO_symbolic)) + #print("SIO space (statement instances -> statement instances):") + #print(SIO_symbolic.space) print("-"*85) # create a map representing constraints from the dependency, @@ -385,8 +406,8 @@ for statement_dep_set, dom_before, dom_after in deps_and_domains: sched.unused_param_name, sched.statement_var_name, ) - print("constraint map:") - print(prettier_map_string(constraint_map)) + #print("constraint map:") + #print(prettier_map_string(constraint_map)) aligned_constraint_map = constraint_map.align_params(SIO_symbolic.space) print("aligned constraint map:") print(prettier_map_string(aligned_constraint_map)) diff --git a/example_schedule_creation_old.py b/example_schedule_creation_old.py index 9e9599fe5..ce157418d 100644 --- a/example_schedule_creation_old.py +++ b/example_schedule_creation_old.py @@ -120,8 +120,12 @@ sched.add_symbolic_inames_to_statement_instances( print("LexSchedule with inames added:") print(sched) +sid_to_dom = {} +for insn_id, sid in sched.lp_insnid_to_int_sid.items(): + sid_to_dom[sid] = domain_union + sched_map_symbolic = sched.create_symbolic_isl_map( - [domain_union]*len(sched.lex_schedule), # due to changes, need one per insn + sid_to_dom, all_necessary_inames_ordered) print("LexSchedule after processing:") print(sched) diff --git a/sched_check_utils.py b/sched_check_utils.py index 1d89df6d2..9804ec5d0 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -165,7 +165,7 @@ def create_explicit_map_from_tuples(tuple_pairs, space): def create_symbolic_isl_map_from_tuples( tuple_pairs, space, - domains_to_intersect, + domains_to_intersect, # TODO pass these zipped w/tuples? unused_param_name, statement_var_name, # TODO can we not pass this? ): @@ -207,9 +207,10 @@ def create_symbolic_isl_map_from_tuples( # here for determing where to set inames equal to dummy vars, # should instead determine before in LexSchedule and pass info in dom_var_names = dom.get_var_names(dim_type.out) - assert set( + if not set( [var for var in tup_out if not isinstance(var, int)] - ).issubset(set(dom_var_names)) + ).issubset(set(dom_var_names)): + assert False unused_inames = set(space_in_names) \ - set(dom_var_names) - set([statement_var_name]) for unused_iname in unused_inames: diff --git a/schedule.py b/schedule.py index be60ca533..ccda2e0af 100644 --- a/schedule.py +++ b/schedule.py @@ -5,15 +5,15 @@ from collections import OrderedDict class Statement(object): def __init__( self, - sid, + insn_id, active_inames, ): - self.sid = sid # string + self.insn_id = insn_id # string self.active_inames = active_inames # [string, ] def __str__(self): return "%s {%s}" % ( - self.sid, ",".join(self.active_inames)) + self.insn_id, ",".join(self.active_inames)) class StatementInstance(object): @@ -31,12 +31,12 @@ class StatementInstance(object): def __str__(self): import six return "[%s,%s]" % ( - self.statement.sid, ",".join( + self.statement.insn_id, ",".join( ["%d" % (v) for k, v in sorted(six.iteritems(self.iname_vals))])) def __eq__(self, other): return self.iname_vals == other.iname_vals and \ - self.statement.sid == other.statement.sid + self.statement.insn_id == other.statement.insn_id def __hash__(self): return hash(str(self)) @@ -172,7 +172,7 @@ class LexSchedule(object): def get_last_lex_pt(self): return self.lex_schedule[self.get_last_schedule_item()] - def create_symbolic_isl_map(self, domains, inames_ordered): + def create_symbolic_isl_map(self, sid_to_dom, inames_ordered): # create isl map representing lex schedule from schedule_checker.sched_check_utils import ( @@ -180,7 +180,7 @@ class LexSchedule(object): add_dims_to_isl_set ) - assert len(domains) == len(self.lex_schedule) + assert len(sid_to_dom) == len(self.lex_schedule) # create an isl space # {('statement', used in >=1 statement domain>) -> @@ -195,11 +195,14 @@ class LexSchedule(object): # Insert 'statement' dim into domain so that its space allows for # intersection with sched map later new_pose = 0 # insert 'statement' dim at beginning - # TODO don't hardcode pose + # TODO don't hardcode statement var pose doms_to_intersect = [] - for dom in domains: - doms_to_intersect.append(add_dims_to_isl_set( - dom, isl.dim_type.out, [self.statement_var_name], new_pose)) + for tup_in, tup_out in self.items(): + sid = tup_in[0] # TODO don't hardcode this + doms_to_intersect.append( + add_dims_to_isl_set( + sid_to_dom[sid], isl.dim_type.out, + [self.statement_var_name], new_pose)) # create isl map return create_symbolic_isl_map_from_tuples( @@ -223,7 +226,7 @@ class LexSchedule(object): map_str = "{" for state_inst, lex_pt in self.lex_schedule.items(): domain_elem = "[s=%s,%s]" % ( - state_inst.statement.sid, ",".join( + state_inst.statement.insn_id, ",".join( ["%s=%d" % (iname, val) for iname, val in state_inst.iname_vals.items()])) range_elem = "[%s]" % (",".join("%s" % (lx) for lx in lex_pt)) @@ -253,6 +256,9 @@ class LexSchedule(object): def values(self): return self.lex_schedule.values() + def __len__(self): + return len(self.lex_schedule) + #def __str__(self): # #return str(self.get_isl_map()) # return str(self.get_isl_map_str()) -- GitLab From e000ee0effe1febbec6275b3810481dcd7019b8b Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 15 Jul 2019 21:03:35 -0500 Subject: [PATCH 073/415] removed some old TODOs --- example_pairwise_schedule_validity.py | 87 +-------------------------- sched_check_utils.py | 4 +- schedule.py | 2 - 3 files changed, 3 insertions(+), 90 deletions(-) diff --git a/example_pairwise_schedule_validity.py b/example_pairwise_schedule_validity.py index 8b79cb551..220ce389d 100644 --- a/example_pairwise_schedule_validity.py +++ b/example_pairwise_schedule_validity.py @@ -365,7 +365,7 @@ for statement_dep_set, dom_before, dom_after in deps_and_domains: } else: assert False - print("sid_to_dom:\n",sid_to_dom) + print("sid_to_dom:\n", sid_to_dom) sched_map_symbolic = sched.create_symbolic_isl_map( sid_to_dom, all_necessary_inames_ordered) @@ -441,88 +441,3 @@ for statement_dep_set, dom_before, dom_after in deps_and_domains: print("is sched valid? constraint map subset of SIO?") print(sched_is_valid) - - -# TODO create example with simple explicit sched -''' -all_inames = ['i', 'j'] -iname_params = ['p0', 'p1'] -iname_param_vals = [2, 2] -statement_var = 's' -statement_param = 'ps' -statement_bound = 2 - - - -s0 = Statement("0", ["i", "j"]) -s1 = Statement("1", ["i", "j"]) -print("Statements:") -print(s0) -print(s1) - -s0_00 = StatementInstance(s0, {"i": 0, "j": 0}) -s0_10 = StatementInstance(s0, {"i": 1, "j": 0}) -s0_01 = StatementInstance(s0, {"i": 0, "j": 1}) -s0_11 = StatementInstance(s0, {"i": 1, "j": 1}) -s1_00 = StatementInstance(s1, {"i": 0, "j": 0}) -s1_10 = StatementInstance(s1, {"i": 1, "j": 0}) -s1_01 = StatementInstance(s1, {"i": 0, "j": 1}) -s1_11 = StatementInstance(s1, {"i": 1, "j": 1}) -print("Statement instances:") -print(s0_00) -print(s0_10) -print(s0_01) -print(s0_11) -print(s1_00) -print(s1_10) -print(s1_01) -print(s1_11) - -state_inst_to_lex_time_dict = { - s0_00: (0,0), - s1_00: (0,1), - s0_10: (0,0), - s1_10: (0,1), - s0_01: (1,0), - s1_01: (1,1), - s0_11: (1,0), - s1_11: (1,1), - } - -sched = LexSchedule(state_inst_to_lex_time_dict) -print("LexSchedule:") -print(sched) - -# sched map should be this: -schedule_explicit_map = isl.Map( - """{ - [s,i,j] -> [0,0] : s = 0 and i = 0 and j = 0; - [s,i,j] -> [0,1] : s = 1 and i = 0 and j = 0; - [s,i,j] -> [0,0] : s = 0 and i = 1 and j = 0; - [s,i,j] -> [0,1] : s = 1 and i = 1 and j = 0; - [s,i,j] -> [1,0] : s = 0 and i = 0 and j = 1; - [s,i,j] -> [1,1] : s = 1 and i = 0 and j = 1; - [s,i,j] -> [1,0] : s = 0 and i = 1 and j = 1; - [s,i,j] -> [1,1] : s = 1 and i = 1 and j = 1; - }""") - -schedule_general_map = isl.Map("{[s,i,j] -> [j,s]}") - -print("Map representing schedule generally:") -print(schedule_general_map) - -# the following is equivalent to explicit map above: -schedule_explicit_map2 = isl.Map( - """{ - [s=0,i=0,j=0] -> [0,0]; - [s=1,i=0,j=0] -> [0,1]; - [s=0,i=1,j=0] -> [0,0]; - [s=1,i=1,j=0] -> [0,1]; - [s=0,i=0,j=1] -> [1,0]; - [s=1,i=0,j=1] -> [1,1]; - [s=0,i=1,j=1] -> [1,0]; - [s=1,i=1,j=1] -> [1,1]; - }""") -assert schedule_explicit_map2 == schedule_explicit_map == sched.get_isl_map() - -''' diff --git a/sched_check_utils.py b/sched_check_utils.py index 9804ec5d0..697e0b033 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -208,8 +208,8 @@ def create_symbolic_isl_map_from_tuples( # should instead determine before in LexSchedule and pass info in dom_var_names = dom.get_var_names(dim_type.out) if not set( - [var for var in tup_out if not isinstance(var, int)] - ).issubset(set(dom_var_names)): + [var for var in tup_out if not isinstance(var, int)] + ).issubset(set(dom_var_names)): assert False unused_inames = set(space_in_names) \ - set(dom_var_names) - set([statement_var_name]) diff --git a/schedule.py b/schedule.py index ccda2e0af..0a5645130 100644 --- a/schedule.py +++ b/schedule.py @@ -79,7 +79,6 @@ class LexSchedule(object): # keep track of the next point in our lexicographic ordering # initially this as a 1-d point with value 0 next_insn_lex_pt = [0] - # TODO originally assumed perfect loop nesting, still the case? for sched_item in knl.schedule: if isinstance(sched_item, EnterLoop): iname = sched_item.iname @@ -187,7 +186,6 @@ class LexSchedule(object): # (lexicographic ordering dims)} params_sched = [self.unused_param_name] in_names_sched = [self.statement_var_name] + inames_ordered - # TODO make "l" a variable for consistency out_names_sched = self.get_lex_var_names() from schedule_checker.sched_check_utils import get_isl_space sched_space = get_isl_space(params_sched, in_names_sched, out_names_sched) -- GitLab From 571aa4893edf6902bd8f9d467801ffbec5c46528 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 15 Jul 2019 21:24:09 -0500 Subject: [PATCH 074/415] statement variable position within statement instance tuples now less hard coded --- dependency.py | 8 +++++--- example_dependency_checking.py | 2 ++ example_pairwise_schedule_validity.py | 1 + example_schedule_creation_old.py | 1 + schedule.py | 12 ++++++++---- 5 files changed, 17 insertions(+), 7 deletions(-) diff --git a/dependency.py b/dependency.py index e705a4ec8..8298528b1 100644 --- a/dependency.py +++ b/dependency.py @@ -60,6 +60,7 @@ def create_dependency_constraint( insn_id_to_int, unused_param_name, statement_var_name, + statement_var_pose, ): from schedule_checker.sched_check_utils import ( make_islvars_with_var_primes, @@ -140,12 +141,13 @@ def create_dependency_constraint( # now apply domain sets to constraint variables # add statement variable to doms to enable intersection - new_pose = 0 # insert 'statement' at beginning # TODO don't hardcode position domain_to_intersect = add_dims_to_isl_set( - dom_before_constraint_set, isl.dim_type.out, [statement_var_name], new_pose) + dom_before_constraint_set, isl.dim_type.out, + [statement_var_name], statement_var_pose) range_constraint_set = create_new_set_with_primes(dom_after_constraint_set) range_to_intersect = add_dims_to_isl_set( - range_constraint_set, isl.dim_type.out, [statement_var_name_prime], new_pose) + range_constraint_set, isl.dim_type.out, + [statement_var_name_prime], statement_var_pose) # insert inames missing from doms to enable intersection domain_to_intersect = add_missing_dims_to_isl_set( diff --git a/example_dependency_checking.py b/example_dependency_checking.py index 84adc8fdc..5c3e3281b 100644 --- a/example_dependency_checking.py +++ b/example_dependency_checking.py @@ -114,6 +114,7 @@ print("----------------------------------------------------------------------") # i dependency is none, j dependency is `prior` statement_var = 's' +statement_var_pose = 0 unused_param_name = 'unused' domains = {} @@ -147,6 +148,7 @@ constraint_map = create_dependency_constraint( insnid_to_int_sid, unused_param_name, statement_var, + statement_var_pose, ) print("constraint map space:") print(constraint_map.space) diff --git a/example_pairwise_schedule_validity.py b/example_pairwise_schedule_validity.py index 220ce389d..d30097422 100644 --- a/example_pairwise_schedule_validity.py +++ b/example_pairwise_schedule_validity.py @@ -405,6 +405,7 @@ for statement_dep_set, dom_before, dom_after in deps_and_domains: sched.lp_insnid_to_int_sid, sched.unused_param_name, sched.statement_var_name, + sched.statement_var_pose(), ) #print("constraint map:") #print(prettier_map_string(constraint_map)) diff --git a/example_schedule_creation_old.py b/example_schedule_creation_old.py index ce157418d..87678df56 100644 --- a/example_schedule_creation_old.py +++ b/example_schedule_creation_old.py @@ -191,6 +191,7 @@ for statement_dep_set in statement_dep_sets: sched.lp_insnid_to_int_sid, sched.unused_param_name, sched.statement_var_name, + sched.statement_var_pose(), ) print("constraint map:") print(prettier_map_string(constraint_map)) diff --git a/schedule.py b/schedule.py index 0a5645130..605d66ec4 100644 --- a/schedule.py +++ b/schedule.py @@ -171,6 +171,11 @@ class LexSchedule(object): def get_last_lex_pt(self): return self.lex_schedule[self.get_last_schedule_item()] + def statement_var_pose(self): + # TODO what is the proper way to provide this information + # while keeping it immutable? + return 0 # 1st position in statement instance tuple + def create_symbolic_isl_map(self, sid_to_dom, inames_ordered): # create isl map representing lex schedule @@ -192,15 +197,13 @@ class LexSchedule(object): # Insert 'statement' dim into domain so that its space allows for # intersection with sched map later - new_pose = 0 # insert 'statement' dim at beginning - # TODO don't hardcode statement var pose doms_to_intersect = [] for tup_in, tup_out in self.items(): - sid = tup_in[0] # TODO don't hardcode this + sid = tup_in[self.statement_var_pose()] doms_to_intersect.append( add_dims_to_isl_set( sid_to_dom[sid], isl.dim_type.out, - [self.statement_var_name], new_pose)) + [self.statement_var_name], self.statement_var_pose())) # create isl map return create_symbolic_isl_map_from_tuples( @@ -221,6 +224,7 @@ class LexSchedule(object): n_dims, in_names=self.get_lex_var_names()) def get_isl_map_str(self): + # TODO remove this and other unused functions map_str = "{" for state_inst, lex_pt in self.lex_schedule.items(): domain_elem = "[s=%s,%s]" % ( -- GitLab From 18ea86b720eb37d8d9f338ac447e8108fff19f55 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 15 Jul 2019 21:59:55 -0500 Subject: [PATCH 075/415] moved/removed unused code --- dependency.py | 4 -- example_dependency_checking.py | 1 - example_pairwise_schedule_validity.py | 8 ++-- sched_check_utils.py | 57 ++++++++++++++------------- schedule.py | 57 +++++---------------------- 5 files changed, 44 insertions(+), 83 deletions(-) diff --git a/dependency.py b/dependency.py index 8298528b1..2c22d3b49 100644 --- a/dependency.py +++ b/dependency.py @@ -247,9 +247,6 @@ def create_dependencies_from_legacy_knl(knl): def get_dependency_sources_and_sinks(knl, sched_item_ids): - from schedule_checker.sched_check_utils import ( - _get_insn_id_from_sched_item, - ) sources = set() dep_heads = set() # all dependency heads (within sched_item_ids) for item_id in sched_item_ids: @@ -265,4 +262,3 @@ def get_dependency_sources_and_sinks(knl, sched_item_ids): sinks = sched_item_ids - dep_heads return sources, sinks - diff --git a/example_dependency_checking.py b/example_dependency_checking.py index 5c3e3281b..ede094231 100644 --- a/example_dependency_checking.py +++ b/example_dependency_checking.py @@ -120,7 +120,6 @@ unused_param_name = 'unused' domains = {} for iname in all_necessary_inames_ordered: domains[iname] = knl.get_inames_domain(iname) -domains_list = list(domains.values()) # make some dependencies manually for now: s0 = Statement("0", {"i", "j"}) diff --git a/example_pairwise_schedule_validity.py b/example_pairwise_schedule_validity.py index d30097422..25afb3e7d 100644 --- a/example_pairwise_schedule_validity.py +++ b/example_pairwise_schedule_validity.py @@ -9,7 +9,6 @@ from schedule_checker.lexicographic_order_map import ( get_statement_ordering_map, ) from schedule_checker.sched_check_utils import ( - get_concurrent_inames, prettier_map_string, order_var_names_to_match_islset, ) @@ -160,7 +159,7 @@ elif knl_choice == "stroud": knl = lp.get_one_scheduled_kernel(knl) if knl_choice == "add_barrier": np.random.seed(17) - a = np.random.randn(16) + #a = np.random.randn(16) cnst = np.random.randn(16) knl = lp.make_kernel( "{[i, ii]: 0<=i, ii statement instances):") diff --git a/sched_check_utils.py b/sched_check_utils.py index 697e0b033..95fdf368f 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -6,10 +6,6 @@ def prettier_map_string(isl_map): ).replace("{ ", "{\n").replace(" }", "\n}").replace("; ", ";\n") -def flatten_2d_list(list2d): - return [item for inner_list in list2d for item in inner_list] - - def get_islvars_from_space(space): param_names = space.get_var_names(isl.dim_type.param) in_names = space.get_var_names(isl.dim_type.in_) @@ -64,29 +60,6 @@ def create_new_set_with_primes(old_set): return new_set -def add_missing_set_dims_to_map_indims(islmap, islset): - new_map = islmap.copy() - for i in range(islset.n_dim()): - new_dim_name = islset.get_dim_name(isl.dim_type.out, i) - # does new_dim_name already exist in map? - dim_idx = new_map.find_dim_by_name(isl.dim_type.in_, new_dim_name) - if dim_idx == -1: - # new map needs dim, insert it - new_map = new_map.insert_dims(isl.dim_type.in_, i, 1) - new_map = new_map.set_dim_name(isl.dim_type.in_, i, new_dim_name) - else: - # new_map already has new_dim_name - if dim_idx == i: - # and it's already in the right spot - continue - else: - # move it - # TODO how do we move these? move_dims doesn't work for same dim_type - print("%s not in right spot" % (new_dim_name)) - raise ValueError("(this should not happen)") - return new_map - - def make_islvars_with_var_primes(var_names, param_names): return isl.make_zero_and_vars( var_names+append_apostrophes(var_names), param_names) @@ -360,3 +333,33 @@ def get_sched_item_ids_within_inames(knl, inames): if inames.issubset(insn.within_inames): sched_item_ids.add(insn_id) return sched_item_ids + + +# currently unused: +""" +def flatten_2d_list(list2d): + return [item for inner_list in list2d for item in inner_list] + + +def add_missing_set_dims_to_map_indims(islmap, islset): + new_map = islmap.copy() + for i in range(islset.n_dim()): + new_dim_name = islset.get_dim_name(isl.dim_type.out, i) + # does new_dim_name already exist in map? + dim_idx = new_map.find_dim_by_name(isl.dim_type.in_, new_dim_name) + if dim_idx == -1: + # new map needs dim, insert it + new_map = new_map.insert_dims(isl.dim_type.in_, i, 1) + new_map = new_map.set_dim_name(isl.dim_type.in_, i, new_dim_name) + else: + # new_map already has new_dim_name + if dim_idx == i: + # and it's already in the right spot + continue + else: + # move it + # TODO how do we move these? move_dims doesn't work for same dim_type + print("%s not in right spot" % (new_dim_name)) + raise ValueError("(this should not happen)") + return new_map +""" diff --git a/schedule.py b/schedule.py index 605d66ec4..0e1ed7325 100644 --- a/schedule.py +++ b/schedule.py @@ -16,32 +16,6 @@ class Statement(object): self.insn_id, ",".join(self.active_inames)) -class StatementInstance(object): - def __init__( - self, - statement, - iname_vals, - ): - assert all( - [iname in statement.active_inames - for iname, val in iname_vals.items()]) - self.statement = statement # statement - self.iname_vals = iname_vals # dict{string:int} - - def __str__(self): - import six - return "[%s,%s]" % ( - self.statement.insn_id, ",".join( - ["%d" % (v) for k, v in sorted(six.iteritems(self.iname_vals))])) - - def __eq__(self, other): - return self.iname_vals == other.iname_vals and \ - self.statement.insn_id == other.statement.insn_id - - def __hash__(self): - return hash(str(self)) - - class LexSchedule(object): # contains a mapping of {statement instance: lex point} @@ -168,9 +142,6 @@ class LexSchedule(object): def get_last_schedule_item(self): return next(reversed(self.lex_schedule)) - def get_last_lex_pt(self): - return self.lex_schedule[self.get_last_schedule_item()] - def statement_var_pose(self): # TODO what is the proper way to provide this information # while keeping it immutable? @@ -223,20 +194,6 @@ class LexSchedule(object): return create_symbolic_lex_order_map( n_dims, in_names=self.get_lex_var_names()) - def get_isl_map_str(self): - # TODO remove this and other unused functions - map_str = "{" - for state_inst, lex_pt in self.lex_schedule.items(): - domain_elem = "[s=%s,%s]" % ( - state_inst.statement.insn_id, ",".join( - ["%s=%d" % (iname, val) - for iname, val in state_inst.iname_vals.items()])) - range_elem = "[%s]" % (",".join("%s" % (lx) for lx in lex_pt)) - map_str += "%s -> %s; " % (domain_elem, range_elem) - map_str += "}" - #TODO return map not string - return map_str - def __bool__(self): return bool(self.lex_schedule) @@ -261,12 +218,16 @@ class LexSchedule(object): def __len__(self): return len(self.lex_schedule) - #def __str__(self): - # #return str(self.get_isl_map()) - # return str(self.get_isl_map_str()) - def __str__(self): - return str(list(self.lex_schedule.items())) + sched_str = "{\n" + for state_tuple, lex_pt in self.lex_schedule.items(): + domain_elem = "[%s=%s,%s]" % ( + self.statement_var_name, + state_tuple[self.statement_var_pose()], + ",".join(state_tuple[1:])) + sched_str += "%s -> %s;\n" % (domain_elem, lex_pt) + sched_str += "}" + return sched_str # Methods related to *explicit* schedule/map creation ------------------ # TODO consider removing these -- GitLab From aec46dd2f7901ab4fa2e1676f2f65a6a64b6f8f9 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 15 Jul 2019 22:11:48 -0500 Subject: [PATCH 076/415] added get_inames_in_sched_order(scheduled_knl) func --- example_pairwise_schedule_validity.py | 3 +++ sched_check_utils.py | 6 ++++++ 2 files changed, 9 insertions(+) diff --git a/example_pairwise_schedule_validity.py b/example_pairwise_schedule_validity.py index 25afb3e7d..7e363b802 100644 --- a/example_pairwise_schedule_validity.py +++ b/example_pairwise_schedule_validity.py @@ -11,6 +11,7 @@ from schedule_checker.lexicographic_order_map import ( from schedule_checker.sched_check_utils import ( prettier_map_string, order_var_names_to_match_islset, + get_inames_in_sched_order, ) # Choose kernel ---------------------------------------------------------- @@ -267,6 +268,8 @@ print("="*80) print("Loopy schedule:") for sched_item in knl.schedule: print(sched_item) +print("scheduled iname order:") +print(get_inames_in_sched_order(knl)) print("="*80) # Create StatementDependencySet(s) from kernel dependencies ----------------- diff --git a/sched_check_utils.py b/sched_check_utils.py index 95fdf368f..2fd4bb952 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -335,6 +335,12 @@ def get_sched_item_ids_within_inames(knl, inames): return sched_item_ids +def get_inames_in_sched_order(scheduled_knl): + # returns non-concurrent inames in order found in sched + from loopy.schedule import EnterLoop + return [sched_item.iname for sched_item in scheduled_knl.schedule + if isinstance(sched_item, EnterLoop)] + # currently unused: """ def flatten_2d_list(list2d): -- GitLab From fd91ef7b51fad4477d46d618e45989f3e68f5654 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 15 Jul 2019 23:06:21 -0500 Subject: [PATCH 077/415] now PRIOR requires statement_before complete previous *lexicographically ordered* iterations of nested loops before statement_after completes current iteration --- dependency.py | 66 ++++++++++++++++++++++----- example_pairwise_schedule_validity.py | 12 +++-- example_schedule_creation_old.py | 8 ++-- lexicographic_order_map.py | 24 ++++++---- schedule.py | 4 +- 5 files changed, 83 insertions(+), 31 deletions(-) diff --git a/dependency.py b/dependency.py index 2c22d3b49..0b15e7669 100644 --- a/dependency.py +++ b/dependency.py @@ -54,9 +54,10 @@ def _convert_constraint_set_to_map(constraint_set, mv_count, src_position=None): def create_dependency_constraint( statement_dep_set, - all_inames_ordered, + all_dom_inames_ordered, dom_before_constraint_set, dom_after_constraint_set, + sched_iname_order, insn_id_to_int, unused_param_name, statement_var_name, @@ -76,17 +77,17 @@ def create_dependency_constraint( # create some (ordered) isl vars to use, e.g., {s, i, j, s', i', j'} islvars = make_islvars_with_var_primes( - [statement_var_name]+all_inames_ordered, + [statement_var_name]+all_dom_inames_ordered, [unused_param_name]) statement_var_name_prime = statement_var_name+"'" # get (ordered) list of unused before/after inames inames_before_unused = [] - for iname in all_inames_ordered: + for iname in all_dom_inames_ordered: if iname not in dom_before_constraint_set.get_var_names(isl.dim_type.out): inames_before_unused.append(iname) inames_after_unused = [] - for iname in all_inames_ordered: + for iname in all_dom_inames_ordered: if iname not in dom_after_constraint_set.get_var_names(isl.dim_type.out): inames_after_unused.append(iname + "'") @@ -113,10 +114,30 @@ def create_dependency_constraint( constraint_set = create_elementwise_comparison_conjunction_set( inames_list, inames_prime, islvars, op="eq") elif dep_type == dt.PRIOR: - # TODO for now, PRIOR requires upper left quadrant happen before, - # but next need to switch this to ordering based on loop nest - constraint_set = create_elementwise_comparison_conjunction_set( - inames_list, inames_prime, islvars, op="lt") + # (old) PRIOR requires upper left quadrant happen before: + #constraint_set = create_elementwise_comparison_conjunction_set( + # inames_list, inames_prime, islvars, op="lt") + + # PRIOR requires statement_before complete previous iterations + # of (nested) loops before statement_after completes current iteration + inames_list_nest_ordered = [ + iname for iname in sched_iname_order + if iname in inames_list] + inames_list_nest_ordered_prime = append_apostrophes( + inames_list_nest_ordered) + if set(inames_list_nest_ordered) != set(inames_list): + # TODO when does this happen? + # TODO what do we do here? + assert False + + from schedule_checker.lexicographic_order_map import ( + get_lex_order_constraint + ) + constraint_set = get_lex_order_constraint( + islvars, + inames_list_nest_ordered, + inames_list_nest_ordered_prime, + ) # set unused vars == unused dummy param for iname in inames_before_unused+inames_after_unused: @@ -136,8 +157,19 @@ def create_dependency_constraint( # convert constraint set to map all_constraints_map = _convert_constraint_set_to_map( - all_constraints_set, len(all_inames_ordered) + 1) # +1 for statement var - + all_constraints_set, len(all_dom_inames_ordered) + 1) # +1 for statement var + + """ + # for debugging + if dt.PRIOR in statement_dep_set.deps.keys(): + print("!"*90) + print(inames_list_nest_ordered) + from schedule_checker.sched_check_utils import ( + prettier_map_string, + ) + print(prettier_map_string(all_constraints_map)) + print("."*90) + """ # now apply domain sets to constraint variables # add statement variable to doms to enable intersection @@ -152,15 +184,25 @@ def create_dependency_constraint( # insert inames missing from doms to enable intersection domain_to_intersect = add_missing_dims_to_isl_set( domain_to_intersect, isl.dim_type.out, - [statement_var_name] + all_inames_ordered) + [statement_var_name] + all_dom_inames_ordered) range_to_intersect = add_missing_dims_to_isl_set( range_to_intersect, isl.dim_type.out, - append_apostrophes([statement_var_name] + all_inames_ordered)) + append_apostrophes([statement_var_name] + all_dom_inames_ordered)) # intersect doms map_with_loop_domain_constraints = all_constraints_map.intersect_domain( domain_to_intersect).intersect_range(range_to_intersect) + """ + # for debugging + if dt.PRIOR in statement_dep_set.deps.keys(): + print(inames_list_nest_ordered) + from schedule_checker.sched_check_utils import ( + prettier_map_string, + ) + print(prettier_map_string(map_with_loop_domain_constraints)) + print("!"*90) + """ return map_with_loop_domain_constraints diff --git a/example_pairwise_schedule_validity.py b/example_pairwise_schedule_validity.py index 7e363b802..7a2c7dd52 100644 --- a/example_pairwise_schedule_validity.py +++ b/example_pairwise_schedule_validity.py @@ -268,8 +268,9 @@ print("="*80) print("Loopy schedule:") for sched_item in knl.schedule: print(sched_item) +sched_iname_order = get_inames_in_sched_order(knl) print("scheduled iname order:") -print(get_inames_in_sched_order(knl)) +print(sched_iname_order) print("="*80) # Create StatementDependencySet(s) from kernel dependencies ----------------- @@ -326,7 +327,7 @@ for statement_dep_set, dom_before, dom_after in deps_and_domains: s_before.active_inames | s_after.active_inames ) - all_necessary_inames_ordered = order_var_names_to_match_islset( + all_dom_inames_ordered = order_var_names_to_match_islset( knl.all_inames(), combined_doms) # Create a mapping of {statement instance: lex point} @@ -346,7 +347,7 @@ for statement_dep_set, dom_before, dom_after in deps_and_domains: # the in-dims for an isl map, so if an iname is needed in one # statement tuple, then it is needed in all statement tuples. sched.add_symbolic_inames_to_statement_instances( - all_necessary_inames_ordered) + all_dom_inames_ordered) print("LexSchedule with inames added:") print(sched) @@ -370,7 +371,7 @@ for statement_dep_set, dom_before, dom_after in deps_and_domains: print("sid_to_dom:\n", sid_to_dom) sched_map_symbolic = sched.create_symbolic_isl_map( - sid_to_dom, all_necessary_inames_ordered) + sid_to_dom, all_dom_inames_ordered) print("LexSchedule after creating symbolic isl map:") print(sched) print("LexSched:") @@ -401,9 +402,10 @@ for statement_dep_set, dom_before, dom_after in deps_and_domains: # maps each statement instance to all statement instances that must occur later constraint_map = create_dependency_constraint( statement_dep_set, - all_necessary_inames_ordered, + all_dom_inames_ordered, dom_before, dom_after, + sched_iname_order, sched.lp_insnid_to_int_sid, sched.unused_param_name, sched.statement_var_name, diff --git a/example_schedule_creation_old.py b/example_schedule_creation_old.py index 87678df56..e90609482 100644 --- a/example_schedule_creation_old.py +++ b/example_schedule_creation_old.py @@ -102,7 +102,7 @@ if not all_iname_domains_equal(knl): "schedule checker does not yet handle kernels where " "get_inames_domain(iname) is not same for all inames") domain_union = _union_inames_domains(knl) -all_necessary_inames_ordered = order_var_names_to_match_islset( +all_dom_inames_ordered = order_var_names_to_match_islset( knl.all_inames(), domain_union) # get all inames in consistent ordering: @@ -116,7 +116,7 @@ print(sched) # the in-dims for an isl map, so if an iname is needed in one # statement tuple, then it is needed in all statement tuples. sched.add_symbolic_inames_to_statement_instances( - all_necessary_inames_ordered) + all_dom_inames_ordered) print("LexSchedule with inames added:") print(sched) @@ -126,7 +126,7 @@ for insn_id, sid in sched.lp_insnid_to_int_sid.items(): sched_map_symbolic = sched.create_symbolic_isl_map( sid_to_dom, - all_necessary_inames_ordered) + all_dom_inames_ordered) print("LexSchedule after processing:") print(sched) # ------------------------------------------------------------------- @@ -185,7 +185,7 @@ for statement_dep_set in statement_dep_sets: statement_dep_set.statement_after.active_inames) constraint_map = create_dependency_constraint( statement_dep_set, - all_necessary_inames_ordered, + all_dom_inames_ordered, dom_before, dom_after, sched.lp_insnid_to_int_sid, diff --git a/lexicographic_order_map.py b/lexicographic_order_map.py index 7b34d190b..994bbdad4 100644 --- a/lexicographic_order_map.py +++ b/lexicographic_order_map.py @@ -32,6 +32,21 @@ def get_statement_ordering_map(sched_map, lex_map): return sched_map.apply_range(lex_map).apply_range(sched_map.reverse()) +def get_lex_order_constraint(islvars, in_names, out_names): + # create constraint enforcing lex ordering, e.g., in the 3-dim case: + # i0 < o0 or ((i0 = o0) and (i1 < o1)) + # or ((i0 = o0) and (i1 = o1) and (i2 < o2)) + lex_order_constraint = islvars[in_names[0]].lt_set(islvars[out_names[0]]) + for i in range(1, len(in_names)): + lex_order_constraint_conj = islvars[in_names[i]].lt_set( + islvars[out_names[i]]) + for j in range(i): + lex_order_constraint_conj = lex_order_constraint_conj & \ + islvars[in_names[j]].eq_set(islvars[out_names[j]]) + lex_order_constraint = lex_order_constraint | lex_order_constraint_conj + return lex_order_constraint + + def create_symbolic_lex_order_map( n_dims, in_names=None, @@ -53,14 +68,7 @@ def create_symbolic_lex_order_map( # create constraint enforcing lex ordering, e.g., in the 3-dim case: # i0 < o0 or ((i0 = o0) and (i1 < o1)) # or ((i0 = o0) and (i1 = o1) and (i2 < o2)) - lex_order_constraint = islvars[in_names[0]].lt_set(islvars[out_names[0]]) - for i in range(1, len(in_names)): - lex_order_constraint_conj = islvars[in_names[i]].lt_set( - islvars[out_names[i]]) - for j in range(i): - lex_order_constraint_conj = lex_order_constraint_conj & \ - islvars[in_names[j]].eq_set(islvars[out_names[j]]) - lex_order_constraint = lex_order_constraint | lex_order_constraint_conj + lex_order_constraint = get_lex_order_constraint(islvars, in_names, out_names) #lex_set = lex_set_outer_bounds & lex_order_constraint #lex_map = isl.Map.from_domain(lex_set) diff --git a/schedule.py b/schedule.py index 0e1ed7325..f47eb2705 100644 --- a/schedule.py +++ b/schedule.py @@ -147,7 +147,7 @@ class LexSchedule(object): # while keeping it immutable? return 0 # 1st position in statement instance tuple - def create_symbolic_isl_map(self, sid_to_dom, inames_ordered): + def create_symbolic_isl_map(self, sid_to_dom, dom_inames_ordered): # create isl map representing lex schedule from schedule_checker.sched_check_utils import ( @@ -161,7 +161,7 @@ class LexSchedule(object): # {('statement', used in >=1 statement domain>) -> # (lexicographic ordering dims)} params_sched = [self.unused_param_name] - in_names_sched = [self.statement_var_name] + inames_ordered + in_names_sched = [self.statement_var_name] + dom_inames_ordered out_names_sched = self.get_lex_var_names() from schedule_checker.sched_check_utils import get_isl_space sched_space = get_isl_space(params_sched, in_names_sched, out_names_sched) -- GitLab From 7d945266cab83e2ac6d81274a60870eb14bf7e3d Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 16 Jul 2019 01:47:42 -0500 Subject: [PATCH 078/415] encapsulated sched checking procedure into single function that takes kernel and returns bool --- __init__.py | 234 ++++++++++++++++++++++++++ example_pairwise_schedule_validity.py | 209 +---------------------- 2 files changed, 239 insertions(+), 204 deletions(-) diff --git a/__init__.py b/__init__.py index e69de29bb..fb3543494 100644 --- a/__init__.py +++ b/__init__.py @@ -0,0 +1,234 @@ +def check_schedule_validity(test_knl, verbose=False): + + from schedule_checker.dependency import ( + create_dependencies_from_legacy_knl, + create_dependency_constraint, + ) + from schedule_checker.schedule import LexSchedule + from schedule_checker.lexicographic_order_map import ( + get_statement_ordering_map, + ) + from schedule_checker.sched_check_utils import ( + prettier_map_string, + order_var_names_to_match_islset, + get_inames_in_sched_order, + ) + + if test_knl.schedule is None: + from loopy import preprocess_kernel, get_one_scheduled_kernel + knl = lp.preprocess_kernel(test_knl) + knl = lp.get_one_scheduled_kernel(knl) + else: + knl = test_knl + + sched_iname_order = get_inames_in_sched_order(knl) + + if verbose: + # Print kernel info ------------------------------------------------------ + print("="*80) + print("Kernel:") + print(knl) + from loopy import generate_code_v2 + print(generate_code_v2(knl).device_code()) + print("="*80) + print("Iname tags: %s" % (knl.iname_to_tags)) + print("="*80) + print("Loopy schedule:") + for sched_item in knl.schedule: + print(sched_item) + print("scheduled iname order:") + print(sched_iname_order) + + # Create StatementDependencySet(s) from kernel dependencies ----------------- + + # Introduce SAME dep for set of shared, non-concurrent inames. + # For each set of insns within a given iname subset, find sources and sinks, + # then make PRIOR dep from all sinks to all sources at previous iterations. + statement_dep_sets = create_dependencies_from_legacy_knl(knl) + + # get separate domains for before.active_inames and after.active_inames + deps_and_domains = [] + for dep_set in statement_dep_sets: + deps_and_domains.append([ + dep_set, + knl.get_inames_domain(dep_set.statement_before.active_inames), + knl.get_inames_domain(dep_set.statement_after.active_inames) + ]) + + if verbose: + print("="*80) + print("StatementDependencies w/domains:") + for dep_set, dom_before, dom_after in deps_and_domains: + print(dep_set) + print(dom_before) + print(dom_after) + + # For each dependency, create+test schedule containing pair of insns------ + + if verbose: + print("="*80) + print("Looping through dep pairs...") + + sched_is_valid = True + for statement_dep_set, dom_before, dom_after in deps_and_domains: + if verbose: + print("="*80) + print("statement dep set:") + print(statement_dep_set) + print("dom_before:", dom_before) + print("dom_after:", dom_after) + + s_before = statement_dep_set.statement_before + s_after = statement_dep_set.statement_after + + # The isl map representing the schedule maps + # statement instances -> lex time + # The 'in_' dim vars need to match for all sched items in the map, + # Instructions that use fewer inames will still need to + # have the unused inames in their 'in_' dim vars, so we'll + # include them and set them equal to a dummy variable. + + # Get all inames now in order to maintain list with consistent ordering + # This will help keep isl maps/sets compatible + combined_doms = knl.get_inames_domain( + s_before.active_inames | + s_after.active_inames + ) + all_dom_inames_ordered = order_var_names_to_match_islset( + knl.all_inames(), combined_doms) + + # Create a mapping of {statement instance: lex point} + # including only instructions involved in this dependency + sched = LexSchedule(knl, include_only_insn_ids=[ + s_before.insn_id, + s_after.insn_id + ]) + + #print("-"*80) + #print("LexSchedule before processing:") + #print(sched) + + # Right now, statement tuples consist of single int. + # Add all inames from combined domains to statement tuples. + # This may include inames not used in every instruction, + # but all in-tuples need to match because they will become + # the in-dims for an isl map, so if an iname is needed in one + # statement tuple, then it is needed in all statement tuples. + sched.add_symbolic_inames_to_statement_instances( + all_dom_inames_ordered) + if verbose: + print("-"*80) + print("LexSchedule with inames added:") + print(sched) + print("dict{lp insn id : sched sid int}:") + print(sched.lp_insnid_to_int_sid) + + # Get an isl map representing the LexSchedule; + # this requires the iname domains + + if len(sched) == 1: + assert dom_before == dom_after + sid_to_dom = { + sched.lp_insnid_to_int_sid[s_before.insn_id]: dom_before} + elif len(sched) == 2: + sid_to_dom = { + sched.lp_insnid_to_int_sid[s_before.insn_id]: dom_before, + sched.lp_insnid_to_int_sid[s_after.insn_id]: dom_after, + } + else: + assert False + + sched_map_symbolic = sched.create_symbolic_isl_map( + sid_to_dom, all_dom_inames_ordered) + + if verbose: + print("sid_to_dom:\n", sid_to_dom) + print("LexSchedule after creating symbolic isl map:") + print(sched) + print("LexSched:") + print(prettier_map_string(sched_map_symbolic)) + #print("space (statement instances -> lex time):") + #print(sched_map_symbolic.space) + #print("-"*80) + + # get map representing lexicographic ordering + lex_order_map_symbolic = sched.get_lex_order_map_for_symbolic_sched() + """ + if verbose: + print("lex order map symbolic:") + print(prettier_map_string(lex_order_map_symbolic)) + print("space (lex time -> lex time):") + print(lex_order_map_symbolic.space) + print("-"*80) + """ + + # create statement instance ordering, + # maps each statement instance to all statement instances occuring later + sio = get_statement_ordering_map( + sched_map_symbolic, lex_order_map_symbolic) + """ + if verbose: + print("statement instance ordering symbolic:") + print(prettier_map_string(sio)) + print("SIO space (statement instances -> statement instances):") + print(sio.space) + print("-"*80) + """ + + # create a map representing constraints from the dependency, + # maps each statement instance to all statement instances that must occur later + constraint_map = create_dependency_constraint( + statement_dep_set, + all_dom_inames_ordered, + dom_before, + dom_after, + sched_iname_order, + sched.lp_insnid_to_int_sid, + sched.unused_param_name, + sched.statement_var_name, + sched.statement_var_pose(), + ) + + aligned_constraint_map = constraint_map.align_params(sio.space) + if verbose: + print("constraint map:") + print(prettier_map_string(aligned_constraint_map)) + + assert aligned_constraint_map.space == sio.space + + if not aligned_constraint_map.is_subset(sio): + + sched_is_valid = False + + if verbose: + print("================ constraint check failure =================") + print("constraint map not subset of SIO") + print("dependency:") + print(statement_dep_set) + print("statement instance ordering:") + print(prettier_map_string(sio)) + print("constraint_map.gist(sio):") + print(aligned_constraint_map.gist(sio)) + print("sio.gist(constraint_map)") + print(sio.gist(aligned_constraint_map)) + """ + from schedule_checker.sched_check_utils import ( + get_concurrent_inames, + ) + conc_inames, non_conc_inames = get_concurrent_inames(knl) + print("concurrent inames:", conc_inames) + print("sequential inames:", non_conc_inames) + print("constraint map space (statment instances -> statement instances):") + print(aligned_constraint_map.space) + print("SIO space (statement instances -> statement instances):") + print(sio.space) + print("constraint map:") + print(prettier_map_string(aligned_constraint_map)) + print("statement instance ordering:") + print(prettier_map_string(sio)) + print("{insn id -> sched sid int} dict:") + print(sched.lp_insnid_to_int_sid) + """ + print("===========================================================") + + return sched_is_valid diff --git a/example_pairwise_schedule_validity.py b/example_pairwise_schedule_validity.py index 7a2c7dd52..2c4a01256 100644 --- a/example_pairwise_schedule_validity.py +++ b/example_pairwise_schedule_validity.py @@ -1,18 +1,6 @@ import loopy as lp import numpy as np -from schedule_checker.dependency import ( - create_dependencies_from_legacy_knl, - create_dependency_constraint, -) -from schedule_checker.schedule import LexSchedule -from schedule_checker.lexicographic_order_map import ( - get_statement_ordering_map, -) -from schedule_checker.sched_check_utils import ( - prettier_map_string, - order_var_names_to_match_islset, - get_inames_in_sched_order, -) +from schedule_checker import check_schedule_validity # Choose kernel ---------------------------------------------------------- @@ -20,7 +8,7 @@ from schedule_checker.sched_check_utils import ( #knl_choice = "matmul" #knl_choice = "scan" #knl_choice = "dependent_domain" -#knl_choice = "stroud" # TODO invalid sched? +#knl_choice = "stroud_bernstein" # TODO invalid sched? #knl_choice = "add_barrier" #knl_choice = "nop" #TODO nop not in sched... error #knl_choice = "nest_multi_dom" @@ -110,7 +98,7 @@ elif knl_choice == "dependent_domain": knl = lp.realize_reduction(knl, force_scan=True) knl = lp.preprocess_kernel(knl) knl = lp.get_one_scheduled_kernel(knl) -elif knl_choice == "stroud": +elif knl_choice == "stroud_bernstein": knl = lp.make_kernel( "{[el, i2, alpha1,alpha2]: \ 0 <= el < nels and \ @@ -144,7 +132,7 @@ elif knl_choice == "stroud": lp.GlobalArg("coeffs", None, shape=None), "..." ], - name="stroud", + name="stroud_bernstein", assumptions="deg>=0 and nels>=1" ) @@ -257,195 +245,8 @@ if knl_choice == "loop_carried_deps": knl = lp.preprocess_kernel(knl) knl = lp.get_one_scheduled_kernel(knl) -# Print kernel info ------------------------------------------------------ -print("Kernel:") -print(knl) -print(lp.generate_code_v2(knl).device_code()) -print("="*80) -print("Iname tags: %s" % (knl.iname_to_tags)) -print("="*80) -print("Loopy schedule:") -for sched_item in knl.schedule: - print(sched_item) -sched_iname_order = get_inames_in_sched_order(knl) -print("scheduled iname order:") -print(sched_iname_order) -print("="*80) - -# Create StatementDependencySet(s) from kernel dependencies ----------------- - -# Introduce SAME dep for set of shared, non-concurrent inames -print("-"*85) -statement_dep_sets = create_dependencies_from_legacy_knl(knl) -print("Statement Dependencies:") -for dep_set in statement_dep_sets: - print(dep_set) - print("") - -# get separate domains for before.active_inames and after.active_inames -deps_and_domains = [] -for dep_set in statement_dep_sets: - deps_and_domains.append([ - dep_set, - knl.get_inames_domain(dep_set.statement_before.active_inames), - knl.get_inames_domain(dep_set.statement_after.active_inames) - ]) - -print("-"*85) -print("StatementDependencies w/domains:") -for dep_set, dom_before, dom_after in deps_and_domains: - print(dep_set) - print(dom_before) - print(dom_after) - -# For each dependency, create+test schedule containing pair of insns------ - -print("="*85) -print("Looping through dep pairs...") - -sched_is_valid = True -for statement_dep_set, dom_before, dom_after in deps_and_domains: - print("="*85) - print(statement_dep_set) - print("dom_before:", dom_before) - print("dom_after:", dom_after) - - s_before = statement_dep_set.statement_before - s_after = statement_dep_set.statement_after - - # The isl map representing the schedule maps - # statement instances -> lex time - # The 'in_' dim vars need to match for all sched items in the map, - # Instructions that use fewer inames will still need to - # have the unused inames in their 'in_' dim vars, so we'll - # include them and set them equal to a dummy variable. - - # Get all inames now in order to maintain list with consistent ordering - # This will help keep isl maps/sets compatible - combined_doms = knl.get_inames_domain( - s_before.active_inames | - s_after.active_inames - ) - all_dom_inames_ordered = order_var_names_to_match_islset( - knl.all_inames(), combined_doms) - - # Create a mapping of {statement instance: lex point} - # including only instructions involved in this dependency - sched = LexSchedule(knl, include_only_insn_ids=[ - s_before.insn_id, - s_after.insn_id - ]) - #print("-"*85) - #print("LexSchedule before processing:") - #print(sched) - - # Right now, statement tuples consist of single int. - # Add all inames from combined domains to statement tuples. - # This may include inames not used in every instruction, - # but all in-tuples need to match because they will become - # the in-dims for an isl map, so if an iname is needed in one - # statement tuple, then it is needed in all statement tuples. - sched.add_symbolic_inames_to_statement_instances( - all_dom_inames_ordered) - print("LexSchedule with inames added:") - print(sched) - - print("dict{lp insn id : sched sid int}:") - print(sched.lp_insnid_to_int_sid) - print("-"*85) - # Get an isl map representing the LexSchedule; - # this requires the iname domains - - if len(sched) == 1: - assert dom_before == dom_after - sid_to_dom = { - sched.lp_insnid_to_int_sid[s_before.insn_id]: dom_before} - elif len(sched) == 2: - sid_to_dom = { - sched.lp_insnid_to_int_sid[s_before.insn_id]: dom_before, - sched.lp_insnid_to_int_sid[s_after.insn_id]: dom_after, - } - else: - assert False - print("sid_to_dom:\n", sid_to_dom) - - sched_map_symbolic = sched.create_symbolic_isl_map( - sid_to_dom, all_dom_inames_ordered) - print("LexSchedule after creating symbolic isl map:") - print(sched) - print("LexSched:") - print(prettier_map_string(sched_map_symbolic)) - #print("space (statement instances -> lex time):") - #print(sched_map_symbolic.space) - #print("-"*85) - - # get map representing lexicographic ordering - lex_order_map_symbolic = sched.get_lex_order_map_for_symbolic_sched() - #print("lex order map symbolic:") - #print(prettier_map_string(lex_order_map_symbolic)) - #print("space (lex time -> lex time):") - #print(lex_order_map_symbolic.space) - #print("-"*85) - - # create statement instance ordering, - # maps each statement instance to all statement instances occuring later - SIO_symbolic = get_statement_ordering_map( - sched_map_symbolic, lex_order_map_symbolic) - #print("statement instance ordering symbolic:") - #print(prettier_map_string(SIO_symbolic)) - #print("SIO space (statement instances -> statement instances):") - #print(SIO_symbolic.space) - print("-"*85) - - # create a map representing constraints from the dependency, - # maps each statement instance to all statement instances that must occur later - constraint_map = create_dependency_constraint( - statement_dep_set, - all_dom_inames_ordered, - dom_before, - dom_after, - sched_iname_order, - sched.lp_insnid_to_int_sid, - sched.unused_param_name, - sched.statement_var_name, - sched.statement_var_pose(), - ) - #print("constraint map:") - #print(prettier_map_string(constraint_map)) - aligned_constraint_map = constraint_map.align_params(SIO_symbolic.space) - print("aligned constraint map:") - print(prettier_map_string(aligned_constraint_map)) - - assert aligned_constraint_map.space == SIO_symbolic.space - if not aligned_constraint_map.is_subset(SIO_symbolic): - sched_is_valid = False - print("================ constraint check failure =================") - print("constraint map not subset of SIO") - print("dependency:") - print(statement_dep_set) - """ - from schedule_checker.sched_check_utils import ( - get_concurrent_inames, - ) - conc_inames, non_conc_inames = get_concurrent_inames(knl) - print("concurrent inames:", conc_inames) - print("sequential inames:", non_conc_inames) - print("constraint map space (statment instances -> statement instances):") - print(aligned_constraint_map.space) - print("SIO space (statement instances -> statement instances):") - print(SIO_symbolic.space) - print("constraint map:") - print(prettier_map_string(aligned_constraint_map)) - print("statement instance ordering:") - print(prettier_map_string(SIO_symbolic)) - print("{insn id -> sched sid int} dict:") - print(sched.lp_insnid_to_int_sid) - print("gist") - print(aligned_constraint_map.gist(SIO_symbolic)) - print(SIO_symbolic.gist(aligned_constraint_map)) - """ - print("===========================================================") +sched_is_valid = check_schedule_validity(knl, verbose=True) print("is sched valid? constraint map subset of SIO?") print(sched_is_valid) -- GitLab From ab0c1b16b6e0df4b71a2e0531fecc9086ececa03 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sat, 20 Jul 2019 12:11:54 -0500 Subject: [PATCH 079/415] added comment about unnecessary (but not problematic) lex point incrementation --- schedule.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/schedule.py b/schedule.py index f47eb2705..f4e42e9fb 100644 --- a/schedule.py +++ b/schedule.py @@ -78,6 +78,8 @@ class LexSchedule(object): next_insn_lex_pt.pop() next_insn_lex_pt.pop() next_insn_lex_pt[-1] = next_insn_lex_pt[-1]+1 + # if we didn't add any statements while in this loop, we might + # sometimes be able to skip this increment, but it's not hurting anything elif isinstance(sched_item, (RunInstruction, Barrier)): from schedule_checker.sched_check_utils import ( _get_insn_id_from_sched_item, -- GitLab From 25f2d436bdeb4958bb54d58c74560bc381685ebf Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sat, 20 Jul 2019 12:13:06 -0500 Subject: [PATCH 080/415] looping through instructions in unscheduled kernel, rather than sched items in sched kernel, in get_all_nonconcurrent_insn_iname_subsets() and get_sched_item_ids_within_inames() --- sched_check_utils.py | 31 ++++++------------------------- 1 file changed, 6 insertions(+), 25 deletions(-) diff --git a/sched_check_utils.py b/sched_check_utils.py index 2fd4bb952..f7b15827c 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -292,23 +292,13 @@ def _get_insn_id_from_sched_item(knl, sched_item): # loop over schedule more than once def get_all_nonconcurrent_insn_iname_subsets( knl, exclude_empty=False, non_conc_inames=None): - from loopy.schedule import Barrier, RunInstruction if non_conc_inames is None: _, non_conc_inames = get_concurrent_inames(knl) iname_subsets = set() - #TODO do we need to check anything besides Barrer, RunInsn? - for sched_item in knl.schedule: - if isinstance(sched_item, (RunInstruction, Barrier)): - insn_id = _get_insn_id_from_sched_item(knl, sched_item) - if insn_id is None: - # TODO make sure it's okay to ignore barriers without id - # matmul example has barrier that fails this assertion... - # assert sched_item.originating_insn_id is not None - continue - insn = knl.id_to_insn[insn_id] - iname_subsets.add(insn.within_inames & non_conc_inames) + for insn in knl.instructions: + iname_subsets.add(insn.within_inames & non_conc_inames) if exclude_empty: iname_subsets.discard(frozenset()) @@ -317,21 +307,11 @@ def get_all_nonconcurrent_insn_iname_subsets( def get_sched_item_ids_within_inames(knl, inames): - from loopy.schedule import Barrier, RunInstruction sched_item_ids = set() - for sched_item in knl.schedule: - if isinstance(sched_item, (RunInstruction, Barrier)): - insn_id = _get_insn_id_from_sched_item(knl, sched_item) - if insn_id is None: - # TODO make sure it's okay to ignore barriers without id - # matmul example has barrier that fails this assertion... - # assert sched_item.originating_insn_id is not None - continue - - insn = knl.id_to_insn[insn_id] - if inames.issubset(insn.within_inames): - sched_item_ids.add(insn_id) + for insn in knl.instructions: + if inames.issubset(insn.within_inames): + sched_item_ids.add(insn.id) return sched_item_ids @@ -341,6 +321,7 @@ def get_inames_in_sched_order(scheduled_knl): return [sched_item.iname for sched_item in scheduled_knl.schedule if isinstance(sched_item, EnterLoop)] + # currently unused: """ def flatten_2d_list(list2d): -- GitLab From 66eb8d9984ed341e03b1b7cee0b0fecba98053d8 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sat, 20 Jul 2019 12:16:08 -0500 Subject: [PATCH 081/415] creating dependencies from *unscheduled* kernel; getting loop priority from knl.loop_priority for PRIOR deps; when priority is unknown, PRIOR requires that quadrant happen first --- __init__.py | 74 ++++++++++++++------------- dependency.py | 67 +++++++++++++++--------- example_dependency_checking.py | 3 +- example_pairwise_schedule_validity.py | 29 +++-------- example_schedule_creation_old.py | 2 + 5 files changed, 94 insertions(+), 81 deletions(-) diff --git a/__init__.py b/__init__.py index fb3543494..bf682da87 100644 --- a/__init__.py +++ b/__init__.py @@ -1,4 +1,4 @@ -def check_schedule_validity(test_knl, verbose=False): +def check_schedule_validity(unscheduled_knl, verbose=False): from schedule_checker.dependency import ( create_dependencies_from_legacy_knl, @@ -14,45 +14,24 @@ def check_schedule_validity(test_knl, verbose=False): get_inames_in_sched_order, ) - if test_knl.schedule is None: - from loopy import preprocess_kernel, get_one_scheduled_kernel - knl = lp.preprocess_kernel(test_knl) - knl = lp.get_one_scheduled_kernel(knl) - else: - knl = test_knl - - sched_iname_order = get_inames_in_sched_order(knl) - - if verbose: - # Print kernel info ------------------------------------------------------ - print("="*80) - print("Kernel:") - print(knl) - from loopy import generate_code_v2 - print(generate_code_v2(knl).device_code()) - print("="*80) - print("Iname tags: %s" % (knl.iname_to_tags)) - print("="*80) - print("Loopy schedule:") - for sched_item in knl.schedule: - print(sched_item) - print("scheduled iname order:") - print(sched_iname_order) + from loopy import preprocess_kernel + # TODO check to see if preprocessed already? + preprocessed_knl = preprocess_kernel(unscheduled_knl) # Create StatementDependencySet(s) from kernel dependencies ----------------- # Introduce SAME dep for set of shared, non-concurrent inames. # For each set of insns within a given iname subset, find sources and sinks, # then make PRIOR dep from all sinks to all sources at previous iterations. - statement_dep_sets = create_dependencies_from_legacy_knl(knl) + statement_dep_sets = create_dependencies_from_legacy_knl(preprocessed_knl) # get separate domains for before.active_inames and after.active_inames deps_and_domains = [] for dep_set in statement_dep_sets: deps_and_domains.append([ dep_set, - knl.get_inames_domain(dep_set.statement_before.active_inames), - knl.get_inames_domain(dep_set.statement_after.active_inames) + preprocessed_knl.get_inames_domain(dep_set.statement_before.active_inames), + preprocessed_knl.get_inames_domain(dep_set.statement_after.active_inames) ]) if verbose: @@ -63,6 +42,31 @@ def check_schedule_validity(test_knl, verbose=False): print(dom_before) print(dom_after) + # get a schedule to check + if preprocessed_knl.schedule is None: + from loopy import get_one_scheduled_kernel + scheduled_knl = get_one_scheduled_kernel(preprocessed_knl) + else: + scheduled_knl = preprocessed_knl + + sched_iname_order = get_inames_in_sched_order(scheduled_knl) + + if verbose: + # Print kernel info ------------------------------------------------------ + print("="*80) + print("Kernel:") + print(scheduled_knl) + from loopy import generate_code_v2 + print(generate_code_v2(scheduled_knl).device_code()) + print("="*80) + print("Iname tags: %s" % (scheduled_knl.iname_to_tags)) + print("="*80) + print("Loopy schedule:") + for sched_item in scheduled_knl.schedule: + print(sched_item) + print("scheduled iname order:") + print(sched_iname_order) + # For each dependency, create+test schedule containing pair of insns------ if verbose: @@ -90,16 +94,16 @@ def check_schedule_validity(test_knl, verbose=False): # Get all inames now in order to maintain list with consistent ordering # This will help keep isl maps/sets compatible - combined_doms = knl.get_inames_domain( + combined_doms = preprocessed_knl.get_inames_domain( s_before.active_inames | s_after.active_inames ) all_dom_inames_ordered = order_var_names_to_match_islset( - knl.all_inames(), combined_doms) + preprocessed_knl.all_inames(), combined_doms) # Create a mapping of {statement instance: lex point} # including only instructions involved in this dependency - sched = LexSchedule(knl, include_only_insn_ids=[ + sched = LexSchedule(scheduled_knl, include_only_insn_ids=[ s_before.insn_id, s_after.insn_id ]) @@ -176,13 +180,13 @@ def check_schedule_validity(test_knl, verbose=False): """ # create a map representing constraints from the dependency, - # maps each statement instance to all statement instances that must occur later + # maps statement instance to all statement instances that must occur later constraint_map = create_dependency_constraint( statement_dep_set, all_dom_inames_ordered, dom_before, dom_after, - sched_iname_order, + unscheduled_knl.loop_priority, sched.lp_insnid_to_int_sid, sched.unused_param_name, sched.statement_var_name, @@ -215,10 +219,10 @@ def check_schedule_validity(test_knl, verbose=False): from schedule_checker.sched_check_utils import ( get_concurrent_inames, ) - conc_inames, non_conc_inames = get_concurrent_inames(knl) + conc_inames, non_conc_inames = get_concurrent_inames(scheduled_knl) print("concurrent inames:", conc_inames) print("sequential inames:", non_conc_inames) - print("constraint map space (statment instances -> statement instances):") + print("constraint map space (stmt instances -> stmt instances):") print(aligned_constraint_map.space) print("SIO space (statement instances -> statement instances):") print(sio.space) diff --git a/dependency.py b/dependency.py index 0b15e7669..788854fb1 100644 --- a/dependency.py +++ b/dependency.py @@ -57,7 +57,7 @@ def create_dependency_constraint( all_dom_inames_ordered, dom_before_constraint_set, dom_after_constraint_set, - sched_iname_order, + loop_priorities, insn_id_to_int, unused_param_name, statement_var_name, @@ -114,30 +114,49 @@ def create_dependency_constraint( constraint_set = create_elementwise_comparison_conjunction_set( inames_list, inames_prime, islvars, op="eq") elif dep_type == dt.PRIOR: - # (old) PRIOR requires upper left quadrant happen before: - #constraint_set = create_elementwise_comparison_conjunction_set( - # inames_list, inames_prime, islvars, op="lt") - - # PRIOR requires statement_before complete previous iterations - # of (nested) loops before statement_after completes current iteration - inames_list_nest_ordered = [ - iname for iname in sched_iname_order - if iname in inames_list] - inames_list_nest_ordered_prime = append_apostrophes( - inames_list_nest_ordered) - if set(inames_list_nest_ordered) != set(inames_list): - # TODO when does this happen? - # TODO what do we do here? - assert False - - from schedule_checker.lexicographic_order_map import ( - get_lex_order_constraint - ) - constraint_set = get_lex_order_constraint( - islvars, - inames_list_nest_ordered, - inames_list_nest_ordered_prime, + + # if nesting is known: + # TODO there might be situations where we know the priority even + # though loop_priorities is None + priority_known = False + if loop_priorities: + for priority_tuple in loop_priorities: + # TODO might be able to deduce priority from multiple tuples + # even if all inames are not present in any single tuple + if set(inames_list).issubset(set(priority_tuple)): + priority_known = True + break + + # if only one loop, we know the priority + if not priority_known and len(inames_list) == 1: + priority_tuple = tuple(inames_list) + priority_known = True + + if priority_known: + # PRIOR requires statement_before complete previous iterations + # of loops before statement_after completes current iteration + # according to loop nest order + inames_list_nest_ordered = [ + iname for iname in priority_tuple + if iname in inames_list] + inames_list_nest_ordered_prime = append_apostrophes( + inames_list_nest_ordered) + if set(inames_list_nest_ordered) != set(inames_list): + # TODO could this happen? + assert False + + from schedule_checker.lexicographic_order_map import ( + get_lex_order_constraint ) + constraint_set = get_lex_order_constraint( + islvars, + inames_list_nest_ordered, + inames_list_nest_ordered_prime, + ) + else: # priority not known + # PRIOR requires upper left quadrant happen before: + constraint_set = create_elementwise_comparison_conjunction_set( + inames_list, inames_prime, islvars, op="lt") # set unused vars == unused dummy param for iname in inames_before_unused+inames_after_unused: diff --git a/example_dependency_checking.py b/example_dependency_checking.py index ede094231..b037a02fa 100644 --- a/example_dependency_checking.py +++ b/example_dependency_checking.py @@ -138,12 +138,13 @@ dom_before = knl.get_inames_domain( dom_after = knl.get_inames_domain( statement_dep_set.statement_after.active_inames ) - +loop_priority = None # TODO constraint_map = create_dependency_constraint( statement_dep_set, all_necessary_inames_ordered, dom_before, dom_after, + loop_priority, insnid_to_int_sid, unused_param_name, statement_var, diff --git a/example_pairwise_schedule_validity.py b/example_pairwise_schedule_validity.py index 2c4a01256..64edcea44 100644 --- a/example_pairwise_schedule_validity.py +++ b/example_pairwise_schedule_validity.py @@ -4,15 +4,15 @@ from schedule_checker import check_schedule_validity # Choose kernel ---------------------------------------------------------- -#knl_choice = "example" -#knl_choice = "matmul" +knl_choice = "example" +knl_choice = "matmul" #knl_choice = "scan" #knl_choice = "dependent_domain" #knl_choice = "stroud_bernstein" # TODO invalid sched? #knl_choice = "add_barrier" #knl_choice = "nop" #TODO nop not in sched... error #knl_choice = "nest_multi_dom" -knl_choice = "loop_carried_deps" +#knl_choice = "loop_carried_deps" if knl_choice == "example": knl = lp.make_kernel( @@ -46,8 +46,9 @@ if knl_choice == "example": knl, {"b": np.float32, "d": np.float32, "f": np.float32}) #knl = lp.tag_inames(knl, {"i": "l.0"}) - knl = lp.preprocess_kernel(knl) - knl = lp.get_one_scheduled_kernel(knl) + #knl = lp.prioritize_loops(knl, "i,k,j") + knl = lp.prioritize_loops(knl, "i,k") + knl = lp.prioritize_loops(knl, "i,j") elif knl_choice == "matmul": bsize = 16 knl = lp.make_kernel( @@ -65,8 +66,7 @@ elif knl_choice == "matmul": knl = lp.split_iname(knl, "k", bsize) knl = lp.add_prefetch(knl, "a", ["k_inner", "i_inner"], default_tag="l.auto") knl = lp.add_prefetch(knl, "b", ["j_inner", "k_inner"], default_tag="l.auto") - knl = lp.preprocess_kernel(knl) - knl = lp.get_one_scheduled_kernel(knl) + knl = lp.prioritize_loops(knl, "k_outer,k_inner") elif knl_choice == "scan": stride = 1 n_scan = 16 @@ -81,8 +81,6 @@ elif knl_choice == "scan": knl = lp.fix_parameters(knl, n=n_scan) knl = lp.realize_reduction(knl, force_scan=True) - knl = lp.preprocess_kernel(knl) - knl = lp.get_one_scheduled_kernel(knl) elif knl_choice == "dependent_domain": knl = lp.make_kernel( [ @@ -96,8 +94,6 @@ elif knl_choice == "dependent_domain": lang_version=(2018, 2), ) knl = lp.realize_reduction(knl, force_scan=True) - knl = lp.preprocess_kernel(knl) - knl = lp.get_one_scheduled_kernel(knl) elif knl_choice == "stroud_bernstein": knl = lp.make_kernel( "{[el, i2, alpha1,alpha2]: \ @@ -144,8 +140,6 @@ elif knl_choice == "stroud_bernstein": knl = lp.split_iname(knl, "el_outer", 2, outer_tag="g.0", inner_tag="ilp", slabs=(0, 1)) knl = lp.tag_inames(knl, dict(i2="l.1", alpha1="unr", alpha2="unr")) - knl = lp.preprocess_kernel(knl) - knl = lp.get_one_scheduled_kernel(knl) if knl_choice == "add_barrier": np.random.seed(17) #a = np.random.randn(16) @@ -166,8 +160,6 @@ if knl_choice == "add_barrier": knl = lp.split_iname(knl, "i", 2, outer_tag="g.0", inner_tag="l.0") knl = lp.split_iname(knl, "ii", 2, outer_tag="g.0", inner_tag="l.0") - knl = lp.preprocess_kernel(knl) - knl = lp.get_one_scheduled_kernel(knl) if knl_choice == "nop": knl = lp.make_kernel( [ @@ -185,8 +177,6 @@ if knl_choice == "nop": "...", seq_dependencies=True) knl = lp.fix_parameters(knl, dim=3) - knl = lp.preprocess_kernel(knl) - knl = lp.get_one_scheduled_kernel(knl) if knl_choice == "nest_multi_dom": #"{[i,j,k]: 0<=i,j,kbar = foo {id=insn4,dep=insn3} """ + knl = lp.prioritize_loops(knl, "i,j,k") - knl = lp.preprocess_kernel(knl) - knl = lp.get_one_scheduled_kernel(knl) if knl_choice == "loop_carried_deps": knl = lp.make_kernel( "{[i]: 0<=i Date: Mon, 22 Jul 2019 06:12:17 -0500 Subject: [PATCH 082/415] when checking to see if loop priority is known, instead of requiring all relevant inames be present in single tuple within the knl.loop_priority set, determine whether the sets taken together fully specify the loop prority, e.g., if we ahve (a,b) and (b,c) then we know a->b->c --- __init__.py | 16 +++++--- dependency.py | 49 ++++++++++++++++++++++- example_pairwise_schedule_validity.py | 24 +++++++----- sched_check_utils.py | 56 +++++++++++++++++++++++++++ 4 files changed, 129 insertions(+), 16 deletions(-) diff --git a/__init__.py b/__init__.py index bf682da87..4019453ee 100644 --- a/__init__.py +++ b/__init__.py @@ -1,4 +1,7 @@ -def check_schedule_validity(unscheduled_knl, verbose=False): +def check_schedule_validity( + unscheduled_knl, + verbose=False, + _use_scheduled_kernel_to_obtain_loop_priority=False): from schedule_checker.dependency import ( create_dependencies_from_legacy_knl, @@ -94,12 +97,13 @@ def check_schedule_validity(unscheduled_knl, verbose=False): # Get all inames now in order to maintain list with consistent ordering # This will help keep isl maps/sets compatible - combined_doms = preprocessed_knl.get_inames_domain( - s_before.active_inames | - s_after.active_inames - ) all_dom_inames_ordered = order_var_names_to_match_islset( - preprocessed_knl.all_inames(), combined_doms) + preprocessed_knl.all_inames(), + preprocessed_knl.get_inames_domain( + s_before.active_inames | + s_after.active_inames + ) + ) # Create a mapping of {statement instance: lex point} # including only instructions involved in this dependency diff --git a/dependency.py b/dependency.py index 788854fb1..5a5e06189 100644 --- a/dependency.py +++ b/dependency.py @@ -120,12 +120,59 @@ def create_dependency_constraint( # though loop_priorities is None priority_known = False if loop_priorities: + # assumes all loop_priority tuples are consistent + + # with multiple priority tuples, determine whether the combined + # info they contain can give us a single, full proiritization, + # e.g., if prios={(a, b), (b, c), (c, d, e)}, then we know + # a -> b -> c -> d -> e + + # remove irrelevant inames from priority tuples (because we're + # about to perform a costly operation on remaining tuples) + relevant_priorities = set() + for p_tuple in loop_priorities: + new_tuple = [iname for iname in p_tuple if iname in inames_list] + # empty tuples and single tuples don't help us define + # a nesting, so ignore them (if we're dealing with a single + # iname, priorities will be ignored later anyway) + if len(new_tuple) > 1: + relevant_priorities.add(tuple(new_tuple)) + + nested_after = {} + for iname in inames_list: + comes_after_iname = set() + for p_tuple in relevant_priorities: + if iname in p_tuple: + comes_after_iname.update([ + iname for iname in + p_tuple[p_tuple.index(iname)+1:]]) + nested_after[iname] = comes_after_iname + + from schedule_checker.sched_check_utils import ( + get_orderings_of_length_n + ) + orders = get_orderings_of_length_n( + nested_after, + required_length=len(inames_list), + #return_first_found=True, # TODO might be faster + return_first_found=False, + ) + if orders: + assert len(orders) == 1 + # TODO can remove assert if return_first_found above + # (or if we trust that all iname priorities are consistent) + priority_known = True + priority_tuple = orders.pop() + + # old way + """ for priority_tuple in loop_priorities: - # TODO might be able to deduce priority from multiple tuples + # might be able to deduce priority from multiple tuples # even if all inames are not present in any single tuple if set(inames_list).issubset(set(priority_tuple)): priority_known = True break + """ # if only one loop, we know the priority if not priority_known and len(inames_list) == 1: diff --git a/example_pairwise_schedule_validity.py b/example_pairwise_schedule_validity.py index 64edcea44..4bdf9b3d2 100644 --- a/example_pairwise_schedule_validity.py +++ b/example_pairwise_schedule_validity.py @@ -4,13 +4,14 @@ from schedule_checker import check_schedule_validity # Choose kernel ---------------------------------------------------------- + knl_choice = "example" -knl_choice = "matmul" +#knl_choice = "matmul" #knl_choice = "scan" #knl_choice = "dependent_domain" #knl_choice = "stroud_bernstein" # TODO invalid sched? #knl_choice = "add_barrier" -#knl_choice = "nop" #TODO nop not in sched... error +#knl_choice = "nop" #knl_choice = "nest_multi_dom" #knl_choice = "loop_carried_deps" @@ -184,20 +185,23 @@ if knl_choice == "nest_multi_dom": "{[i]: 0<=iacc = 0 {id=insn0} - for j - for k - acc = acc + j + k {id=insn1,dep=insn0} + for x,xx + for i + <>acc = 0 {id=insn0} + for j + for k + acc = acc + j + k {id=insn1,dep=insn0} + end end end end """, name="nest_multi_dom", #assumptions="n >= 1", - assumptions="ni,nj,nk >= 1", + assumptions="ni,nj,nk,nx >= 1", lang_version=(2018, 2) ) """ @@ -213,7 +217,9 @@ if knl_choice == "nest_multi_dom": end <>bar = foo {id=insn4,dep=insn3} """ - knl = lp.prioritize_loops(knl, "i,j,k") + knl = lp.prioritize_loops(knl, "x,xx,i") + knl = lp.prioritize_loops(knl, "i,j") + knl = lp.prioritize_loops(knl, "j,k") if knl_choice == "loop_carried_deps": knl = lp.make_kernel( diff --git a/sched_check_utils.py b/sched_check_utils.py index f7b15827c..4e72f3c5d 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -322,6 +322,62 @@ def get_inames_in_sched_order(scheduled_knl): if isinstance(sched_item, EnterLoop)] +# TODO made a mess trying to make this as fast as possible, +# probably a better way +def _generate_orderings_starting_w_prefix( + allowed_after_dict, orderings, required_length=None, + start_prefix=(), return_first_found=False): + # comes after dict = {str: set(str)} + # start prefix = tuple(str) + # orderings = set + if start_prefix: + next_items = allowed_after_dict[start_prefix[-1]]-set(start_prefix) + else: + next_items = allowed_after_dict.keys() + + if required_length: + if len(start_prefix) == required_length: + orderings.add(start_prefix) + if return_first_found: + return + else: + orderings.add(start_prefix) + if return_first_found: + return + + # return if no more items left + if not next_items: + return + + for next_item in next_items: + new_prefix = start_prefix + (next_item,) + _generate_orderings_starting_w_prefix( + allowed_after_dict, + orderings, + required_length=required_length, + start_prefix=new_prefix, + return_first_found=return_first_found, + ) + if return_first_found and orderings: + return + return + + +def get_orderings_of_length_n( + allowed_after_dict, required_length, return_first_found=False): + # comes after dict = {str: set(str)} + # note: if the set for a dict key is empty, nothing allowed to come after + orderings = set() + _generate_orderings_starting_w_prefix( + allowed_after_dict, + orderings, + required_length=required_length, + start_prefix=(), + return_first_found=return_first_found, + ) + return orderings + + # currently unused: """ def flatten_2d_list(list2d): -- GitLab From 1bb00d4300365941bcdc8f28814c7afefb9ea156 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 23 Jul 2019 07:03:42 -0500 Subject: [PATCH 083/415] in get_concurrent_inames, check for ConcurrentTag rather than list of specific tags --- sched_check_utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sched_check_utils.py b/sched_check_utils.py index 4e72f3c5d..e116bc91c 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -258,7 +258,7 @@ def get_isl_space(param_names, in_names, out_names): def get_concurrent_inames(knl): - from loopy.kernel.data import LocalIndexTag, GroupIndexTag + from loopy.kernel.data import ConcurrentTag conc_inames = set() # TODO remove non-conc test + assertion @@ -268,7 +268,7 @@ def get_concurrent_inames(knl): for iname in all_inames: iname_tags = knl.iname_to_tags.get(iname, None) if iname_tags and any( - isinstance(tag, (LocalIndexTag, GroupIndexTag)) + isinstance(tag, ConcurrentTag) for tag in iname_tags): conc_inames.add(iname) else: -- GitLab From 9865c52224a572d2cb907015c3c3c5cd197c5162 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 23 Jul 2019 07:04:23 -0500 Subject: [PATCH 084/415] made simpler stroud_bernstein kernel for debugging --- __init__.py | 22 +++++++++++++-------- example_pairwise_schedule_validity.py | 28 ++++++++++++++++++++++++--- schedule.py | 2 +- 3 files changed, 40 insertions(+), 12 deletions(-) diff --git a/__init__.py b/__init__.py index 4019453ee..a9da6f457 100644 --- a/__init__.py +++ b/__init__.py @@ -1,3 +1,5 @@ + + def check_schedule_validity( unscheduled_knl, verbose=False, @@ -33,8 +35,10 @@ def check_schedule_validity( for dep_set in statement_dep_sets: deps_and_domains.append([ dep_set, - preprocessed_knl.get_inames_domain(dep_set.statement_before.active_inames), - preprocessed_knl.get_inames_domain(dep_set.statement_after.active_inames) + preprocessed_knl.get_inames_domain( + dep_set.statement_before.active_inames), + preprocessed_knl.get_inames_domain( + dep_set.statement_after.active_inames) ]) if verbose: @@ -95,15 +99,15 @@ def check_schedule_validity( # have the unused inames in their 'in_' dim vars, so we'll # include them and set them equal to a dummy variable. + # combined_doms is only used for printing (map.gist(dom)) + # and for getting a consistent iname ordering to use in our maps + combined_doms = preprocessed_knl.get_inames_domain( + s_before.active_inames | s_after.active_inames) + # Get all inames now in order to maintain list with consistent ordering # This will help keep isl maps/sets compatible all_dom_inames_ordered = order_var_names_to_match_islset( - preprocessed_knl.all_inames(), - preprocessed_knl.get_inames_domain( - s_before.active_inames | - s_after.active_inames - ) - ) + preprocessed_knl.all_inames(), combined_doms) # Create a mapping of {statement instance: lex point} # including only instructions involved in this dependency @@ -219,6 +223,8 @@ def check_schedule_validity( print(aligned_constraint_map.gist(sio)) print("sio.gist(constraint_map)") print(sio.gist(aligned_constraint_map)) + print("loop priority known:") + print(preprocessed_knl.loop_priority) """ from schedule_checker.sched_check_utils import ( get_concurrent_inames, diff --git a/example_pairwise_schedule_validity.py b/example_pairwise_schedule_validity.py index 4bdf9b3d2..0addf8209 100644 --- a/example_pairwise_schedule_validity.py +++ b/example_pairwise_schedule_validity.py @@ -5,11 +5,12 @@ from schedule_checker import check_schedule_validity # Choose kernel ---------------------------------------------------------- -knl_choice = "example" +#knl_choice = "example" #knl_choice = "matmul" #knl_choice = "scan" #knl_choice = "dependent_domain" -#knl_choice = "stroud_bernstein" # TODO invalid sched? +#knl_choice = "stroud_bernstein_orig" # TODO invalid sched? +knl_choice = "stroud_bernstein" # TODO invalid sched? #knl_choice = "add_barrier" #knl_choice = "nop" #knl_choice = "nest_multi_dom" @@ -95,7 +96,7 @@ elif knl_choice == "dependent_domain": lang_version=(2018, 2), ) knl = lp.realize_reduction(knl, force_scan=True) -elif knl_choice == "stroud_bernstein": +elif knl_choice == "stroud_bernstein_orig": knl = lp.make_kernel( "{[el, i2, alpha1,alpha2]: \ 0 <= el < nels and \ @@ -141,6 +142,27 @@ elif knl_choice == "stroud_bernstein": knl = lp.split_iname(knl, "el_outer", 2, outer_tag="g.0", inner_tag="ilp", slabs=(0, 1)) knl = lp.tag_inames(knl, dict(i2="l.1", alpha1="unr", alpha2="unr")) +elif knl_choice == "stroud_bernstein": + knl = lp.make_kernel( + "{[el]: 0 <= el < nels}", + """ + for el + tmp[el] = 3.14 {id=write_tmp} + aind = 1 {id=aind_incr,dep=write_tmp} + end + """, + name="stroud_bernstein", + assumptions="nels>=1 and nels mod 32 = 0", + ) + + knl = lp.split_iname(knl, "el", 16, + inner_tag="l.0", + ) + knl = lp.split_iname(knl, "el_outer", 2, + outer_tag="g.0", + inner_tag="ilp", + ) + #knl = lp.prioritize_loops(knl, "el_outer_outer,el_outer_inner,el_inner,a") if knl_choice == "add_barrier": np.random.seed(17) #a = np.random.randn(16) diff --git a/schedule.py b/schedule.py index f4e42e9fb..8d7626be0 100644 --- a/schedule.py +++ b/schedule.py @@ -79,7 +79,7 @@ class LexSchedule(object): next_insn_lex_pt.pop() next_insn_lex_pt[-1] = next_insn_lex_pt[-1]+1 # if we didn't add any statements while in this loop, we might - # sometimes be able to skip this increment, but it's not hurting anything + # sometimes be able to skip increment, but it's not hurting anything elif isinstance(sched_item, (RunInstruction, Barrier)): from schedule_checker.sched_check_utils import ( _get_insn_id_from_sched_item, -- GitLab From 3208f05c54e363a81e7a9c3655480418b7bfc56a Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 23 Jul 2019 07:10:12 -0500 Subject: [PATCH 085/415] added (commented out) unr tag to replace ilp tag in stroud kernel --- example_pairwise_schedule_validity.py | 1 + 1 file changed, 1 insertion(+) diff --git a/example_pairwise_schedule_validity.py b/example_pairwise_schedule_validity.py index 0addf8209..460f1ad14 100644 --- a/example_pairwise_schedule_validity.py +++ b/example_pairwise_schedule_validity.py @@ -161,6 +161,7 @@ elif knl_choice == "stroud_bernstein": knl = lp.split_iname(knl, "el_outer", 2, outer_tag="g.0", inner_tag="ilp", + #inner_tag="unr", ) #knl = lp.prioritize_loops(knl, "el_outer_outer,el_outer_inner,el_inner,a") if knl_choice == "add_barrier": -- GitLab From 55172e335b51cebfedb205d78f2bb1eef5db4362 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sat, 27 Jul 2019 15:21:11 -0500 Subject: [PATCH 086/415] for checking for concurrent tags, using existing loopy function rather than recreating it --- sched_check_utils.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/sched_check_utils.py b/sched_check_utils.py index e116bc91c..687b43e3c 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -259,23 +259,34 @@ def get_isl_space(param_names, in_names, out_names): def get_concurrent_inames(knl): from loopy.kernel.data import ConcurrentTag + conc_inames_old = set() conc_inames = set() # TODO remove non-conc test + assertion + non_conc_inames_old = set() non_conc_inames = set() all_inames = knl.all_inames() for iname in all_inames: + # TODO remove old version: iname_tags = knl.iname_to_tags.get(iname, None) if iname_tags and any( isinstance(tag, ConcurrentTag) for tag in iname_tags): + conc_inames_old.add(iname) + else: + non_conc_inames_old.add(iname) + + if knl.iname_tags_of_type(iname, ConcurrentTag): conc_inames.add(iname) else: non_conc_inames.add(iname) # TODO remove non-conc test + assertion - assert all_inames-conc_inames == non_conc_inames + assert all_inames-conc_inames_old == non_conc_inames_old + + assert conc_inames == conc_inames_old + assert non_conc_inames == non_conc_inames_old return conc_inames, all_inames-conc_inames -- GitLab From 9e325b33f1baa52ad4755540c2263e58bee9d7f3 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sat, 27 Jul 2019 15:21:43 -0500 Subject: [PATCH 087/415] ignoring loops with concurent tags when creating sched --- schedule.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/schedule.py b/schedule.py index 8d7626be0..b7c47bb31 100644 --- a/schedule.py +++ b/schedule.py @@ -47,6 +47,7 @@ class LexSchedule(object): iname == self.unused_param_name for iname in knl.all_inames()) from loopy.schedule import (EnterLoop, LeaveLoop, Barrier, RunInstruction) + from loopy.kernel.data import ConcurrentTag # go through knl.schedule and generate self.lex_schedule @@ -56,6 +57,10 @@ class LexSchedule(object): for sched_item in knl.schedule: if isinstance(sched_item, EnterLoop): iname = sched_item.iname + if knl.iname_tags_of_type(iname, ConcurrentTag): + # TODO in the future, this should be unnecessary because there + # won't be any inames with ConcurrentTags in the loopy sched + continue # if the schedule is empty, this is the first schedule item, so # don't increment lex dim val enumerating items in current block, # otherwise, this loop is next item in current code block, so @@ -71,6 +76,10 @@ class LexSchedule(object): next_insn_lex_pt.append(iname) next_insn_lex_pt.append(0) elif isinstance(sched_item, LeaveLoop): + if knl.iname_tags_of_type(sched_item.iname, ConcurrentTag): + # TODO in the future, this should be unnecessary because there + # won't be any inames with ConcurrentTags in the loopy sched + continue # upon leaving a loop, # pop lex dimension for enumerating code blocks within this loop, and # pop lex dimension for the loop variable, and -- GitLab From 8782ae7304d0e6383423df8aa0b806dacec7f30d Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 29 Jul 2019 18:02:21 -0500 Subject: [PATCH 088/415] comnew TODOs from code review with Matt --- __init__.py | 33 +++++++++++++------------ dependency.py | 23 +++++++++--------- example_dependency_checking.py | 16 ++++++------ example_pairwise_schedule_validity.py | 35 +++++++++++---------------- sched_check_utils.py | 7 ++++++ schedule.py | 2 ++ 6 files changed, 61 insertions(+), 55 deletions(-) diff --git a/__init__.py b/__init__.py index a9da6f457..b56957c5d 100644 --- a/__init__.py +++ b/__init__.py @@ -1,5 +1,9 @@ +# TODO create a set of broken and valid kernels to test against +# (small kernels to test a specific case) +# TODO work on granularity of encapsulation, encapsulate some of this in +# separate functions def check_schedule_validity( unscheduled_knl, verbose=False, @@ -16,23 +20,22 @@ def check_schedule_validity( from schedule_checker.sched_check_utils import ( prettier_map_string, order_var_names_to_match_islset, - get_inames_in_sched_order, ) from loopy import preprocess_kernel - # TODO check to see if preprocessed already? + # TODO check to see if preprocessed already? kernel.kernel_status attr? preprocessed_knl = preprocess_kernel(unscheduled_knl) - # Create StatementDependencySet(s) from kernel dependencies ----------------- + # Create StatementPairDependencySet(s) from kernel dependencies ----------------- # Introduce SAME dep for set of shared, non-concurrent inames. # For each set of insns within a given iname subset, find sources and sinks, # then make PRIOR dep from all sinks to all sources at previous iterations. - statement_dep_sets = create_dependencies_from_legacy_knl(preprocessed_knl) + statement_pair_dep_sets = create_dependencies_from_legacy_knl(preprocessed_knl) # get separate domains for before.active_inames and after.active_inames deps_and_domains = [] - for dep_set in statement_dep_sets: + for dep_set in statement_pair_dep_sets: deps_and_domains.append([ dep_set, preprocessed_knl.get_inames_domain( @@ -56,8 +59,6 @@ def check_schedule_validity( else: scheduled_knl = preprocessed_knl - sched_iname_order = get_inames_in_sched_order(scheduled_knl) - if verbose: # Print kernel info ------------------------------------------------------ print("="*80) @@ -71,8 +72,8 @@ def check_schedule_validity( print("Loopy schedule:") for sched_item in scheduled_knl.schedule: print(sched_item) - print("scheduled iname order:") - print(sched_iname_order) + #print("scheduled iname order:") + #print(sched_iname_order) # For each dependency, create+test schedule containing pair of insns------ @@ -81,16 +82,16 @@ def check_schedule_validity( print("Looping through dep pairs...") sched_is_valid = True - for statement_dep_set, dom_before, dom_after in deps_and_domains: + for statement_pair_dep_set, dom_before, dom_after in deps_and_domains: if verbose: print("="*80) print("statement dep set:") - print(statement_dep_set) + print(statement_pair_dep_set) print("dom_before:", dom_before) print("dom_after:", dom_after) - s_before = statement_dep_set.statement_before - s_after = statement_dep_set.statement_after + s_before = statement_pair_dep_set.statement_before + s_after = statement_pair_dep_set.statement_after # The isl map representing the schedule maps # statement instances -> lex time @@ -103,6 +104,7 @@ def check_schedule_validity( # and for getting a consistent iname ordering to use in our maps combined_doms = preprocessed_knl.get_inames_domain( s_before.active_inames | s_after.active_inames) + # TODO not guaranteed to work # Get all inames now in order to maintain list with consistent ordering # This will help keep isl maps/sets compatible @@ -149,6 +151,7 @@ def check_schedule_validity( } else: assert False + # TODO maybe can just do len 2 case sched_map_symbolic = sched.create_symbolic_isl_map( sid_to_dom, all_dom_inames_ordered) @@ -190,7 +193,7 @@ def check_schedule_validity( # create a map representing constraints from the dependency, # maps statement instance to all statement instances that must occur later constraint_map = create_dependency_constraint( - statement_dep_set, + statement_pair_dep_set, all_dom_inames_ordered, dom_before, dom_after, @@ -216,7 +219,7 @@ def check_schedule_validity( print("================ constraint check failure =================") print("constraint map not subset of SIO") print("dependency:") - print(statement_dep_set) + print(statement_pair_dep_set) print("statement instance ordering:") print(prettier_map_string(sio)) print("constraint_map.gist(sio):") diff --git a/dependency.py b/dependency.py index 5a5e06189..a26d219c9 100644 --- a/dependency.py +++ b/dependency.py @@ -8,12 +8,12 @@ class DependencyType: #ALL = "all" -class StatementDependencySet(object): +class StatementPairDependencySet(object): def __init__( self, statement_before, statement_after, - deps, # {dep_type: iname} + deps, # {dep_type: iname_set} ): self.statement_before = statement_before self.statement_after = statement_after @@ -99,7 +99,7 @@ def create_dependency_constraint( # all_constraints_set will be the union of all these constraints dt = DependencyType for dep_type, inames in statement_dep_set.deps.items(): - if dep_type == dt.NONE: + if dep_type == dt.NONE: # TODO remove, not used continue # need to put inames in a list so that order of inames and inames' @@ -151,12 +151,14 @@ def create_dependency_constraint( from schedule_checker.sched_check_utils import ( get_orderings_of_length_n ) + # TODO explain how it only creates explicitly described orderings orders = get_orderings_of_length_n( nested_after, required_length=len(inames_list), #return_first_found=True, # TODO might be faster return_first_found=False, ) + # TODO make sure this handles a cycle (error) if orders: assert len(orders) == 1 # TODO can remove assert if return_first_found above @@ -306,7 +308,7 @@ def create_dependencies_from_legacy_knl(knl): s_before = Statement(insn_before.id, insn_before_inames) s_after = Statement(insn_after.id, insn_after_inames) statement_dep_sets.append( - StatementDependencySet(s_before, s_after, dep_dict)) + StatementPairDependencySet(s_before, s_after, dep_dict)) # loop-carried deps ------------------------------------------ @@ -328,11 +330,10 @@ def create_dependencies_from_legacy_knl(knl): # find sources and sinks sources, sinks = get_dependency_sources_and_sinks(knl, sched_item_ids) - # TODO this ignores deps connecting to items outside sched_item_ids, - # is that okay? #print("sources:", sources) #print("sinks:", sinks) + # TODO in future, consider putting in a single no-op source and sink # create prior deps for source_id in sources: for sink_id in sinks: @@ -348,7 +349,7 @@ def create_dependencies_from_legacy_knl(knl): s_before = Statement(sink_id, sink_insn_inames) s_after = Statement(source_id, source_insn_inames) statement_dep_sets.append( - StatementDependencySet(s_before, s_after, dep_dict)) + StatementPairDependencySet(s_before, s_after, dep_dict)) #print("-"*85) return statement_dep_sets @@ -356,17 +357,17 @@ def create_dependencies_from_legacy_knl(knl): def get_dependency_sources_and_sinks(knl, sched_item_ids): sources = set() - dep_heads = set() # all dependency heads (within sched_item_ids) + dependees = set() # all dependees (within sched_item_ids) for item_id in sched_item_ids: # find the deps within sched_item_ids deps = knl.id_to_insn[item_id].depends_on & sched_item_ids if deps: - # add deps to dep_heads - dep_heads.update(deps) + # add deps to dependees + dependees.update(deps) else: # has no deps (within sched_item_ids), this is a source sources.add(item_id) # sinks don't point to anyone - sinks = sched_item_ids - dep_heads + sinks = sched_item_ids - dependees return sources, sinks diff --git a/example_dependency_checking.py b/example_dependency_checking.py index b037a02fa..f7a4d51bd 100644 --- a/example_dependency_checking.py +++ b/example_dependency_checking.py @@ -1,6 +1,6 @@ import loopy as lp from schedule_checker.dependency import ( # noqa - StatementDependencySet, + StatementPairDependencySet, DependencyType as dt, create_dependency_constraint, ) @@ -126,21 +126,21 @@ s0 = Statement("0", {"i", "j"}) s1 = Statement("1", {"i", "j"}) insnid_to_int_sid = {"0": 0, "1": 1} -statement_dep_set = StatementDependencySet(s0, s1, {dt.SAME: ["i", "j"]}) -print(statement_dep_set) +statement_pair_dep_set = StatementPairDependencySet(s0, s1, {dt.SAME: ["i", "j"]}) +print(statement_pair_dep_set) combined_doms = knl.get_inames_domain( - statement_dep_set.statement_before.active_inames | # noqa - statement_dep_set.statement_after.active_inames + statement_pair_dep_set.statement_before.active_inames | # noqa + statement_pair_dep_set.statement_after.active_inames ) dom_before = knl.get_inames_domain( - statement_dep_set.statement_before.active_inames + statement_pair_dep_set.statement_before.active_inames ) dom_after = knl.get_inames_domain( - statement_dep_set.statement_after.active_inames + statement_pair_dep_set.statement_after.active_inames ) loop_priority = None # TODO constraint_map = create_dependency_constraint( - statement_dep_set, + statement_pair_dep_set, all_necessary_inames_ordered, dom_before, dom_after, diff --git a/example_pairwise_schedule_validity.py b/example_pairwise_schedule_validity.py index 460f1ad14..00fb969f8 100644 --- a/example_pairwise_schedule_validity.py +++ b/example_pairwise_schedule_validity.py @@ -5,12 +5,12 @@ from schedule_checker import check_schedule_validity # Choose kernel ---------------------------------------------------------- -#knl_choice = "example" +knl_choice = "example" #knl_choice = "matmul" #knl_choice = "scan" #knl_choice = "dependent_domain" #knl_choice = "stroud_bernstein_orig" # TODO invalid sched? -knl_choice = "stroud_bernstein" # TODO invalid sched? +#knl_choice = "stroud_bernstein" # TODO invalid sched? #knl_choice = "add_barrier" #knl_choice = "nop" #knl_choice = "nest_multi_dom" @@ -108,10 +108,8 @@ elif knl_choice == "stroud_bernstein_orig": <> s = 1-xi <> r = xi/s <> aind = 0 {id=aind_init} - for alpha1 <> w = s**(deg-alpha1) {id=init_w} - for alpha2 tmp[el,alpha1,i2] = tmp[el,alpha1,i2] + w * coeffs[aind] \ {id=write_tmp,dep=init_w:aind_init} @@ -123,25 +121,17 @@ elif knl_choice == "stroud_bernstein_orig": end end """, - [ - # Must declare coeffs to have "no" shape, to keep loopy - # from trying to figure it out the shape automatically. - - lp.GlobalArg("coeffs", None, shape=None), - "..." - ], - name="stroud_bernstein", - assumptions="deg>=0 and nels>=1" - ) - - knl = lp.add_and_infer_dtypes( - knl, + [lp.GlobalArg("coeffs", None, shape=None), "..."], + name="stroud_bernstein", assumptions="deg>=0 and nels>=1") + knl = lp.add_and_infer_dtypes(knl, dict(coeffs=np.float32, qpts=np.int32)) knl = lp.fix_parameters(knl, nqp1d=7, deg=4) knl = lp.split_iname(knl, "el", 16, inner_tag="l.0") - knl = lp.split_iname(knl, "el_outer", 2, outer_tag="g.0", inner_tag="ilp", - slabs=(0, 1)) + knl = lp.split_iname(knl, "el_outer", 2, outer_tag="g.0", + inner_tag="ilp", slabs=(0, 1)) knl = lp.tag_inames(knl, dict(i2="l.1", alpha1="unr", alpha2="unr")) + # Must declare coeffs to have "no" shape, to keep loopy + # from trying to figure it out the shape automatically. elif knl_choice == "stroud_bernstein": knl = lp.make_kernel( "{[el]: 0 <= el < nels}", @@ -155,13 +145,16 @@ elif knl_choice == "stroud_bernstein": assumptions="nels>=1 and nels mod 32 = 0", ) - knl = lp.split_iname(knl, "el", 16, + knl = lp.split_iname( + knl, "el", 16, inner_tag="l.0", ) - knl = lp.split_iname(knl, "el_outer", 2, + knl = lp.split_iname( + knl, "el_outer", 2, outer_tag="g.0", inner_tag="ilp", #inner_tag="unr", + #inner_tag="g.1", ) #knl = lp.prioritize_loops(knl, "el_outer_outer,el_outer_inner,el_inner,a") if knl_choice == "add_barrier": diff --git a/sched_check_utils.py b/sched_check_utils.py index 687b43e3c..8aeca4cbf 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -98,6 +98,7 @@ def all_iname_domains_equal(knl): def order_var_names_to_match_islset(var_names, islset): + # TODO specifiy isl dim # returns subset of var_names found in islset in # order matching the islset variables name_order = islset.get_var_names(isl.dim_type.out) @@ -143,6 +144,8 @@ def create_symbolic_isl_map_from_tuples( statement_var_name, # TODO can we not pass this? ): + # TODO clarify this with comments + # given a list of pairs of ((input), (output)) tuples, create an isl map # and intersect each pair with corresponding domain_to_intersect #TODO allow None for domains @@ -186,6 +189,8 @@ def create_symbolic_isl_map_from_tuples( assert False unused_inames = set(space_in_names) \ - set(dom_var_names) - set([statement_var_name]) + # TODO find another way to determine which inames should be unused and + # make an assertion to double check this for unused_iname in unused_inames: constraint = constraint & islvars[unused_iname].eq_set( islvars[unused_param_name]) @@ -334,7 +339,9 @@ def get_inames_in_sched_order(scheduled_knl): # TODO made a mess trying to make this as fast as possible, +# TODO use yield to clean this up # probably a better way +# TODO find topological sort in loopy, then find longest path in dag def _generate_orderings_starting_w_prefix( allowed_after_dict, orderings, required_length=None, start_prefix=(), return_first_found=False): diff --git a/schedule.py b/schedule.py index b7c47bb31..eab68891d 100644 --- a/schedule.py +++ b/schedule.py @@ -31,6 +31,7 @@ class LexSchedule(object): ): # mapping of {statement instance: lex point} + # TODO make the key a data type that knows the var names self.lex_schedule = OrderedDict() # symbolic inames in sched that have been enumerated @@ -60,6 +61,7 @@ class LexSchedule(object): if knl.iname_tags_of_type(iname, ConcurrentTag): # TODO in the future, this should be unnecessary because there # won't be any inames with ConcurrentTags in the loopy sched + # TODO warn continue # if the schedule is empty, this is the first schedule item, so # don't increment lex dim val enumerating items in current block, -- GitLab From 087ea62538a815f33dfcf8a5368fa161c7d3801a Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 30 Jul 2019 10:41:04 -0500 Subject: [PATCH 089/415] updated example ilp kernel; added isl set dim as parameter to order_var_names_to_match_islset() --- __init__.py | 3 ++- example_pairwise_schedule_validity.py | 36 +++++++++++++++++---------- example_schedule_creation_old.py | 3 ++- sched_check_utils.py | 4 +-- 4 files changed, 29 insertions(+), 17 deletions(-) diff --git a/__init__.py b/__init__.py index b56957c5d..923c137ec 100644 --- a/__init__.py +++ b/__init__.py @@ -108,8 +108,9 @@ def check_schedule_validity( # Get all inames now in order to maintain list with consistent ordering # This will help keep isl maps/sets compatible + import islpy as isl all_dom_inames_ordered = order_var_names_to_match_islset( - preprocessed_knl.all_inames(), combined_doms) + preprocessed_knl.all_inames(), combined_doms, isl.dim_type.out) # Create a mapping of {statement instance: lex point} # including only instructions involved in this dependency diff --git a/example_pairwise_schedule_validity.py b/example_pairwise_schedule_validity.py index 00fb969f8..3d5917c0d 100644 --- a/example_pairwise_schedule_validity.py +++ b/example_pairwise_schedule_validity.py @@ -10,7 +10,7 @@ knl_choice = "example" #knl_choice = "scan" #knl_choice = "dependent_domain" #knl_choice = "stroud_bernstein_orig" # TODO invalid sched? -#knl_choice = "stroud_bernstein" # TODO invalid sched? +#knl_choice = "ilp_kernel" #knl_choice = "add_barrier" #knl_choice = "nop" #knl_choice = "nest_multi_dom" @@ -122,7 +122,7 @@ elif knl_choice == "stroud_bernstein_orig": end """, [lp.GlobalArg("coeffs", None, shape=None), "..."], - name="stroud_bernstein", assumptions="deg>=0 and nels>=1") + name="stroud_bernstein_orig", assumptions="deg>=0 and nels>=1") knl = lp.add_and_infer_dtypes(knl, dict(coeffs=np.float32, qpts=np.int32)) knl = lp.fix_parameters(knl, nqp1d=7, deg=4) @@ -132,31 +132,41 @@ elif knl_choice == "stroud_bernstein_orig": knl = lp.tag_inames(knl, dict(i2="l.1", alpha1="unr", alpha2="unr")) # Must declare coeffs to have "no" shape, to keep loopy # from trying to figure it out the shape automatically. -elif knl_choice == "stroud_bernstein": +elif knl_choice == "ilp_kernel": knl = lp.make_kernel( - "{[el]: 0 <= el < nels}", + "{[i,j,ilp_iname]: 0 <= i,j < n and 0 <= ilp_iname < 4}", """ - for el - tmp[el] = 3.14 {id=write_tmp} - aind = 1 {id=aind_incr,dep=write_tmp} + for i + for j + for ilp_iname + tmp[i,j,ilp_iname] = 3.14 + end + end end """, - name="stroud_bernstein", - assumptions="nels>=1 and nels mod 32 = 0", + name="ilp_kernel", + assumptions="n>=1 and n mod 4 = 0", ) - + # TODO why is conditional on ilp_name? + knl = lp.tag_inames(knl, {"j": "l.0","ilp_iname": "ilp"}) + """ + for i + tmp[i] = 3.14 {id=write_tmp} + aind = 1 {id=aind_incr,dep=write_tmp} + end knl = lp.split_iname( - knl, "el", 16, + knl, "i", 16, inner_tag="l.0", ) knl = lp.split_iname( - knl, "el_outer", 2, + knl, "i_outer", 2, outer_tag="g.0", inner_tag="ilp", #inner_tag="unr", #inner_tag="g.1", ) - #knl = lp.prioritize_loops(knl, "el_outer_outer,el_outer_inner,el_inner,a") + """ + #knl = lp.prioritize_loops(knl, "i_outer_outer,i_outer_inner,i_inner,a") if knl_choice == "add_barrier": np.random.seed(17) #a = np.random.randn(16) diff --git a/example_schedule_creation_old.py b/example_schedule_creation_old.py index 3dfa8edac..0a9f9abaf 100644 --- a/example_schedule_creation_old.py +++ b/example_schedule_creation_old.py @@ -1,5 +1,6 @@ import loopy as lp import numpy as np +import islpy as isl from schedule_checker.dependency import ( create_dependencies_from_legacy_knl, create_dependency_constraint, @@ -103,7 +104,7 @@ if not all_iname_domains_equal(knl): "get_inames_domain(iname) is not same for all inames") domain_union = _union_inames_domains(knl) all_dom_inames_ordered = order_var_names_to_match_islset( - knl.all_inames(), domain_union) + knl.all_inames(), domain_union, isl.dim_type.out) # get all inames in consistent ordering: sched = LexSchedule(knl) diff --git a/sched_check_utils.py b/sched_check_utils.py index 8aeca4cbf..b48c55a8d 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -97,11 +97,11 @@ def all_iname_domains_equal(knl): return True -def order_var_names_to_match_islset(var_names, islset): +def order_var_names_to_match_islset(var_names, islset, set_dim=isl.dim_type.out): # TODO specifiy isl dim # returns subset of var_names found in islset in # order matching the islset variables - name_order = islset.get_var_names(isl.dim_type.out) + name_order = islset.get_var_names(set_dim) names_ordered_to_match_islset = [] for v in name_order: if v in var_names: -- GitLab From f59dfcf0f5086f66ba7417b8408dcce37cbf08be Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 30 Jul 2019 10:42:38 -0500 Subject: [PATCH 090/415] removed test assertions from get_concurrent_inames() --- sched_check_utils.py | 20 -------------------- 1 file changed, 20 deletions(-) diff --git a/sched_check_utils.py b/sched_check_utils.py index b48c55a8d..1954b63b1 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -98,7 +98,6 @@ def all_iname_domains_equal(knl): def order_var_names_to_match_islset(var_names, islset, set_dim=isl.dim_type.out): - # TODO specifiy isl dim # returns subset of var_names found in islset in # order matching the islset variables name_order = islset.get_var_names(set_dim) @@ -264,35 +263,16 @@ def get_isl_space(param_names, in_names, out_names): def get_concurrent_inames(knl): from loopy.kernel.data import ConcurrentTag - conc_inames_old = set() conc_inames = set() - - # TODO remove non-conc test + assertion - non_conc_inames_old = set() non_conc_inames = set() all_inames = knl.all_inames() for iname in all_inames: - # TODO remove old version: - iname_tags = knl.iname_to_tags.get(iname, None) - if iname_tags and any( - isinstance(tag, ConcurrentTag) - for tag in iname_tags): - conc_inames_old.add(iname) - else: - non_conc_inames_old.add(iname) - if knl.iname_tags_of_type(iname, ConcurrentTag): conc_inames.add(iname) else: non_conc_inames.add(iname) - # TODO remove non-conc test + assertion - assert all_inames-conc_inames_old == non_conc_inames_old - - assert conc_inames == conc_inames_old - assert non_conc_inames == non_conc_inames_old - return conc_inames, all_inames-conc_inames -- GitLab From 8e0fb8184414ac00c85989629e220d57728b7387 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Wed, 31 Jul 2019 18:09:03 -0500 Subject: [PATCH 091/415] added valid sched tests --- test/test_valid_scheds.py | 276 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 276 insertions(+) create mode 100644 test/test_valid_scheds.py diff --git a/test/test_valid_scheds.py b/test/test_valid_scheds.py new file mode 100644 index 000000000..24855c455 --- /dev/null +++ b/test/test_valid_scheds.py @@ -0,0 +1,276 @@ +from __future__ import division, print_function + +__copyright__ = "Copyright (C) 2018 James Stevens" + +__license__ = """ +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. +""" + +import sys +from pyopencl.tools import ( # noqa + pytest_generate_tests_for_pyopencl + as pytest_generate_tests) +import loopy as lp +import numpy as np +from schedule_checker import check_schedule_validity + + +def test_loop_prioritization(): + knl = lp.make_kernel( + [ + "{[i,ii]: 0<=itemp = b[i,k] {id=insn_a} + end + for j + a[i,j] = temp + 1 {id=insn_b,dep=insn_a} + c[i,j] = d[i,j] {id=insn_c} + end + end + for t + e[t] = f[t] {id=insn_d} + end + """ + ], + name="example", + assumptions="pi,pj,pk,pt >= 1", + lang_version=(2018, 2) + ) + knl = lp.add_and_infer_dtypes( + knl, + {"b": np.float32, "d": np.float32, "f": np.float32}) + knl = lp.prioritize_loops(knl, "i,k") + knl = lp.prioritize_loops(knl, "i,j") + assert check_schedule_validity(knl) + + +def test_matmul(): + bsize = 16 + knl = lp.make_kernel( + "{[i,k,j]: 0<=i {[i,j]: 0<=i {[i]: 0<=i xi = qpts[1, i2] + <> s = 1-xi + <> r = xi/s + <> aind = 0 {id=aind_init} + for alpha1 + <> w = s**(deg-alpha1) {id=init_w} + for alpha2 + tmp[el,alpha1,i2] = tmp[el,alpha1,i2] + w * coeffs[aind] \ + {id=write_tmp,dep=init_w:aind_init} + w = w * r * ( deg - alpha1 - alpha2 ) / (1 + alpha2) \ + {id=update_w,dep=init_w:write_tmp} + aind = aind + 1 \ + {id=aind_incr,dep=aind_init:write_tmp:update_w} + end + end + end + """, + [lp.GlobalArg("coeffs", None, shape=None), "..."], + name="stroud_bernstein_orig", assumptions="deg>=0 and nels>=1") + knl = lp.add_and_infer_dtypes(knl, + dict(coeffs=np.float32, qpts=np.int32)) + knl = lp.fix_parameters(knl, nqp1d=7, deg=4) + knl = lp.split_iname(knl, "el", 16, inner_tag="l.0") + knl = lp.split_iname(knl, "el_outer", 2, outer_tag="g.0", + inner_tag="ilp", slabs=(0, 1)) + knl = lp.tag_inames(knl, dict(i2="l.1", alpha1="unr", alpha2="unr")) + assert check_schedule_validity(knl) + + +def test_ilp(): + knl = lp.make_kernel( + "{[i,j,ilp_iname]: 0 <= i,j < n and 0 <= ilp_iname < 4}", + """ + for i + for j + for ilp_iname + tmp[i,j,ilp_iname] = 3.14 + end + end + end + """, + name="ilp_kernel", + assumptions="n>=1 and n mod 4 = 0", + ) + knl = lp.tag_inames(knl, {"j": "l.0", "ilp_iname": "ilp"}) + #knl = lp.prioritize_loops(knl, "i_outer_outer,i_outer_inner,i_inner,a") + assert check_schedule_validity(knl) + + +def test_barrier(): + np.random.seed(17) + cnst = np.random.randn(16) + knl = lp.make_kernel( + "{[i, ii]: 0<=i, ii c_end = 2 + for c + ... nop + end + end + """, + "...", + seq_dependencies=True) + knl = lp.fix_parameters(knl, dim=3) + assert check_schedule_validity(knl) + + +def test_multi_domain(): + knl = lp.make_kernel( + [ + "{[i]: 0<=iacc = 0 {id=insn0} + for j + for k + acc = acc + j + k {id=insn1,dep=insn0} + end + end + end + end + """, + name="nest_multi_dom", + assumptions="ni,nj,nk,nx >= 1", + lang_version=(2018, 2) + ) + knl = lp.prioritize_loops(knl, "x,xx,i") + knl = lp.prioritize_loops(knl, "i,j") + knl = lp.prioritize_loops(knl, "j,k") + assert check_schedule_validity(knl) + + +def test_loop_carried_deps(): + knl = lp.make_kernel( + "{[i]: 0<=iacc0 = 0 {id=insn0} + for i + acc0 = acc0 + i {id=insn1,dep=insn0} + <>acc2 = acc0 + i {id=insn2,dep=insn1} + <>acc3 = acc2 + i {id=insn3,dep=insn2} + <>acc4 = acc0 + i {id=insn4,dep=insn1} + end + """, + name="loop_carried_deps", + assumptions="n >= 1", + lang_version=(2018, 2) + ) + assert check_schedule_validity(knl) + + +if __name__ == "__main__": + if len(sys.argv) > 1: + exec(sys.argv[1]) + else: + from pytest import main + main([__file__]) -- GitLab From c95b3be4761e0257d525238c27792dde57c63f8b Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Thu, 8 Aug 2019 06:55:39 -0500 Subject: [PATCH 092/415] removed unused functions+example related to explicit schedule creation --- example_schedule_creation_old.py | 209 ------------------------------- schedule.py | 80 ------------ 2 files changed, 289 deletions(-) delete mode 100644 example_schedule_creation_old.py diff --git a/example_schedule_creation_old.py b/example_schedule_creation_old.py deleted file mode 100644 index 0a9f9abaf..000000000 --- a/example_schedule_creation_old.py +++ /dev/null @@ -1,209 +0,0 @@ -import loopy as lp -import numpy as np -import islpy as isl -from schedule_checker.dependency import ( - create_dependencies_from_legacy_knl, - create_dependency_constraint, -) -from schedule_checker.schedule import LexSchedule -from schedule_checker.lexicographic_order_map import ( - get_statement_ordering_map, -) -from schedule_checker.sched_check_utils import ( - prettier_map_string, - _union_inames_domains, - all_iname_domains_equal, - order_var_names_to_match_islset, -) - -# TODO either remove this file or update as an example of full sched creation -# (rather than the usual pairwise schedule creation) - -knl_choice = "example" -#knl_choice = "matmul" -#knl_choice = "scan" - -if knl_choice == "example": - # make example kernel - knl = lp.make_kernel( - #"{[i,j]: 0<=i<2 and 1<=j<3}", - #"{[i,j]: pi_lo<=itemp = b[i,j] {id=insn_a}", - "a[i,j] = temp + 1 {id=insn_b,dep=insn_a}", - "c[i,j] = d[i,j] {id=insn_c}", - "out[t,tt] = in[t,tt] {id=insn_d}", - ], - name="example", - #assumptions="pi_lo,pi_up,pj_lo,pj_up >= 1", - #assumptions="pi_up,pj_up >= 1", - #assumptions="pi_up,pj_up,pt_up,pt_lo >= 1 and pt_lo < pt_up", - assumptions="pi_up,pj_up,pt_up,pt_lo >= 1 and pt_lo < pt_up", - lang_version=(2018, 2) - ) - #knl = lp.add_and_infer_dtypes(knl, {"b": np.float32, "d": np.float32}) - knl = lp.add_and_infer_dtypes(knl, - {"b": np.float32, "d": np.float32, "in": np.float32}) - knl = lp.tag_inames(knl, {"i": "l.0"}) - knl = lp.preprocess_kernel(knl) - knl = lp.get_one_scheduled_kernel(knl) -elif knl_choice == "matmul": - bsize = 16 - knl = lp.make_kernel( - "{[i,k,j]: 0<=i {[i,j]: 0<=i lex time):") -print(sched_map_symbolic.space) - -# *Explicit* lexicographic mapping- map each tuple to all tuples occuring later -print("---------------------------------------------------------------------------") -#lex_map_explicit = sched.get_explicit_sched_map() - -lex_map_symbolic = sched.get_lex_order_map_for_symbolic_sched() - -print("lex map symbolic:") -print(prettier_map_string(lex_map_symbolic)) -print("space (lex time -> lex time):") -print(lex_map_symbolic.space) - -# Statement instance ordering -print("----------------------------------------------------------------------") -#SIO_explicit_valid = get_statement_ordering_map( -# example_sched_explicit, lex_map_explicit) -#print("statement instance ordering explicit (valid_sched):") -#print(prettier_map_string(SIO_explicit_valid)) -SIO_symbolic_valid = get_statement_ordering_map( - sched_map_symbolic, lex_map_symbolic) -print("statement instance ordering symbolic (valid_sched):") -print(prettier_map_string(SIO_symbolic_valid)) -print("space (statement instances -> statement instances):") -print(SIO_symbolic_valid.space) - -# For every shared (between depender and dependee) non-concurrent iname, -# Introduce a same dep -# (Perform voodoo guesswork to determine whether a ‘prior’ dep is needed) -# For every shared (between depender and dependee) concurrent iname, -# Introduce an all dep - -print("----------------------------------------------------------------------") -statement_dep_sets = create_dependencies_from_legacy_knl(knl) -print("Statement Dependencies:") -for dep_set in statement_dep_sets: - print(dep_set) - print("") -print("----------------------------------------------------------------------") -print("dict{lp insn id : sched sid int}:") -print(sched.lp_insnid_to_int_sid) -print("----------------------------------------------------------------------") -sched_is_valid = True -for statement_dep_set in statement_dep_sets: - - loop_priority = None # TODO - dom_before = knl.get_inames_domain( - statement_dep_set.statement_before.active_inames) - dom_after = knl.get_inames_domain( - statement_dep_set.statement_after.active_inames) - constraint_map = create_dependency_constraint( - statement_dep_set, - all_dom_inames_ordered, - dom_before, - dom_after, - loop_priority, - sched.lp_insnid_to_int_sid, - sched.unused_param_name, - sched.statement_var_name, - sched.statement_var_pose(), - ) - print("constraint map:") - print(prettier_map_string(constraint_map)) - #print("space (statment instances -> statement instances):") - #print(constraint_map.space) - - assert constraint_map.space == SIO_symbolic_valid.space - if not constraint_map.is_subset(SIO_symbolic_valid): - sched_is_valid = False - -print("is valid sched valid? constraint map subset of SIO?") -print(sched_is_valid) diff --git a/schedule.py b/schedule.py index eab68891d..0c551ca1a 100644 --- a/schedule.py +++ b/schedule.py @@ -241,83 +241,3 @@ class LexSchedule(object): sched_str += "%s -> %s;\n" % (domain_elem, lex_pt) sched_str += "}" return sched_str - - # Methods related to *explicit* schedule/map creation ------------------ - # TODO consider removing these - - def get_min_lex_dim_vals(self, var_bounds_dict): - - # this only works for integer lex pts (no symbolic vars): - #return [min(dim_pts) for dim_pts in zip(*self.lex_schedule.values())] - result = [] - for dim_pts in zip(*self.lex_schedule.values()): - if all(isinstance(pt, int) for pt in dim_pts): - result.append(min(dim_pts)) - else: - assert all(pt == dim_pts[0] for pt in dim_pts) - # append lower bound for this variable - result.append(var_bounds_dict[dim_pts[0]][0]) - return result - - def enumerate_iname(self, iname, bound): - new_sched = OrderedDict() - iname_found = False - for insn, lex_pt in self.lex_schedule.items(): - if iname in lex_pt: - for v in range(bound[0], bound[1]): - new_sched[tuple(list(insn)+[v])] = [ - lx if lx != iname else v for lx in lex_pt] - iname_found = True - else: - new_sched[insn] = lex_pt - self.lex_schedule = new_sched - if iname_found: - self.inames_enumerated.append(iname) - - def enumerate_inames(self, iname_bounds): - for iname, bound in iname_bounds.items(): - self.enumerate_iname(iname, bound) - - def get_isl_space_for_explicit_sched(self): - params_sched = ["ps"] + ["p"+iname for iname in self.inames_enumerated] - in_names_sched = [self.statement_var_name] + self.inames_enumerated - out_names_sched = self.get_lex_var_names() - from schedule_checker.sched_check_utils import get_isl_space - return get_isl_space(params_sched, in_names_sched, out_names_sched) - - def create_explicit_isl_map(self, sched_space): - from schedule_checker.sched_check_utils import ( - create_explicit_map_from_tuples - ) - return create_explicit_map_from_tuples(list(self.items()), sched_space) - - def enumerate_symbolic_inames_and_create_explicit_isl_map(self, iname_bounds): - self.enumerate_inames(iname_bounds) - sched_space = self.get_isl_space_for_explicit_sched() - return self.create_explicit_isl_map(sched_space) - - def get_explicit_sched_map(self): - - from schedule_checker.lexicographic_order_map import ( - make_lex_order_map_tuple_pairs, - ) - from schedule_checker.sched_check_utils import ( - create_explicit_map_from_tuples, - get_isl_space, - append_apostrophes - ) - - # TODO lower bound may not be zero - lex_dim_bounds = list(zip(self.get_min_lex_dim_vals(), - self.get_max_lex_dim_vals())) - sched_space = self.get_isl_space_for_explicit_sched() - - lex_in_names = sched_space.get_var_names(isl.dim_type.out) - lex_out_names = append_apostrophes(lex_in_names) - lex_params = [] - - explicit_lex_map_pairs = make_lex_order_map_tuple_pairs(lex_dim_bounds) - lex_space_explicit = get_isl_space(lex_params, lex_in_names, lex_out_names) - - return create_explicit_map_from_tuples(explicit_lex_map_pairs, - lex_space_explicit) -- GitLab From 4a2deff0a9260f4d0ea713f46678932665916d82 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Thu, 8 Aug 2019 07:02:37 -0500 Subject: [PATCH 093/415] warning when encountering+ignoring EnterLoop with ConcurrentTag in sched creation --- schedule.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/schedule.py b/schedule.py index 0c551ca1a..3c73036c5 100644 --- a/schedule.py +++ b/schedule.py @@ -59,9 +59,13 @@ class LexSchedule(object): if isinstance(sched_item, EnterLoop): iname = sched_item.iname if knl.iname_tags_of_type(iname, ConcurrentTag): - # TODO in the future, this should be unnecessary because there + # In the future, this should be unnecessary because there # won't be any inames with ConcurrentTags in the loopy sched - # TODO warn + from warnings import warn + warn( + "LexSchedule.__init__: Encountered EnterLoop for iname %s " + "with ConcurrentTag(s) in schedule for kernel %s. " + "Ignoring this loop." % (iname, kernel.name)) continue # if the schedule is empty, this is the first schedule item, so # don't increment lex dim val enumerating items in current block, @@ -79,7 +83,7 @@ class LexSchedule(object): next_insn_lex_pt.append(0) elif isinstance(sched_item, LeaveLoop): if knl.iname_tags_of_type(sched_item.iname, ConcurrentTag): - # TODO in the future, this should be unnecessary because there + # In the future, this should be unnecessary because there # won't be any inames with ConcurrentTags in the loopy sched continue # upon leaving a loop, -- GitLab From ac95ce64867fa79948efaffb954b6c4650b71f59 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Thu, 8 Aug 2019 07:11:12 -0500 Subject: [PATCH 094/415] removed a TODO --- sched_check_utils.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/sched_check_utils.py b/sched_check_utils.py index 1954b63b1..e4b9bcbf1 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -1,6 +1,9 @@ import islpy as isl +# TODO remove assertions once satisified they are unnecessary + + def prettier_map_string(isl_map): return str(isl_map ).replace("{ ", "{\n").replace(" }", "\n}").replace("; ", ";\n") @@ -140,7 +143,7 @@ def create_symbolic_isl_map_from_tuples( space, domains_to_intersect, # TODO pass these zipped w/tuples? unused_param_name, - statement_var_name, # TODO can we not pass this? + statement_var_name, ): # TODO clarify this with comments @@ -210,7 +213,6 @@ def create_symbolic_isl_map_from_tuples( dim_type.out, 0, dim_type.in_, len(space_in_names), len(space_out_names)) - # TODO remove: assert space_in_names == map_from_set.get_var_names( isl.dim_type.in_) -- GitLab From 1b13ca39b7144e92d9b098a2aa1b6c9d86183fc0 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Thu, 8 Aug 2019 07:18:43 -0500 Subject: [PATCH 095/415] zipping tuple pairs with corresponding domains before passing them to create_symbolic_isl_map_from_tuples() --- sched_check_utils.py | 8 +++----- schedule.py | 3 +-- 2 files changed, 4 insertions(+), 7 deletions(-) diff --git a/sched_check_utils.py b/sched_check_utils.py index e4b9bcbf1..ad7bbc351 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -139,9 +139,8 @@ def create_explicit_map_from_tuples(tuple_pairs, space): def create_symbolic_isl_map_from_tuples( - tuple_pairs, + tuple_pairs_with_domains, # list of ((tup_in, tup_out), dom_to_intersect) space, - domains_to_intersect, # TODO pass these zipped w/tuples? unused_param_name, statement_var_name, ): @@ -151,7 +150,6 @@ def create_symbolic_isl_map_from_tuples( # given a list of pairs of ((input), (output)) tuples, create an isl map # and intersect each pair with corresponding domain_to_intersect #TODO allow None for domains - assert len(tuple_pairs) == len(domains_to_intersect) dim_type = isl.dim_type @@ -166,7 +164,7 @@ def create_symbolic_isl_map_from_tuples( # initialize set with constraint that is always false #constraints_set = islvars[0].eq_set(islvars[0] + 1) all_maps = [] - for (tup_in, tup_out), dom in zip(tuple_pairs, domains_to_intersect): + for (tup_in, tup_out), dom in tuple_pairs_with_domains: # initialize constraint with true constraint = islvars[0].eq_set(islvars[0]) @@ -181,7 +179,7 @@ def create_symbolic_isl_map_from_tuples( constraint = constraint \ & islvars[dim_name].eq_set(islvars[val_in]) - # TODO we probably shouldn't rely on domains_to_intersect + # TODO we probably shouldn't rely on dom # here for determing where to set inames equal to dummy vars, # should instead determine before in LexSchedule and pass info in dom_var_names = dom.get_var_names(dim_type.out) diff --git a/schedule.py b/schedule.py index 3c73036c5..6e5cd3bbb 100644 --- a/schedule.py +++ b/schedule.py @@ -195,8 +195,7 @@ class LexSchedule(object): # create isl map return create_symbolic_isl_map_from_tuples( - list(self.items()), sched_space, - doms_to_intersect, + zip(list(self.items()), doms_to_intersect), sched_space, self.unused_param_name, self.statement_var_name) def get_lex_var_names(self): -- GitLab From fad96a45c2872061091ebbc74932b5ff0e6fee55 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Thu, 8 Aug 2019 07:31:19 -0500 Subject: [PATCH 096/415] renamed make_lex_order_map_tuple_pairs()->def lex_order_map_tuple_pairs_from_explicit_bounds() --- example_dependency_checking.py | 5 +++-- example_lex_map_creation.py | 4 ++-- lexicographic_order_map.py | 3 +-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/example_dependency_checking.py b/example_dependency_checking.py index f7a4d51bd..dee185a92 100644 --- a/example_dependency_checking.py +++ b/example_dependency_checking.py @@ -5,7 +5,7 @@ from schedule_checker.dependency import ( # noqa create_dependency_constraint, ) from schedule_checker.lexicographic_order_map import ( - make_lex_order_map_tuple_pairs, + lex_order_map_tuple_pairs_from_explicit_bounds, get_statement_ordering_map, ) from schedule_checker.schedule import Statement @@ -84,7 +84,8 @@ lex_params = [] lex_in_names = out_names_sched lex_out_names = append_apostrophes(out_names_sched) -explicit_lex_map_pairs = make_lex_order_map_tuple_pairs(lex_dim_bounds) +explicit_lex_map_pairs = lex_order_map_tuple_pairs_from_explicit_bounds( + lex_dim_bounds) # for pair in explicit_lex_map_pairs: # print(pair[0], pair[1]) lex_space_explicit = get_isl_space(lex_params, lex_in_names, lex_out_names) diff --git a/example_lex_map_creation.py b/example_lex_map_creation.py index 527d97869..79730d036 100644 --- a/example_lex_map_creation.py +++ b/example_lex_map_creation.py @@ -2,7 +2,7 @@ from schedule_checker.lexicographic_order_map import ( get_statement_ordering_map, create_symbolic_lex_order_map, ) -from schedule_checker.sched_check_utils import( +from schedule_checker.sched_check_utils import ( create_explicit_map_from_tuples, get_isl_space, ) @@ -22,7 +22,7 @@ print(lex_map_symbolic) """ dim_bounds = [(0,2), (0,2)] # max vals for each dim (e.g., 0 <= i0 < max0 ...) -explicit_lex_map_pairs = make_lex_order_map_tuple_pairs(dim_bounds) +explicit_lex_map_pairs = lex_order_map_tuple_pairs_from_explicit_bounds(dim_bounds) # for pair in explicit_lex_map_pairs: # print(pair[0], pair[1]) lex_map_explicit = create_explicit_map_from_tuples(explicit_lex_map_pairs, diff --git a/lexicographic_order_map.py b/lexicographic_order_map.py index 994bbdad4..05d5111c0 100644 --- a/lexicographic_order_map.py +++ b/lexicographic_order_map.py @@ -1,7 +1,7 @@ import islpy as isl -def make_lex_order_map_tuple_pairs(dim_bounds): +def lex_order_map_tuple_pairs_from_explicit_bounds(dim_bounds): # Given list of integer dimension bound pairs # [(lower0, upper0), (lower1, upper1) ... ], @@ -16,7 +16,6 @@ def make_lex_order_map_tuple_pairs(dim_bounds): lex_tuples = list( itertools.product(*[range(l, u) for l, u in dim_bounds])) # goes up to u-1 because u is a non-inclusive upper bound - # TODO: is itertools.product ordering guaranteed? map_pairs = [] for i, l_before in enumerate(lex_tuples): -- GitLab From 3ab809fe85595538e95f39dd12f5100aad66c2f7 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Thu, 8 Aug 2019 13:05:37 -0500 Subject: [PATCH 097/415] refactored LexSchedule to contain list of LexScheduleItems instead of dictionary mapping statement instance tuples to lex time tuples; LexScheduleItems contain statement instance + lex order info --- __init__.py | 1 - dependency.py | 19 +++++-- example_dependency_checking.py | 4 +- schedule.py | 96 ++++++++++++++++------------------ 4 files changed, 62 insertions(+), 58 deletions(-) diff --git a/__init__.py b/__init__.py index 923c137ec..8e17898de 100644 --- a/__init__.py +++ b/__init__.py @@ -202,7 +202,6 @@ def check_schedule_validity( sched.lp_insnid_to_int_sid, sched.unused_param_name, sched.statement_var_name, - sched.statement_var_pose(), ) aligned_constraint_map = constraint_map.align_params(sio.space) diff --git a/dependency.py b/dependency.py index a26d219c9..d035134d3 100644 --- a/dependency.py +++ b/dependency.py @@ -8,6 +8,20 @@ class DependencyType: #ALL = "all" +class Statement(object): + def __init__( + self, + insn_id, # loopy insn id + active_inames, + ): + self.insn_id = insn_id # string + self.active_inames = active_inames # [string, ] + + def __str__(self): + return "%s {%s}" % ( + self.insn_id, ",".join(self.active_inames)) + + class StatementPairDependencySet(object): def __init__( self, @@ -61,7 +75,7 @@ def create_dependency_constraint( insn_id_to_int, unused_param_name, statement_var_name, - statement_var_pose, + statement_var_pose=0, ): from schedule_checker.sched_check_utils import ( make_islvars_with_var_primes, @@ -73,8 +87,6 @@ def create_dependency_constraint( # This function uses the dependency given to create the following constraint: # Statement [s,i,j] comes before statement [s',i',j'] iff - # assumes statements are numbered sequentially - # create some (ordered) isl vars to use, e.g., {s, i, j, s', i', j'} islvars = make_islvars_with_var_primes( [statement_var_name]+all_dom_inames_ordered, @@ -283,7 +295,6 @@ def create_dependencies_from_legacy_knl(knl): get_all_nonconcurrent_insn_iname_subsets, get_sched_item_ids_within_inames, ) - from schedule_checker.schedule import Statement dt = DependencyType conc_inames, non_conc_inames = get_concurrent_inames(knl) #all_inames = list(knl.all_inames()) diff --git a/example_dependency_checking.py b/example_dependency_checking.py index dee185a92..0ebb8244a 100644 --- a/example_dependency_checking.py +++ b/example_dependency_checking.py @@ -3,12 +3,12 @@ from schedule_checker.dependency import ( # noqa StatementPairDependencySet, DependencyType as dt, create_dependency_constraint, + Statement, ) from schedule_checker.lexicographic_order_map import ( lex_order_map_tuple_pairs_from_explicit_bounds, get_statement_ordering_map, ) -from schedule_checker.schedule import Statement from schedule_checker.sched_check_utils import ( prettier_map_string, append_apostrophes, @@ -115,7 +115,6 @@ print("----------------------------------------------------------------------") # i dependency is none, j dependency is `prior` statement_var = 's' -statement_var_pose = 0 unused_param_name = 'unused' domains = {} @@ -149,7 +148,6 @@ constraint_map = create_dependency_constraint( insnid_to_int_sid, unused_param_name, statement_var, - statement_var_pose, ) print("constraint map space:") print(constraint_map.space) diff --git a/schedule.py b/schedule.py index 6e5cd3bbb..74ff5c94b 100644 --- a/schedule.py +++ b/schedule.py @@ -2,23 +2,29 @@ import islpy as isl from collections import OrderedDict -class Statement(object): +class LexScheduleItem(object): def __init__( self, - insn_id, - active_inames, + insn_id, # loopy insn id + int_id, # sid int (statement id within LexSchedule) + inames, + lex_pt, # point in lexicographic ordering ): self.insn_id = insn_id # string - self.active_inames = active_inames # [string, ] + self.int_id = int_id + self.inames = inames # [string, ] + self.lex_pt = lex_pt def __str__(self): - return "%s {%s}" % ( - self.insn_id, ",".join(self.active_inames)) + return "%s:%d {%s} -> %s" % ( + self.insn_id, self.statment_id, ",".join(self.inames), + self.lex_pt) class LexSchedule(object): - # contains a mapping of {statement instance: lex point} + # contains list of LexScheduleItems + # representing a mapping of {statement instance: lex point} unused_param_name = "unused" statement_var_name = "statement" @@ -30,13 +36,8 @@ class LexSchedule(object): include_only_insn_ids=None, ): - # mapping of {statement instance: lex point} - # TODO make the key a data type that knows the var names - self.lex_schedule = OrderedDict() - - # symbolic inames in sched that have been enumerated - # into explicit statement instances - self.inames_enumerated = [] + # list of LexScheduleItems + self.lex_schedule = [] # map from loopy insn_id strings to statement id ints self.lp_insnid_to_int_sid = {} @@ -118,7 +119,12 @@ class LexSchedule(object): insn_id_int = self.lp_insnid_to_int_sid[lp_insn_id] # add ((sid,), lex_pt) pair to lex schedule - self.lex_schedule[(insn_id_int,)] = next_insn_lex_pt[:] + self.lex_schedule.append( + LexScheduleItem( + insn_id=lp_insn_id, + int_id=insn_id_int, + inames=None, + lex_pt=next_insn_lex_pt[:])) # increment lex dim val enumerating items in current code block next_insn_lex_pt[-1] = next_insn_lex_pt[-1] + 1 @@ -130,21 +136,25 @@ class LexSchedule(object): self.pad_lex_pts_with_zeros() def max_lex_dims(self): - return max(len(lex_pt) for insn, lex_pt in self.items()) + return max(len(stmt.lex_pt) for stmt in self.lex_schedule) def pad_lex_pts_with_zeros(self): # pad lex points with zeros so that all points have same number of dims max_lex_dim = self.max_lex_dims() - new_sched = OrderedDict() - for insn, lex_pt in self.items(): - new_sched[insn] = lex_pt + [0]*(max_lex_dim-len(lex_pt)) + new_sched = [] + for stmt in self.lex_schedule: + new_sched.append( + LexScheduleItem(stmt.insn_id, stmt.int_id, stmt.inames, + stmt.lex_pt + [0]*(max_lex_dim-len(stmt.lex_pt)))) self.lex_schedule = new_sched def add_symbolic_inames_to_statement_instances(self, inames): # append inames to lex tuples (matching specified order) - new_sched = OrderedDict() - for insn, lex_pt in self.lex_schedule.items(): - new_sched[tuple(list(insn)+inames[:])] = lex_pt + new_sched = [] + for stmt in self.lex_schedule: + new_sched.append( + LexScheduleItem( + stmt.insn_id, stmt.int_id, tuple(inames[:]), stmt.lex_pt)) self.lex_schedule = new_sched def add_new_lp_insnid(self, lp_insnid): @@ -156,14 +166,6 @@ class LexSchedule(object): else: self.lp_insnid_to_int_sid[lp_insnid] = 0 - def get_last_schedule_item(self): - return next(reversed(self.lex_schedule)) - - def statement_var_pose(self): - # TODO what is the proper way to provide this information - # while keeping it immutable? - return 0 # 1st position in statement instance tuple - def create_symbolic_isl_map(self, sid_to_dom, dom_inames_ordered): # create isl map representing lex schedule @@ -178,7 +180,7 @@ class LexSchedule(object): # {('statement', used in >=1 statement domain>) -> # (lexicographic ordering dims)} params_sched = [self.unused_param_name] - in_names_sched = [self.statement_var_name] + dom_inames_ordered + in_names_sched = [self.statement_var_name] + dom_inames_ordered[:] out_names_sched = self.get_lex_var_names() from schedule_checker.sched_check_utils import get_isl_space sched_space = get_isl_space(params_sched, in_names_sched, out_names_sched) @@ -186,17 +188,20 @@ class LexSchedule(object): # Insert 'statement' dim into domain so that its space allows for # intersection with sched map later doms_to_intersect = [] - for tup_in, tup_out in self.items(): - sid = tup_in[self.statement_var_pose()] + for stmt in self.lex_schedule: doms_to_intersect.append( add_dims_to_isl_set( - sid_to_dom[sid], isl.dim_type.out, - [self.statement_var_name], self.statement_var_pose())) + sid_to_dom[stmt.int_id], isl.dim_type.out, + [self.statement_var_name], 0)) # create isl map return create_symbolic_isl_map_from_tuples( - zip(list(self.items()), doms_to_intersect), sched_space, - self.unused_param_name, self.statement_var_name) + zip( + [((stmt.int_id,) + tuple(stmt.inames), stmt.lex_pt) + for stmt in self.lex_schedule], + doms_to_intersect + ), + sched_space, self.unused_param_name, self.statement_var_name) def get_lex_var_names(self): return [self.lex_var_prefix+str(i) @@ -222,25 +227,16 @@ class LexSchedule(object): def __iter__(self): return iter(self.lex_schedule) - def keys(self): - return self.lex_schedule.keys() - - def items(self): - return self.lex_schedule.items() - - def values(self): - return self.lex_schedule.values() - def __len__(self): return len(self.lex_schedule) def __str__(self): sched_str = "{\n" - for state_tuple, lex_pt in self.lex_schedule.items(): + for stmt in self.lex_schedule: domain_elem = "[%s=%s,%s]" % ( self.statement_var_name, - state_tuple[self.statement_var_pose()], - ",".join(state_tuple[1:])) - sched_str += "%s -> %s;\n" % (domain_elem, lex_pt) + stmt.int_id, + ",".join(stmt.inames)) + sched_str += "%s -> %s;\n" % (domain_elem, stmt.lex_pt) sched_str += "}" return sched_str -- GitLab From 4067a9c995394e750750392e3bca3739e6e6a904 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sun, 11 Aug 2019 09:48:27 -0500 Subject: [PATCH 098/415] fixed typo kernel->knl --- schedule.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/schedule.py b/schedule.py index 74ff5c94b..91d63a316 100644 --- a/schedule.py +++ b/schedule.py @@ -1,5 +1,4 @@ import islpy as isl -from collections import OrderedDict class LexScheduleItem(object): @@ -66,7 +65,7 @@ class LexSchedule(object): warn( "LexSchedule.__init__: Encountered EnterLoop for iname %s " "with ConcurrentTag(s) in schedule for kernel %s. " - "Ignoring this loop." % (iname, kernel.name)) + "Ignoring this loop." % (iname, knl.name)) continue # if the schedule is empty, this is the first schedule item, so # don't increment lex dim val enumerating items in current block, -- GitLab From 2cf845bd4dedc69cde847dc8a5e4257e96f7c962 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sun, 11 Aug 2019 09:49:35 -0500 Subject: [PATCH 099/415] fixed typo statment_id->int_id --- schedule.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schedule.py b/schedule.py index 91d63a316..57feacc95 100644 --- a/schedule.py +++ b/schedule.py @@ -16,7 +16,7 @@ class LexScheduleItem(object): def __str__(self): return "%s:%d {%s} -> %s" % ( - self.insn_id, self.statment_id, ",".join(self.inames), + self.insn_id, self.int_id, ",".join(self.inames), self.lex_pt) -- GitLab From c5a27eec8c0ac724631e029baeeae406b6d8763d Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sun, 11 Aug 2019 10:10:36 -0500 Subject: [PATCH 100/415] instead of holding map of {loopy insn id : lex sched int id}, create it from sched items if necessary using new member function --- __init__.py | 13 +++++++------ schedule.py | 25 ++++++------------------- 2 files changed, 13 insertions(+), 25 deletions(-) diff --git a/__init__.py b/__init__.py index 8e17898de..efc23a62d 100644 --- a/__init__.py +++ b/__init__.py @@ -131,12 +131,13 @@ def check_schedule_validity( # statement tuple, then it is needed in all statement tuples. sched.add_symbolic_inames_to_statement_instances( all_dom_inames_ordered) + lp_insn_id_to_lex_sched_id = sched.loopy_insn_id_to_lex_sched_id() if verbose: print("-"*80) print("LexSchedule with inames added:") print(sched) print("dict{lp insn id : sched sid int}:") - print(sched.lp_insnid_to_int_sid) + print(lp_insn_id_to_lex_sched_id) # Get an isl map representing the LexSchedule; # this requires the iname domains @@ -144,11 +145,11 @@ def check_schedule_validity( if len(sched) == 1: assert dom_before == dom_after sid_to_dom = { - sched.lp_insnid_to_int_sid[s_before.insn_id]: dom_before} + lp_insn_id_to_lex_sched_id[s_before.insn_id]: dom_before} elif len(sched) == 2: sid_to_dom = { - sched.lp_insnid_to_int_sid[s_before.insn_id]: dom_before, - sched.lp_insnid_to_int_sid[s_after.insn_id]: dom_after, + lp_insn_id_to_lex_sched_id[s_before.insn_id]: dom_before, + lp_insn_id_to_lex_sched_id[s_after.insn_id]: dom_after, } else: assert False @@ -199,7 +200,7 @@ def check_schedule_validity( dom_before, dom_after, unscheduled_knl.loop_priority, - sched.lp_insnid_to_int_sid, + lp_insn_id_to_lex_sched_id, sched.unused_param_name, sched.statement_var_name, ) @@ -244,7 +245,7 @@ def check_schedule_validity( print("statement instance ordering:") print(prettier_map_string(sio)) print("{insn id -> sched sid int} dict:") - print(sched.lp_insnid_to_int_sid) + print(lp_insn_id_to_lex_sched_id) """ print("===========================================================") diff --git a/schedule.py b/schedule.py index 57feacc95..bf28ba566 100644 --- a/schedule.py +++ b/schedule.py @@ -12,7 +12,7 @@ class LexScheduleItem(object): self.insn_id = insn_id # string self.int_id = int_id self.inames = inames # [string, ] - self.lex_pt = lex_pt + self.lex_pt = lex_pt # [int, ] def __str__(self): return "%s:%d {%s} -> %s" % ( @@ -38,9 +38,6 @@ class LexSchedule(object): # list of LexScheduleItems self.lex_schedule = [] - # map from loopy insn_id strings to statement id ints - self.lp_insnid_to_int_sid = {} - # make sure we don't have an iname name conflict assert not any( iname == self.statement_var_name for iname in knl.all_inames()) @@ -112,16 +109,12 @@ class LexSchedule(object): # otherwise process all instructions if (include_only_insn_ids is None or lp_insn_id in include_only_insn_ids): - # create an int representing this instruction and - # update the map from loopy insn_ids to statement ids - self.add_new_lp_insnid(lp_insn_id) - insn_id_int = self.lp_insnid_to_int_sid[lp_insn_id] - # add ((sid,), lex_pt) pair to lex schedule + # add sched item self.lex_schedule.append( LexScheduleItem( insn_id=lp_insn_id, - int_id=insn_id_int, + int_id=len(self.lex_schedule), # int representing insn inames=None, lex_pt=next_insn_lex_pt[:])) @@ -134,6 +127,9 @@ class LexSchedule(object): # the values in these missing dims should be zero, so add them self.pad_lex_pts_with_zeros() + def loopy_insn_id_to_lex_sched_id(self): + return dict([(lsi.insn_id, lsi.int_id) for lsi in self.lex_schedule]) + def max_lex_dims(self): return max(len(stmt.lex_pt) for stmt in self.lex_schedule) @@ -156,15 +152,6 @@ class LexSchedule(object): stmt.insn_id, stmt.int_id, tuple(inames[:]), stmt.lex_pt)) self.lex_schedule = new_sched - def add_new_lp_insnid(self, lp_insnid): - # create an int representing this instruction and - # update the map from loopy insn_ids to statement ids - if self.lp_insnid_to_int_sid: - self.lp_insnid_to_int_sid[lp_insnid] = max( - self.lp_insnid_to_int_sid.values()) + 1 - else: - self.lp_insnid_to_int_sid[lp_insnid] = 0 - def create_symbolic_isl_map(self, sid_to_dom, dom_inames_ordered): # create isl map representing lex schedule -- GitLab From 843f58e7f1979f0915ca96cdaa63b136282355fa Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sun, 11 Aug 2019 10:54:10 -0500 Subject: [PATCH 101/415] combined unnecessary separate cases for len-1 and len-2 schedules --- __init__.py | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/__init__.py b/__init__.py index efc23a62d..35e11f964 100644 --- a/__init__.py +++ b/__init__.py @@ -142,18 +142,15 @@ def check_schedule_validity( # Get an isl map representing the LexSchedule; # this requires the iname domains + assert len(sched) in [1, 2] if len(sched) == 1: assert dom_before == dom_after - sid_to_dom = { - lp_insn_id_to_lex_sched_id[s_before.insn_id]: dom_before} - elif len(sched) == 2: - sid_to_dom = { - lp_insn_id_to_lex_sched_id[s_before.insn_id]: dom_before, - lp_insn_id_to_lex_sched_id[s_after.insn_id]: dom_after, - } - else: - assert False - # TODO maybe can just do len 2 case + + # get a mapping from lex schedule id to relevant inames domain + sid_to_dom = { + lp_insn_id_to_lex_sched_id[s_before.insn_id]: dom_before, + lp_insn_id_to_lex_sched_id[s_after.insn_id]: dom_after, + } sched_map_symbolic = sched.create_symbolic_isl_map( sid_to_dom, all_dom_inames_ordered) -- GitLab From 6b531c40af1bb7ee84d312457bc5909cc60d519b Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sun, 11 Aug 2019 11:06:29 -0500 Subject: [PATCH 102/415] commenting out printing of code because calling generate_code_v2 at end of loopy sched generation causes problem with save_reload tests --- __init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/__init__.py b/__init__.py index 35e11f964..c12cb2214 100644 --- a/__init__.py +++ b/__init__.py @@ -65,7 +65,7 @@ def check_schedule_validity( print("Kernel:") print(scheduled_knl) from loopy import generate_code_v2 - print(generate_code_v2(scheduled_knl).device_code()) + #print(generate_code_v2(scheduled_knl).device_code()) print("="*80) print("Iname tags: %s" % (scheduled_knl.iname_to_tags)) print("="*80) -- GitLab From 927838fc4c553decf19a89933089e6e39d6f3a7f Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sun, 11 Aug 2019 11:14:37 -0500 Subject: [PATCH 103/415] only preprocess if not already preprocessed --- __init__.py | 16 ++++++++++------ schedule.py | 2 +- 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/__init__.py b/__init__.py index c12cb2214..0e15bd85a 100644 --- a/__init__.py +++ b/__init__.py @@ -1,11 +1,11 @@ -# TODO create a set of broken and valid kernels to test against +# TODO create a set of broken kernels to test against # (small kernels to test a specific case) # TODO work on granularity of encapsulation, encapsulate some of this in # separate functions def check_schedule_validity( - unscheduled_knl, + knl, verbose=False, _use_scheduled_kernel_to_obtain_loop_priority=False): @@ -22,9 +22,13 @@ def check_schedule_validity( order_var_names_to_match_islset, ) - from loopy import preprocess_kernel - # TODO check to see if preprocessed already? kernel.kernel_status attr? - preprocessed_knl = preprocess_kernel(unscheduled_knl) + # Preprocess if not already preprocessed + from loopy.kernel import KernelState + if knl.state < KernelState.PREPROCESSED: + from loopy import preprocess_kernel + preprocessed_knl = preprocess_kernel(knl) + else: + preprocessed_knl = knl # Create StatementPairDependencySet(s) from kernel dependencies ----------------- @@ -196,7 +200,7 @@ def check_schedule_validity( all_dom_inames_ordered, dom_before, dom_after, - unscheduled_knl.loop_priority, + knl.loop_priority, lp_insn_id_to_lex_sched_id, sched.unused_param_name, sched.statement_var_name, diff --git a/schedule.py b/schedule.py index bf28ba566..7c4b832ca 100644 --- a/schedule.py +++ b/schedule.py @@ -114,7 +114,7 @@ class LexSchedule(object): self.lex_schedule.append( LexScheduleItem( insn_id=lp_insn_id, - int_id=len(self.lex_schedule), # int representing insn + int_id=len(self.lex_schedule), # int representing insn inames=None, lex_pt=next_insn_lex_pt[:])) -- GitLab From 6b9638bd99267c57e50bb853afc69538a0eb2120 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sun, 11 Aug 2019 11:24:25 -0500 Subject: [PATCH 104/415] removing unused dependency types --- dependency.py | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/dependency.py b/dependency.py index d035134d3..be1077731 100644 --- a/dependency.py +++ b/dependency.py @@ -2,10 +2,8 @@ import islpy as isl class DependencyType: - NONE = "none" SAME = "same" PRIOR = "prior" - #ALL = "all" class Statement(object): @@ -104,16 +102,13 @@ def create_dependency_constraint( inames_after_unused.append(iname + "'") # initialize constraints to False - # this will disappear as soon as we add a constraint that is not dt.NONE + # this will disappear as soon as we add a constraint all_constraints_set = islvars[0].eq_set(islvars[0] + 1) # for each (dep_type, inames) pair, create 'happens before' constraint, # all_constraints_set will be the union of all these constraints dt = DependencyType for dep_type, inames in statement_dep_set.deps.items(): - if dep_type == dt.NONE: # TODO remove, not used - continue - # need to put inames in a list so that order of inames and inames' # matches when calling create_elementwise_comparison_conj... if not isinstance(inames, list): @@ -309,12 +304,6 @@ def create_dependencies_from_legacy_knl(knl): shared_non_conc_inames = shared_inames & non_conc_inames dep_dict[dt.SAME] = shared_non_conc_inames - """ - for conc_iname in shared_conc_inames: - dep_dict[conc_iname] = dt.ALL - for non_shared_iname in non_shared_inames: - dep_dict[non_shared_iname] = dt.ALL - """ s_before = Statement(insn_before.id, insn_before_inames) s_after = Statement(insn_after.id, insn_after_inames) -- GitLab From 39074d08c593c2d1d3cd4a9dd31db88405164a42 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sun, 11 Aug 2019 11:27:45 -0500 Subject: [PATCH 105/415] removed TODO that has been handled --- dependency.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/dependency.py b/dependency.py index be1077731..9945aeafd 100644 --- a/dependency.py +++ b/dependency.py @@ -122,10 +122,8 @@ def create_dependency_constraint( inames_list, inames_prime, islvars, op="eq") elif dep_type == dt.PRIOR: - # if nesting is known: - # TODO there might be situations where we know the priority even - # though loop_priorities is None priority_known = False + # if nesting info is provided: if loop_priorities: # assumes all loop_priority tuples are consistent -- GitLab From b88cc89bc5bb2982d0f2ce4ac8cc5ec8e3d8c058 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sun, 11 Aug 2019 11:38:03 -0500 Subject: [PATCH 106/415] removed commented-out code --- __init__.py | 2 +- dependency.py | 41 ------------------------ example_pairwise_schedule_validity.py | 45 ++++++++------------------- sched_check_utils.py | 4 --- test/test_valid_scheds.py | 26 +++++++--------- 5 files changed, 26 insertions(+), 92 deletions(-) diff --git a/__init__.py b/__init__.py index 0e15bd85a..c5bc0ca19 100644 --- a/__init__.py +++ b/__init__.py @@ -68,7 +68,7 @@ def check_schedule_validity( print("="*80) print("Kernel:") print(scheduled_knl) - from loopy import generate_code_v2 + #from loopy import generate_code_v2 #print(generate_code_v2(scheduled_knl).device_code()) print("="*80) print("Iname tags: %s" % (scheduled_knl.iname_to_tags)) diff --git a/dependency.py b/dependency.py index 9945aeafd..a3ae6067e 100644 --- a/dependency.py +++ b/dependency.py @@ -171,16 +171,6 @@ def create_dependency_constraint( priority_known = True priority_tuple = orders.pop() - # old way - """ - for priority_tuple in loop_priorities: - # might be able to deduce priority from multiple tuples - # even if all inames are not present in any single tuple - if set(inames_list).issubset(set(priority_tuple)): - priority_known = True - break - """ - # if only one loop, we know the priority if not priority_known and len(inames_list) == 1: priority_tuple = tuple(inames_list) @@ -232,17 +222,6 @@ def create_dependency_constraint( all_constraints_map = _convert_constraint_set_to_map( all_constraints_set, len(all_dom_inames_ordered) + 1) # +1 for statement var - """ - # for debugging - if dt.PRIOR in statement_dep_set.deps.keys(): - print("!"*90) - print(inames_list_nest_ordered) - from schedule_checker.sched_check_utils import ( - prettier_map_string, - ) - print(prettier_map_string(all_constraints_map)) - print("."*90) - """ # now apply domain sets to constraint variables # add statement variable to doms to enable intersection @@ -266,16 +245,6 @@ def create_dependency_constraint( # intersect doms map_with_loop_domain_constraints = all_constraints_map.intersect_domain( domain_to_intersect).intersect_range(range_to_intersect) - """ - # for debugging - if dt.PRIOR in statement_dep_set.deps.keys(): - print(inames_list_nest_ordered) - from schedule_checker.sched_check_utils import ( - prettier_map_string, - ) - print(prettier_map_string(map_with_loop_domain_constraints)) - print("!"*90) - """ return map_with_loop_domain_constraints @@ -290,7 +259,6 @@ def create_dependencies_from_legacy_knl(knl): ) dt = DependencyType conc_inames, non_conc_inames = get_concurrent_inames(knl) - #all_inames = list(knl.all_inames()) statement_dep_sets = [] for insn_after in knl.instructions: for insn_before_id in insn_after.depends_on: @@ -313,23 +281,15 @@ def create_dependencies_from_legacy_knl(knl): # Go through insns and get all unique insn.depends_on iname sets non_conc_iname_subsets = get_all_nonconcurrent_insn_iname_subsets( knl, exclude_empty=True, non_conc_inames=non_conc_inames) - #print("-"*85) - #print("NONCONCURRENT INAME SUBSETS") - #print(non_conc_iname_subsets) # For each set of insns within a given iname set, find sources and sinks. # Then make PRIOR dep from all sinks to all sources at previous iterations for iname_subset in non_conc_iname_subsets: # find items within this iname set sched_item_ids = get_sched_item_ids_within_inames(knl, iname_subset) - #print("") - #print("inames:", iname_subset) - #print("matching sched items:", sched_item_ids) # find sources and sinks sources, sinks = get_dependency_sources_and_sinks(knl, sched_item_ids) - #print("sources:", sources) - #print("sinks:", sinks) # TODO in future, consider putting in a single no-op source and sink # create prior deps @@ -348,7 +308,6 @@ def create_dependencies_from_legacy_knl(knl): s_after = Statement(source_id, source_insn_inames) statement_dep_sets.append( StatementPairDependencySet(s_before, s_after, dep_dict)) - #print("-"*85) return statement_dep_sets diff --git a/example_pairwise_schedule_validity.py b/example_pairwise_schedule_validity.py index 3d5917c0d..0cca2d18a 100644 --- a/example_pairwise_schedule_validity.py +++ b/example_pairwise_schedule_validity.py @@ -24,22 +24,20 @@ if knl_choice == "example": "{[j,jj]: 0<=jtemp = b[i,k] {id=insn_a} - end - for j - a[i,j] = temp + 1 {id=insn_b,dep=insn_a} - c[i,j] = d[i,j] {id=insn_c} - end + """ + for i + for k + <>temp = b[i,k] {id=insn_a} end - for t - e[t] = f[t] {id=insn_d} + for j + a[i,j] = temp + 1 {id=insn_b,dep=insn_a} + c[i,j] = d[i,j] {id=insn_c} end - """ - ], + end + for t + e[t] = f[t] {id=insn_d} + end + """, name="example", assumptions="pi,pj,pk,pt >= 1", lang_version=(2018, 2) @@ -148,24 +146,7 @@ elif knl_choice == "ilp_kernel": assumptions="n>=1 and n mod 4 = 0", ) # TODO why is conditional on ilp_name? - knl = lp.tag_inames(knl, {"j": "l.0","ilp_iname": "ilp"}) - """ - for i - tmp[i] = 3.14 {id=write_tmp} - aind = 1 {id=aind_incr,dep=write_tmp} - end - knl = lp.split_iname( - knl, "i", 16, - inner_tag="l.0", - ) - knl = lp.split_iname( - knl, "i_outer", 2, - outer_tag="g.0", - inner_tag="ilp", - #inner_tag="unr", - #inner_tag="g.1", - ) - """ + knl = lp.tag_inames(knl, {"j": "l.0", "ilp_iname": "ilp"}) #knl = lp.prioritize_loops(knl, "i_outer_outer,i_outer_inner,i_inner,a") if knl_choice == "add_barrier": np.random.seed(17) diff --git a/sched_check_utils.py b/sched_check_utils.py index ad7bbc351..c93b49782 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -378,10 +378,6 @@ def get_orderings_of_length_n( # currently unused: """ -def flatten_2d_list(list2d): - return [item for inner_list in list2d for item in inner_list] - - def add_missing_set_dims_to_map_indims(islmap, islset): new_map = islmap.copy() for i in range(islset.n_dim()): diff --git a/test/test_valid_scheds.py b/test/test_valid_scheds.py index 24855c455..6603c7a93 100644 --- a/test/test_valid_scheds.py +++ b/test/test_valid_scheds.py @@ -39,22 +39,20 @@ def test_loop_prioritization(): "{[j,jj]: 0<=jtemp = b[i,k] {id=insn_a} - end - for j - a[i,j] = temp + 1 {id=insn_b,dep=insn_a} - c[i,j] = d[i,j] {id=insn_c} - end + """ + for i + for k + <>temp = b[i,k] {id=insn_a} end - for t - e[t] = f[t] {id=insn_d} + for j + a[i,j] = temp + 1 {id=insn_b,dep=insn_a} + c[i,j] = d[i,j] {id=insn_c} end - """ - ], + end + for t + e[t] = f[t] {id=insn_d} + end + """, name="example", assumptions="pi,pj,pk,pt >= 1", lang_version=(2018, 2) -- GitLab From d7b4b42d8c499e4f3b0b70976f452e10960a8cf4 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 12 Aug 2019 12:09:17 -0500 Subject: [PATCH 107/415] slightly better comments to explain what get_orderings_of_length_n() does --- dependency.py | 12 +++++++----- sched_check_utils.py | 13 ++++++++----- 2 files changed, 15 insertions(+), 10 deletions(-) diff --git a/dependency.py b/dependency.py index a3ae6067e..e5c71ef09 100644 --- a/dependency.py +++ b/dependency.py @@ -143,7 +143,9 @@ def create_dependency_constraint( if len(new_tuple) > 1: relevant_priorities.add(tuple(new_tuple)) - nested_after = {} + # create a mapping from each iname to inames that must be + # nested inside that iname + nested_inside = {} for iname in inames_list: comes_after_iname = set() for p_tuple in relevant_priorities: @@ -151,16 +153,16 @@ def create_dependency_constraint( comes_after_iname.update([ iname for iname in p_tuple[p_tuple.index(iname)+1:]]) - nested_after[iname] = comes_after_iname + nested_inside[iname] = comes_after_iname from schedule_checker.sched_check_utils import ( get_orderings_of_length_n ) - # TODO explain how it only creates explicitly described orderings + # get all orderings that are explicitly allowed by priorities orders = get_orderings_of_length_n( - nested_after, + nested_inside, required_length=len(inames_list), - #return_first_found=True, # TODO might be faster + #return_first_found=True, # faster; obviates assert test below return_first_found=False, ) # TODO make sure this handles a cycle (error) diff --git a/sched_check_utils.py b/sched_check_utils.py index c93b49782..f99c0b508 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -318,14 +318,12 @@ def get_inames_in_sched_order(scheduled_knl): if isinstance(sched_item, EnterLoop)] -# TODO made a mess trying to make this as fast as possible, # TODO use yield to clean this up -# probably a better way -# TODO find topological sort in loopy, then find longest path in dag +# TODO use topological sort from loopy, then find longest path in dag def _generate_orderings_starting_w_prefix( allowed_after_dict, orderings, required_length=None, start_prefix=(), return_first_found=False): - # comes after dict = {str: set(str)} + # alowed_after_dict = {str: set(str)} # start prefix = tuple(str) # orderings = set if start_prefix: @@ -363,8 +361,13 @@ def _generate_orderings_starting_w_prefix( def get_orderings_of_length_n( allowed_after_dict, required_length, return_first_found=False): - # comes after dict = {str: set(str)} + # get all orderings that are *explicitly* allowed by allowed_after_dict + # i.e., if we know a->b and c->b, we don't know enough to return a->c->b + # note: if the set for a dict key is empty, nothing allowed to come after + + # alowed_after_dict = {str: set(str)} + orderings = set() _generate_orderings_starting_w_prefix( allowed_after_dict, -- GitLab From 02ce094a52b88d13096a777ef6a006589862146a Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 12 Aug 2019 12:38:13 -0500 Subject: [PATCH 108/415] check+error for inconsistent priorities; also relevant test case --- dependency.py | 18 ++++---- test/test_invalid_scheds.py | 91 +++++++++++++++++++++++++++++++++++++ 2 files changed, 101 insertions(+), 8 deletions(-) create mode 100644 test/test_invalid_scheds.py diff --git a/dependency.py b/dependency.py index e5c71ef09..f94423a02 100644 --- a/dependency.py +++ b/dependency.py @@ -156,20 +156,22 @@ def create_dependency_constraint( nested_inside[iname] = comes_after_iname from schedule_checker.sched_check_utils import ( - get_orderings_of_length_n - ) + get_orderings_of_length_n) # get all orderings that are explicitly allowed by priorities orders = get_orderings_of_length_n( nested_inside, required_length=len(inames_list), - #return_first_found=True, # faster; obviates assert test below - return_first_found=False, + #return_first_found=True, + return_first_found=False, # slower; allows priorities test below ) - # TODO make sure this handles a cycle (error) + if orders: - assert len(orders) == 1 - # TODO can remove assert if return_first_found above - # (or if we trust that all iname priorities are consistent) + # test for invalid priorities (includes cycles) + if len(orders) != 1: + raise ValueError( + "create_dependency_constriant encountered invalid " + "priorities %s" + % (loop_priorities)) priority_known = True priority_tuple = orders.pop() diff --git a/test/test_invalid_scheds.py b/test/test_invalid_scheds.py new file mode 100644 index 000000000..db85e10c4 --- /dev/null +++ b/test/test_invalid_scheds.py @@ -0,0 +1,91 @@ +from __future__ import division, print_function + +__copyright__ = "Copyright (C) 2018 James Stevens" + +__license__ = """ +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. +""" + +import sys +from pyopencl.tools import ( # noqa + pytest_generate_tests_for_pyopencl + as pytest_generate_tests) +import loopy as lp +import numpy as np +from schedule_checker import check_schedule_validity + + +def test_invalid_prioritiy_detection(): + ref_knl = lp.make_kernel( + [ + "{[h]: 0<=h acc = 0 + for h,i,j,k + acc = acc + h + i + j + k + end + """, + name="priorities", + assumptions="ni,nj,nk,nh >= 1", + lang_version=(2018, 2) + ) + + # no error: + knl0 = lp.prioritize_loops(ref_knl, "h,i") + knl0 = lp.prioritize_loops(ref_knl, "i,j") + knl0 = lp.prioritize_loops(knl0, "j,k") + assert check_schedule_validity(knl0) + + # no error: + knl1 = lp.prioritize_loops(ref_knl, "h,i,k") + knl1 = lp.prioritize_loops(knl1, "h,j,k") + assert check_schedule_validity(knl1) + + # error (cycle): + knl2 = lp.prioritize_loops(ref_knl, "h,i,j") + knl2 = lp.prioritize_loops(knl2, "j,k") + knl2 = lp.prioritize_loops(knl2, "k,i") + try: + check_schedule_validity(knl2) + # should raise error + assert False + except ValueError as e: + assert "invalid priorities" in str(e) + + # error (inconsistent priorities): + knl3 = lp.prioritize_loops(ref_knl, "h,i,j,k") + knl3 = lp.prioritize_loops(knl3, "h,j,i,k") + try: + check_schedule_validity(knl3) + # should raise error + assert False + except ValueError as e: + assert "invalid priorities" in str(e) + + +if __name__ == "__main__": + if len(sys.argv) > 1: + exec(sys.argv[1]) + else: + from pytest import main + main([__file__]) -- GitLab From 5d969b19d49364606426c30b4bd1aa96c52081a8 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 12 Aug 2019 12:43:30 -0500 Subject: [PATCH 109/415] removed two TODOs --- dependency.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dependency.py b/dependency.py index f94423a02..aa6d63eee 100644 --- a/dependency.py +++ b/dependency.py @@ -295,8 +295,9 @@ def create_dependencies_from_legacy_knl(knl): # find sources and sinks sources, sinks = get_dependency_sources_and_sinks(knl, sched_item_ids) - # TODO in future, consider putting in a single no-op source and sink # create prior deps + + # in future, consider inserting single no-op source and sink for source_id in sources: for sink_id in sinks: dep_dict = {} @@ -305,7 +306,6 @@ def create_dependencies_from_legacy_knl(knl): shared_inames = sink_insn_inames & source_insn_inames shared_non_conc_inames = shared_inames & non_conc_inames - # TODO who tracks the iname nesting (needed for prior)? dep_dict[dt.PRIOR] = shared_non_conc_inames s_before = Statement(sink_id, sink_insn_inames) -- GitLab From f7afb2828cb3ce9f373508300b143b70285771f0 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 12 Aug 2019 13:17:22 -0500 Subject: [PATCH 110/415] WIP-removing combined domain creation (was used for getting consistent iname order that matches domains) --- __init__.py | 30 +++++++++++++++++++++--------- sched_check_utils.py | 15 +++++++++++++++ 2 files changed, 36 insertions(+), 9 deletions(-) diff --git a/__init__.py b/__init__.py index c5bc0ca19..32b86c590 100644 --- a/__init__.py +++ b/__init__.py @@ -19,7 +19,7 @@ def check_schedule_validity( ) from schedule_checker.sched_check_utils import ( prettier_map_string, - order_var_names_to_match_islset, + order_var_names_to_match_islsets, ) # Preprocess if not already preprocessed @@ -33,8 +33,10 @@ def check_schedule_validity( # Create StatementPairDependencySet(s) from kernel dependencies ----------------- # Introduce SAME dep for set of shared, non-concurrent inames. + # For each set of insns within a given iname subset, find sources and sinks, # then make PRIOR dep from all sinks to all sources at previous iterations. + statement_pair_dep_sets = create_dependencies_from_legacy_knl(preprocessed_knl) # get separate domains for before.active_inames and after.active_inames @@ -104,17 +106,27 @@ def check_schedule_validity( # have the unused inames in their 'in_' dim vars, so we'll # include them and set them equal to a dummy variable. - # combined_doms is only used for printing (map.gist(dom)) - # and for getting a consistent iname ordering to use in our maps - combined_doms = preprocessed_knl.get_inames_domain( - s_before.active_inames | s_after.active_inames) - # TODO not guaranteed to work - - # Get all inames now in order to maintain list with consistent ordering + # Get a consistent iname ordering to use in our maps # This will help keep isl maps/sets compatible + + # TODO We're starting with an order matching the domains + # so that we don't have to worry about reordering isl sets/maps later + # and if we don't, assertions will fail. Later, improve this so we can + # start with arbitrary ordering of inames, or find some other way to + # make this more intuitive+robust. import islpy as isl - all_dom_inames_ordered = order_var_names_to_match_islset( + all_dom_inames_ordered = order_var_names_to_match_islsets( + preprocessed_knl.all_inames(), [dom_after, dom_before], isl.dim_type.out) + + combined_doms = preprocessed_knl.get_inames_domain( + s_before.active_inames | s_after.active_inames) + from schedule_checker.sched_check_utils import ( + order_var_names_to_match_islset) + _all_dom_inames_ordered = order_var_names_to_match_islset( preprocessed_knl.all_inames(), combined_doms, isl.dim_type.out) + print(all_dom_inames_ordered) + print(_all_dom_inames_ordered) + #assert all_dom_inames_ordered == _all_dom_inames_ordered # Create a mapping of {statement instance: lex point} # including only instructions involved in this dependency diff --git a/sched_check_utils.py b/sched_check_utils.py index f99c0b508..482762864 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -111,6 +111,21 @@ def order_var_names_to_match_islset(var_names, islset, set_dim=isl.dim_type.out) return names_ordered_to_match_islset +def order_var_names_to_match_islsets(var_names, islset_list, set_dim=isl.dim_type.out): + # returns subset of var_names found in islset in + # order matching the islset variables + name_order = [] + for islset in islset_list: + name_order.extend( + [v for v in islset.get_var_names(set_dim) + if v not in name_order]) + names_ordered_to_match_islsets = [] + for v in name_order: + if v in var_names: + names_ordered_to_match_islsets.append(v) + return names_ordered_to_match_islsets + + def create_explicit_map_from_tuples(tuple_pairs, space): dim_type = isl.dim_type -- GitLab From 4a33f708f788636926eb088abf636b4ed5c7f2cd Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 12 Aug 2019 16:09:11 -0500 Subject: [PATCH 111/415] refactored (again) so that LexSchedule now contains a list of (LexScheduleStatement, [lex point]) tuples --- dependency.py | 12 ++++---- example_dependency_checking.py | 6 ++-- schedule.py | 51 +++++++++++++++------------------- 3 files changed, 32 insertions(+), 37 deletions(-) diff --git a/dependency.py b/dependency.py index aa6d63eee..fefdc11da 100644 --- a/dependency.py +++ b/dependency.py @@ -6,7 +6,7 @@ class DependencyType: PRIOR = "prior" -class Statement(object): +class LoopyStatement(object): def __init__( self, insn_id, # loopy insn id @@ -162,7 +162,7 @@ def create_dependency_constraint( nested_inside, required_length=len(inames_list), #return_first_found=True, - return_first_found=False, # slower; allows priorities test below + return_first_found=False, # slower; allows priorities test below ) if orders: @@ -275,8 +275,8 @@ def create_dependencies_from_legacy_knl(knl): dep_dict[dt.SAME] = shared_non_conc_inames - s_before = Statement(insn_before.id, insn_before_inames) - s_after = Statement(insn_after.id, insn_after_inames) + s_before = LoopyStatement(insn_before.id, insn_before_inames) + s_after = LoopyStatement(insn_after.id, insn_after_inames) statement_dep_sets.append( StatementPairDependencySet(s_before, s_after, dep_dict)) @@ -308,8 +308,8 @@ def create_dependencies_from_legacy_knl(knl): dep_dict[dt.PRIOR] = shared_non_conc_inames - s_before = Statement(sink_id, sink_insn_inames) - s_after = Statement(source_id, source_insn_inames) + s_before = LoopyStatement(sink_id, sink_insn_inames) + s_after = LoopyStatement(source_id, source_insn_inames) statement_dep_sets.append( StatementPairDependencySet(s_before, s_after, dep_dict)) diff --git a/example_dependency_checking.py b/example_dependency_checking.py index 0ebb8244a..f55540c44 100644 --- a/example_dependency_checking.py +++ b/example_dependency_checking.py @@ -3,7 +3,7 @@ from schedule_checker.dependency import ( # noqa StatementPairDependencySet, DependencyType as dt, create_dependency_constraint, - Statement, + LoopyStatement, ) from schedule_checker.lexicographic_order_map import ( lex_order_map_tuple_pairs_from_explicit_bounds, @@ -122,8 +122,8 @@ for iname in all_necessary_inames_ordered: domains[iname] = knl.get_inames_domain(iname) # make some dependencies manually for now: -s0 = Statement("0", {"i", "j"}) -s1 = Statement("1", {"i", "j"}) +s0 = LoopyStatement("0", {"i", "j"}) +s1 = LoopyStatement("1", {"i", "j"}) insnid_to_int_sid = {"0": 0, "1": 1} statement_pair_dep_set = StatementPairDependencySet(s0, s1, {dt.SAME: ["i", "j"]}) diff --git a/schedule.py b/schedule.py index 7c4b832ca..a5d90b010 100644 --- a/schedule.py +++ b/schedule.py @@ -1,30 +1,24 @@ import islpy as isl -class LexScheduleItem(object): +class LexScheduleStatement(object): def __init__( self, insn_id, # loopy insn id int_id, # sid int (statement id within LexSchedule) inames, - lex_pt, # point in lexicographic ordering ): self.insn_id = insn_id # string self.int_id = int_id self.inames = inames # [string, ] - self.lex_pt = lex_pt # [int, ] def __str__(self): - return "%s:%d {%s} -> %s" % ( - self.insn_id, self.int_id, ",".join(self.inames), - self.lex_pt) + return "%s:%d {%s}" % ( + self.insn_id, self.int_id, ",".join(self.inames)) class LexSchedule(object): - # contains list of LexScheduleItems - # representing a mapping of {statement instance: lex point} - unused_param_name = "unused" statement_var_name = "statement" lex_var_prefix = "l" @@ -35,7 +29,7 @@ class LexSchedule(object): include_only_insn_ids=None, ): - # list of LexScheduleItems + # list of LexScheduleStatements self.lex_schedule = [] # make sure we don't have an iname name conflict @@ -112,11 +106,13 @@ class LexSchedule(object): # add sched item self.lex_schedule.append( - LexScheduleItem( + ( + LexScheduleStatement( insn_id=lp_insn_id, int_id=len(self.lex_schedule), # int representing insn - inames=None, - lex_pt=next_insn_lex_pt[:])) + inames=None), + next_insn_lex_pt[:] + )) # increment lex dim val enumerating items in current code block next_insn_lex_pt[-1] = next_insn_lex_pt[-1] + 1 @@ -128,28 +124,27 @@ class LexSchedule(object): self.pad_lex_pts_with_zeros() def loopy_insn_id_to_lex_sched_id(self): - return dict([(lsi.insn_id, lsi.int_id) for lsi in self.lex_schedule]) + return dict([(stmt.insn_id, stmt.int_id) for stmt, _ in self.lex_schedule]) def max_lex_dims(self): - return max(len(stmt.lex_pt) for stmt in self.lex_schedule) + return max(len(lex_pt) for _, lex_pt in self.lex_schedule) def pad_lex_pts_with_zeros(self): # pad lex points with zeros so that all points have same number of dims max_lex_dim = self.max_lex_dims() new_sched = [] - for stmt in self.lex_schedule: - new_sched.append( - LexScheduleItem(stmt.insn_id, stmt.int_id, stmt.inames, - stmt.lex_pt + [0]*(max_lex_dim-len(stmt.lex_pt)))) + for stmt, lex_pt in self.lex_schedule: + new_sched.append((stmt, lex_pt + [0]*(max_lex_dim-len(lex_pt)))) self.lex_schedule = new_sched def add_symbolic_inames_to_statement_instances(self, inames): # append inames to lex tuples (matching specified order) new_sched = [] - for stmt in self.lex_schedule: - new_sched.append( - LexScheduleItem( - stmt.insn_id, stmt.int_id, tuple(inames[:]), stmt.lex_pt)) + for stmt, lex_pt in self.lex_schedule: + new_sched.append(( + LexScheduleStatement(stmt.insn_id, stmt.int_id, tuple(inames[:])), + lex_pt + )) self.lex_schedule = new_sched def create_symbolic_isl_map(self, sid_to_dom, dom_inames_ordered): @@ -174,7 +169,7 @@ class LexSchedule(object): # Insert 'statement' dim into domain so that its space allows for # intersection with sched map later doms_to_intersect = [] - for stmt in self.lex_schedule: + for stmt, _ in self.lex_schedule: doms_to_intersect.append( add_dims_to_isl_set( sid_to_dom[stmt.int_id], isl.dim_type.out, @@ -183,8 +178,8 @@ class LexSchedule(object): # create isl map return create_symbolic_isl_map_from_tuples( zip( - [((stmt.int_id,) + tuple(stmt.inames), stmt.lex_pt) - for stmt in self.lex_schedule], + [((stmt.int_id,) + tuple(stmt.inames), lex_pt) + for stmt, lex_pt in self.lex_schedule], doms_to_intersect ), sched_space, self.unused_param_name, self.statement_var_name) @@ -218,11 +213,11 @@ class LexSchedule(object): def __str__(self): sched_str = "{\n" - for stmt in self.lex_schedule: + for stmt, lex_pt in self.lex_schedule: domain_elem = "[%s=%s,%s]" % ( self.statement_var_name, stmt.int_id, ",".join(stmt.inames)) - sched_str += "%s -> %s;\n" % (domain_elem, stmt.lex_pt) + sched_str += "%s -> %s;\n" % (domain_elem, lex_pt) sched_str += "}" return sched_str -- GitLab From a80c31abf4d571616099f059e753f95070bc116a Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 12 Aug 2019 16:35:05 -0500 Subject: [PATCH 112/415] added initial docstrings for LexSchedule and LexScheduleStatement --- schedule.py | 57 +++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 57 insertions(+) diff --git a/schedule.py b/schedule.py index a5d90b010..b29cdd6e8 100644 --- a/schedule.py +++ b/schedule.py @@ -2,6 +2,26 @@ import islpy as isl class LexScheduleStatement(object): + """A representation of a Loopy statement instance or set of + instances used in a :class:`LexSchedule`. + + .. attribute:: insn_id + + A :class:`str` specifying the instruction id. + + .. attribute:: int_id + + A :class:`int` uniquely identifying the instruction. + + .. attribute:: inames + + A list of :class:`str` representing the inames applying to + this instruction, and possibly additional inames that do not + apply to this instruction but must be included because they + are used in another instruction in the program ordering. + + """ + def __init__( self, insn_id, # loopy insn id @@ -18,6 +38,41 @@ class LexScheduleStatement(object): class LexSchedule(object): + """A program ordering represented as a mapping from statement + instances to points in a lexicographic ordering. + + .. attribute:: lex_schedule + + A :class:`list` of (:class:`LexScheduleStatement`, :class:`list`) + tuples, representing the program ordering as a map from + statement instances to points in a lexicographic ordering. Points + in lexicographic ordering represented as list of :class:`int`. + + .. attribute:: unused_param_name + + A :class:`str` that specifies the name of a dummy isl parameter + assigned to variables in domain elements of the isl map that + represent inames unused in a particular statement instance. + The domain space of the generated isl map will have a dimension + for every iname used in any statement instance found in the + program ordering. An element in the domain of this map may + represent a statement instance that does not lie within + iname x, but will still need to assign a value to the x domain + variable. In this case, the parameter unused_param_name is + is assigned to x. + + .. attribute:: statement_var_name + + A :class:`str` specifying the name of the isl variable used + to represent the unique :class:`int` statement id. + + .. attribute:: lex_var_prefix + + A :class:`str` specifying the prefix to be used for the variables + representing the dimensions in the lexicographic ordering. E.g., + a prefix of "lex" might yield variables "lex0", "lex1", "lex2". + + """ unused_param_name = "unused" statement_var_name = "statement" @@ -138,6 +193,8 @@ class LexSchedule(object): self.lex_schedule = new_sched def add_symbolic_inames_to_statement_instances(self, inames): + # TODO if inames is same for all stmt instances, just keep single copy somewhere + # append inames to lex tuples (matching specified order) new_sched = [] for stmt, lex_pt in self.lex_schedule: -- GitLab From a16d516a04a67024816b0c93c08e8838986c2e67 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 12 Aug 2019 16:37:25 -0500 Subject: [PATCH 113/415] added todo --- schedule.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/schedule.py b/schedule.py index b29cdd6e8..0b24d5f85 100644 --- a/schedule.py +++ b/schedule.py @@ -1,6 +1,10 @@ import islpy as isl +# TODO if inames must be same for all stmt instances in lex sched, +# just keep single copy somewhere. After fixing this, combine +# LexScheduleStatement and LoopyStatement into single class + class LexScheduleStatement(object): """A representation of a Loopy statement instance or set of instances used in a :class:`LexSchedule`. -- GitLab From 32f4cf80e5c2433fc12caaa47fbccff3dd71bb5a Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 20 Aug 2019 02:28:10 -0500 Subject: [PATCH 114/415] keeping single copy of map_domain_inames in schedule rather than storing redundant copies in all statement instances; combined LoopyStatement with LexScheduleStatement --- __init__.py | 10 +++--- dependency.py | 31 +++++++--------- example_dependency_checking.py | 14 ++++---- schedule.py | 66 +++++++++++++++++----------------- 4 files changed, 59 insertions(+), 62 deletions(-) diff --git a/__init__.py b/__init__.py index c5bc0ca19..1a103a8a7 100644 --- a/__init__.py +++ b/__init__.py @@ -37,15 +37,15 @@ def check_schedule_validity( # then make PRIOR dep from all sinks to all sources at previous iterations. statement_pair_dep_sets = create_dependencies_from_legacy_knl(preprocessed_knl) - # get separate domains for before.active_inames and after.active_inames + # get separate domains for before.within_inames and after.within_inames deps_and_domains = [] for dep_set in statement_pair_dep_sets: deps_and_domains.append([ dep_set, preprocessed_knl.get_inames_domain( - dep_set.statement_before.active_inames), + dep_set.statement_before.within_inames), preprocessed_knl.get_inames_domain( - dep_set.statement_after.active_inames) + dep_set.statement_after.within_inames) ]) if verbose: @@ -107,7 +107,7 @@ def check_schedule_validity( # combined_doms is only used for printing (map.gist(dom)) # and for getting a consistent iname ordering to use in our maps combined_doms = preprocessed_knl.get_inames_domain( - s_before.active_inames | s_after.active_inames) + s_before.within_inames | s_after.within_inames) # TODO not guaranteed to work # Get all inames now in order to maintain list with consistent ordering @@ -133,7 +133,7 @@ def check_schedule_validity( # but all in-tuples need to match because they will become # the in-dims for an isl map, so if an iname is needed in one # statement tuple, then it is needed in all statement tuples. - sched.add_symbolic_inames_to_statement_instances( + sched.set_symbolic_inames_for_statement_instance_space( all_dom_inames_ordered) lp_insn_id_to_lex_sched_id = sched.loopy_insn_id_to_lex_sched_id() if verbose: diff --git a/dependency.py b/dependency.py index fefdc11da..e71e77b31 100644 --- a/dependency.py +++ b/dependency.py @@ -6,20 +6,6 @@ class DependencyType: PRIOR = "prior" -class LoopyStatement(object): - def __init__( - self, - insn_id, # loopy insn id - active_inames, - ): - self.insn_id = insn_id # string - self.active_inames = active_inames # [string, ] - - def __str__(self): - return "%s {%s}" % ( - self.insn_id, ",".join(self.active_inames)) - - class StatementPairDependencySet(object): def __init__( self, @@ -261,6 +247,7 @@ def create_dependencies_from_legacy_knl(knl): get_all_nonconcurrent_insn_iname_subsets, get_sched_item_ids_within_inames, ) + from schedule_checker.schedule import LexScheduleStatement dt = DependencyType conc_inames, non_conc_inames = get_concurrent_inames(knl) statement_dep_sets = [] @@ -275,8 +262,12 @@ def create_dependencies_from_legacy_knl(knl): dep_dict[dt.SAME] = shared_non_conc_inames - s_before = LoopyStatement(insn_before.id, insn_before_inames) - s_after = LoopyStatement(insn_after.id, insn_after_inames) + s_before = LexScheduleStatement( + insn_id=insn_before.id, + within_inames=insn_before_inames) + s_after = LexScheduleStatement( + insn_id=insn_after.id, + within_inames=insn_after_inames) statement_dep_sets.append( StatementPairDependencySet(s_before, s_after, dep_dict)) @@ -308,8 +299,12 @@ def create_dependencies_from_legacy_knl(knl): dep_dict[dt.PRIOR] = shared_non_conc_inames - s_before = LoopyStatement(sink_id, sink_insn_inames) - s_after = LoopyStatement(source_id, source_insn_inames) + s_before = LexScheduleStatement( + insn_id=sink_id, + within_inames=sink_insn_inames) + s_after = LexScheduleStatement( + insn_id=source_id, + within_inames=source_insn_inames) statement_dep_sets.append( StatementPairDependencySet(s_before, s_after, dep_dict)) diff --git a/example_dependency_checking.py b/example_dependency_checking.py index f55540c44..b81c52de7 100644 --- a/example_dependency_checking.py +++ b/example_dependency_checking.py @@ -3,7 +3,6 @@ from schedule_checker.dependency import ( # noqa StatementPairDependencySet, DependencyType as dt, create_dependency_constraint, - LoopyStatement, ) from schedule_checker.lexicographic_order_map import ( lex_order_map_tuple_pairs_from_explicit_bounds, @@ -15,6 +14,7 @@ from schedule_checker.sched_check_utils import ( create_explicit_map_from_tuples, get_isl_space, ) +from schedule_checker.schedule import LexScheduleStatement # make example kernel @@ -122,21 +122,21 @@ for iname in all_necessary_inames_ordered: domains[iname] = knl.get_inames_domain(iname) # make some dependencies manually for now: -s0 = LoopyStatement("0", {"i", "j"}) -s1 = LoopyStatement("1", {"i", "j"}) +s0 = LexScheduleStatement(insn_id="0", within_inames={"i", "j"}) +s1 = LexScheduleStatement(insn_id="1", within_inames={"i", "j"}) insnid_to_int_sid = {"0": 0, "1": 1} statement_pair_dep_set = StatementPairDependencySet(s0, s1, {dt.SAME: ["i", "j"]}) print(statement_pair_dep_set) combined_doms = knl.get_inames_domain( - statement_pair_dep_set.statement_before.active_inames | # noqa - statement_pair_dep_set.statement_after.active_inames + statement_pair_dep_set.statement_before.within_inames | # noqa + statement_pair_dep_set.statement_after.within_inames ) dom_before = knl.get_inames_domain( - statement_pair_dep_set.statement_before.active_inames + statement_pair_dep_set.statement_before.within_inames ) dom_after = knl.get_inames_domain( - statement_pair_dep_set.statement_after.active_inames + statement_pair_dep_set.statement_after.within_inames ) loop_priority = None # TODO constraint_map = create_dependency_constraint( diff --git a/schedule.py b/schedule.py index 0b24d5f85..0d068f98d 100644 --- a/schedule.py +++ b/schedule.py @@ -1,13 +1,8 @@ import islpy as isl -# TODO if inames must be same for all stmt instances in lex sched, -# just keep single copy somewhere. After fixing this, combine -# LexScheduleStatement and LoopyStatement into single class - class LexScheduleStatement(object): - """A representation of a Loopy statement instance or set of - instances used in a :class:`LexSchedule`. + """A representation of a Loopy statement instance. .. attribute:: insn_id @@ -17,28 +12,34 @@ class LexScheduleStatement(object): A :class:`int` uniquely identifying the instruction. - .. attribute:: inames + .. attribute:: within_inames - A list of :class:`str` representing the inames applying to - this instruction, and possibly additional inames that do not - apply to this instruction but must be included because they - are used in another instruction in the program ordering. + A :class:`list` of :class:`str` inames identifying the loops within + which this statement will be executed. """ def __init__( self, insn_id, # loopy insn id - int_id, # sid int (statement id within LexSchedule) - inames, + int_id=None, # sid int (statement id within LexSchedule) + within_inames=None, # [string, ] ): self.insn_id = insn_id # string self.int_id = int_id - self.inames = inames # [string, ] + self.within_inames = within_inames def __str__(self): - return "%s:%d {%s}" % ( - self.insn_id, self.int_id, ",".join(self.inames)) + if self.int_id: + int_id = ":%d" % (self.int_id) + else: + int_id = "" + if self.within_inames: + within_inames = " {%s}" % (",".join(self.within_inames)) + else: + within_inames = "" + return "%s%s%s" % ( + self.insn_id, int_id, within_inames) class LexSchedule(object): @@ -52,6 +53,13 @@ class LexSchedule(object): statement instances to points in a lexicographic ordering. Points in lexicographic ordering represented as list of :class:`int`. + .. attribute:: map_domain_inames + + A list of :class:`str` representing the union of inames used + in all statement instances. `statement_var_name` and + `map_domain_inames` are the names of the dims of the space of the + ISL map domain. + .. attribute:: unused_param_name A :class:`str` that specifies the name of a dummy isl parameter @@ -91,6 +99,9 @@ class LexSchedule(object): # list of LexScheduleStatements self.lex_schedule = [] + # inames for statement instance space + self.map_domain_inames = [] + # make sure we don't have an iname name conflict assert not any( iname == self.statement_var_name for iname in knl.all_inames()) @@ -164,12 +175,11 @@ class LexSchedule(object): or lp_insn_id in include_only_insn_ids): # add sched item - self.lex_schedule.append( - ( + self.lex_schedule.append(( LexScheduleStatement( insn_id=lp_insn_id, int_id=len(self.lex_schedule), # int representing insn - inames=None), + ), next_insn_lex_pt[:] )) @@ -196,17 +206,9 @@ class LexSchedule(object): new_sched.append((stmt, lex_pt + [0]*(max_lex_dim-len(lex_pt)))) self.lex_schedule = new_sched - def add_symbolic_inames_to_statement_instances(self, inames): - # TODO if inames is same for all stmt instances, just keep single copy somewhere - - # append inames to lex tuples (matching specified order) - new_sched = [] - for stmt, lex_pt in self.lex_schedule: - new_sched.append(( - LexScheduleStatement(stmt.insn_id, stmt.int_id, tuple(inames[:])), - lex_pt - )) - self.lex_schedule = new_sched + def set_symbolic_inames_for_statement_instance_space(self, inames): + # set map_domain_inames + self.map_domain_inames = inames[:] def create_symbolic_isl_map(self, sid_to_dom, dom_inames_ordered): # create isl map representing lex schedule @@ -239,7 +241,7 @@ class LexSchedule(object): # create isl map return create_symbolic_isl_map_from_tuples( zip( - [((stmt.int_id,) + tuple(stmt.inames), lex_pt) + [((stmt.int_id,) + tuple(self.map_domain_inames), lex_pt) for stmt, lex_pt in self.lex_schedule], doms_to_intersect ), @@ -278,7 +280,7 @@ class LexSchedule(object): domain_elem = "[%s=%s,%s]" % ( self.statement_var_name, stmt.int_id, - ",".join(stmt.inames)) + ",".join(self.map_domain_inames)) sched_str += "%s -> %s;\n" % (domain_elem, lex_pt) sched_str += "}" return sched_str -- GitLab From 2d653ea7346b9cbe8920e6065bec447ee5a9c9c5 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 26 Aug 2019 20:44:03 -0500 Subject: [PATCH 115/415] active_inames->within_inames --- __init__.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/__init__.py b/__init__.py index 5b71a0171..28153982a 100644 --- a/__init__.py +++ b/__init__.py @@ -127,7 +127,7 @@ def check_schedule_validity( preprocessed_knl.all_inames(), [dom_after, dom_before], isl.dim_type.out) combined_doms = preprocessed_knl.get_inames_domain( - s_before.active_inames | s_after.active_inames) + s_before.within_inames | s_after.within_inames) from schedule_checker.sched_check_utils import ( order_var_names_to_match_islset) _all_dom_inames_ordered = order_var_names_to_match_islset( @@ -154,7 +154,7 @@ def check_schedule_validity( # the in-dims for an isl map, so if an iname is needed in one # statement tuple, then it is needed in all statement tuples. sched.set_symbolic_inames_for_statement_instance_space( - all_dom_inames_ordered) + _all_dom_inames_ordered) lp_insn_id_to_lex_sched_id = sched.loopy_insn_id_to_lex_sched_id() if verbose: print("-"*80) @@ -177,7 +177,7 @@ def check_schedule_validity( } sched_map_symbolic = sched.create_symbolic_isl_map( - sid_to_dom, all_dom_inames_ordered) + sid_to_dom, _all_dom_inames_ordered) if verbose: print("sid_to_dom:\n", sid_to_dom) @@ -217,7 +217,7 @@ def check_schedule_validity( # maps statement instance to all statement instances that must occur later constraint_map = create_dependency_constraint( statement_pair_dep_set, - all_dom_inames_ordered, + _all_dom_inames_ordered, dom_before, dom_after, knl.loop_priority, -- GitLab From 6f0512ef00f2fe050e1b0c98afcede2cd62ae12d Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 26 Aug 2019 23:22:07 -0500 Subject: [PATCH 116/415] changed add_missing_dims_to_isl_set()->align_and_add_missing_dims_to_isl_set(); now any arbitrary initial iname ordering can be used; removed now unused order_var_names_to_match_islset(s) --- __init__.py | 40 ++++---------- dependency.py | 6 +-- example_pairwise_schedule_validity.py | 31 ++++++++++- sched_check_utils.py | 78 +++++++++++---------------- 4 files changed, 74 insertions(+), 81 deletions(-) diff --git a/__init__.py b/__init__.py index 28153982a..958d3ff67 100644 --- a/__init__.py +++ b/__init__.py @@ -19,7 +19,6 @@ def check_schedule_validity( ) from schedule_checker.sched_check_utils import ( prettier_map_string, - order_var_names_to_match_islsets, ) # Preprocess if not already preprocessed @@ -107,34 +106,15 @@ def check_schedule_validity( # include them and set them equal to a dummy variable. # Get a consistent iname ordering to use in our maps - # combined_doms is only used for printing (map.gist(dom)) - # and for getting a consistent iname ordering to use in our maps - combined_doms = preprocessed_knl.get_inames_domain( - s_before.within_inames | s_after.within_inames) - # TODO not guaranteed to work - # TODO remove this and make domain processing more robust - - # Get all inames now in order to maintain list with consistent ordering # This will help keep isl maps/sets compatible - - # TODO We're starting with an order matching the domains - # so that we don't have to worry about reordering isl sets/maps later - # and if we don't, assertions will fail. Later, improve this so we can - # start with arbitrary ordering of inames, or find some other way to - # make this more intuitive+robust. - import islpy as isl - all_dom_inames_ordered = order_var_names_to_match_islsets( - preprocessed_knl.all_inames(), [dom_after, dom_before], isl.dim_type.out) - - combined_doms = preprocessed_knl.get_inames_domain( - s_before.within_inames | s_after.within_inames) + # TODO make it unnecessary to track this ordering from schedule_checker.sched_check_utils import ( - order_var_names_to_match_islset) - _all_dom_inames_ordered = order_var_names_to_match_islset( - preprocessed_knl.all_inames(), combined_doms, isl.dim_type.out) - print(all_dom_inames_ordered) - print(_all_dom_inames_ordered) - #assert all_dom_inames_ordered == _all_dom_inames_ordered + list_var_names_in_isl_sets, + ) + consistent_iname_ordering = list_var_names_in_isl_sets( + [dom_before, dom_after]) + print("iname ordering:", consistent_iname_ordering) + assert set(consistent_iname_ordering).issubset(knl.all_inames()) # Create a mapping of {statement instance: lex point} # including only instructions involved in this dependency @@ -154,7 +134,7 @@ def check_schedule_validity( # the in-dims for an isl map, so if an iname is needed in one # statement tuple, then it is needed in all statement tuples. sched.set_symbolic_inames_for_statement_instance_space( - _all_dom_inames_ordered) + consistent_iname_ordering) lp_insn_id_to_lex_sched_id = sched.loopy_insn_id_to_lex_sched_id() if verbose: print("-"*80) @@ -177,7 +157,7 @@ def check_schedule_validity( } sched_map_symbolic = sched.create_symbolic_isl_map( - sid_to_dom, _all_dom_inames_ordered) + sid_to_dom, consistent_iname_ordering) if verbose: print("sid_to_dom:\n", sid_to_dom) @@ -217,7 +197,7 @@ def check_schedule_validity( # maps statement instance to all statement instances that must occur later constraint_map = create_dependency_constraint( statement_pair_dep_set, - _all_dom_inames_ordered, + consistent_iname_ordering, dom_before, dom_after, knl.loop_priority, diff --git a/dependency.py b/dependency.py index e71e77b31..47dc32f86 100644 --- a/dependency.py +++ b/dependency.py @@ -65,7 +65,7 @@ def create_dependency_constraint( make_islvars_with_var_primes, append_apostrophes, add_dims_to_isl_set, - add_missing_dims_to_isl_set, + align_and_add_missing_dims_to_isl_set, create_new_set_with_primes, ) # This function uses the dependency given to create the following constraint: @@ -224,10 +224,10 @@ def create_dependency_constraint( [statement_var_name_prime], statement_var_pose) # insert inames missing from doms to enable intersection - domain_to_intersect = add_missing_dims_to_isl_set( + domain_to_intersect = align_and_add_missing_dims_to_isl_set( domain_to_intersect, isl.dim_type.out, [statement_var_name] + all_dom_inames_ordered) - range_to_intersect = add_missing_dims_to_isl_set( + range_to_intersect = align_and_add_missing_dims_to_isl_set( range_to_intersect, isl.dim_type.out, append_apostrophes([statement_var_name] + all_dom_inames_ordered)) diff --git a/example_pairwise_schedule_validity.py b/example_pairwise_schedule_validity.py index 0cca2d18a..4a62c791d 100644 --- a/example_pairwise_schedule_validity.py +++ b/example_pairwise_schedule_validity.py @@ -5,7 +5,8 @@ from schedule_checker import check_schedule_validity # Choose kernel ---------------------------------------------------------- -knl_choice = "example" +#knl_choice = "example" +knl_choice = "unused_inames" #knl_choice = "matmul" #knl_choice = "scan" #knl_choice = "dependent_domain" @@ -49,6 +50,34 @@ if knl_choice == "example": #knl = lp.prioritize_loops(knl, "i,k,j") knl = lp.prioritize_loops(knl, "i,k") knl = lp.prioritize_loops(knl, "i,j") +if knl_choice == "unused_inames": + knl = lp.make_kernel( + [ + "{[i,ii]: 0<=itemp = b[i,k] {id=insn_a} + end + for j + a[i,j] = temp + 1 {id=insn_b,dep=insn_a} + end + end + """, + name="unused_inames", + assumptions="pi,pj,pk >= 1", + lang_version=(2018, 2) + ) + knl = lp.add_and_infer_dtypes( + knl, + {"b": np.float32}) + #knl = lp.tag_inames(knl, {"i": "l.0"}) + #knl = lp.prioritize_loops(knl, "i,k,j") + knl = lp.prioritize_loops(knl, "i,k") + knl = lp.prioritize_loops(knl, "i,j") elif knl_choice == "matmul": bsize = 16 knl = lp.make_kernel( diff --git a/sched_check_utils.py b/sched_check_utils.py index 482762864..b081d9917 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -25,32 +25,33 @@ def add_dims_to_isl_set(isl_set, dim_type, names, new_pose_start): return new_set -def is_ordered_sublist(sub_list, full_list): - full_idx = 0 - sub_idx = 0 - while sub_idx < len(sub_list) and full_idx < len(full_list): - if sub_list[sub_idx] == full_list[full_idx]: - sub_idx += 1 - full_idx += 1 - return sub_idx == len(sub_list) - - -def add_missing_dims_to_isl_set(isl_set, dim_type, all_dim_names): - # assumes vars in set are ordered subset of all_dim_names - assert is_ordered_sublist( - isl_set.get_var_names(dim_type), - all_dim_names, - ) +def align_and_add_missing_dims_to_isl_set(isl_set, dim_type, desired_dims_ordered): + assert set(isl_set.get_var_names(dim_type)).issubset(desired_dims_ordered) + + other_dim_type = isl.dim_type.param + other_dim_len = len(isl_set.get_var_names(other_dim_type)) new_set = isl_set.copy() - for i, name in enumerate(all_dim_names): - if i >= new_set.n_dim() or \ - new_set.get_dim_name(dim_type, i) != name: - # insert missing dim + for desired_pose, name in enumerate(desired_dims_ordered): + # if iname doesn't exist in set, add dim: + if not name in new_set.get_var_names(dim_type): + # insert missing dim in correct location new_set = new_set.insert_dims( - dim_type, i, 1 + dim_type, desired_pose, 1 ).set_dim_name( - dim_type, i, name) + dim_type, desired_pose, name) + else: # iname exists in set + current_pose = new_set.find_dim_by_name(dim_type, name) + if current_pose != desired_pose: + # move_dims(dst_type, dst_pose, src_type, src_pose, n) + + # first move to other dim because isl is stupid + new_set = new_set.move_dims( + other_dim_type, other_dim_len, dim_type, current_pose, 1) + # TODO is this safe? + # now move it where we actually want it + new_set = new_set.move_dims( + dim_type, desired_pose, other_dim_type, other_dim_len, 1) return new_set @@ -100,30 +101,13 @@ def all_iname_domains_equal(knl): return True -def order_var_names_to_match_islset(var_names, islset, set_dim=isl.dim_type.out): - # returns subset of var_names found in islset in - # order matching the islset variables - name_order = islset.get_var_names(set_dim) - names_ordered_to_match_islset = [] - for v in name_order: - if v in var_names: - names_ordered_to_match_islset.append(v) - return names_ordered_to_match_islset - - -def order_var_names_to_match_islsets(var_names, islset_list, set_dim=isl.dim_type.out): - # returns subset of var_names found in islset in - # order matching the islset variables - name_order = [] - for islset in islset_list: - name_order.extend( - [v for v in islset.get_var_names(set_dim) - if v not in name_order]) - names_ordered_to_match_islsets = [] - for v in name_order: - if v in var_names: - names_ordered_to_match_islsets.append(v) - return names_ordered_to_match_islsets +def list_var_names_in_isl_sets( + isl_sets, + set_dim=isl.dim_type.set): + inames = set() + for isl_set in isl_sets: + inames.update(isl_set.get_var_names(set_dim)) + return list(inames) def create_explicit_map_from_tuples(tuple_pairs, space): @@ -232,7 +216,7 @@ def create_symbolic_isl_map_from_tuples( # if there are any dimensions in dom that are missing from # map_from_set, we have a problem I think? # (assertion checks this in add_missing... - dom_with_all_inames = add_missing_dims_to_isl_set( + dom_with_all_inames = align_and_add_missing_dims_to_isl_set( dom, isl.dim_type.out, space_in_names, ) -- GitLab From 0e87b45b5a3c1238d65c2d2712f0f10b2af2529a Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 27 Aug 2019 01:48:19 -0500 Subject: [PATCH 117/415] removed unnecessary state (instance variable) from LexSchedule; now domain inames are passed directly to the isl map creation method rather than storing them as part of the LexSchedule --- __init__.py | 11 ++--------- schedule.py | 43 ++++++++++++++++++++++++------------------- 2 files changed, 26 insertions(+), 28 deletions(-) diff --git a/__init__.py b/__init__.py index 958d3ff67..7b201f0fb 100644 --- a/__init__.py +++ b/__init__.py @@ -114,7 +114,8 @@ def check_schedule_validity( consistent_iname_ordering = list_var_names_in_isl_sets( [dom_before, dom_after]) print("iname ordering:", consistent_iname_ordering) - assert set(consistent_iname_ordering).issubset(knl.all_inames()) + print("all inames:", knl.all_inames()) + assert set(consistent_iname_ordering).issubset(knl.all_inames()) # TODO remove assert # Create a mapping of {statement instance: lex point} # including only instructions involved in this dependency @@ -127,14 +128,6 @@ def check_schedule_validity( #print("LexSchedule before processing:") #print(sched) - # Right now, statement tuples consist of single int. - # Add all inames from combined domains to statement tuples. - # This may include inames not used in every instruction, - # but all in-tuples need to match because they will become - # the in-dims for an isl map, so if an iname is needed in one - # statement tuple, then it is needed in all statement tuples. - sched.set_symbolic_inames_for_statement_instance_space( - consistent_iname_ordering) lp_insn_id_to_lex_sched_id = sched.loopy_insn_id_to_lex_sched_id() if verbose: print("-"*80) diff --git a/schedule.py b/schedule.py index 0d068f98d..9439227b5 100644 --- a/schedule.py +++ b/schedule.py @@ -53,13 +53,6 @@ class LexSchedule(object): statement instances to points in a lexicographic ordering. Points in lexicographic ordering represented as list of :class:`int`. - .. attribute:: map_domain_inames - - A list of :class:`str` representing the union of inames used - in all statement instances. `statement_var_name` and - `map_domain_inames` are the names of the dims of the space of the - ISL map domain. - .. attribute:: unused_param_name A :class:`str` that specifies the name of a dummy isl parameter @@ -99,9 +92,6 @@ class LexSchedule(object): # list of LexScheduleStatements self.lex_schedule = [] - # inames for statement instance space - self.map_domain_inames = [] - # make sure we don't have an iname name conflict assert not any( iname == self.statement_var_name for iname in knl.all_inames()) @@ -206,12 +196,21 @@ class LexSchedule(object): new_sched.append((stmt, lex_pt + [0]*(max_lex_dim-len(lex_pt)))) self.lex_schedule = new_sched - def set_symbolic_inames_for_statement_instance_space(self, inames): - # set map_domain_inames - self.map_domain_inames = inames[:] + def create_symbolic_isl_map( + self, + sid_to_dom, + dom_inames_ordered): + + """Create isl map representing lex schedule + + .. arg:: dom_inames_ordered - def create_symbolic_isl_map(self, sid_to_dom, dom_inames_ordered): - # create isl map representing lex schedule + A list of :class:`str` representing the union of inames used + in all statement instances. `statement_var_name` and + `dom_inames_ordered` are the names of the dims of the space of the + ISL map domain. + + """ from schedule_checker.sched_check_utils import ( create_symbolic_isl_map_from_tuples, @@ -238,10 +237,17 @@ class LexSchedule(object): sid_to_dom[stmt.int_id], isl.dim_type.out, [self.statement_var_name], 0)) + # Right now, statement tuples consist of single int. + # Add all inames from combined domains to domain tuples. + # This may include inames not used in every instruction, + # but all in-tuples need to match because they will become + # the in-dims for an isl map, so if an iname is needed in one + # statement tuple, then it is needed in all statement tuples. + # create isl map return create_symbolic_isl_map_from_tuples( zip( - [((stmt.int_id,) + tuple(self.map_domain_inames), lex_pt) + [((stmt.int_id,) + tuple(dom_inames_ordered), lex_pt) for stmt, lex_pt in self.lex_schedule], doms_to_intersect ), @@ -277,10 +283,9 @@ class LexSchedule(object): def __str__(self): sched_str = "{\n" for stmt, lex_pt in self.lex_schedule: - domain_elem = "[%s=%s,%s]" % ( + domain_elem = "[%s=%s,]" % ( self.statement_var_name, - stmt.int_id, - ",".join(self.map_domain_inames)) + stmt.int_id) sched_str += "%s -> %s;\n" % (domain_elem, lex_pt) sched_str += "}" return sched_str -- GitLab From 6a41be2db6035d2857b26b9f351fef9b18faf892 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 27 Aug 2019 03:46:40 -0500 Subject: [PATCH 118/415] eliminated need to keep consistently ordered list of inames for isl set/map compatibility; instead reordering dims by name when necessary; renamed align_and_add_missing_dims_to_isl_set()->reorder_dims_by_name() and added options to *not* add missing inames while reordering --- __init__.py | 52 ++++++++++++++++++++++++++---- dependency.py | 21 ++++++++---- example_dependency_checking.py | 2 +- lexicographic_order_map.py | 1 + sched_check_utils.py | 59 ++++++++++++++-------------------- schedule.py | 10 ++++-- 6 files changed, 95 insertions(+), 50 deletions(-) diff --git a/__init__.py b/__init__.py index 7b201f0fb..81c8a7e05 100644 --- a/__init__.py +++ b/__init__.py @@ -149,8 +149,7 @@ def check_schedule_validity( lp_insn_id_to_lex_sched_id[s_after.insn_id]: dom_after, } - sched_map_symbolic = sched.create_symbolic_isl_map( - sid_to_dom, consistent_iname_ordering) + sched_map_symbolic = sched.create_symbolic_isl_map(sid_to_dom) if verbose: print("sid_to_dom:\n", sid_to_dom) @@ -177,20 +176,18 @@ def check_schedule_validity( # maps each statement instance to all statement instances occuring later sio = get_statement_ordering_map( sched_map_symbolic, lex_order_map_symbolic) - """ + if verbose: - print("statement instance ordering symbolic:") + print("statement instance ordering:") print(prettier_map_string(sio)) print("SIO space (statement instances -> statement instances):") print(sio.space) print("-"*80) - """ # create a map representing constraints from the dependency, # maps statement instance to all statement instances that must occur later constraint_map = create_dependency_constraint( statement_pair_dep_set, - consistent_iname_ordering, dom_before, dom_after, knl.loop_priority, @@ -199,12 +196,55 @@ def check_schedule_validity( sched.statement_var_name, ) + # align constraint map spaces to match sio so we can compare them + if verbose: + print("constraint map space (before aligning):") + print(constraint_map.space) + + # align params aligned_constraint_map = constraint_map.align_params(sio.space) + + # align in_ dims + import islpy as isl + from schedule_checker.sched_check_utils import ( + reorder_dims_by_name, + append_apostrophes, + ) + sio_in_names = sio.space.get_var_names(isl.dim_type.in_) + aligned_constraint_map = reorder_dims_by_name( + aligned_constraint_map, + isl.dim_type.in_, + sio_in_names, + add_missing=False, + new_names_are_permutation_only=True, + ) + + # align out dims + aligned_constraint_map = reorder_dims_by_name( + aligned_constraint_map, + isl.dim_type.out, + append_apostrophes(sio_in_names), + # TODO sio out names are only pretending to have apostrophes; confusing + add_missing=False, + new_names_are_permutation_only=True, + ) + if verbose: + print("constraint map space (after aligning):") + print(aligned_constraint_map.space) print("constraint map:") print(prettier_map_string(aligned_constraint_map)) assert aligned_constraint_map.space == sio.space + assert ( + aligned_constraint_map.space.get_var_names(isl.dim_type.in_) + == sio.space.get_var_names(isl.dim_type.in_)) + assert ( + aligned_constraint_map.space.get_var_names(isl.dim_type.out) + == append_apostrophes(sio.space.get_var_names(isl.dim_type.out))) + assert ( + aligned_constraint_map.space.get_var_names(isl.dim_type.param) + == sio.space.get_var_names(isl.dim_type.param)) if not aligned_constraint_map.is_subset(sio): diff --git a/dependency.py b/dependency.py index 47dc32f86..1c9ee5720 100644 --- a/dependency.py +++ b/dependency.py @@ -52,7 +52,6 @@ def _convert_constraint_set_to_map(constraint_set, mv_count, src_position=None): def create_dependency_constraint( statement_dep_set, - all_dom_inames_ordered, dom_before_constraint_set, dom_after_constraint_set, loop_priorities, @@ -60,17 +59,25 @@ def create_dependency_constraint( unused_param_name, statement_var_name, statement_var_pose=0, + all_dom_inames_ordered=None, ): from schedule_checker.sched_check_utils import ( make_islvars_with_var_primes, append_apostrophes, add_dims_to_isl_set, - align_and_add_missing_dims_to_isl_set, + reorder_dims_by_name, create_new_set_with_primes, ) # This function uses the dependency given to create the following constraint: # Statement [s,i,j] comes before statement [s',i',j'] iff + from schedule_checker.sched_check_utils import ( + list_var_names_in_isl_sets, + ) + if all_dom_inames_ordered is None: + all_dom_inames_ordered = list_var_names_in_isl_sets( + [dom_before_constraint_set, dom_after_constraint_set]) + # create some (ordered) isl vars to use, e.g., {s, i, j, s', i', j'} islvars = make_islvars_with_var_primes( [statement_var_name]+all_dom_inames_ordered, @@ -224,13 +231,15 @@ def create_dependency_constraint( [statement_var_name_prime], statement_var_pose) # insert inames missing from doms to enable intersection - domain_to_intersect = align_and_add_missing_dims_to_isl_set( + domain_to_intersect = reorder_dims_by_name( domain_to_intersect, isl.dim_type.out, - [statement_var_name] + all_dom_inames_ordered) - range_to_intersect = align_and_add_missing_dims_to_isl_set( + [statement_var_name] + all_dom_inames_ordered, + add_missing=True) + range_to_intersect = reorder_dims_by_name( range_to_intersect, isl.dim_type.out, - append_apostrophes([statement_var_name] + all_dom_inames_ordered)) + append_apostrophes([statement_var_name] + all_dom_inames_ordered), + add_missing=True) # intersect doms map_with_loop_domain_constraints = all_constraints_map.intersect_domain( diff --git a/example_dependency_checking.py b/example_dependency_checking.py index b81c52de7..dec2d5abb 100644 --- a/example_dependency_checking.py +++ b/example_dependency_checking.py @@ -141,13 +141,13 @@ dom_after = knl.get_inames_domain( loop_priority = None # TODO constraint_map = create_dependency_constraint( statement_pair_dep_set, - all_necessary_inames_ordered, dom_before, dom_after, loop_priority, insnid_to_int_sid, unused_param_name, statement_var, + all_dom_inames_ordered=all_necessary_inames_ordered, ) print("constraint map space:") print(constraint_map.space) diff --git a/lexicographic_order_map.py b/lexicographic_order_map.py index 05d5111c0..730ffc081 100644 --- a/lexicographic_order_map.py +++ b/lexicographic_order_map.py @@ -28,6 +28,7 @@ def get_statement_ordering_map(sched_map, lex_map): # statement ordering: # map each statement instance to all statement instances that occur later # S -> L -> S^-1 + # TODO apostrophes aren't really there for range, this is confusing return sched_map.apply_range(lex_map).apply_range(sched_map.reverse()) diff --git a/sched_check_utils.py b/sched_check_utils.py index b081d9917..7f105ff38 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -25,8 +25,20 @@ def add_dims_to_isl_set(isl_set, dim_type, names, new_pose_start): return new_set -def align_and_add_missing_dims_to_isl_set(isl_set, dim_type, desired_dims_ordered): +def reorder_dims_by_name( + isl_set, dim_type, desired_dims_ordered, + add_missing=False, new_names_are_permutation_only=False): + assert set(isl_set.get_var_names(dim_type)).issubset(desired_dims_ordered) + assert dim_type != isl.dim_type.param + + if new_names_are_permutation_only and ( + set(isl_set.get_var_names(dim_type)) + != set(desired_dims_ordered)): + raise ValueError( + "Var name sets must match with new_names_are_permutation_only=True. " + "isl vars: %s, desired dims: %s" + % (isl_set.get_var_names(dim_type), desired_dims_ordered)) other_dim_type = isl.dim_type.param other_dim_len = len(isl_set.get_var_names(other_dim_type)) @@ -34,13 +46,14 @@ def align_and_add_missing_dims_to_isl_set(isl_set, dim_type, desired_dims_ordere new_set = isl_set.copy() for desired_pose, name in enumerate(desired_dims_ordered): # if iname doesn't exist in set, add dim: - if not name in new_set.get_var_names(dim_type): - # insert missing dim in correct location - new_set = new_set.insert_dims( - dim_type, desired_pose, 1 - ).set_dim_name( - dim_type, desired_pose, name) - else: # iname exists in set + if name not in new_set.get_var_names(dim_type): + if add_missing: + # insert missing dim in correct location + new_set = new_set.insert_dims( + dim_type, desired_pose, 1 + ).set_dim_name( + dim_type, desired_pose, name) + else: # iname exists in set current_pose = new_set.find_dim_by_name(dim_type, name) if current_pose != desired_pose: # move_dims(dst_type, dst_pose, src_type, src_pose, n) @@ -216,9 +229,11 @@ def create_symbolic_isl_map_from_tuples( # if there are any dimensions in dom that are missing from # map_from_set, we have a problem I think? # (assertion checks this in add_missing... - dom_with_all_inames = align_and_add_missing_dims_to_isl_set( + dom_with_all_inames = reorder_dims_by_name( dom, isl.dim_type.out, space_in_names, + add_missing=True, + new_names_are_permutation_only=False, ) # intersect domain with this map @@ -376,29 +391,3 @@ def get_orderings_of_length_n( return_first_found=return_first_found, ) return orderings - - -# currently unused: -""" -def add_missing_set_dims_to_map_indims(islmap, islset): - new_map = islmap.copy() - for i in range(islset.n_dim()): - new_dim_name = islset.get_dim_name(isl.dim_type.out, i) - # does new_dim_name already exist in map? - dim_idx = new_map.find_dim_by_name(isl.dim_type.in_, new_dim_name) - if dim_idx == -1: - # new map needs dim, insert it - new_map = new_map.insert_dims(isl.dim_type.in_, i, 1) - new_map = new_map.set_dim_name(isl.dim_type.in_, i, new_dim_name) - else: - # new_map already has new_dim_name - if dim_idx == i: - # and it's already in the right spot - continue - else: - # move it - # TODO how do we move these? move_dims doesn't work for same dim_type - print("%s not in right spot" % (new_dim_name)) - raise ValueError("(this should not happen)") - return new_map -""" diff --git a/schedule.py b/schedule.py index 9439227b5..5c8e4ac29 100644 --- a/schedule.py +++ b/schedule.py @@ -199,7 +199,7 @@ class LexSchedule(object): def create_symbolic_isl_map( self, sid_to_dom, - dom_inames_ordered): + dom_inames_ordered=None): """Create isl map representing lex schedule @@ -219,6 +219,12 @@ class LexSchedule(object): assert len(sid_to_dom) == len(self.lex_schedule) + from schedule_checker.sched_check_utils import ( + list_var_names_in_isl_sets, + ) + if dom_inames_ordered is None: + dom_inames_ordered = list_var_names_in_isl_sets(sid_to_dom.values()) + # create an isl space # {('statement', used in >=1 statement domain>) -> # (lexicographic ordering dims)} @@ -234,7 +240,7 @@ class LexSchedule(object): for stmt, _ in self.lex_schedule: doms_to_intersect.append( add_dims_to_isl_set( - sid_to_dom[stmt.int_id], isl.dim_type.out, + sid_to_dom[stmt.int_id], isl.dim_type.set, [self.statement_var_name], 0)) # Right now, statement tuples consist of single int. -- GitLab From d264a288789fc0163156ee8556cba4435a732a6e Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 27 Aug 2019 04:05:34 -0500 Subject: [PATCH 119/415] removing unnecessary creation of iname list --- __init__.py | 19 ------------------- schedule.py | 13 ++++++++----- 2 files changed, 8 insertions(+), 24 deletions(-) diff --git a/__init__.py b/__init__.py index 81c8a7e05..f0e953548 100644 --- a/__init__.py +++ b/__init__.py @@ -98,25 +98,6 @@ def check_schedule_validity( s_before = statement_pair_dep_set.statement_before s_after = statement_pair_dep_set.statement_after - # The isl map representing the schedule maps - # statement instances -> lex time - # The 'in_' dim vars need to match for all sched items in the map, - # Instructions that use fewer inames will still need to - # have the unused inames in their 'in_' dim vars, so we'll - # include them and set them equal to a dummy variable. - - # Get a consistent iname ordering to use in our maps - # This will help keep isl maps/sets compatible - # TODO make it unnecessary to track this ordering - from schedule_checker.sched_check_utils import ( - list_var_names_in_isl_sets, - ) - consistent_iname_ordering = list_var_names_in_isl_sets( - [dom_before, dom_after]) - print("iname ordering:", consistent_iname_ordering) - print("all inames:", knl.all_inames()) - assert set(consistent_iname_ordering).issubset(knl.all_inames()) # TODO remove assert - # Create a mapping of {statement instance: lex point} # including only instructions involved in this dependency sched = LexSchedule(scheduled_knl, include_only_insn_ids=[ diff --git a/schedule.py b/schedule.py index 5c8e4ac29..23ec14f0f 100644 --- a/schedule.py +++ b/schedule.py @@ -243,12 +243,15 @@ class LexSchedule(object): sid_to_dom[stmt.int_id], isl.dim_type.set, [self.statement_var_name], 0)) + # The isl map representing the schedule maps + # statement instances -> lex time + # The 'in_' dim vars need to match for all sched items in the map, + # Instructions that use fewer inames will still need to + # have the unused inames in their 'in_' dim vars, so we'll + # include them and set them equal to a dummy variable. + # Right now, statement tuples consist of single int. - # Add all inames from combined domains to domain tuples. - # This may include inames not used in every instruction, - # but all in-tuples need to match because they will become - # the in-dims for an isl map, so if an iname is needed in one - # statement tuple, then it is needed in all statement tuples. + # Add all inames from combined domains to map domain tuples. # create isl map return create_symbolic_isl_map_from_tuples( -- GitLab From 2c54b29016b7f414fc651cc8b3bbeefad22daa8c Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 27 Aug 2019 11:05:46 -0500 Subject: [PATCH 120/415] added docstrings for LexSchedule and its methods --- sched_check_utils.py | 1 + schedule.py | 41 ++++++++++++++++++++++++++++++++++------- 2 files changed, 35 insertions(+), 7 deletions(-) diff --git a/sched_check_utils.py b/sched_check_utils.py index 7f105ff38..6c6e87332 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -291,6 +291,7 @@ def get_concurrent_inames(knl): def _get_insn_id_from_sched_item(knl, sched_item): + # TODO could use loopy's sched_item_to_insn_id() from loopy.schedule import Barrier if isinstance(sched_item, Barrier): return sched_item.originating_insn_id diff --git a/schedule.py b/schedule.py index 23ec14f0f..92f92e6b4 100644 --- a/schedule.py +++ b/schedule.py @@ -88,6 +88,15 @@ class LexSchedule(object): knl, include_only_insn_ids=None, ): + """ + :arg kernel: A :class:`LoopKernel` whose instructions will be + described by this :class:`LexSchedule`. + + :arg include_only_insn_ids: A list of :class:`str` instruction ids + specifying which instructions to include in the mapping. If set + to None, all insructions will be included. + + """ # list of LexScheduleStatements self.lex_schedule = [] @@ -183,13 +192,23 @@ class LexSchedule(object): self.pad_lex_pts_with_zeros() def loopy_insn_id_to_lex_sched_id(self): + """Return a dictionary mapping insn_id to int_id, where `insn_id` and + `int_id` refer to the `insn_id` and `int_id` attributes of + :class:`LexScheduleStatement`. + """ return dict([(stmt.insn_id, stmt.int_id) for stmt, _ in self.lex_schedule]) def max_lex_dims(self): return max(len(lex_pt) for _, lex_pt in self.lex_schedule) def pad_lex_pts_with_zeros(self): - # pad lex points with zeros so that all points have same number of dims + """Find the maximum number of lexicographic dimensions represented + in the lexicographic ordering, and if any + :class:`LexScheduleStatement` maps to a point in lexicographic + time with fewer dimensions, add a zero for each of the missing + dimensions. + """ + max_lex_dim = self.max_lex_dims() new_sched = [] for stmt, lex_pt in self.lex_schedule: @@ -200,15 +219,18 @@ class LexSchedule(object): self, sid_to_dom, dom_inames_ordered=None): - """Create isl map representing lex schedule - .. arg:: dom_inames_ordered + .. arg:: sid_to_dom: A :class:`dict` mapping integer ids to domains, + where integer ids are instances of the `int_id` attribute of + :class:`LexScheduleStatement`, and domains are the + :class:`islpy.BasicSet` representing the domain for this + statement. - A list of :class:`str` representing the union of inames used - in all statement instances. `statement_var_name` and - `dom_inames_ordered` are the names of the dims of the space of the - ISL map domain. + .. arg:: dom_inames_ordered: A list of :class:`str` representing + the union of inames used in all statement instances. + `statement_var_name` and `dom_inames_ordered` are the names + of the dims of the space of the ISL map domain. """ @@ -267,6 +289,11 @@ class LexSchedule(object): for i in range(self.max_lex_dims())] def get_lex_order_map_for_symbolic_sched(self): + """Return an :class:`islpy.BasicMap` that maps each point in a + lexicographic ordering to every point that is + lexocigraphically greater. + """ + from schedule_checker.lexicographic_order_map import ( create_symbolic_lex_order_map, ) -- GitLab From 470654ad97a34845437f0ee6371bc0e2f80cb7c6 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 27 Aug 2019 11:15:08 -0500 Subject: [PATCH 121/415] added docstring for StatementPairDependencySet --- dependency.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/dependency.py b/dependency.py index 1c9ee5720..1813fd80b 100644 --- a/dependency.py +++ b/dependency.py @@ -7,6 +7,24 @@ class DependencyType: class StatementPairDependencySet(object): + """A set of dependencies between two statements. + + .. attribute:: statement_before + + A :class:`LexScheduleStatement` depended on by statement_after. + + .. attribute:: statement_after + + A :class:`LexScheduleStatement` which depends on statement_before. + + .. attribute:: deps + + A :class:`dict` mapping instances of :class:`DependencyType` to + the Loopy kernel inames involved in that particular + dependency relationship. + + """ + def __init__( self, statement_before, -- GitLab From 1e1204451bf618f1beec80298724efbd915df198 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 2 Sep 2019 18:08:35 -0500 Subject: [PATCH 122/415] minor changes to schedule docstrings --- schedule.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/schedule.py b/schedule.py index 92f92e6b4..bfbd789b6 100644 --- a/schedule.py +++ b/schedule.py @@ -51,7 +51,7 @@ class LexSchedule(object): A :class:`list` of (:class:`LexScheduleStatement`, :class:`list`) tuples, representing the program ordering as a map from statement instances to points in a lexicographic ordering. Points - in lexicographic ordering represented as list of :class:`int`. + in lexicographic ordering are represented as list of :class:`int`. .. attribute:: unused_param_name @@ -89,7 +89,7 @@ class LexSchedule(object): include_only_insn_ids=None, ): """ - :arg kernel: A :class:`LoopKernel` whose instructions will be + :arg knl: A :class:`LoopKernel` whose instructions will be described by this :class:`LexSchedule`. :arg include_only_insn_ids: A list of :class:`str` instruction ids -- GitLab From 5274862bafbc2430b2ce1b19129617ccc27a4d20 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 2 Sep 2019 18:13:50 -0500 Subject: [PATCH 123/415] raise error if someone attempts to build a schedule of length greater than 2 --- schedule.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/schedule.py b/schedule.py index bfbd789b6..0237fe829 100644 --- a/schedule.py +++ b/schedule.py @@ -107,6 +107,12 @@ class LexSchedule(object): assert not any( iname == self.unused_param_name for iname in knl.all_inames()) + if (include_only_insn_ids is None and len(knl.schedule) > 2 + ) or len(include_only_insn_ids) > 2: + raise NotImplementedError( + "LexSchedule currently does not produce program orderings " + "with greater than 2 statements.") + from loopy.schedule import (EnterLoop, LeaveLoop, Barrier, RunInstruction) from loopy.kernel.data import ConcurrentTag -- GitLab From b2a258f34d4b8f86ac5bbc74064f5a0d89df7c58 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 2 Sep 2019 19:00:43 -0500 Subject: [PATCH 124/415] docstring for dependency type --- dependency.py | 33 +++++++++++++++++++++++++++++++++ schedule.py | 2 +- 2 files changed, 34 insertions(+), 1 deletion(-) diff --git a/dependency.py b/dependency.py index 1813fd80b..88bb17ef2 100644 --- a/dependency.py +++ b/dependency.py @@ -2,6 +2,38 @@ import islpy as isl class DependencyType: + """Strings specifying a particular type of dependency relationship. + + .. attribute:: SAME + + A :class:`str` specifying the following dependency relationship: + + If ``S = {i, j, ...}`` is a set of inames used in both statements + ``insn0`` and ``insn1``, and ``{i, j, ...}`` represent the values + of the inames in ``insn0``, and ``{i', j', ...}`` represent the + values of the inames in ``insn1``, then the dependency + ``insn0 happens before insn1 iff SAME({i, j})`` specifies that + ``insn0 happens before insn1 iff {i = i' and j = j' and ...}``. + Note that ``SAME({}) = True``. + + .. attribute:: PRIOR + + A :class:`str` specifying the following dependency relationship: + + If ``S = {i, j, k, ...}`` is a set of inames used in both statements + ``insn0`` and ``insn1``, and ``{i, j, k, ...}`` represent the values + of the inames in ``insn0``, and ``{i', j', k', ...}`` represent the + values of the inames in ``insn1``, then the dependency + ``insn0 happens before insn1 iff PRIOR({i, j, k})`` specifies one of + two possibilities, depending on whether the loop nest ordering is + known. If the loop nest ordering is unknown, then + ``insn0 happens before insn1 iff {i < i' and j < j' and k < k' ...}``. + If the loop nest ordering is known, the condition becomes + ``{i, j, k, ...}`` is lexicographically less than ``{i', j', k', ...}``, + i.e., ``i < i' or (i = i' and j < j') or (i = i' and j = j' and k < k') ...``. + + """ + SAME = "same" PRIOR = "prior" @@ -207,6 +239,7 @@ def create_dependency_constraint( from schedule_checker.lexicographic_order_map import ( get_lex_order_constraint ) + # TODO handle case where inames list is empty constraint_set = get_lex_order_constraint( islvars, inames_list_nest_ordered, diff --git a/schedule.py b/schedule.py index 0237fe829..d71fdc603 100644 --- a/schedule.py +++ b/schedule.py @@ -225,7 +225,7 @@ class LexSchedule(object): self, sid_to_dom, dom_inames_ordered=None): - """Create isl map representing lex schedule + """Create an isl map representing lex schedule .. arg:: sid_to_dom: A :class:`dict` mapping integer ids to domains, where integer ids are instances of the `int_id` attribute of -- GitLab From f133e25e5da23620d922d6d2ae37e8312a4117aa Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 2 Sep 2019 19:19:55 -0500 Subject: [PATCH 125/415] docstring for create_elementwise_comparison_conjunction_set --- dependency.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/dependency.py b/dependency.py index 88bb17ef2..8b7e6e92a 100644 --- a/dependency.py +++ b/dependency.py @@ -77,6 +77,26 @@ class StatementPairDependencySet(object): def create_elementwise_comparison_conjunction_set( names0, names1, islvars, op="eq"): + """Create a set constrained by the conjunction of conditions comparing + `names0` to `names1`. + + .. arg names0: A list of :class:`str` representing variable names. + + .. arg names1: A list of :class:`str` representing variable names. + + .. arg islvars: A dictionary from variable names to :class:`PwAff` + instances that represent each of the variables + (islvars may be produced by `islpy.make_zero_and_vars`). The key + '0' is also include and represents a :class:`PwAff` zero constant. + + .. arg op: A :class:`str` describing the operator to use when creating + the set constraints. Options: `eq` for `=`, `lt` for `<` + + .. return: A set involving `islvars` cosntrained by the constraints + `{names0[0] names1[0] and names0[1] names1[1] and ...}`. + + """ + # initialize set with constraint that is always true conj_set = islvars[0].eq_set(islvars[0]) -- GitLab From a804ff8bcc2417627c1ccb9717b36461c93825fe Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 2 Sep 2019 19:24:24 -0500 Subject: [PATCH 126/415] adding 'return' description to docstring for create_symbolic_isl_map --- schedule.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/schedule.py b/schedule.py index d71fdc603..5e1c10f1b 100644 --- a/schedule.py +++ b/schedule.py @@ -225,19 +225,25 @@ class LexSchedule(object): self, sid_to_dom, dom_inames_ordered=None): - """Create an isl map representing lex schedule + """Create an isl map representing lex schedule as a mapping + from each statement instance to all statement instances + occuring later. - .. arg:: sid_to_dom: A :class:`dict` mapping integer ids to domains, + .. arg sid_to_dom: A :class:`dict` mapping integer ids to domains, where integer ids are instances of the `int_id` attribute of :class:`LexScheduleStatement`, and domains are the :class:`islpy.BasicSet` representing the domain for this statement. - .. arg:: dom_inames_ordered: A list of :class:`str` representing + .. arg dom_inames_ordered: A list of :class:`str` representing the union of inames used in all statement instances. `statement_var_name` and `dom_inames_ordered` are the names of the dims of the space of the ISL map domain. + .. return: An :class:`islpy.Map` representing the lex schedule as + a mapping from each statement instance to all statement instances + occuring later. + """ from schedule_checker.sched_check_utils import ( -- GitLab From feb1cf6924a3eb92bf1ca323bd6c68c4c9367063 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 2 Sep 2019 19:57:41 -0500 Subject: [PATCH 127/415] added docstring for create_dependency_constraint --- dependency.py | 50 +++++++++++++++++++++++++++++++++++++++++++++++++- schedule.py | 4 ++-- 2 files changed, 51 insertions(+), 3 deletions(-) diff --git a/dependency.py b/dependency.py index 8b7e6e92a..f15cdd8d0 100644 --- a/dependency.py +++ b/dependency.py @@ -97,7 +97,6 @@ def create_elementwise_comparison_conjunction_set( """ - # initialize set with constraint that is always true conj_set = islvars[0].eq_set(islvars[0]) for n0, n1 in zip(names0, names1): @@ -131,6 +130,55 @@ def create_dependency_constraint( statement_var_pose=0, all_dom_inames_ordered=None, ): + """Create a statement dependency constraint represented as a map from + each statement instance to statement instances that must occur later, + i.e., ``{[s=0, i, j] -> [s'=1, i', j'] : condition on {i, j, i', j'}}`` + indicates that statement ``0`` comes before statment ``1`` when the + specified condition on inames ``i,j,i',j'`` is met. ``i'`` and ``j'`` + are the values of inames ``i`` and ``j`` in second statement instance. + + .. arg statement_dep_set: A :class:`StatementPairDependencySet` describing + the dependency relationship between the two statements. + + .. arg dom_before_constraint_set: A :class:`islpy.BasicSet` specifying the + domain for the 'before' statement in the relationship. + + .. arg dom_after_constraint_set: A :class:`islpy.BasicSet` specifying the + domain for the 'after' statement in the relationship. + + .. arg loop_priorities: A list of tuples from the ``loop_priority`` + attribute of :class:`loopy.LoopKernel` specifying the loop nest + ordering rules. + + .. arg insn_id_to_int: A :class:`dict` mapping insn_id to int_id, where + 'insn_id' and 'int_id' refer to the 'insn_id' and 'int_id' attributes + of :class:`LexScheduleStatement`. + + .. arg unused_param_name: A :class:`str` that specifies the name of a + dummy isl parameter assigned to variables in domain elements of the + isl map that represent inames unused in a particular statement + instance. The domain space of the generated isl map will have a + dimension for every iname used in any statement instance found in + the program ordering. An element in the domain of this map may + represent a statement instance that does not lie within iname x, but + will still need to assign a value to the x domain variable. In this + case, the parameter unused_param_name is is assigned to x. + + .. arg statement_var_name: A :class:`str` specifying the name of the + isl variable used to represent the unique :class:`int` statement id. + + .. arg statement_var_pose: A :class:`int` specifying which position in the + statement instance tuples holds the dimension representing the + statement id. Defaults to ``0``. + + .. arg all_dom_inames_ordered: A :class:`list` of :class:`str` specifying + an order for the dimensions representing inames. + + .. return: An :class:`islpy.Map` mapping each statement instance to all + statement instances that must occur later according to the constraints. + + """ + from schedule_checker.sched_check_utils import ( make_islvars_with_var_primes, append_apostrophes, diff --git a/schedule.py b/schedule.py index 5e1c10f1b..c182550ee 100644 --- a/schedule.py +++ b/schedule.py @@ -107,8 +107,8 @@ class LexSchedule(object): assert not any( iname == self.unused_param_name for iname in knl.all_inames()) - if (include_only_insn_ids is None and len(knl.schedule) > 2 - ) or len(include_only_insn_ids) > 2: + if ((include_only_insn_ids is None and len(knl.schedule) > 2) + or len(include_only_insn_ids) > 2): raise NotImplementedError( "LexSchedule currently does not produce program orderings " "with greater than 2 statements.") -- GitLab From 1015927ddcee0bf52584e90ae8885772333dabea Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 2 Sep 2019 19:59:03 -0500 Subject: [PATCH 128/415] fixing docstring formatting --- schedule.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/schedule.py b/schedule.py index c182550ee..a2df9acb3 100644 --- a/schedule.py +++ b/schedule.py @@ -198,8 +198,8 @@ class LexSchedule(object): self.pad_lex_pts_with_zeros() def loopy_insn_id_to_lex_sched_id(self): - """Return a dictionary mapping insn_id to int_id, where `insn_id` and - `int_id` refer to the `insn_id` and `int_id` attributes of + """Return a dictionary mapping insn_id to int_id, where ``insn_id`` and + ``int_id`` refer to the ``insn_id`` and ``int_id`` attributes of :class:`LexScheduleStatement`. """ return dict([(stmt.insn_id, stmt.int_id) for stmt, _ in self.lex_schedule]) @@ -230,14 +230,14 @@ class LexSchedule(object): occuring later. .. arg sid_to_dom: A :class:`dict` mapping integer ids to domains, - where integer ids are instances of the `int_id` attribute of + where integer ids are instances of the ``int_id`` attribute of :class:`LexScheduleStatement`, and domains are the :class:`islpy.BasicSet` representing the domain for this statement. .. arg dom_inames_ordered: A list of :class:`str` representing the union of inames used in all statement instances. - `statement_var_name` and `dom_inames_ordered` are the names + ``statement_var_name`` and ``dom_inames_ordered`` are the names of the dims of the space of the ISL map domain. .. return: An :class:`islpy.Map` representing the lex schedule as -- GitLab From 7bedbbb4179d5452f16840c1c078501c0ebe6dd1 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 2 Sep 2019 20:21:52 -0500 Subject: [PATCH 129/415] added docstring to create_dependencies_from_legacy_kernel --- dependency.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/dependency.py b/dependency.py index f15cdd8d0..74b8a0978 100644 --- a/dependency.py +++ b/dependency.py @@ -368,6 +368,23 @@ def create_dependency_constraint( def create_dependencies_from_legacy_knl(knl): + """Return a list of :class:`StatementPairDependySet` instances created + for a :class:`loopy.LoopKernel` containing legacy depencencies. Create + the new dependencies according to the following rules. (1) If + a dependency exists between ``insn0`` and ``insn1``, create the dependnecy + ``SAME(SNC)`` where ``SNC`` is the set of non-concurrent inames used + by both ``insn0 and ``insn1``, and ``SAME`` is the relationship specified + by the ``SAME`` attribute of :class:`DependencyType`. (2) For each subset + of non-concurrent inames used by any instruction, find the set of all + instructions using those inames, create a directed graph with these + instructions as nodes and edges representing a 'happens before' + relationship specfied by each dependency, find the sources and sinks within + this graph, and connect each sink to each source (sink happens before + source) with a ``PRIOR(SNC)`` dependency, where ``PRIOR`` is the + relationship specified by the ``PRIOR`` attribute of + :class:`DependencyType`. + + """ # Introduce SAME dep for set of shared, non-concurrent inames from schedule_checker.sched_check_utils import ( -- GitLab From 547e2cace6f0b82ee317725587e5eb4bf34ad396 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 2 Sep 2019 20:29:09 -0500 Subject: [PATCH 130/415] created docstring for get_dependency_sources_and_sinks --- dependency.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/dependency.py b/dependency.py index 74b8a0978..8a118bdc5 100644 --- a/dependency.py +++ b/dependency.py @@ -457,6 +457,18 @@ def create_dependencies_from_legacy_knl(knl): def get_dependency_sources_and_sinks(knl, sched_item_ids): + """Implicitly create a directed graph with the schedule items specified + by ``sched_item_ids`` as nodes, and with edges representing a + 'happens before' relationship specfied by each legacy dependency between + two instructions. Return the sources and sinks within this graph. + + .. arg sched_item_ids: A :class:`list` of :class:`str` representing + loopy instruction ids. + + .. return: Two instances of :class:`set` of :class:`str` instruction ids + representing the sources and sinks in the dependency graph. + + """ sources = set() dependees = set() # all dependees (within sched_item_ids) for item_id in sched_item_ids: -- GitLab From 9acd446bcc412d9b2cad58841c1f6f99cb7fe78d Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 3 Sep 2019 00:00:47 -0500 Subject: [PATCH 131/415] simplified and cleaned up lex map creation example --- example_lex_map_creation.py | 116 +++++------------------------------- 1 file changed, 14 insertions(+), 102 deletions(-) diff --git a/example_lex_map_creation.py b/example_lex_map_creation.py index 79730d036..dde4e001e 100644 --- a/example_lex_map_creation.py +++ b/example_lex_map_creation.py @@ -5,32 +5,15 @@ from schedule_checker.lexicographic_order_map import ( from schedule_checker.sched_check_utils import ( create_explicit_map_from_tuples, get_isl_space, + prettier_map_string as pmap, ) -# *Symbolic* lexicographic mapping- map each tuple to all tuples occuring later - -#in_names = ["i", "j"] -#out_names = append_apostrophes(in_names) -n_dims = 2 # len(in_names) -lex_map_symbolic = create_symbolic_lex_order_map( - n_dims) -print("lex_map (symbolic):") -print(lex_map_symbolic) - - -# *Explicit* lexicographic mapping- map each tuple to all tuples occuring later - -""" -dim_bounds = [(0,2), (0,2)] # max vals for each dim (e.g., 0 <= i0 < max0 ...) -explicit_lex_map_pairs = lex_order_map_tuple_pairs_from_explicit_bounds(dim_bounds) -# for pair in explicit_lex_map_pairs: -# print(pair[0], pair[1]) -lex_map_explicit = create_explicit_map_from_tuples(explicit_lex_map_pairs, - lex_map_symbolic.space) -print("lex_map (explicit):") -print(lex_map_explicit) -""" +# Lexicographic order map- map each tuple to all tuples occuring later +n_dims = 2 +lex_order_map = create_symbolic_lex_order_map(n_dims) +print("lexicographic order map:") +print(pmap(lex_order_map)) # Example *explicit* schedule (map statement instances to lex time) @@ -38,12 +21,8 @@ param_names_sched = [] in_names_sched = ["s"] out_names_sched = ["i", "j"] sched_space = get_isl_space(param_names_sched, in_names_sched, out_names_sched) -example_sched = create_explicit_map_from_tuples( +sched_explicit = create_explicit_map_from_tuples( [ - #((0,), (2, 0, 0)), - #((1,), (2, 0, 1)), - #((2,), (2, 1, 0)), - #((3,), (2, 1, 1)), ((0,), (0, 0)), ((1,), (0, 1)), ((2,), (1, 0)), @@ -51,81 +30,14 @@ example_sched = create_explicit_map_from_tuples( ], sched_space, ) -print("example sched:") -print(example_sched) +print("example explicit sched:") +print(pmap(sched_explicit)) -# statement ordering: +# Statement instance ordering: # map each statement instance to all statement instances that occur later # S -> L -> S^-1 -""" -statement_instance_ordering_explicit = get_statement_ordering_map( - example_sched, lex_map_explicit) -print("statement instance ordering explicit:") -print(statement_instance_ordering_explicit) -""" - -statement_instance_ordering_symbolic = get_statement_ordering_map( - example_sched, lex_map_symbolic) -print("statement instance ordering symbolic:") -print(statement_instance_ordering_symbolic) - - -# example constraint test: -print("---------------------------------------------------------------------------") -""" -param_names_sched = ["ps", "p0", "p1"] -in_names_sched = ["s"] -out_names_sched = ["i", "j"] -sched_space = isl.Space.alloc(isl.DEFAULT_CONTEXT, 3, 1, 2) -sched_space = set_space_names( - sched_space, - param_names=param_names_sched, - in_names=in_names_sched, - out_names=out_names_sched) -example_sched = create_explicit_map_from_tuples( - [ - #((0,0), (0, 0)), - #((1,0), (0, 1)), - #((2,1), (1, 0)), - #((3,1), (1, 1)), - ((0,), (0, 0)), - ((1,), (0, 1)), - ((2,), (1, 0)), - ((3,), (1, 1)), - ], - sched_space, - ) -print("example sched:") -print(example_sched) -""" - -""" -param_names_sched = ["ps", "p0", "p1"] -in_names_sched = ["s","i","j"] -out_names_sched = ["l0","l1"] -sched_space = get_isl_space(param_names_sched, in_names_sched, out_names_sched) -example_sched = create_explicit_map_from_tuples( - [ - ((0,0,0), (0, 0)), - ((0,1,0), (0, 0)), - ((1,0,0), (0, 1)), - ((1,1,0), (0, 1)), - ((0,0,1), (1, 0)), - ((0,1,1), (1, 0)), - ((1,0,1), (1, 1)), - ((1,1,1), (1, 1)), - ], - sched_space, - ) -print("example sched:") -print(example_sched) - -print("lex map explicit:") -print(lex_map_explicit) - -statement_instance_ordering_explicit = get_statement_ordering_map( - example_sched, lex_map_explicit) -print("statement instance ordering explicit:") -print(statement_instance_ordering_explicit) -""" +sio = get_statement_ordering_map( + sched_explicit, lex_order_map) +print("Statement instance ordering:") +print(pmap(sio)) -- GitLab From 2edc3893d6651e9beb4febd7f1189590e703b8f4 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 3 Sep 2019 00:53:48 -0500 Subject: [PATCH 132/415] removed old/unused code/approach from dependency checking example and added code for ensuring spaces are aligned without enforcing iname order at map construcction --- example_dependency_checking.py | 157 +++++++++++++++++++-------------- 1 file changed, 93 insertions(+), 64 deletions(-) diff --git a/example_dependency_checking.py b/example_dependency_checking.py index dec2d5abb..0f04aee95 100644 --- a/example_dependency_checking.py +++ b/example_dependency_checking.py @@ -5,11 +5,11 @@ from schedule_checker.dependency import ( # noqa create_dependency_constraint, ) from schedule_checker.lexicographic_order_map import ( - lex_order_map_tuple_pairs_from_explicit_bounds, + create_symbolic_lex_order_map, get_statement_ordering_map, ) from schedule_checker.sched_check_utils import ( - prettier_map_string, + prettier_map_string as pmap, append_apostrophes, create_explicit_map_from_tuples, get_isl_space, @@ -30,18 +30,18 @@ knl = lp.tag_inames(knl, {"i": "l.0"}) print("Kernel:") print(knl) -all_necessary_inames_ordered = ['i', 'j'] -#all_necessary_inames_ordered = sorted(list(knl.all_inames())) +inames = ['i', 'j'] statement_var = 's' +unused_param_name = 'unused' # example sched: -print("---------------------------------------------------------------------------") +print("-"*80) # i is parallel, suppose we want to enforce the following: # for a given i, statement 0 happens before statement 1 -params_sched = ['p0', 'p1'] -in_names_sched = [statement_var]+all_necessary_inames_ordered +params_sched = ['p0', 'p1', unused_param_name] +in_names_sched = [statement_var]+inames out_names_sched = ['l0', 'l1'] sched_space = get_isl_space(params_sched, in_names_sched, out_names_sched) @@ -59,7 +59,7 @@ example_sched_valid = create_explicit_map_from_tuples( sched_space, ) print("example sched (valid):") -print(prettier_map_string(example_sched_valid)) +print(pmap(example_sched_valid)) example_sched_invalid = create_explicit_map_from_tuples( [ @@ -75,70 +75,54 @@ example_sched_invalid = create_explicit_map_from_tuples( sched_space, ) print("example sched (invalid):") -print(prettier_map_string(example_sched_invalid)) - -# *Explicit* lexicographic mapping- map each tuple to all tuples occuring later -print("---------------------------------------------------------------------------") -lex_dim_bounds = [(0, 2), (0, 2)] # max vals for each dim (e.g., 0 <= i0 < max0 ...) -lex_params = [] -lex_in_names = out_names_sched -lex_out_names = append_apostrophes(out_names_sched) - -explicit_lex_map_pairs = lex_order_map_tuple_pairs_from_explicit_bounds( - lex_dim_bounds) -# for pair in explicit_lex_map_pairs: -# print(pair[0], pair[1]) -lex_space_explicit = get_isl_space(lex_params, lex_in_names, lex_out_names) -lex_map_explicit = create_explicit_map_from_tuples(explicit_lex_map_pairs, - lex_space_explicit) -print("lex_map (explicit):") -print(prettier_map_string(lex_map_explicit)) +print(pmap(example_sched_invalid)) + +# Lexicographic order map- map each tuple to all tuples occuring later +print("-"*80) +n_dims = 2 +lex_order_map = create_symbolic_lex_order_map(n_dims) +print("lexicographic order map:") +print(pmap(lex_order_map)) # Statement instance ordering (valid sched) -print("----------------------------------------------------------------------") -SIO_explicit_valid = get_statement_ordering_map( - example_sched_valid, lex_map_explicit) -print("statement instance ordering explicit (valid_sched):") -print(prettier_map_string(SIO_explicit_valid)) +print("-"*80) +SIO_valid = get_statement_ordering_map( + example_sched_valid, lex_order_map) +print("statement instance ordering (valid_sched):") +print(pmap(SIO_valid)) + # Statement instance ordering (invalid sched) -print("----------------------------------------------------------------------") -SIO_explicit_invalid = get_statement_ordering_map( - example_sched_invalid, lex_map_explicit) -print("statement instance ordering explicit (invalid_sched):") -print(prettier_map_string(SIO_explicit_invalid)) +print("-"*80) +SIO_invalid = get_statement_ordering_map( + example_sched_invalid, lex_order_map) +print("statement instance ordering (invalid_sched):") +print(pmap(SIO_invalid)) # Dependencies and constraints: -print("----------------------------------------------------------------------") +print("-"*80) -# i is parallel, suppose we want to enforce the following: -# for a given i, statement 0 happens before statement 1 -# i dependency is none, j dependency is `prior` +# make some dependencies manually: -statement_var = 's' -unused_param_name = 'unused' - -domains = {} -for iname in all_necessary_inames_ordered: - domains[iname] = knl.get_inames_domain(iname) - -# make some dependencies manually for now: s0 = LexScheduleStatement(insn_id="0", within_inames={"i", "j"}) s1 = LexScheduleStatement(insn_id="1", within_inames={"i", "j"}) insnid_to_int_sid = {"0": 0, "1": 1} -statement_pair_dep_set = StatementPairDependencySet(s0, s1, {dt.SAME: ["i", "j"]}) +statement_pair_dep_set = StatementPairDependencySet( + s0, s1, {dt.SAME: ["i", "j"]}) +# SAME({i,j}) means: +# insn0{i,j} happens before insn1{i',j'} iff i = i' and j = j' + +print("Statement pair dependency set:") print(statement_pair_dep_set) -combined_doms = knl.get_inames_domain( - statement_pair_dep_set.statement_before.within_inames | # noqa - statement_pair_dep_set.statement_after.within_inames - ) + dom_before = knl.get_inames_domain( statement_pair_dep_set.statement_before.within_inames ) dom_after = knl.get_inames_domain( statement_pair_dep_set.statement_after.within_inames ) -loop_priority = None # TODO + +loop_priority = None constraint_map = create_dependency_constraint( statement_pair_dep_set, dom_before, @@ -147,20 +131,65 @@ constraint_map = create_dependency_constraint( insnid_to_int_sid, unused_param_name, statement_var, - all_dom_inames_ordered=all_necessary_inames_ordered, + #all_dom_inames_ordered=inames, # not necessary since algin spaces below ) -print("constraint map space:") +print("constraint map (before aligning space):") +print(pmap(constraint_map)) + +assert SIO_valid.space == SIO_invalid.space + +# align constraint map spaces to match sio so we can compare them + +print("constraint map space (before aligning):") print(constraint_map.space) + +# align params +aligned_constraint_map = constraint_map.align_params(SIO_valid.space) + +# align in_ dims +import islpy as isl +from schedule_checker.sched_check_utils import ( + reorder_dims_by_name, +) +SIO_valid_in_names = SIO_valid.space.get_var_names(isl.dim_type.in_) +aligned_constraint_map = reorder_dims_by_name( + aligned_constraint_map, + isl.dim_type.in_, + SIO_valid_in_names, + add_missing=False, + new_names_are_permutation_only=True, + ) + +# align out dims +aligned_constraint_map = reorder_dims_by_name( + aligned_constraint_map, + isl.dim_type.out, + append_apostrophes(SIO_valid_in_names), + # TODO SIO out names are only pretending to have apostrophes; confusing + add_missing=False, + new_names_are_permutation_only=True, + ) + +assert aligned_constraint_map.space == SIO_valid.space +assert ( + aligned_constraint_map.space.get_var_names(isl.dim_type.in_) + == SIO_valid.space.get_var_names(isl.dim_type.in_)) +assert ( + aligned_constraint_map.space.get_var_names(isl.dim_type.out) + == append_apostrophes(SIO_valid.space.get_var_names(isl.dim_type.out))) +assert ( + aligned_constraint_map.space.get_var_names(isl.dim_type.param) + == SIO_valid.space.get_var_names(isl.dim_type.param)) + +print("constraint map space (after aligning):") +print(aligned_constraint_map.space) +print("constraint map (after aligning space):") +print(pmap(aligned_constraint_map)) print("SIO space:") -print(SIO_explicit_valid.space) -#assert constraint_map.space == SIO_explicit_valid.space -print("constraint map:") -print(prettier_map_string(constraint_map)) +print(SIO_valid.space) print("is valid sched valid?") -print(constraint_map.is_subset(SIO_explicit_valid)) -#print(SIO_explicit_valid.is_subset(constraint_map)) +print(aligned_constraint_map.is_subset(SIO_valid)) print("is invalid sched valid?") -print(constraint_map.is_subset(SIO_explicit_invalid)) -#print(SIO_explicit_invalid.is_subset(constraint_map)) +print(aligned_constraint_map.is_subset(SIO_invalid)) -- GitLab From f99c1783bc1fa2bf46b5a9aa03dcb0fe5da9d39f Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 3 Sep 2019 00:55:00 -0500 Subject: [PATCH 133/415] removed now-unused function lex_order_map_tuple_pairs_from_explicit_bounds() --- lexicographic_order_map.py | 23 ----------------------- 1 file changed, 23 deletions(-) diff --git a/lexicographic_order_map.py b/lexicographic_order_map.py index 730ffc081..36345539b 100644 --- a/lexicographic_order_map.py +++ b/lexicographic_order_map.py @@ -1,29 +1,6 @@ import islpy as isl -def lex_order_map_tuple_pairs_from_explicit_bounds(dim_bounds): - - # Given list of integer dimension bound pairs - # [(lower0, upper0), (lower1, upper1) ... ], - # create a list of tuple pairs [(x0, x1, ...), (y0, y1, ...)] - # representing a relation that maps from each point - # to every point that comes after that point in a lexicographic ordering - - # lower bounds are inclusive, upper bounds are exclusive - - import itertools - # all lex tuples in order: - lex_tuples = list( - itertools.product(*[range(l, u) for l, u in dim_bounds])) - # goes up to u-1 because u is a non-inclusive upper bound - - map_pairs = [] - for i, l_before in enumerate(lex_tuples): - for l_after in lex_tuples[i+1:]: - map_pairs.append((l_before, l_after)) - return map_pairs - - def get_statement_ordering_map(sched_map, lex_map): # statement ordering: # map each statement instance to all statement instances that occur later -- GitLab From a21c8c3d265b3b2007c41160855c7910a3f33016 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 3 Sep 2019 01:03:24 -0500 Subject: [PATCH 134/415] fixed typo in schedule.create_symbolic_isl_map() docstring --- schedule.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/schedule.py b/schedule.py index a2df9acb3..30b09d8e3 100644 --- a/schedule.py +++ b/schedule.py @@ -240,9 +240,9 @@ class LexSchedule(object): ``statement_var_name`` and ``dom_inames_ordered`` are the names of the dims of the space of the ISL map domain. - .. return: An :class:`islpy.Map` representing the lex schedule as - a mapping from each statement instance to all statement instances - occuring later. + .. return: An :class:`islpy.Map` representing a schedule + as a mapping from each statement instance to a point in + a lexicographic ordering. """ -- GitLab From 99ccd567ff9c507d68caa809bcd1c2b012036fca Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 3 Sep 2019 01:41:00 -0500 Subject: [PATCH 135/415] added docstrings for get_statement_ordering_map() and get_lex_order_constraint() --- lexicographic_order_map.py | 69 +++++++++++++++++++++++++++++++------- 1 file changed, 57 insertions(+), 12 deletions(-) diff --git a/lexicographic_order_map.py b/lexicographic_order_map.py index 36345539b..20cb7c723 100644 --- a/lexicographic_order_map.py +++ b/lexicographic_order_map.py @@ -2,24 +2,68 @@ import islpy as isl def get_statement_ordering_map(sched_map, lex_map): - # statement ordering: - # map each statement instance to all statement instances that occur later - # S -> L -> S^-1 + """Return a mapping that maps each statement instance to + all statement instances occuring later. + + .. arg sched_map: An :class:`islpy.Map` representing a schedule + as a mapping from each statement instance to a point in + a lexicographic ordering. + + .. arg lex_map: An :class:`islpy.Map` representing a lexicographic + ordering as a mapping from each point in lexicographic time + to every point that occurs later in lexicographic time. E.g.:: + + {[i0, i1, i2, ...] -> [i0', i1', i2', ...] : + i0 < i0' or (i0 = i0' and i1 < i1') + or (i0 = i0' and i1 = i1' and i2 < i2') ...} + + .. return: An :class:`islpy.Map` representing the lex schedule as + a mapping from each statement instance to all statement instances + occuring later. I.e., we compose S -> L -> S^-1, where S + is the schedule map and L is the lexicographic ordering map. + + """ + # TODO apostrophes aren't really there for range, this is confusing return sched_map.apply_range(lex_map).apply_range(sched_map.reverse()) -def get_lex_order_constraint(islvars, in_names, out_names): - # create constraint enforcing lex ordering, e.g., in the 3-dim case: - # i0 < o0 or ((i0 = o0) and (i1 < o1)) - # or ((i0 = o0) and (i1 = o1) and (i2 < o2)) - lex_order_constraint = islvars[in_names[0]].lt_set(islvars[out_names[0]]) - for i in range(1, len(in_names)): - lex_order_constraint_conj = islvars[in_names[i]].lt_set( - islvars[out_names[i]]) +def get_lex_order_constraint(islvars, before_names, after_names): + """Return a constraint represented as an :class:`islpy.Set` + defining a 'happens before' relationship in a lexicographic + ordering. + + .. arg islvars: A dictionary from variable names to :class:`PwAff` + instances that represent each of the variables + (islvars may be produced by `islpy.make_zero_and_vars`). The key + '0' is also include and represents a :class:`PwAff` zero constant. + This dictionary defines the space to be used for the set. + + .. arg before_names: A list of :class:`str` variable names representing + the lexicographic space dimensions for the point in lexicographic + time that occurs before. (see example below) + + .. arg after_names: A list of :class:`str` variable names representing + the lexicographic space dimensions for the point in lexicographic + time that occurs after. (see example below) + + .. return: An :class:`islpy.Set` representing a constraint that enforces a + lexicographic ordering. E.g., if ``before_names = [i, j, k]`` and + ``after_names = [i', j', k']``, return the set + + {[i0, i1, i2, i0', i1', i2'] : + i0 < i0' or (i0 = i0' and i1 < i1') + or (i0 = i0' and i1 = i1' and i2 < i2')} + + """ + + lex_order_constraint = islvars[before_names[0]].lt_set(islvars[after_names[0]]) + for i in range(1, len(before_names)): + lex_order_constraint_conj = islvars[before_names[i]].lt_set( + islvars[after_names[i]]) for j in range(i): lex_order_constraint_conj = lex_order_constraint_conj & \ - islvars[in_names[j]].eq_set(islvars[out_names[j]]) + islvars[before_names[j]].eq_set(islvars[after_names[j]]) lex_order_constraint = lex_order_constraint | lex_order_constraint_conj return lex_order_constraint @@ -29,6 +73,7 @@ def create_symbolic_lex_order_map( in_names=None, out_names=None, ): + if in_names is None: in_names = ["i%s" % (i) for i in range(n_dims)] if out_names is None: -- GitLab From 1a7d0f5372b266bc1283c1e47f2c2170f9ea8ddd Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 3 Sep 2019 01:52:29 -0500 Subject: [PATCH 136/415] added docstring for create_symbolic_lex_order_map(), fixed a few typos --- lexicographic_order_map.py | 59 ++++++++++++++++++++++++++------------ schedule.py | 2 +- 2 files changed, 41 insertions(+), 20 deletions(-) diff --git a/lexicographic_order_map.py b/lexicographic_order_map.py index 20cb7c723..687f18e84 100644 --- a/lexicographic_order_map.py +++ b/lexicographic_order_map.py @@ -40,16 +40,16 @@ def get_lex_order_constraint(islvars, before_names, after_names): This dictionary defines the space to be used for the set. .. arg before_names: A list of :class:`str` variable names representing - the lexicographic space dimensions for the point in lexicographic + the lexicographic space dimensions for a point in lexicographic time that occurs before. (see example below) .. arg after_names: A list of :class:`str` variable names representing - the lexicographic space dimensions for the point in lexicographic + the lexicographic space dimensions for a point in lexicographic time that occurs after. (see example below) .. return: An :class:`islpy.Set` representing a constraint that enforces a - lexicographic ordering. E.g., if ``before_names = [i, j, k]`` and - ``after_names = [i', j', k']``, return the set + lexicographic ordering. E.g., if ``before_names = [i0, i1, i2]`` and + ``after_names = [i0', i1', i2']``, return the set:: {[i0, i1, i2, i0', i1', i2'] : i0 < i0' or (i0 = i0' and i1 < i1') @@ -70,33 +70,54 @@ def get_lex_order_constraint(islvars, before_names, after_names): def create_symbolic_lex_order_map( n_dims, - in_names=None, - out_names=None, + before_names=None, + after_names=None, ): + """Return a mapping that maps each point in a lexicographic + ordering to every point that occurs later in lexicographic + time. - if in_names is None: - in_names = ["i%s" % (i) for i in range(n_dims)] - if out_names is None: + .. arg n_dims: An :class:`int` representing the number of dimensions + in the lexicographic ordering. + + .. arg before_names: A list of :class:`str` variable names representing + the lexicographic space dimensions for a point in lexicographic + time that occurs before. (see example below) + + .. arg after_names: A list of :class:`str` variable names representing + the lexicographic space dimensions for a point in lexicographic + time that occurs after. (see example below) + + .. return: An :class:`islpy.Map` representing a lexicographic + ordering as a mapping from each point in lexicographic time + to every point that occurs later in lexicographic time. + E.g., if ``before_names = [i0, i1, i2]`` and + ``after_names = [i0', i1', i2']``, return the map:: + + {[i0, i1, i2] -> [i0', i1', i2'] : + i0 < i0' or (i0 = i0' and i1 < i1') + or (i0 = i0' and i1 = i1' and i2 < i2')} + + """ + + if before_names is None: + before_names = ["i%s" % (i) for i in range(n_dims)] + if after_names is None: from schedule_checker.sched_check_utils import append_apostrophes - out_names = append_apostrophes(in_names) + after_names = append_apostrophes(before_names) - assert len(in_names) == len(out_names) == n_dims + assert len(before_names) == len(after_names) == n_dims dim_type = isl.dim_type islvars = isl.make_zero_and_vars( - in_names+out_names, + before_names+after_names, []) - # create constraint enforcing lex ordering, e.g., in the 3-dim case: - # i0 < o0 or ((i0 = o0) and (i1 < o1)) - # or ((i0 = o0) and (i1 = o1) and (i2 < o2)) - lex_order_constraint = get_lex_order_constraint(islvars, in_names, out_names) + lex_order_constraint = get_lex_order_constraint(islvars, before_names, after_names) - #lex_set = lex_set_outer_bounds & lex_order_constraint - #lex_map = isl.Map.from_domain(lex_set) lex_map = isl.Map.from_domain(lex_order_constraint) lex_map = lex_map.move_dims( dim_type.out, 0, dim_type.in_, - len(in_names), len(out_names)) + len(before_names), len(after_names)) return lex_map diff --git a/schedule.py b/schedule.py index 30b09d8e3..7595261fd 100644 --- a/schedule.py +++ b/schedule.py @@ -311,7 +311,7 @@ class LexSchedule(object): ) n_dims = self.max_lex_dims() return create_symbolic_lex_order_map( - n_dims, in_names=self.get_lex_var_names()) + n_dims, before_names=self.get_lex_var_names()) def __bool__(self): return bool(self.lex_schedule) -- GitLab From 529f7c8a51deeae6d0430babf9a09d73d328fb94 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 3 Sep 2019 01:55:49 -0500 Subject: [PATCH 137/415] renamed create_symbolic_lex_order_map()->create_lex_order_map() --- example_dependency_checking.py | 4 ++-- example_lex_map_creation.py | 4 ++-- lexicographic_order_map.py | 2 +- schedule.py | 4 ++-- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/example_dependency_checking.py b/example_dependency_checking.py index 0f04aee95..52c554607 100644 --- a/example_dependency_checking.py +++ b/example_dependency_checking.py @@ -5,7 +5,7 @@ from schedule_checker.dependency import ( # noqa create_dependency_constraint, ) from schedule_checker.lexicographic_order_map import ( - create_symbolic_lex_order_map, + create_lex_order_map, get_statement_ordering_map, ) from schedule_checker.sched_check_utils import ( @@ -80,7 +80,7 @@ print(pmap(example_sched_invalid)) # Lexicographic order map- map each tuple to all tuples occuring later print("-"*80) n_dims = 2 -lex_order_map = create_symbolic_lex_order_map(n_dims) +lex_order_map = create_lex_order_map(n_dims) print("lexicographic order map:") print(pmap(lex_order_map)) diff --git a/example_lex_map_creation.py b/example_lex_map_creation.py index dde4e001e..83ff538d3 100644 --- a/example_lex_map_creation.py +++ b/example_lex_map_creation.py @@ -1,6 +1,6 @@ from schedule_checker.lexicographic_order_map import ( get_statement_ordering_map, - create_symbolic_lex_order_map, + create_lex_order_map, ) from schedule_checker.sched_check_utils import ( create_explicit_map_from_tuples, @@ -11,7 +11,7 @@ from schedule_checker.sched_check_utils import ( # Lexicographic order map- map each tuple to all tuples occuring later n_dims = 2 -lex_order_map = create_symbolic_lex_order_map(n_dims) +lex_order_map = create_lex_order_map(n_dims) print("lexicographic order map:") print(pmap(lex_order_map)) diff --git a/lexicographic_order_map.py b/lexicographic_order_map.py index 687f18e84..356fb8731 100644 --- a/lexicographic_order_map.py +++ b/lexicographic_order_map.py @@ -68,7 +68,7 @@ def get_lex_order_constraint(islvars, before_names, after_names): return lex_order_constraint -def create_symbolic_lex_order_map( +def create_lex_order_map( n_dims, before_names=None, after_names=None, diff --git a/schedule.py b/schedule.py index 7595261fd..34af2cfbb 100644 --- a/schedule.py +++ b/schedule.py @@ -307,10 +307,10 @@ class LexSchedule(object): """ from schedule_checker.lexicographic_order_map import ( - create_symbolic_lex_order_map, + create_lex_order_map, ) n_dims = self.max_lex_dims() - return create_symbolic_lex_order_map( + return create_lex_order_map( n_dims, before_names=self.get_lex_var_names()) def __bool__(self): -- GitLab From cb639d9accd0d3ba12cb297ee4ed20e9a38b8594 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 3 Sep 2019 02:06:06 -0500 Subject: [PATCH 138/415] removed more unused functions; added TODOs for remaining docstrings --- sched_check_utils.py | 96 +++++++++++++++++++------------------------- 1 file changed, 41 insertions(+), 55 deletions(-) diff --git a/sched_check_utils.py b/sched_check_utils.py index 6c6e87332..a0a9ccc9f 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -28,6 +28,7 @@ def add_dims_to_isl_set(isl_set, dim_type, names, new_pose_start): def reorder_dims_by_name( isl_set, dim_type, desired_dims_ordered, add_missing=False, new_names_are_permutation_only=False): + # TODO add docstring assert set(isl_set.get_var_names(dim_type)).issubset(desired_dims_ordered) assert dim_type != isl.dim_type.param @@ -70,6 +71,7 @@ def reorder_dims_by_name( def create_new_set_with_primes(old_set): + # TODO add docstring new_set = old_set.copy() for i in range(old_set.n_dim()): new_set = new_set.set_dim_name(isl.dim_type.out, i, old_set.get_dim_name( @@ -78,6 +80,7 @@ def create_new_set_with_primes(old_set): def make_islvars_with_var_primes(var_names, param_names): + # TODO add docstring return isl.make_zero_and_vars( var_names+append_apostrophes(var_names), param_names) @@ -96,24 +99,6 @@ def _union_of_isl_sets_or_maps(set_list): return union -def _union_inames_domains(knl): - all_inames = list(knl.all_inames()) - domain_union = knl.get_inames_domain(all_inames[0]) - for iname in all_inames[1:]: - domain_union = domain_union.union(knl.get_inames_domain(iname)) - return domain_union - - -def all_iname_domains_equal(knl): - all_inames = list(knl.all_inames()) - - first = knl.get_inames_domain(all_inames[0]) - for iname in all_inames[1:]: - if knl.get_inames_domain(iname) != first: - return False - return True - - def list_var_names_in_isl_sets( isl_sets, set_dim=isl.dim_type.set): @@ -123,39 +108,13 @@ def list_var_names_in_isl_sets( return list(inames) -def create_explicit_map_from_tuples(tuple_pairs, space): - - dim_type = isl.dim_type - individual_maps = [] - - for tup_in, tup_out in tuple_pairs: - constraints = [] - for i, val_in in enumerate(tup_in): - constraints.append( - isl.Constraint.equality_alloc(space) - .set_coefficient_val(dim_type.in_, i, 1) - .set_constant_val(-1*val_in)) - for i, val_out in enumerate(tup_out): - constraints.append( - isl.Constraint.equality_alloc(space) - .set_coefficient_val(dim_type.out, i, 1) - .set_constant_val(-1*val_out)) - individual_maps.append( - isl.Map.universe(space).add_constraints(constraints)) - - union_map = individual_maps[0] - for m in individual_maps[1:]: - union_map = union_map.union(m) - - return union_map - - def create_symbolic_isl_map_from_tuples( tuple_pairs_with_domains, # list of ((tup_in, tup_out), dom_to_intersect) space, unused_param_name, statement_var_name, ): + # TODO add docstring # TODO clarify this with comments @@ -243,8 +202,9 @@ def create_symbolic_isl_map_from_tuples( return _union_of_isl_sets_or_maps(all_maps) -def set_space_names( +def set_all_space_names( space, param_names=None, in_names=None, out_names=None): + # TODO add docstring new_space = space.copy() dim_type = isl.dim_type if param_names: @@ -269,9 +229,10 @@ def set_space_names( def get_isl_space(param_names, in_names, out_names): + # TODO add docstring space = isl.Space.alloc( isl.DEFAULT_CONTEXT, len(param_names), len(in_names), len(out_names)) - return set_space_names( + return set_all_space_names( space, param_names=param_names, in_names=in_names, out_names=out_names) @@ -303,6 +264,7 @@ def _get_insn_id_from_sched_item(knl, sched_item): # loop over schedule more than once def get_all_nonconcurrent_insn_iname_subsets( knl, exclude_empty=False, non_conc_inames=None): + # TODO add docstring if non_conc_inames is None: _, non_conc_inames = get_concurrent_inames(knl) @@ -318,7 +280,6 @@ def get_all_nonconcurrent_insn_iname_subsets( def get_sched_item_ids_within_inames(knl, inames): - sched_item_ids = set() for insn in knl.instructions: if inames.issubset(insn.within_inames): @@ -326,13 +287,6 @@ def get_sched_item_ids_within_inames(knl, inames): return sched_item_ids -def get_inames_in_sched_order(scheduled_knl): - # returns non-concurrent inames in order found in sched - from loopy.schedule import EnterLoop - return [sched_item.iname for sched_item in scheduled_knl.schedule - if isinstance(sched_item, EnterLoop)] - - # TODO use yield to clean this up # TODO use topological sort from loopy, then find longest path in dag def _generate_orderings_starting_w_prefix( @@ -376,6 +330,7 @@ def _generate_orderings_starting_w_prefix( def get_orderings_of_length_n( allowed_after_dict, required_length, return_first_found=False): + # TODO add docstring # get all orderings that are *explicitly* allowed by allowed_after_dict # i.e., if we know a->b and c->b, we don't know enough to return a->c->b @@ -392,3 +347,34 @@ def get_orderings_of_length_n( return_first_found=return_first_found, ) return orderings + + +# only used for example purposes: + + +def create_explicit_map_from_tuples(tuple_pairs, space): + # TODO add docstring + + dim_type = isl.dim_type + individual_maps = [] + + for tup_in, tup_out in tuple_pairs: + constraints = [] + for i, val_in in enumerate(tup_in): + constraints.append( + isl.Constraint.equality_alloc(space) + .set_coefficient_val(dim_type.in_, i, 1) + .set_constant_val(-1*val_in)) + for i, val_out in enumerate(tup_out): + constraints.append( + isl.Constraint.equality_alloc(space) + .set_coefficient_val(dim_type.out, i, 1) + .set_constant_val(-1*val_out)) + individual_maps.append( + isl.Map.universe(space).add_constraints(constraints)) + + union_map = individual_maps[0] + for m in individual_maps[1:]: + union_map = union_map.union(m) + + return union_map -- GitLab From a566ba4f2f046919e175643e39cdf65438843a89 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 16 Sep 2019 21:24:59 -0500 Subject: [PATCH 139/415] docstring for add_dims_to_isl_set() --- sched_check_utils.py | 28 +++++++++++++++++++++++++++- 1 file changed, 27 insertions(+), 1 deletion(-) diff --git a/sched_check_utils.py b/sched_check_utils.py index a0a9ccc9f..1604d7f0a 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -28,7 +28,33 @@ def add_dims_to_isl_set(isl_set, dim_type, names, new_pose_start): def reorder_dims_by_name( isl_set, dim_type, desired_dims_ordered, add_missing=False, new_names_are_permutation_only=False): - # TODO add docstring + """Return an isl_set with the dimensions in the specified order. + + .. arg isl_set: A :class:`islpy.Set` whose dimensions are + to be reordered. + + .. arg dim_type: A :class:`islpy.dim_type` specifying the + dimension to be reordered. + + .. arg desired_dims_ordered: A :class:`list` of :class:`string` elements + representing the desired dimensions order by dimension name. + + .. arg add_missing: A :class:`bool` specifying whether to insert + dimensions (by name) found in `desired_dims_ordered` that are not + present in `isl_set`. + + .. arg new_names_are_permutation_only: A :class:`bool` indicating that + `desired_dims_ordered` contains the same names as the specified + dimensions in `isl_set`, and does not, e.g., contain additional dimension names + not found in `isl_set`. If set to True, and these two sets of names + do not match, an error is produced. + + .. return: An :class:`islpy.Set` matching `isl_set` with the + dimension order matching `desired_dims_ordered`, optionally + including additional dimensions present in `desred_dims_ordered` + that are not present in `isl_set`. + + """ assert set(isl_set.get_var_names(dim_type)).issubset(desired_dims_ordered) assert dim_type != isl.dim_type.param -- GitLab From 15e6e86b0e150ea3faab070611293685fe20111a Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 16 Sep 2019 21:29:01 -0500 Subject: [PATCH 140/415] renamed create_new_set_with_primes()->create_new_isl_set_with_primes() and added docstring --- dependency.py | 4 ++-- sched_check_utils.py | 18 ++++++++++++------ 2 files changed, 14 insertions(+), 8 deletions(-) diff --git a/dependency.py b/dependency.py index 8a118bdc5..50fefa1cb 100644 --- a/dependency.py +++ b/dependency.py @@ -184,7 +184,7 @@ def create_dependency_constraint( append_apostrophes, add_dims_to_isl_set, reorder_dims_by_name, - create_new_set_with_primes, + create_new_isl_set_with_primes, ) # This function uses the dependency given to create the following constraint: # Statement [s,i,j] comes before statement [s',i',j'] iff @@ -344,7 +344,7 @@ def create_dependency_constraint( domain_to_intersect = add_dims_to_isl_set( dom_before_constraint_set, isl.dim_type.out, [statement_var_name], statement_var_pose) - range_constraint_set = create_new_set_with_primes(dom_after_constraint_set) + range_constraint_set = create_new_isl_set_with_primes(dom_after_constraint_set) range_to_intersect = add_dims_to_isl_set( range_constraint_set, isl.dim_type.out, [statement_var_name_prime], statement_var_pose) diff --git a/sched_check_utils.py b/sched_check_utils.py index 1604d7f0a..b0a35e9de 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -96,12 +96,18 @@ def reorder_dims_by_name( return new_set -def create_new_set_with_primes(old_set): - # TODO add docstring - new_set = old_set.copy() - for i in range(old_set.n_dim()): - new_set = new_set.set_dim_name(isl.dim_type.out, i, old_set.get_dim_name( - isl.dim_type.out, i)+"'") +def create_new_isl_set_with_primes(old_isl_set): + """Return an isl_set with apostrophes appended to + dim_type.set dimension names. + + .. arg old_isl_set: A :class:`islpy.Set`. + + """ + + new_set = old_isl_set.copy() + for i in range(old_isl_set.n_dim()): + new_set = new_set.set_dim_name(isl.dim_type.set, i, old_isl_set.get_dim_name( + isl.dim_type.set, i)+"'") return new_set -- GitLab From 16de2a29dd8305782b86c9aa33473514834d4ca7 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 16 Sep 2019 21:36:27 -0500 Subject: [PATCH 141/415] docstring for make_islvars_with_var_primes() --- sched_check_utils.py | 25 +++++++++++++++++++++++-- 1 file changed, 23 insertions(+), 2 deletions(-) diff --git a/sched_check_utils.py b/sched_check_utils.py index b0a35e9de..2300f4a45 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -36,7 +36,7 @@ def reorder_dims_by_name( .. arg dim_type: A :class:`islpy.dim_type` specifying the dimension to be reordered. - .. arg desired_dims_ordered: A :class:`list` of :class:`string` elements + .. arg desired_dims_ordered: A :class:`list` of :class:`str` elements representing the desired dimensions order by dimension name. .. arg add_missing: A :class:`bool` specifying whether to insert @@ -102,6 +102,9 @@ def create_new_isl_set_with_primes(old_isl_set): .. arg old_isl_set: A :class:`islpy.Set`. + .. return: A :class:`islpy.Set` matching `old_isl_set` with + apostrophes appended to dim_type.set dimension names. + """ new_set = old_isl_set.copy() @@ -112,7 +115,25 @@ def create_new_isl_set_with_primes(old_isl_set): def make_islvars_with_var_primes(var_names, param_names): - # TODO add docstring + """Return a dictionary from variable and parameter names + to :class:`PwAff` instances that represent each of + the variables and parameters, including + both the variables in `var_names` and a copy of each + variable with an apostrophe appended. + + .. arg var_names: A :class:`list` of :class:`str` elements + representing variable names. + + .. arg param_names: A :class:`list` of :class:`str` elements + representing parameter names. + + .. return: A dictionary from variable names to :class:`PwAff` + instances that represent each of the variables + (islvars may be produced by `islpy.make_zero_and_vars`). The key + '0' is also include and represents a :class:`PwAff` zero constant. + + """ + return isl.make_zero_and_vars( var_names+append_apostrophes(var_names), param_names) -- GitLab From b28a0093bc1fa050b9af23a214c74469c94084cd Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 16 Sep 2019 22:21:27 -0500 Subject: [PATCH 142/415] docstring for create_symbolic_isl_map_from_tuples(); other minor improvements to documentation --- sched_check_utils.py | 52 +++++++++++++++++++++++++++++++++++--------- 1 file changed, 42 insertions(+), 10 deletions(-) diff --git a/sched_check_utils.py b/sched_check_utils.py index 2300f4a45..3d7a20281 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -33,8 +33,8 @@ def reorder_dims_by_name( .. arg isl_set: A :class:`islpy.Set` whose dimensions are to be reordered. - .. arg dim_type: A :class:`islpy.dim_type` specifying the - dimension to be reordered. + .. arg dim_type: A :class:`islpy.dim_type`, i.e., a :class:`int`, + specifying the dimension to be reordered. .. arg desired_dims_ordered: A :class:`list` of :class:`str` elements representing the desired dimensions order by dimension name. @@ -162,18 +162,50 @@ def list_var_names_in_isl_sets( def create_symbolic_isl_map_from_tuples( - tuple_pairs_with_domains, # list of ((tup_in, tup_out), dom_to_intersect) + tuple_pairs_with_domains, space, unused_param_name, statement_var_name, ): - # TODO add docstring + """Return a :class:`islpy.Map` constructed using the provided space, + mapping input->output tuples provided in `tuple_pairs_with_domains`, + with each set of tuple variables constrained by the domains provided. + + .. arg tuple_pairs_with_domains: A :class:`list` with each element being + a tuple of the form `((tup_in, tup_out), domain)`. + `tup_in` and `tup_out` are tuples containing elements of type + :class:`int` and :class:`str` representing values for the + input and output dimensions in `space`, and `domain` is a + :class:`islpy.Set` constraining variable bounds. + + .. arg space: A :class:`islpy.Space` to be used to create the map. + + .. arg unused_param_name: A :class:`str` that specifies the name of a + dummy isl parameter assigned to variables in domain elements of the + isl map that represent inames unused in a particular statement + instance. An element in the domain of this map may + represent a statement instance that does not lie within iname x, but + will still need to assign a value to the x domain variable. In this + case, the parameter unused_param_name is is assigned to x. This + situation is detected when a name present in `in_` dimension of + the space is not present in a particular domain. + + .. arg statement_var_name: A :class:`str` specifying the name of the + isl variable used to represent the unique :class:`int` statement id. + + .. return: A :class:`islpy.Map` constructed using the provided space + as follows. For each `((tup_in, tup_out), domain)` in + `tuple_pairs_with_domains`, map + `(tup_in)->(tup_out) : domain`, where `tup_in` and `tup_out` are + numeric or symbolic values assigned to the input and output + dimension variables in `space`, and `domain` specifies constraints + on these values. Any space `in_` dimension variable not + constrained by `domain` is assigned `unused_param_name`. - # TODO clarify this with comments + """ - # given a list of pairs of ((input), (output)) tuples, create an isl map - # and intersect each pair with corresponding domain_to_intersect - #TODO allow None for domains + # TODO clarify this with more comments + # TODO allow None for domains dim_type = isl.dim_type @@ -206,7 +238,7 @@ def create_symbolic_isl_map_from_tuples( # TODO we probably shouldn't rely on dom # here for determing where to set inames equal to dummy vars, # should instead determine before in LexSchedule and pass info in - dom_var_names = dom.get_var_names(dim_type.out) + dom_var_names = dom.get_var_names(dim_type.set) if not set( [var for var in tup_out if not isinstance(var, int)] ).issubset(set(dom_var_names)): @@ -242,7 +274,7 @@ def create_symbolic_isl_map_from_tuples( # map_from_set, we have a problem I think? # (assertion checks this in add_missing... dom_with_all_inames = reorder_dims_by_name( - dom, isl.dim_type.out, + dom, isl.dim_type.set, space_in_names, add_missing=True, new_names_are_permutation_only=False, -- GitLab From c832fe83937ef1e8c5bdce24078619a26bf35be3 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 16 Sep 2019 22:28:14 -0500 Subject: [PATCH 143/415] renamed set_all_space_names()->set_all_isl_space_names() and added docstring --- sched_check_utils.py | 22 ++++++++++++++-------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/sched_check_utils.py b/sched_check_utils.py index 3d7a20281..33f3c7725 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -287,28 +287,34 @@ def create_symbolic_isl_map_from_tuples( return _union_of_isl_sets_or_maps(all_maps) -def set_all_space_names( - space, param_names=None, in_names=None, out_names=None): - # TODO add docstring - new_space = space.copy() +def set_all_isl_space_names( + isl_space, param_names=None, in_names=None, out_names=None): + """Return a copy of `isl_space` with the specified dimension names. + If no names are provided, use `p0, p1, ...` for parameters, + `i0, i1, ...`, for in_ dimensions, and `o0, o1, ...` for out + dimensions. + + """ + + new_space = isl_space.copy() dim_type = isl.dim_type if param_names: for i, p in enumerate(param_names): new_space = new_space.set_dim_name(dim_type.param, i, p) else: - for i in range(len(space.get_var_names(dim_type.param))): + for i in range(len(isl_space.get_var_names(dim_type.param))): new_space = new_space.set_dim_name(dim_type.param, i, "p%d" % (i)) if in_names: for i, p in enumerate(in_names): new_space = new_space.set_dim_name(dim_type.in_, i, p) else: - for i in range(len(space.get_var_names(dim_type.in_))): + for i in range(len(isl_space.get_var_names(dim_type.in_))): new_space = new_space.set_dim_name(dim_type.in_, i, "i%d" % (i)) if out_names: for i, p in enumerate(out_names): new_space = new_space.set_dim_name(dim_type.out, i, p) else: - for i in range(len(space.get_var_names(dim_type.out))): + for i in range(len(isl_space.get_var_names(dim_type.out))): new_space = new_space.set_dim_name(dim_type.out, i, "o%d" % (i)) return new_space @@ -317,7 +323,7 @@ def get_isl_space(param_names, in_names, out_names): # TODO add docstring space = isl.Space.alloc( isl.DEFAULT_CONTEXT, len(param_names), len(in_names), len(out_names)) - return set_all_space_names( + return set_all_isl_space_names( space, param_names=param_names, in_names=in_names, out_names=out_names) -- GitLab From 87e6ea4dd84725a3629bd63ae092b0b921769372 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 16 Sep 2019 22:30:21 -0500 Subject: [PATCH 144/415] docstring for get_isl_space() --- sched_check_utils.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/sched_check_utils.py b/sched_check_utils.py index 33f3c7725..3d1583a67 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -33,7 +33,7 @@ def reorder_dims_by_name( .. arg isl_set: A :class:`islpy.Set` whose dimensions are to be reordered. - .. arg dim_type: A :class:`islpy.dim_type`, i.e., a :class:`int`, + .. arg dim_type: A :class:`islpy.dim_type`, i.e., an :class:`int`, specifying the dimension to be reordered. .. arg desired_dims_ordered: A :class:`list` of :class:`str` elements @@ -167,7 +167,7 @@ def create_symbolic_isl_map_from_tuples( unused_param_name, statement_var_name, ): - """Return a :class:`islpy.Map` constructed using the provided space, + """Return an :class:`islpy.Map` constructed using the provided space, mapping input->output tuples provided in `tuple_pairs_with_domains`, with each set of tuple variables constrained by the domains provided. @@ -320,7 +320,9 @@ def set_all_isl_space_names( def get_isl_space(param_names, in_names, out_names): - # TODO add docstring + """Return an :class:`islpy.Space` with the specified dimension names. + """ + space = isl.Space.alloc( isl.DEFAULT_CONTEXT, len(param_names), len(in_names), len(out_names)) return set_all_isl_space_names( -- GitLab From 2961533a8c4b4d2c279ff8d2cf55aa2101c49608 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 16 Sep 2019 22:40:30 -0500 Subject: [PATCH 145/415] docstring for get_all_nonconcurrent_insn_iname_subsets() --- sched_check_utils.py | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/sched_check_utils.py b/sched_check_utils.py index 3d1583a67..15cd89380 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -357,7 +357,21 @@ def _get_insn_id_from_sched_item(knl, sched_item): # loop over schedule more than once def get_all_nonconcurrent_insn_iname_subsets( knl, exclude_empty=False, non_conc_inames=None): - # TODO add docstring + """Return a :class:`set` of every unique subset of non-concurrent + inames used in an instruction in a :class:`loopy.LoopKernel`. + + .. arg knl: A :class:`loopy.LoopKernel`. + + .. arg exclude_empty: A :class:`bool` specifying whether to + exclude the empty set. + + .. arg non_conc_inames: A :class:`set` of non-concurrent inames + which may be provided if already known. + + .. return: A :class:`set` of every unique subset of non-concurrent + inames used in every instruction in a :class:`loopy.LoopKernel`. + + """ if non_conc_inames is None: _, non_conc_inames = get_concurrent_inames(knl) -- GitLab From 1727e2093ff2ebdaa4d45539405e59862b1b2aec Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 16 Sep 2019 22:50:31 -0500 Subject: [PATCH 146/415] docstring for get_orderings_of_length_n() --- sched_check_utils.py | 24 ++++++++++++++++++------ 1 file changed, 18 insertions(+), 6 deletions(-) diff --git a/sched_check_utils.py b/sched_check_utils.py index 15cd89380..4f59d4a56 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -369,7 +369,7 @@ def get_all_nonconcurrent_insn_iname_subsets( which may be provided if already known. .. return: A :class:`set` of every unique subset of non-concurrent - inames used in every instruction in a :class:`loopy.LoopKernel`. + inames used in any instruction in a :class:`loopy.LoopKernel`. """ @@ -437,13 +437,25 @@ def _generate_orderings_starting_w_prefix( def get_orderings_of_length_n( allowed_after_dict, required_length, return_first_found=False): - # TODO add docstring - # get all orderings that are *explicitly* allowed by allowed_after_dict - # i.e., if we know a->b and c->b, we don't know enough to return a->c->b + """Return all orderings found in tree represented by `allowed_after_dict`. - # note: if the set for a dict key is empty, nothing allowed to come after + .. arg allowed_after_dict: A :class:`dict` mapping each :class:`string` + names to a :class:`set` of names that are allowed to come after + that name. - # alowed_after_dict = {str: set(str)} + .. arg required_length: A :class:`int` representing the length required + for all orderings. Orderings not matching the required length will + not be returned. + + .. arg return_first_found: A :class:`bool` specifying whether to return + the first valid ordering found. + + .. return: A :class:`set` of all orderings that are *explicitly* allowed + by the tree represented by `allowed_after_dict`. I.e., if we know + a->b and c->b, we don't know enough to return a->c->b. Note that + if the set for a dict key is empty, nothing is allowed to come after. + + """ orderings = set() _generate_orderings_starting_w_prefix( -- GitLab From 711e20fa220e5750286fc2b15218dba0dfb156fb Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 16 Sep 2019 22:55:42 -0500 Subject: [PATCH 147/415] docstring for create_explicit_map_from_tuples() --- sched_check_utils.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/sched_check_utils.py b/sched_check_utils.py index 4f59d4a56..575923753 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -472,7 +472,13 @@ def get_orderings_of_length_n( def create_explicit_map_from_tuples(tuple_pairs, space): - # TODO add docstring + """Return a :class:`islpy.Map` in :class:`islpy.Space` space + mapping tup_in->tup_out for each `(tup_in, tup_out)` pair + in `tuple_pairs`, where `tup_in` and `tup_out` are + tuples of :class:`int` values to be assigned to the + corresponding dimension variables in `space`. + + """ dim_type = isl.dim_type individual_maps = [] -- GitLab From f8c3cfd13dd47f4ba616b091b16682825cde9fd9 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 17 Sep 2019 12:15:00 -0500 Subject: [PATCH 148/415] switching example kernel --- example_pairwise_schedule_validity.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/example_pairwise_schedule_validity.py b/example_pairwise_schedule_validity.py index 4a62c791d..0c5b1f61f 100644 --- a/example_pairwise_schedule_validity.py +++ b/example_pairwise_schedule_validity.py @@ -5,8 +5,8 @@ from schedule_checker import check_schedule_validity # Choose kernel ---------------------------------------------------------- -#knl_choice = "example" -knl_choice = "unused_inames" +knl_choice = "example" +#knl_choice = "unused_inames" #knl_choice = "matmul" #knl_choice = "scan" #knl_choice = "dependent_domain" -- GitLab From c8224a278da80ddbb18b1b2dfbf770373d1acd12 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 12 Nov 2019 16:46:31 -0600 Subject: [PATCH 149/415] WIP: adding wave equation example --- dependency.py | 124 ++++++++++++++ example_wave_equation.py | 338 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 462 insertions(+) create mode 100644 example_wave_equation.py diff --git a/dependency.py b/dependency.py index 50fefa1cb..d30f1a80e 100644 --- a/dependency.py +++ b/dependency.py @@ -367,6 +367,130 @@ def create_dependency_constraint( return map_with_loop_domain_constraints +def _create_5pt_stencil_dependency_constraint( + dom_before_constraint_set, + dom_after_constraint_set, + sid_before, + sid_after, + space_iname, + time_iname, + unused_param_name, + statement_var_name, + statement_var_pose=0, + all_dom_inames_ordered=None, + ): + + from schedule_checker.sched_check_utils import ( + make_islvars_with_var_primes, + append_apostrophes, + add_dims_to_isl_set, + reorder_dims_by_name, + create_new_isl_set_with_primes, + ) + # This function uses the dependency given to create the following constraint: + # Statement [s,i,j] comes before statement [s',i',j'] iff + + from schedule_checker.sched_check_utils import ( + list_var_names_in_isl_sets, + ) + if all_dom_inames_ordered is None: + all_dom_inames_ordered = list_var_names_in_isl_sets( + [dom_before_constraint_set, dom_after_constraint_set]) + + # create some (ordered) isl vars to use, e.g., {s, i, j, s', i', j'} + islvars = make_islvars_with_var_primes( + [statement_var_name]+all_dom_inames_ordered, + [unused_param_name]) + statement_var_name_prime = statement_var_name+"'" + + # get (ordered) list of unused before/after inames + inames_before_unused = [] + for iname in all_dom_inames_ordered: + if iname not in dom_before_constraint_set.get_var_names(isl.dim_type.out): + inames_before_unused.append(iname) + inames_after_unused = [] + for iname in all_dom_inames_ordered: + if iname not in dom_after_constraint_set.get_var_names(isl.dim_type.out): + inames_after_unused.append(iname + "'") + + # initialize constraints to False + # this will disappear as soon as we add a constraint + #all_constraints_set = islvars[0].eq_set(islvars[0] + 1) + + space_iname_prime = space_iname + "'" + time_iname_prime = time_iname + "'" + one = islvars[0] + 1 + two = islvars[0] + 2 + # global: + """ + constraint_set = ( + islvars[time_iname_prime].gt_set(islvars[time_iname]) & + ( + (islvars[space_iname_prime]-two).lt_set(islvars[space_iname]) & + islvars[space_iname].lt_set(islvars[space_iname_prime]+two) + ) + | + islvars[time_iname_prime].gt_set(islvars[time_iname] + one) & + islvars[space_iname].eq_set(islvars[space_iname_prime]) + ) + """ + # local dep: + constraint_set = ( + islvars[time_iname_prime].eq_set(islvars[time_iname] + one) & + ( + (islvars[space_iname_prime]-two).lt_set(islvars[space_iname]) & + islvars[space_iname].lt_set(islvars[space_iname_prime]+two) + ) + | + islvars[time_iname_prime].eq_set(islvars[time_iname] + two) & + islvars[space_iname].eq_set(islvars[space_iname_prime]) + ) + + + # set unused vars == unused dummy param + for iname in inames_before_unused+inames_after_unused: + constraint_set = constraint_set & islvars[iname].eq_set( + islvars[unused_param_name]) + + # set statement_var_name == statement # + constraint_set = constraint_set & islvars[statement_var_name].eq_set( + islvars[0]+sid_before) + constraint_set = constraint_set & islvars[statement_var_name_prime].eq_set( + islvars[0]+sid_after) + + # convert constraint set to map + all_constraints_map = _convert_constraint_set_to_map( + constraint_set, len(all_dom_inames_ordered) + 1) # +1 for statement var + + # now apply domain sets to constraint variables + + # add statement variable to doms to enable intersection + domain_to_intersect = add_dims_to_isl_set( + dom_before_constraint_set, isl.dim_type.out, + [statement_var_name], statement_var_pose) + range_constraint_set = create_new_isl_set_with_primes(dom_after_constraint_set) + range_to_intersect = add_dims_to_isl_set( + range_constraint_set, isl.dim_type.out, + [statement_var_name_prime], statement_var_pose) + + # insert inames missing from doms to enable intersection + domain_to_intersect = reorder_dims_by_name( + domain_to_intersect, isl.dim_type.out, + [statement_var_name] + all_dom_inames_ordered, + add_missing=True) + range_to_intersect = reorder_dims_by_name( + range_to_intersect, + isl.dim_type.out, + append_apostrophes([statement_var_name] + all_dom_inames_ordered), + add_missing=True) + + # intersect doms + map_with_loop_domain_constraints = all_constraints_map.intersect_domain( + domain_to_intersect).intersect_range(range_to_intersect) + + return map_with_loop_domain_constraints + + def create_dependencies_from_legacy_knl(knl): """Return a list of :class:`StatementPairDependySet` instances created for a :class:`loopy.LoopKernel` containing legacy depencencies. Create diff --git a/example_wave_equation.py b/example_wave_equation.py new file mode 100644 index 000000000..1fe656544 --- /dev/null +++ b/example_wave_equation.py @@ -0,0 +1,338 @@ +import loopy as lp +import numpy as np +from loopy.kernel_stat_collector import KernelStatCollector +from loopy.kernel_stat_collector import KernelStatOptions as kso # noqa +from schedule_checker import check_schedule_validity +from schedule_checker.sched_check_utils import ( + prettier_map_string, +) + +# Make kernel ---------------------------------------------------------- + +# u[x,t+1] = 2*u[x,t] - u[x,t-1] + c*(dt/dx)**2*(u[x+1,t] - 2*u[x,t] + u[x-1,t]) +knl = lp.make_kernel( + "{[x,t]: 0<=x lex time):") + #print(sched_map_symbolic.space) + #print("-"*80) + +# get map representing lexicographic ordering +lex_order_map_symbolic = sched.get_lex_order_map_for_symbolic_sched() +""" +if verbose: + print("lex order map symbolic:") + print(prettier_map_string(lex_order_map_symbolic)) + print("space (lex time -> lex time):") + print(lex_order_map_symbolic.space) + print("-"*80) +""" + +# create statement instance ordering, +# maps each statement instance to all statement instances occuring later +sio = get_statement_ordering_map( + sched_map_symbolic, lex_order_map_symbolic) + +if verbose: + print("statement instance ordering:") + print(prettier_map_string(sio)) + print("SIO space (statement instances -> statement instances):") + print(sio.space) + print("-"*80) + +# create a map representing constraints from the dependency, +# maps statement instance to all statement instances that must occur later +""" +constraint_map = create_dependency_constraint( + statement_pair_dep_set, + inames_domain_before, + inames_domain_after, + knl.loop_priority, + lp_insn_id_to_lex_sched_id, + sched.unused_param_name, + sched.statement_var_name, + ) +""" + +# align constraint map spaces to match sio so we can compare them +if verbose: + print("constraint map space (before aligning):") + print(constraint_map.space) + +# align params +aligned_constraint_map = constraint_map.align_params(sio.space) + +# align in_ dims +import islpy as isl +from schedule_checker.sched_check_utils import ( + reorder_dims_by_name, + append_apostrophes, +) +sio_in_names = sio.space.get_var_names(isl.dim_type.in_) +aligned_constraint_map = reorder_dims_by_name( + aligned_constraint_map, + isl.dim_type.in_, + sio_in_names, + add_missing=False, + new_names_are_permutation_only=True, + ) + +# align out dims +aligned_constraint_map = reorder_dims_by_name( + aligned_constraint_map, + isl.dim_type.out, + append_apostrophes(sio_in_names), + # TODO sio out names are only pretending to have apostrophes; confusing + add_missing=False, + new_names_are_permutation_only=True, + ) + +if verbose: + print("constraint map space (after aligning):") + print(aligned_constraint_map.space) + print("constraint map:") + print(prettier_map_string(aligned_constraint_map)) + +assert aligned_constraint_map.space == sio.space +assert ( + aligned_constraint_map.space.get_var_names(isl.dim_type.in_) + == sio.space.get_var_names(isl.dim_type.in_)) +assert ( + aligned_constraint_map.space.get_var_names(isl.dim_type.out) + == append_apostrophes(sio.space.get_var_names(isl.dim_type.out))) +assert ( + aligned_constraint_map.space.get_var_names(isl.dim_type.param) + == sio.space.get_var_names(isl.dim_type.param)) + +if not aligned_constraint_map.is_subset(sio): + + sched_is_valid = False + + if verbose: + print("================ constraint check failure =================") + print("constraint map not subset of SIO") + print("dependency:") + print(statement_pair_dep_set) + print("statement instance ordering:") + print(prettier_map_string(sio)) + print("constraint_map.gist(sio):") + print(aligned_constraint_map.gist(sio)) + print("sio.gist(constraint_map)") + print(sio.gist(aligned_constraint_map)) + print("loop priority known:") + print(preprocessed_knl.loop_priority) + """ + from schedule_checker.sched_check_utils import ( + get_concurrent_inames, + ) + conc_inames, non_conc_inames = get_concurrent_inames(scheduled_knl) + print("concurrent inames:", conc_inames) + print("sequential inames:", non_conc_inames) + print("constraint map space (stmt instances -> stmt instances):") + print(aligned_constraint_map.space) + print("SIO space (statement instances -> statement instances):") + print(sio.space) + print("constraint map:") + print(prettier_map_string(aligned_constraint_map)) + print("statement instance ordering:") + print(prettier_map_string(sio)) + print("{insn id -> sched sid int} dict:") + print(lp_insn_id_to_lex_sched_id) + """ + print("===========================================================") + + +print("is sched valid? constraint map subset of SIO?") +print(sched_is_valid) + + + + +""" +knl = lp.split_iname(knl, "i", bsize, outer_tag="g.0", inner_tag="l.1") +knl = lp.split_iname(knl, "j", bsize, outer_tag="g.1", inner_tag="l.0") +knl = lp.split_iname(knl, "k", bsize) +knl = lp.add_prefetch(knl, "a", ["k_inner", "i_inner"], default_tag="l.auto") +knl = lp.add_prefetch(knl, "b", ["j_inner", "k_inner"], default_tag="l.auto") +knl = lp.prioritize_loops(knl, "k_outer,k_inner") +""" + +''' +# (U_n^{k+1}-U_n^k)/dt = C*(U_{n+1}^k-U_n^k)/dx +# U_n^{k+1} = U_n^k + dt/dx*C*(U_{n+1}^k-U_n^k) +knl = lp.make_kernel( + "{[i,k]: 0<=i Date: Wed, 13 Nov 2019 14:28:00 -0600 Subject: [PATCH 150/415] hacked together some schedule checking for map_domain wave equation example --- example_wave_equation.py | 433 ++++++++++++++++++++++++++++++--------- 1 file changed, 338 insertions(+), 95 deletions(-) diff --git a/example_wave_equation.py b/example_wave_equation.py index 1fe656544..b401e52fa 100644 --- a/example_wave_equation.py +++ b/example_wave_equation.py @@ -1,91 +1,100 @@ import loopy as lp +from loopy import generate_code_v2 +from loopy import get_one_scheduled_kernel +from loopy.kernel import KernelState +from loopy import preprocess_kernel import numpy as np -from loopy.kernel_stat_collector import KernelStatCollector -from loopy.kernel_stat_collector import KernelStatOptions as kso # noqa +import islpy as isl +#from loopy.kernel_stat_collector import KernelStatCollector +#from loopy.kernel_stat_collector import KernelStatOptions as kso # noqa from schedule_checker import check_schedule_validity from schedule_checker.sched_check_utils import ( prettier_map_string, + reorder_dims_by_name, + append_apostrophes, +) +from schedule_checker.dependency import ( + create_dependencies_from_legacy_knl, + create_dependency_constraint, +) +from dependency import _create_5pt_stencil_dependency_constraint +from schedule_checker.schedule import LexSchedule +from schedule_checker.lexicographic_order_map import ( + get_statement_ordering_map, ) # Make kernel ---------------------------------------------------------- # u[x,t+1] = 2*u[x,t] - u[x,t-1] + c*(dt/dx)**2*(u[x+1,t] - 2*u[x,t] + u[x-1,t]) +# mine, works: +# "{[x,t]: 1<=x {[ix, it]: 1<=ix {[ix, it] -> [tx, tt, tparity, itt, itx]: " + "16*(tx - tt + tparity) + itx - itt = ix - it and " + "16*(tx + tt) + itt + itx = ix + it and " + "0<=tparity<2 and 0 <= itx - itt < 16 and 0 <= itt+itx < 16}") +m2 = isl.BasicMap( + "[nx,nt,unused] -> {[statement, ix, it] -> [statement'=statement, tx, tt, tparity, itt, itx]: " + "16*(tx - tt + tparity) + itx - itt = ix - it and " + "16*(tx + tt) + itt + itx = ix + it and " + "0<=tparity<2 and 0 <= itx - itt < 16 and 0 <= itt+itx < 16}") +m2_prime = isl.BasicMap( + "[nx,nt,unused] -> {[statement, ix, it] -> [statement'=statement, tx', tt', tparity', itt', itx']: " + "16*(tx' - tt' + tparity') + itx' - itt' = ix - it and " + "16*(tx' + tt') + itt' + itx' = ix + it and " + "0<=tparity'<2 and 0 <= itx' - itt' < 16 and 0 <= itt'+itx' < 16}") + +print("maping:") +print(prettier_map_string(m2)) + +# new kernel +knl = lp.map_domain(ref_knl, m) +knl = lp.prioritize_loops(knl, "tt,tparity,tx,itt,itx") +print("code after mapping:") +print(generate_code_v2(knl).device_code()) + +#print("constraint_map before apply_range:") +#print(prettier_map_string(constraint_map)) +mapped_constraint_map = constraint_map.apply_range(m2_prime) +#print("constraint_map after apply_range:") +#print(prettier_map_string(mapped_constraint_map)) +mapped_constraint_map = mapped_constraint_map.apply_domain(m2) +#print("constraint_map after apply_domain:") +#print(prettier_map_string(mapped_constraint_map)) +#1/0 + +statement_inames_mapped = set(["itx","itt","tt","tparity","tx"]) +sid_before = 0 +sid_after = 0 + +if knl.state < KernelState.PREPROCESSED: + preprocessed_knl = preprocess_kernel(knl) +else: + preprocessed_knl = knl +inames_domain_before_mapped = preprocessed_knl.get_inames_domain(statement_inames_mapped) +inames_domain_after_mapped = preprocessed_knl.get_inames_domain(statement_inames_mapped) +print("(mapped) inames_domain_before:", inames_domain_before_mapped) +print("(mapped) inames_domain_after:", inames_domain_after_mapped) + +# ============================================= + +verbose = False +verbose = True + +# get a schedule to check +if preprocessed_knl.schedule is None: + scheduled_knl = get_one_scheduled_kernel(preprocessed_knl) +else: + scheduled_knl = preprocessed_knl + +# {{{ verbose + +if verbose: + # Print kernel info ------------------------------------------------------ + print("="*80) + print("Kernel:") + print(scheduled_knl) + #print(generate_code_v2(scheduled_knl).device_code()) + print("="*80) + print("Iname tags: %s" % (scheduled_knl.iname_to_tags)) + print("="*80) + print("Loopy schedule:") + for sched_item in scheduled_knl.schedule: + print(sched_item) + #print("scheduled iname order:") + #print(sched_iname_order) + + print("="*80) + print("inames_domain_before_mapped:", inames_domain_before_mapped) + print("inames_domain_after_mapped:", inames_domain_after_mapped) + +# }}} + +# Create a mapping of {statement instance: lex point} +# including only instructions involved in this dependency +sched = LexSchedule(scheduled_knl, include_only_insn_ids=[ + str(sid_before), + str(sid_after) + ]) +# Get an isl map representing the LexSchedule; +# this requires the iname domains + +assert len(sched) in [1, 2] +if len(sched) == 1: + assert inames_domain_before_mapped == inames_domain_after_mapped + +# get a mapping from lex schedule id to relevant inames domain +sid_to_dom = { + sid_before: inames_domain_before_mapped, + sid_after: inames_domain_after_mapped, + } + +sched_map_symbolic = sched.create_symbolic_isl_map(sid_to_dom) + +# {{{ verbose + +if verbose: + print("sid_to_dom:\n", sid_to_dom) + print("LexSchedule after creating symbolic isl map:") + print(sched) + print("LexSched:") + print(prettier_map_string(sched_map_symbolic)) + #print("space (statement instances -> lex time):") + #print(sched_map_symbolic.space) + #print("-"*80) + +# }}} + +# get map representing lexicographic ordering +lex_order_map_symbolic = sched.get_lex_order_map_for_symbolic_sched() + +# {{{ verbose """ -knl = lp.split_iname(knl, "i", bsize, outer_tag="g.0", inner_tag="l.1") -knl = lp.split_iname(knl, "j", bsize, outer_tag="g.1", inner_tag="l.0") -knl = lp.split_iname(knl, "k", bsize) -knl = lp.add_prefetch(knl, "a", ["k_inner", "i_inner"], default_tag="l.auto") -knl = lp.add_prefetch(knl, "b", ["j_inner", "k_inner"], default_tag="l.auto") -knl = lp.prioritize_loops(knl, "k_outer,k_inner") +if verbose: + print("lex order map symbolic:") + print(prettier_map_string(lex_order_map_symbolic)) + print("space (lex time -> lex time):") + print(lex_order_map_symbolic.space) + print("-"*80) """ +# }}} + +# create statement instance ordering, +# maps each statement instance to all statement instances occuring later +sio = get_statement_ordering_map( + sched_map_symbolic, lex_order_map_symbolic) + +# {{{ verbose + +if verbose: + print("statement instance ordering:") + print(prettier_map_string(sio)) + print("SIO space (statement instances -> statement instances):") + print(sio.space) + print("-"*80) + +if verbose: + print("constraint map space (before aligning):") + print(constraint_map.space) + +# }}} + +# align constraint map spaces to match sio so we can compare them +# align params +aligned_constraint_map = mapped_constraint_map.align_params(sio.space) +#print(prettier_map_string(aligned_constraint_map)) + +# align in_ dims +sio_in_names = sio.space.get_var_names(isl.dim_type.in_) +aligned_constraint_map = reorder_dims_by_name( + aligned_constraint_map, + isl.dim_type.in_, + sio_in_names, + add_missing=False, + new_names_are_permutation_only=True, + ) + +#print(".....") +#print(aligned_constraint_map.space) +#print("...") +#print(set(aligned_constraint_map.get_var_names(isl.dim_type.out))) +#ppp = append_apostrophes(sio_in_names) +#print(ppp) +#print(set(aligned_constraint_map.get_var_names(isl.dim_type.out)).issubset(ppp)) +# align out dims +aligned_constraint_map = reorder_dims_by_name( + aligned_constraint_map, + isl.dim_type.out, + #append_apostrophes(sio_in_names), + sio_in_names, # TODO WHY no apostrophes? + # TODO sio out names are only pretending to have apostrophes; confusing + add_missing=False, + new_names_are_permutation_only=True, +) + +# {{{ verbose + +if verbose: + print("constraint map space (after aligning):") + print(aligned_constraint_map.space) + print("constraint map:") + print(prettier_map_string(aligned_constraint_map)) + +# }}} + +assert aligned_constraint_map.space == sio.space +assert ( + aligned_constraint_map.space.get_var_names(isl.dim_type.in_) + == sio.space.get_var_names(isl.dim_type.in_)) +assert ( + aligned_constraint_map.space.get_var_names(isl.dim_type.out) + == append_apostrophes(sio.space.get_var_names(isl.dim_type.out)) + ) or ( # TODO why no appostrophes? + aligned_constraint_map.space.get_var_names(isl.dim_type.out) + == sio.space.get_var_names(isl.dim_type.out) + ) +assert ( + aligned_constraint_map.space.get_var_names(isl.dim_type.param) + == sio.space.get_var_names(isl.dim_type.param)) + +sched_is_valid = aligned_constraint_map.is_subset(sio) + +if not sched_is_valid: + + # {{{ verbose + + if verbose: + print("================ constraint check failure =================") + print("constraint map not subset of SIO") + print("dependency:") + print(prettier_map_string(constraint_map)) + print("statement instance ordering:") + print(prettier_map_string(sio)) + print("constraint_map.gist(sio):") + print(aligned_constraint_map.gist(sio)) + print("sio.gist(constraint_map)") + print(sio.gist(aligned_constraint_map)) + print("loop priority known:") + print(preprocessed_knl.loop_priority) + """ + from schedule_checker.sched_check_utils import ( + get_concurrent_inames, + ) + conc_inames, non_conc_inames = get_concurrent_inames(scheduled_knl) + print("concurrent inames:", conc_inames) + print("sequential inames:", non_conc_inames) + print("constraint map space (stmt instances -> stmt instances):") + print(aligned_constraint_map.space) + print("SIO space (statement instances -> statement instances):") + print(sio.space) + print("constraint map:") + print(prettier_map_string(aligned_constraint_map)) + print("statement instance ordering:") + print(prettier_map_string(sio)) + print("{insn id -> sched sid int} dict:") + print(lp_insn_id_to_lex_sched_id) + """ + print("===========================================================") + + # }}} + +print("is sched valid? constraint map subset of SIO?") +print(sched_is_valid) + + + + + ''' # (U_n^{k+1}-U_n^k)/dt = C*(U_{n+1}^k-U_n^k)/dx # U_n^{k+1} = U_n^k + dt/dx*C*(U_{n+1}^k-U_n^k) -knl = lp.make_kernel( - "{[i,k]: 0<=i Date: Wed, 13 Nov 2019 19:36:24 -0600 Subject: [PATCH 151/415] update fixed mapping, enforce consistent domain/range variable ordering --- example_wave_equation.py | 58 +++++++++++++++++++++++++++++++--------- 1 file changed, 46 insertions(+), 12 deletions(-) diff --git a/example_wave_equation.py b/example_wave_equation.py index b401e52fa..f167cd865 100644 --- a/example_wave_equation.py +++ b/example_wave_equation.py @@ -43,6 +43,16 @@ knl = lp.make_kernel( assumptions="nx,nt >= 3", lang_version=(2018, 2), ) +''' +ref = lp.make_kernel( + "[nx,nt] -> {[ix, it]: 1<=ix {[ix, it] -> [tx, tt, tparity, itt, itx]: " "16*(tx - tt + tparity) + itx - itt = ix - it and " @@ -294,6 +308,26 @@ m2_prime = isl.BasicMap( "16*(tx' - tt' + tparity') + itx' - itt' = ix - it and " "16*(tx' + tt') + itt' + itx' = ix + it and " "0<=tparity'<2 and 0 <= itx' - itt' < 16 and 0 <= itt'+itx' < 16}") +""" + +# new +m = isl.BasicMap( + "[nx,nt] -> {[ix, it] -> [tx, tt, tparity, itt, itx]: " + "16*(tx - tt) + itx - itt = ix - it and " + "16*(tx + tt + tparity) + itt + itx = ix + it and " + "0<=tparity<2 and 0 <= itx - itt < 16 and 0 <= itt+itx < 16}") +m2 = isl.BasicMap( + "[nx,nt,unused] -> {[statement, ix, it] -> [statement'=statement, tx, tt, tparity, itt, itx]: " + "16*(tx - tt) + itx - itt = ix - it and " + "16*(tx + tt + tparity) + itt + itx = ix + it and " + "0<=tparity<2 and 0 <= itx - itt < 16 and 0 <= itt+itx < 16}") +m2_prime = isl.BasicMap( + "[nx,nt,unused] -> {[statement, ix, it] -> [statement'=statement, tx', tt', tparity', itt', itx']: " + "16*(tx' - tt') + itx' - itt' = ix - it and " + "16*(tx' + tt' + tparity') + itt' + itx' = ix + it and " + "0<=tparity'<2 and 0 <= itx' - itt' < 16 and 0 <= itt'+itx' < 16}") + +# TODO note order must match statement_iname_premap_order print("maping:") print(prettier_map_string(m2)) @@ -301,18 +335,18 @@ print(prettier_map_string(m2)) # new kernel knl = lp.map_domain(ref_knl, m) knl = lp.prioritize_loops(knl, "tt,tparity,tx,itt,itx") -print("code after mapping:") -print(generate_code_v2(knl).device_code()) - -#print("constraint_map before apply_range:") -#print(prettier_map_string(constraint_map)) -mapped_constraint_map = constraint_map.apply_range(m2_prime) -#print("constraint_map after apply_range:") -#print(prettier_map_string(mapped_constraint_map)) +#print("code after mapping:") +#print(generate_code_v2(knl).device_code()) + +print("constraint_map before apply_range:") +print(prettier_map_string(constraint_map)) +#mapped_constraint_map = constraint_map.apply_range(m2_prime) +mapped_constraint_map = constraint_map.apply_range(m2) +print("constraint_map after apply_range:") +print(prettier_map_string(mapped_constraint_map)) mapped_constraint_map = mapped_constraint_map.apply_domain(m2) -#print("constraint_map after apply_domain:") -#print(prettier_map_string(mapped_constraint_map)) -#1/0 +print("constraint_map after apply_domain:") +print(prettier_map_string(mapped_constraint_map)) statement_inames_mapped = set(["itx","itt","tt","tparity","tx"]) sid_before = 0 -- GitLab From ed4f308be528a56e44e31ed5c89dc6dc9d9a0e4b Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 25 Nov 2019 10:37:35 -0600 Subject: [PATCH 152/415] initial stab at arbitrary dependency constraint construction --- dependency.py | 173 +++++++++++++++++++++++++++++++++++++++ example_wave_equation.py | 38 ++++++++- sched_check_utils.py | 24 ++++-- 3 files changed, 226 insertions(+), 9 deletions(-) diff --git a/dependency.py b/dependency.py index d30f1a80e..197815951 100644 --- a/dependency.py +++ b/dependency.py @@ -441,6 +441,10 @@ def _create_5pt_stencil_dependency_constraint( (islvars[space_iname_prime]-two).lt_set(islvars[space_iname]) & islvars[space_iname].lt_set(islvars[space_iname_prime]+two) ) + #( + #(islvars[space_iname]-two).lt_set(islvars[space_iname_prime]) & + # islvars[space_iname_prime].lt_set(islvars[space_iname]+two) + #) | islvars[time_iname_prime].eq_set(islvars[time_iname] + two) & islvars[space_iname].eq_set(islvars[space_iname_prime]) @@ -491,6 +495,175 @@ def _create_5pt_stencil_dependency_constraint( return map_with_loop_domain_constraints +def create_arbitrary_dependency_constraint( + constraint_str, + dom_before_constraint_set, + dom_after_constraint_set, + sid_before, + sid_after, + unused_param_name, + statement_var_name, + statement_var_pose=0, + all_dom_inames_ordered=None, + ): + + from schedule_checker.sched_check_utils import ( + make_islvars_with_var_primes, + #append_apostrophes, + append_marker_to_strings, + add_dims_to_isl_set, + reorder_dims_by_name, + create_new_isl_set_with_primes, + ) + # This function uses the constraint given to create the following map: + # Statement [s,i,j] comes before statement [s',i',j'] iff + + from schedule_checker.sched_check_utils import ( + list_var_names_in_isl_sets, + ) + if all_dom_inames_ordered is None: + all_dom_inames_ordered = list_var_names_in_isl_sets( + [dom_before_constraint_set, dom_after_constraint_set]) + + # create some (ordered) isl vars to use, e.g., {s, i, j, s', i', j'} + islvars = make_islvars_with_var_primes( + [statement_var_name]+all_dom_inames_ordered, + [unused_param_name], + marker="p") # TODO figure out before/after notation + #statement_var_name_prime = statement_var_name+"'" + statement_var_name_prime = statement_var_name+"p" # TODO figure out before/after notation + + # get (ordered) list of unused before/after inames + inames_before_unused = [] + for iname in all_dom_inames_ordered: + if iname not in dom_before_constraint_set.get_var_names(isl.dim_type.out): + inames_before_unused.append(iname) + inames_after_unused = [] + for iname in all_dom_inames_ordered: + if iname not in dom_after_constraint_set.get_var_names(isl.dim_type.out): + #inames_after_unused.append(iname + "'") + inames_after_unused.append(iname + "p") # TODO figure out before/after notation + + # initialize constraints to False + # this will disappear as soon as we add a constraint + all_constraints_set = islvars[0].eq_set(islvars[0] + 1) + space = all_constraints_set.space + from pymbolic import parse + from loopy.symbolic import aff_from_expr + + or_constraint_strs = constraint_str.split("or") + def _quant(s): + return "(" + s + ")" + def _diff(s0, s1): + return _quant(s0) + "-" + _quant(s1) + + for or_constraint_str in or_constraint_strs: + and_constraint_strs = or_constraint_str.split("and") + #conj_constraint = islvars[0].eq_set(islvars[0]) # init to true + conj_constraint = isl.BasicSet.universe(space) + for cons_str in and_constraint_strs: + if "<=" in cons_str: + lhs, rhs = cons_str.split("<=") + conj_constraint = conj_constraint.add_constraint( + isl.Constraint.inequality_from_aff( + aff_from_expr(space, parse(_diff(rhs,lhs))))) + # TODO something more robust than this string meddling^ + elif ">=" in cons_str: + lhs, rhs = cons_str.split(">=") + conj_constraint = conj_constraint.add_constraint( + isl.Constraint.inequality_from_aff( + aff_from_expr(space, parse(_diff(lhs,rhs))))) + elif "<" in cons_str: + lhs, rhs = cons_str.split("<") + conj_constraint = conj_constraint.add_constraint( + isl.Constraint.inequality_from_aff( + aff_from_expr(space, parse(_diff(rhs, lhs) + "- 1")))) + elif ">" in cons_str: + lhs, rhs = cons_str.split(">") + conj_constraint = conj_constraint.add_constraint( + isl.Constraint.inequality_from_aff( + aff_from_expr(space, parse(_diff(lhs, rhs) + "- 1")))) + elif "=" in cons_str: + lhs, rhs = cons_str.split("=") + conj_constraint = conj_constraint.add_constraint( + isl.Constraint.equality_from_aff( + aff_from_expr(space, parse(_diff(lhs, rhs))))) + else: + 1/0 + all_constraints_set = all_constraints_set | conj_constraint + + #TODO deleteme + """ + space_iname = "ix" + time_iname = "it" + + space_iname_prime = space_iname + "'" + time_iname_prime = time_iname + "'" + one = islvars[0] + 1 + two = islvars[0] + 2 + # local dep: + constraint_set = ( + islvars[time_iname_prime].eq_set(islvars[time_iname] + one) & + ( + (islvars[space_iname_prime]-two).lt_set(islvars[space_iname]) & + islvars[space_iname].lt_set(islvars[space_iname_prime]+two) + ) + #( + #(islvars[space_iname]-two).lt_set(islvars[space_iname_prime]) & + # islvars[space_iname_prime].lt_set(islvars[space_iname]+two) + #) + | + islvars[time_iname_prime].eq_set(islvars[time_iname] + two) & + islvars[space_iname].eq_set(islvars[space_iname_prime]) + ) + """ + + # set unused vars == unused dummy param + for iname in inames_before_unused+inames_after_unused: + all_constraints_set = all_constraints_set & islvars[iname].eq_set( + islvars[unused_param_name]) + + # set statement_var_name == statement # + all_constraints_set = all_constraints_set & islvars[statement_var_name].eq_set( + islvars[0]+sid_before) + all_constraints_set = all_constraints_set & islvars[statement_var_name_prime].eq_set( + islvars[0]+sid_after) + + # convert constraint set to map + all_constraints_map = _convert_constraint_set_to_map( + all_constraints_set, len(all_dom_inames_ordered) + 1) # +1 for statement var + + # now apply domain sets to constraint variables + + # add statement variable to doms to enable intersection + domain_to_intersect = add_dims_to_isl_set( + dom_before_constraint_set, isl.dim_type.out, + [statement_var_name], statement_var_pose) + range_constraint_set = create_new_isl_set_with_primes( + dom_after_constraint_set, + marker="p") # TODO figure out before/after notation + range_to_intersect = add_dims_to_isl_set( + range_constraint_set, isl.dim_type.out, + [statement_var_name_prime], statement_var_pose) + + # insert inames missing from doms to enable intersection + domain_to_intersect = reorder_dims_by_name( + domain_to_intersect, isl.dim_type.out, + [statement_var_name] + all_dom_inames_ordered, + add_missing=True) + range_to_intersect = reorder_dims_by_name( + range_to_intersect, + isl.dim_type.out, + append_marker_to_strings([statement_var_name] + all_dom_inames_ordered, "p"), # TODO figure out before/after notation + add_missing=True) + + # intersect doms + map_with_loop_domain_constraints = all_constraints_map.intersect_domain( + domain_to_intersect).intersect_range(range_to_intersect) + + return map_with_loop_domain_constraints + + def create_dependencies_from_legacy_knl(knl): """Return a list of :class:`StatementPairDependySet` instances created for a :class:`loopy.LoopKernel` containing legacy depencencies. Create diff --git a/example_wave_equation.py b/example_wave_equation.py index f167cd865..2be546a78 100644 --- a/example_wave_equation.py +++ b/example_wave_equation.py @@ -16,6 +16,7 @@ from schedule_checker.sched_check_utils import ( from schedule_checker.dependency import ( create_dependencies_from_legacy_knl, create_dependency_constraint, + create_arbitrary_dependency_constraint, ) from dependency import _create_5pt_stencil_dependency_constraint from schedule_checker.schedule import LexSchedule @@ -75,6 +76,7 @@ inames_domain_after = preprocessed_knl.get_inames_domain(statement_inames_premap print("(unmapped) inames_domain_before:", inames_domain_before) print("(unmapped) inames_domain_after:", inames_domain_after) +""" constraint_map = _create_5pt_stencil_dependency_constraint( inames_domain_before, inames_domain_after, @@ -90,6 +92,37 @@ constraint_map = _create_5pt_stencil_dependency_constraint( ) print("constraint_map before mapping:") print(prettier_map_string(constraint_map)) +1/0 +""" +""" + islvars[time_iname_prime].eq_set(islvars[time_iname] + one) & + ( + (islvars[space_iname_prime]-two).lt_set(islvars[space_iname]) & + islvars[space_iname].lt_set(islvars[space_iname_prime]+two) + ) + | + islvars[time_iname_prime].eq_set(islvars[time_iname] + two) & + islvars[space_iname].eq_set(islvars[space_iname_prime]) + ) +""" +# TODO testing new dep map +constraint_map = create_arbitrary_dependency_constraint( + "itp = it + 1 and ixp - 2 < ix and ix < ixp + 2 or itp = it + 2 and ix = ixp", + inames_domain_before, + inames_domain_after, + sid_before = sid_before, + sid_after = sid_after, + unused_param_name = "unused", + statement_var_name = "statement", + statement_var_pose=0, + #all_dom_inames_ordered=None, + all_dom_inames_ordered=statement_inames_premap_order, + ) +print("constraint_map before mapping:") +print(prettier_map_string(constraint_map)) +# TODO (left off here) +# TODO decide on before/after notation and make consistent +1/0 verbose = False verbose = True @@ -335,8 +368,9 @@ print(prettier_map_string(m2)) # new kernel knl = lp.map_domain(ref_knl, m) knl = lp.prioritize_loops(knl, "tt,tparity,tx,itt,itx") -#print("code after mapping:") -#print(generate_code_v2(knl).device_code()) +print("code after mapping:") +print(generate_code_v2(knl).device_code()) +1/0 print("constraint_map before apply_range:") print(prettier_map_string(constraint_map)) diff --git a/sched_check_utils.py b/sched_check_utils.py index 575923753..a91aef3a3 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -96,7 +96,7 @@ def reorder_dims_by_name( return new_set -def create_new_isl_set_with_primes(old_isl_set): +def create_new_isl_set_with_primes(old_isl_set, marker="'"): """Return an isl_set with apostrophes appended to dim_type.set dimension names. @@ -110,11 +110,11 @@ def create_new_isl_set_with_primes(old_isl_set): new_set = old_isl_set.copy() for i in range(old_isl_set.n_dim()): new_set = new_set.set_dim_name(isl.dim_type.set, i, old_isl_set.get_dim_name( - isl.dim_type.set, i)+"'") + isl.dim_type.set, i)+marker) return new_set -def make_islvars_with_var_primes(var_names, param_names): +def make_islvars_with_var_primes(var_names, param_names, marker="'"): """Return a dictionary from variable and parameter names to :class:`PwAff` instances that represent each of the variables and parameters, including @@ -134,15 +134,25 @@ def make_islvars_with_var_primes(var_names, param_names): """ + def append_marker(l, mark): + new_l = [] + for s in l: + new_l.append(s+mark) + return new_l + return isl.make_zero_and_vars( - var_names+append_apostrophes(var_names), param_names) + var_names+append_marker(var_names, marker), param_names) -def append_apostrophes(strings): +def append_marker_to_strings(strings, marker="'"): if not isinstance(strings, list): - raise ValueError("append_apostrophes did not receive a list") + raise ValueError("append_marker_to_strings did not receive a list") else: - return [s+"'" for s in strings] + return [s+marker for s in strings] + + +def append_apostrophes(strings): + return append_marker_to_strings(strings, marker="'") def _union_of_isl_sets_or_maps(set_list): -- GitLab From d253a097f1d32ebac8dcb2634c827a87875f10cc Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 2 Dec 2019 17:15:01 -0600 Subject: [PATCH 153/415] separate dependency specification (legacy kernels) from schedule checking --- __init__.py | 62 +++++++++++++++++--------- example_pairwise_schedule_validity.py | 8 +++- sched_check_utils.py | 2 +- schedule.py | 14 +++--- test/test_invalid_scheds.py | 22 +++++++--- test/test_valid_scheds.py | 63 +++++++++++++++------------ 6 files changed, 108 insertions(+), 63 deletions(-) diff --git a/__init__.py b/__init__.py index f0e953548..76c14acb5 100644 --- a/__init__.py +++ b/__init__.py @@ -1,24 +1,8 @@ -# TODO create a set of broken kernels to test against -# (small kernels to test a specific case) -# TODO work on granularity of encapsulation, encapsulate some of this in -# separate functions -def check_schedule_validity( - knl, - verbose=False, - _use_scheduled_kernel_to_obtain_loop_priority=False): - +def get_statement_pair_dependency_sets_from_legacy_knl(knl): from schedule_checker.dependency import ( create_dependencies_from_legacy_knl, - create_dependency_constraint, - ) - from schedule_checker.schedule import LexSchedule - from schedule_checker.lexicographic_order_map import ( - get_statement_ordering_map, - ) - from schedule_checker.sched_check_utils import ( - prettier_map_string, ) # Preprocess if not already preprocessed @@ -49,6 +33,38 @@ def check_schedule_validity( dep_set.statement_after.within_inames) ]) + return deps_and_domains + + +# TODO create a set of broken kernels to test against +# (small kernels to test a specific case) +# TODO work on granularity of encapsulation, encapsulate some of this in +# separate functions +def check_schedule_validity( + knl, + deps_and_domains, + verbose=False, + _use_scheduled_kernel_to_obtain_loop_priority=False): + + from schedule_checker.dependency import ( + create_dependency_constraint, + ) + from schedule_checker.schedule import LexSchedule + from schedule_checker.lexicographic_order_map import ( + get_statement_ordering_map, + ) + from schedule_checker.sched_check_utils import ( + prettier_map_string, + ) + + # Preprocess if not already preprocessed + from loopy.kernel import KernelState + if knl.state < KernelState.PREPROCESSED: + from loopy import preprocess_kernel + preprocessed_knl = preprocess_kernel(knl) + else: + preprocessed_knl = knl + if verbose: print("="*80) print("StatementDependencies w/domains:") @@ -100,10 +116,12 @@ def check_schedule_validity( # Create a mapping of {statement instance: lex point} # including only instructions involved in this dependency - sched = LexSchedule(scheduled_knl, include_only_insn_ids=[ - s_before.insn_id, - s_after.insn_id - ]) + sched = LexSchedule(scheduled_knl, scheduled_knl.schedule, + include_only_insn_ids=[ + s_before.insn_id, + s_after.insn_id + ], + prohibited_var_names=scheduled_knl.all_inames()) #print("-"*80) #print("LexSchedule before processing:") @@ -176,6 +194,8 @@ def check_schedule_validity( sched.unused_param_name, sched.statement_var_name, ) + # TODO specify lp_insn_id_to_lex_sched_id independently of schedule creation + # so that dependency constraint creation can happen before schedule is created # align constraint map spaces to match sio so we can compare them if verbose: diff --git a/example_pairwise_schedule_validity.py b/example_pairwise_schedule_validity.py index 0c5b1f61f..581001111 100644 --- a/example_pairwise_schedule_validity.py +++ b/example_pairwise_schedule_validity.py @@ -1,6 +1,9 @@ import loopy as lp import numpy as np -from schedule_checker import check_schedule_validity +from schedule_checker import ( + get_statement_pair_dependency_sets_from_legacy_knl, + check_schedule_validity, +) # Choose kernel ---------------------------------------------------------- @@ -275,7 +278,8 @@ if knl_choice == "loop_carried_deps": ) -sched_is_valid = check_schedule_validity(knl, verbose=True) +deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(knl) +sched_is_valid = check_schedule_validity(knl, deps_and_domains, verbose=True) print("is sched valid? constraint map subset of SIO?") print(sched_is_valid) diff --git a/sched_check_utils.py b/sched_check_utils.py index a91aef3a3..c4658efc1 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -354,7 +354,7 @@ def get_concurrent_inames(knl): return conc_inames, all_inames-conc_inames -def _get_insn_id_from_sched_item(knl, sched_item): +def _get_insn_id_from_sched_item(sched_item): # TODO could use loopy's sched_item_to_insn_id() from loopy.schedule import Barrier if isinstance(sched_item, Barrier): diff --git a/schedule.py b/schedule.py index 34af2cfbb..5243ad03b 100644 --- a/schedule.py +++ b/schedule.py @@ -86,7 +86,9 @@ class LexSchedule(object): def __init__( self, knl, + sched_items_ordered, include_only_insn_ids=None, + prohibited_var_names=[], ): """ :arg knl: A :class:`LoopKernel` whose instructions will be @@ -103,11 +105,11 @@ class LexSchedule(object): # make sure we don't have an iname name conflict assert not any( - iname == self.statement_var_name for iname in knl.all_inames()) + iname == self.statement_var_name for iname in prohibited_var_names) assert not any( - iname == self.unused_param_name for iname in knl.all_inames()) + iname == self.unused_param_name for iname in prohibited_var_names) - if ((include_only_insn_ids is None and len(knl.schedule) > 2) + if ((include_only_insn_ids is None and len(sched_items_ordered) > 2) or len(include_only_insn_ids) > 2): raise NotImplementedError( "LexSchedule currently does not produce program orderings " @@ -116,12 +118,12 @@ class LexSchedule(object): from loopy.schedule import (EnterLoop, LeaveLoop, Barrier, RunInstruction) from loopy.kernel.data import ConcurrentTag - # go through knl.schedule and generate self.lex_schedule + # go through sched_items_ordered and generate self.lex_schedule # keep track of the next point in our lexicographic ordering # initially this as a 1-d point with value 0 next_insn_lex_pt = [0] - for sched_item in knl.schedule: + for sched_item in sched_items_ordered: if isinstance(sched_item, EnterLoop): iname = sched_item.iname if knl.iname_tags_of_type(iname, ConcurrentTag): @@ -165,7 +167,7 @@ class LexSchedule(object): from schedule_checker.sched_check_utils import ( _get_insn_id_from_sched_item, ) - lp_insn_id = _get_insn_id_from_sched_item(knl, sched_item) + lp_insn_id = _get_insn_id_from_sched_item(sched_item) if lp_insn_id is None: # TODO make sure it's okay to ignore barriers without id # (because they'll never be part of a dependency?) diff --git a/test/test_invalid_scheds.py b/test/test_invalid_scheds.py index db85e10c4..323b79b8c 100644 --- a/test/test_invalid_scheds.py +++ b/test/test_invalid_scheds.py @@ -28,7 +28,11 @@ from pyopencl.tools import ( # noqa as pytest_generate_tests) import loopy as lp import numpy as np -from schedule_checker import check_schedule_validity +from schedule_checker import ( + get_statement_pair_dependency_sets_from_legacy_knl, + check_schedule_validity, +) + def test_invalid_prioritiy_detection(): @@ -54,19 +58,26 @@ def test_invalid_prioritiy_detection(): knl0 = lp.prioritize_loops(ref_knl, "h,i") knl0 = lp.prioritize_loops(ref_knl, "i,j") knl0 = lp.prioritize_loops(knl0, "j,k") - assert check_schedule_validity(knl0) + + deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(knl0) + sched_is_valid = check_schedule_validity(knl0, deps_and_domains) + assert sched_is_valid # no error: knl1 = lp.prioritize_loops(ref_knl, "h,i,k") knl1 = lp.prioritize_loops(knl1, "h,j,k") - assert check_schedule_validity(knl1) + + deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(knl1) + sched_is_valid = check_schedule_validity(knl1, deps_and_domains) + assert sched_is_valid # error (cycle): knl2 = lp.prioritize_loops(ref_knl, "h,i,j") knl2 = lp.prioritize_loops(knl2, "j,k") knl2 = lp.prioritize_loops(knl2, "k,i") try: - check_schedule_validity(knl2) + deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(knl2) + sched_is_valid = check_schedule_validity(knl2, deps_and_domains) # should raise error assert False except ValueError as e: @@ -76,7 +87,8 @@ def test_invalid_prioritiy_detection(): knl3 = lp.prioritize_loops(ref_knl, "h,i,j,k") knl3 = lp.prioritize_loops(knl3, "h,j,i,k") try: - check_schedule_validity(knl3) + deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(knl3) + sched_is_valid = check_schedule_validity(knl3, deps_and_domains) # should raise error assert False except ValueError as e: diff --git a/test/test_valid_scheds.py b/test/test_valid_scheds.py index 6603c7a93..b0c178b6f 100644 --- a/test/test_valid_scheds.py +++ b/test/test_valid_scheds.py @@ -28,7 +28,10 @@ from pyopencl.tools import ( # noqa as pytest_generate_tests) import loopy as lp import numpy as np -from schedule_checker import check_schedule_validity +from schedule_checker import ( + get_statement_pair_dependency_sets_from_legacy_knl, + check_schedule_validity, +) def test_loop_prioritization(): @@ -62,7 +65,10 @@ def test_loop_prioritization(): {"b": np.float32, "d": np.float32, "f": np.float32}) knl = lp.prioritize_loops(knl, "i,k") knl = lp.prioritize_loops(knl, "i,j") - assert check_schedule_validity(knl) + + deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(knl) + sched_is_valid = check_schedule_validity(knl, deps_and_domains) + assert sched_is_valid def test_matmul(): @@ -83,7 +89,10 @@ def test_matmul(): knl = lp.add_prefetch(knl, "a", ["k_inner", "i_inner"], default_tag="l.auto") knl = lp.add_prefetch(knl, "b", ["j_inner", "k_inner"], default_tag="l.auto") knl = lp.prioritize_loops(knl, "k_outer,k_inner") - assert check_schedule_validity(knl) + + deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(knl) + sched_is_valid = check_schedule_validity(knl, deps_and_domains) + assert sched_is_valid def test_scan(): @@ -115,7 +124,10 @@ def test_dependent_domain(): lang_version=(2018, 2), ) knl = lp.realize_reduction(knl, force_scan=True) - assert check_schedule_validity(knl) + + deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(knl) + sched_is_valid = check_schedule_validity(knl, deps_and_domains) + assert sched_is_valid def test_stroud_bernstein(): @@ -152,27 +164,10 @@ def test_stroud_bernstein(): knl = lp.split_iname(knl, "el_outer", 2, outer_tag="g.0", inner_tag="ilp", slabs=(0, 1)) knl = lp.tag_inames(knl, dict(i2="l.1", alpha1="unr", alpha2="unr")) - assert check_schedule_validity(knl) - -def test_ilp(): - knl = lp.make_kernel( - "{[i,j,ilp_iname]: 0 <= i,j < n and 0 <= ilp_iname < 4}", - """ - for i - for j - for ilp_iname - tmp[i,j,ilp_iname] = 3.14 - end - end - end - """, - name="ilp_kernel", - assumptions="n>=1 and n mod 4 = 0", - ) - knl = lp.tag_inames(knl, {"j": "l.0", "ilp_iname": "ilp"}) - #knl = lp.prioritize_loops(knl, "i_outer_outer,i_outer_inner,i_inner,a") - assert check_schedule_validity(knl) + deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(knl) + sched_is_valid = check_schedule_validity(knl, deps_and_domains) + assert sched_is_valid def test_barrier(): @@ -194,7 +189,10 @@ def test_barrier(): knl = lp.split_iname(knl, "i", 2, outer_tag="g.0", inner_tag="l.0") knl = lp.split_iname(knl, "ii", 2, outer_tag="g.0", inner_tag="l.0") - assert check_schedule_validity(knl) + + deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(knl) + sched_is_valid = check_schedule_validity(knl, deps_and_domains) + assert sched_is_valid def test_nop(): @@ -214,7 +212,10 @@ def test_nop(): "...", seq_dependencies=True) knl = lp.fix_parameters(knl, dim=3) - assert check_schedule_validity(knl) + + deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(knl) + sched_is_valid = check_schedule_validity(knl, deps_and_domains) + assert sched_is_valid def test_multi_domain(): @@ -244,7 +245,10 @@ def test_multi_domain(): knl = lp.prioritize_loops(knl, "x,xx,i") knl = lp.prioritize_loops(knl, "i,j") knl = lp.prioritize_loops(knl, "j,k") - assert check_schedule_validity(knl) + + deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(knl) + sched_is_valid = check_schedule_validity(knl, deps_and_domains) + assert sched_is_valid def test_loop_carried_deps(): @@ -263,7 +267,10 @@ def test_loop_carried_deps(): assumptions="n >= 1", lang_version=(2018, 2) ) - assert check_schedule_validity(knl) + + deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(knl) + sched_is_valid = check_schedule_validity(knl, deps_and_domains) + assert sched_is_valid if __name__ == "__main__": -- GitLab From 6d59a6288abbec5c0cfd4884cb6412b67ce5a143 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 2 Dec 2019 17:34:56 -0600 Subject: [PATCH 154/415] no longer passing fully scheduled kernel to LexSchedule.__init__; instead pass schedule items (a subset of which may be tested within scheduling step) --- __init__.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/__init__.py b/__init__.py index 76c14acb5..e840a35be 100644 --- a/__init__.py +++ b/__init__.py @@ -6,6 +6,7 @@ def get_statement_pair_dependency_sets_from_legacy_knl(knl): ) # Preprocess if not already preprocessed + # note that kernels must always be preprocessed before scheduling from loopy.kernel import KernelState if knl.state < KernelState.PREPROCESSED: from loopy import preprocess_kernel @@ -58,6 +59,7 @@ def check_schedule_validity( ) # Preprocess if not already preprocessed + # note that kernels must always be preprocessed before scheduling from loopy.kernel import KernelState if knl.state < KernelState.PREPROCESSED: from loopy import preprocess_kernel @@ -116,7 +118,8 @@ def check_schedule_validity( # Create a mapping of {statement instance: lex point} # including only instructions involved in this dependency - sched = LexSchedule(scheduled_knl, scheduled_knl.schedule, + all_schedule_items = scheduled_knl.schedule + sched = LexSchedule(preprocessed_knl, all_schedule_items, include_only_insn_ids=[ s_before.insn_id, s_after.insn_id -- GitLab From 77e92c6ad84985221d723d2e9a64200f77ba2705 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 2 Dec 2019 18:16:48 -0600 Subject: [PATCH 155/415] don't use any scheduled kernel in schedule checking; instead, pass in list of schedule items; also pass in prohibited var names instead of getting them from scheduled_knl.all_inames() --- __init__.py | 26 +++--- example_pairwise_schedule_validity.py | 29 ++++++- test/test_invalid_scheds.py | 58 +++++++++++-- test/test_valid_scheds.py | 118 +++++++++++++++++--------- 4 files changed, 169 insertions(+), 62 deletions(-) diff --git a/__init__.py b/__init__.py index e840a35be..d92794c62 100644 --- a/__init__.py +++ b/__init__.py @@ -44,6 +44,8 @@ def get_statement_pair_dependency_sets_from_legacy_knl(knl): def check_schedule_validity( knl, deps_and_domains, + schedule_items, + prohibited_var_names=set(), verbose=False, _use_scheduled_kernel_to_obtain_loop_priority=False): @@ -67,6 +69,9 @@ def check_schedule_validity( else: preprocessed_knl = knl + if not prohibited_var_names: + prohibited_var_names = preprocessed_knl.all_inames() + if verbose: print("="*80) print("StatementDependencies w/domains:") @@ -75,25 +80,19 @@ def check_schedule_validity( print(dom_before) print(dom_after) - # get a schedule to check - if preprocessed_knl.schedule is None: - from loopy import get_one_scheduled_kernel - scheduled_knl = get_one_scheduled_kernel(preprocessed_knl) - else: - scheduled_knl = preprocessed_knl - if verbose: # Print kernel info ------------------------------------------------------ print("="*80) - print("Kernel:") - print(scheduled_knl) + #print("Kernel:") + #print(scheduled_knl) #from loopy import generate_code_v2 #print(generate_code_v2(scheduled_knl).device_code()) print("="*80) - print("Iname tags: %s" % (scheduled_knl.iname_to_tags)) + #print("Iname tags: %s" % (scheduled_knl.iname_to_tags)) print("="*80) print("Loopy schedule:") - for sched_item in scheduled_knl.schedule: + #for sched_item in scheduled_knl.schedule: + for sched_item in schedule_items: print(sched_item) #print("scheduled iname order:") #print(sched_iname_order) @@ -118,13 +117,12 @@ def check_schedule_validity( # Create a mapping of {statement instance: lex point} # including only instructions involved in this dependency - all_schedule_items = scheduled_knl.schedule - sched = LexSchedule(preprocessed_knl, all_schedule_items, + sched = LexSchedule(preprocessed_knl, schedule_items, include_only_insn_ids=[ s_before.insn_id, s_after.insn_id ], - prohibited_var_names=scheduled_knl.all_inames()) + prohibited_var_names=prohibited_var_names) #print("-"*80) #print("LexSchedule before processing:") diff --git a/example_pairwise_schedule_validity.py b/example_pairwise_schedule_validity.py index 581001111..0b83a1780 100644 --- a/example_pairwise_schedule_validity.py +++ b/example_pairwise_schedule_validity.py @@ -4,6 +4,11 @@ from schedule_checker import ( get_statement_pair_dependency_sets_from_legacy_knl, check_schedule_validity, ) +from loopy.kernel import KernelState +from loopy import ( + preprocess_kernel, + get_one_scheduled_kernel, +) # Choose kernel ---------------------------------------------------------- @@ -277,9 +282,31 @@ if knl_choice == "loop_carried_deps": lang_version=(2018, 2) ) +unprocessed_knl = knl.copy() + +deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) + +# get a schedule to check +if knl.state < KernelState.PREPROCESSED: + knl = preprocess_kernel(knl) +knl = get_one_scheduled_kernel(knl) +print("kernel schedueld") +schedule_items = knl.schedule +print("checking validity") +sched_is_valid = check_schedule_validity( + unprocessed_knl, deps_and_domains, schedule_items, verbose=True) + +""" deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(knl) -sched_is_valid = check_schedule_validity(knl, deps_and_domains, verbose=True) + +# get a schedule to check +from loopy import get_one_scheduled_kernel +scheduled_knl = get_one_scheduled_kernel(knl) +schedule_items = scheduled_knl.schedule + +sched_is_valid = check_schedule_validity(knl, deps_and_domains, schedule_items, verbose=True) +""" print("is sched valid? constraint map subset of SIO?") print(sched_is_valid) diff --git a/test/test_invalid_scheds.py b/test/test_invalid_scheds.py index 323b79b8c..5f43909cf 100644 --- a/test/test_invalid_scheds.py +++ b/test/test_invalid_scheds.py @@ -32,7 +32,11 @@ from schedule_checker import ( get_statement_pair_dependency_sets_from_legacy_knl, check_schedule_validity, ) - +from loopy.kernel import KernelState +from loopy import ( + preprocess_kernel, + get_one_scheduled_kernel, +) def test_invalid_prioritiy_detection(): @@ -59,16 +63,34 @@ def test_invalid_prioritiy_detection(): knl0 = lp.prioritize_loops(ref_knl, "i,j") knl0 = lp.prioritize_loops(knl0, "j,k") - deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(knl0) - sched_is_valid = check_schedule_validity(knl0, deps_and_domains) + unprocessed_knl = knl0.copy() + + deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) + + # get a schedule to check + if knl0.state < KernelState.PREPROCESSED: + knl0 = preprocess_kernel(knl0) + knl0 = get_one_scheduled_kernel(knl0) + schedule_items = knl0.schedule + + sched_is_valid = check_schedule_validity(unprocessed_knl, deps_and_domains, schedule_items) assert sched_is_valid # no error: knl1 = lp.prioritize_loops(ref_knl, "h,i,k") knl1 = lp.prioritize_loops(knl1, "h,j,k") - deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(knl1) - sched_is_valid = check_schedule_validity(knl1, deps_and_domains) + unprocessed_knl = knl1.copy() + + deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) + + # get a schedule to check + if knl1.state < KernelState.PREPROCESSED: + knl1 = preprocess_kernel(knl1) + knl1 = get_one_scheduled_kernel(knl1) + schedule_items = knl1.schedule + + sched_is_valid = check_schedule_validity(unprocessed_knl, deps_and_domains, schedule_items) assert sched_is_valid # error (cycle): @@ -76,8 +98,17 @@ def test_invalid_prioritiy_detection(): knl2 = lp.prioritize_loops(knl2, "j,k") knl2 = lp.prioritize_loops(knl2, "k,i") try: - deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(knl2) - sched_is_valid = check_schedule_validity(knl2, deps_and_domains) + unprocessed_knl = knl2.copy() + + deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) + + # get a schedule to check + if knl2.state < KernelState.PREPROCESSED: + knl2 = preprocess_kernel(knl2) + knl2 = get_one_scheduled_kernel(knl2) + schedule_items = knl2.schedule + + sched_is_valid = check_schedule_validity(unprocessed_knl, deps_and_domains, schedule_items) # should raise error assert False except ValueError as e: @@ -87,8 +118,17 @@ def test_invalid_prioritiy_detection(): knl3 = lp.prioritize_loops(ref_knl, "h,i,j,k") knl3 = lp.prioritize_loops(knl3, "h,j,i,k") try: - deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(knl3) - sched_is_valid = check_schedule_validity(knl3, deps_and_domains) + unprocessed_knl = knl3.copy() + + deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) + + # get a schedule to check + if knl3.state < KernelState.PREPROCESSED: + knl3 = preprocess_kernel(knl3) + knl3 = get_one_scheduled_kernel(knl3) + schedule_items = knl3.schedule + + sched_is_valid = check_schedule_validity(unprocessed_knl, deps_and_domains, schedule_items) # should raise error assert False except ValueError as e: diff --git a/test/test_valid_scheds.py b/test/test_valid_scheds.py index b0c178b6f..0ed14502e 100644 --- a/test/test_valid_scheds.py +++ b/test/test_valid_scheds.py @@ -32,7 +32,11 @@ from schedule_checker import ( get_statement_pair_dependency_sets_from_legacy_knl, check_schedule_validity, ) - +from loopy.kernel import KernelState +from loopy import ( + preprocess_kernel, + get_one_scheduled_kernel, +) def test_loop_prioritization(): knl = lp.make_kernel( @@ -66,8 +70,17 @@ def test_loop_prioritization(): knl = lp.prioritize_loops(knl, "i,k") knl = lp.prioritize_loops(knl, "i,j") - deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(knl) - sched_is_valid = check_schedule_validity(knl, deps_and_domains) + unprocessed_knl = knl.copy() + + deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) + + # get a schedule to check + if knl.state < KernelState.PREPROCESSED: + knl = preprocess_kernel(knl) + knl = get_one_scheduled_kernel(knl) + schedule_items = knl.schedule + + sched_is_valid = check_schedule_validity(unprocessed_knl, deps_and_domains, schedule_items) assert sched_is_valid @@ -90,8 +103,17 @@ def test_matmul(): knl = lp.add_prefetch(knl, "b", ["j_inner", "k_inner"], default_tag="l.auto") knl = lp.prioritize_loops(knl, "k_outer,k_inner") - deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(knl) - sched_is_valid = check_schedule_validity(knl, deps_and_domains) + unprocessed_knl = knl.copy() + + deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) + + # get a schedule to check + if knl.state < KernelState.PREPROCESSED: + knl = preprocess_kernel(knl) + knl = get_one_scheduled_kernel(knl) + schedule_items = knl.schedule + + sched_is_valid = check_schedule_validity(unprocessed_knl, deps_and_domains, schedule_items) assert sched_is_valid @@ -125,8 +147,17 @@ def test_dependent_domain(): ) knl = lp.realize_reduction(knl, force_scan=True) - deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(knl) - sched_is_valid = check_schedule_validity(knl, deps_and_domains) + unprocessed_knl = knl.copy() + + deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) + + # get a schedule to check + if knl.state < KernelState.PREPROCESSED: + knl = preprocess_kernel(knl) + knl = get_one_scheduled_kernel(knl) + schedule_items = knl.schedule + + sched_is_valid = check_schedule_validity(unprocessed_knl, deps_and_domains, schedule_items) assert sched_is_valid @@ -165,33 +196,17 @@ def test_stroud_bernstein(): inner_tag="ilp", slabs=(0, 1)) knl = lp.tag_inames(knl, dict(i2="l.1", alpha1="unr", alpha2="unr")) - deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(knl) - sched_is_valid = check_schedule_validity(knl, deps_and_domains) - assert sched_is_valid + unprocessed_knl = knl.copy() + deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) -def test_barrier(): - np.random.seed(17) - cnst = np.random.randn(16) - knl = lp.make_kernel( - "{[i, ii]: 0<=i, ii Date: Mon, 2 Dec 2019 19:23:51 -0600 Subject: [PATCH 156/415] updated todo --- __init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/__init__.py b/__init__.py index d92794c62..5c7fde8df 100644 --- a/__init__.py +++ b/__init__.py @@ -195,8 +195,8 @@ def check_schedule_validity( sched.unused_param_name, sched.statement_var_name, ) - # TODO specify lp_insn_id_to_lex_sched_id independently of schedule creation - # so that dependency constraint creation can happen before schedule is created + # TODO figure out how to keep a consistent lp_insn_id_to_lex_sched_id map + # when dependency creation is separate from schedule checking # align constraint map spaces to match sio so we can compare them if verbose: -- GitLab From 9967e37e08cb7e8a46da9742021d6f1eb4d40a7f Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 10 Dec 2019 05:40:12 -0600 Subject: [PATCH 157/415] changed schedule into two separate maps, one for the 'before' instruction and one for the 'after' instruction, so that unused inames don't have to be part of the map; changed dependency creation from legacy kernels to match; now in_dims and out_dims of statement-instance-ordering and dependencies do not have to match (may contain different inames) --- __init__.py | 55 ++++++----- dependency.py | 34 ++++--- lexicographic_order_map.py | 12 ++- sched_check_utils.py | 8 +- schedule.py | 188 +++++++++++++++++++++++++------------ 5 files changed, 197 insertions(+), 100 deletions(-) diff --git a/__init__.py b/__init__.py index 5c7fde8df..e0ae56a90 100644 --- a/__init__.py +++ b/__init__.py @@ -80,7 +80,6 @@ def check_schedule_validity( print(dom_before) print(dom_after) - if verbose: # Print kernel info ------------------------------------------------------ print("="*80) #print("Kernel:") @@ -91,18 +90,15 @@ def check_schedule_validity( #print("Iname tags: %s" % (scheduled_knl.iname_to_tags)) print("="*80) print("Loopy schedule:") - #for sched_item in scheduled_knl.schedule: for sched_item in schedule_items: print(sched_item) #print("scheduled iname order:") #print(sched_iname_order) - # For each dependency, create+test schedule containing pair of insns------ - - if verbose: print("="*80) print("Looping through dep pairs...") + # For each dependency, create+test schedule containing pair of insns------ sched_is_valid = True for statement_pair_dep_set, dom_before, dom_after in deps_and_domains: if verbose: @@ -117,12 +113,13 @@ def check_schedule_validity( # Create a mapping of {statement instance: lex point} # including only instructions involved in this dependency - sched = LexSchedule(preprocessed_knl, schedule_items, - include_only_insn_ids=[ - s_before.insn_id, - s_after.insn_id - ], - prohibited_var_names=prohibited_var_names) + sched = LexSchedule( + preprocessed_knl, + schedule_items, + s_before.insn_id, + s_after.insn_id, + prohibited_var_names=prohibited_var_names, + ) #print("-"*80) #print("LexSchedule before processing:") @@ -139,26 +136,30 @@ def check_schedule_validity( # Get an isl map representing the LexSchedule; # this requires the iname domains - assert len(sched) in [1, 2] - if len(sched) == 1: - assert dom_before == dom_after - # get a mapping from lex schedule id to relevant inames domain - sid_to_dom = { + # TODO if sid_to_dom_before/after always contain single pair, + # maybe don't use dict + sid_to_dom_before = { lp_insn_id_to_lex_sched_id[s_before.insn_id]: dom_before, + } + sid_to_dom_after = { lp_insn_id_to_lex_sched_id[s_after.insn_id]: dom_after, } - sched_map_symbolic = sched.create_symbolic_isl_map(sid_to_dom) + sched_map_symbolic_before, sched_map_symbolic_after = \ + sched.create_symbolic_isl_map( + sid_to_dom_before, + sid_to_dom_after, + ) if verbose: - print("sid_to_dom:\n", sid_to_dom) + print("sid_to_dom_before:\n", sid_to_dom_before) + print("sid_to_dom_after:\n", sid_to_dom_after) print("LexSchedule after creating symbolic isl map:") print(sched) print("LexSched:") - print(prettier_map_string(sched_map_symbolic)) - #print("space (statement instances -> lex time):") - #print(sched_map_symbolic.space) + print(prettier_map_string(sched_map_symbolic_before)) + print(prettier_map_string(sched_map_symbolic_after)) #print("-"*80) # get map representing lexicographic ordering @@ -172,10 +173,14 @@ def check_schedule_validity( print("-"*80) """ + # TODO which direction does this composition go? # create statement instance ordering, # maps each statement instance to all statement instances occuring later sio = get_statement_ordering_map( - sched_map_symbolic, lex_order_map_symbolic) + sched_map_symbolic_before, + sched_map_symbolic_after, + lex_order_map_symbolic, + ) if verbose: print("statement instance ordering:") @@ -222,11 +227,11 @@ def check_schedule_validity( ) # align out dims + sio_out_names = sio.space.get_var_names(isl.dim_type.out) aligned_constraint_map = reorder_dims_by_name( aligned_constraint_map, isl.dim_type.out, - append_apostrophes(sio_in_names), - # TODO sio out names are only pretending to have apostrophes; confusing + sio_out_names, add_missing=False, new_names_are_permutation_only=True, ) @@ -243,7 +248,7 @@ def check_schedule_validity( == sio.space.get_var_names(isl.dim_type.in_)) assert ( aligned_constraint_map.space.get_var_names(isl.dim_type.out) - == append_apostrophes(sio.space.get_var_names(isl.dim_type.out))) + == sio.space.get_var_names(isl.dim_type.out)) assert ( aligned_constraint_map.space.get_var_names(isl.dim_type.param) == sio.space.get_var_names(isl.dim_type.param)) diff --git a/dependency.py b/dependency.py index 197815951..56e6bcd73 100644 --- a/dependency.py +++ b/dependency.py @@ -128,7 +128,8 @@ def create_dependency_constraint( unused_param_name, statement_var_name, statement_var_pose=0, - all_dom_inames_ordered=None, + dom_inames_ordered_before=None, + dom_inames_ordered_after=None, ): """Create a statement dependency constraint represented as a map from each statement instance to statement instances that must occur later, @@ -192,23 +193,29 @@ def create_dependency_constraint( from schedule_checker.sched_check_utils import ( list_var_names_in_isl_sets, ) - if all_dom_inames_ordered is None: - all_dom_inames_ordered = list_var_names_in_isl_sets( - [dom_before_constraint_set, dom_after_constraint_set]) + if dom_inames_ordered_before is None: + dom_inames_ordered_before = list_var_names_in_isl_sets( + [dom_before_constraint_set]) + if dom_inames_ordered_after is None: + dom_inames_ordered_after = list_var_names_in_isl_sets( + [dom_after_constraint_set]) # create some (ordered) isl vars to use, e.g., {s, i, j, s', i', j'} islvars = make_islvars_with_var_primes( - [statement_var_name]+all_dom_inames_ordered, - [unused_param_name]) + var_names_in=[statement_var_name]+dom_inames_ordered_before, + param_names=[unused_param_name], + var_names_out=[statement_var_name]+dom_inames_ordered_after, + ) statement_var_name_prime = statement_var_name+"'" # get (ordered) list of unused before/after inames + # TODO are there ever unused inames now that we're separating the in/out spaces? inames_before_unused = [] - for iname in all_dom_inames_ordered: + for iname in dom_inames_ordered_before: if iname not in dom_before_constraint_set.get_var_names(isl.dim_type.out): inames_before_unused.append(iname) inames_after_unused = [] - for iname in all_dom_inames_ordered: + for iname in dom_inames_ordered_after: if iname not in dom_after_constraint_set.get_var_names(isl.dim_type.out): inames_after_unused.append(iname + "'") @@ -336,7 +343,10 @@ def create_dependency_constraint( # convert constraint set to map all_constraints_map = _convert_constraint_set_to_map( - all_constraints_set, len(all_dom_inames_ordered) + 1) # +1 for statement var + all_constraints_set, + mv_count=len(dom_inames_ordered_after)+1, # +1 for statement var + src_position=len(dom_inames_ordered_before)+1, # +1 for statement var + ) # now apply domain sets to constraint variables @@ -352,12 +362,12 @@ def create_dependency_constraint( # insert inames missing from doms to enable intersection domain_to_intersect = reorder_dims_by_name( domain_to_intersect, isl.dim_type.out, - [statement_var_name] + all_dom_inames_ordered, + [statement_var_name] + dom_inames_ordered_before, add_missing=True) range_to_intersect = reorder_dims_by_name( range_to_intersect, isl.dim_type.out, - append_apostrophes([statement_var_name] + all_dom_inames_ordered), + append_apostrophes([statement_var_name] + dom_inames_ordered_after), add_missing=True) # intersect doms @@ -507,6 +517,8 @@ def create_arbitrary_dependency_constraint( all_dom_inames_ordered=None, ): + # TODO update after allowing different inames for before/after + from schedule_checker.sched_check_utils import ( make_islvars_with_var_primes, #append_apostrophes, diff --git a/lexicographic_order_map.py b/lexicographic_order_map.py index 356fb8731..ccfb9d6f9 100644 --- a/lexicographic_order_map.py +++ b/lexicographic_order_map.py @@ -1,7 +1,8 @@ import islpy as isl -def get_statement_ordering_map(sched_map, lex_map): +def get_statement_ordering_map( + sched_map_before, sched_map_after, lex_map, out_marker="'"): """Return a mapping that maps each statement instance to all statement instances occuring later. @@ -24,8 +25,13 @@ def get_statement_ordering_map(sched_map, lex_map): """ - # TODO apostrophes aren't really there for range, this is confusing - return sched_map.apply_range(lex_map).apply_range(sched_map.reverse()) + # TODO determine which order is correct + sio = sched_map_before.apply_range(lex_map).apply_range(sched_map_after.reverse()) + # append marker to out names + for i in range(sio.dim(isl.dim_type.out)): + sio = sio.set_dim_name(isl.dim_type.out, i, sio.get_dim_name( + isl.dim_type.out, i)+out_marker) + return sio def get_lex_order_constraint(islvars, before_names, after_names): diff --git a/sched_check_utils.py b/sched_check_utils.py index c4658efc1..fa4e3e3eb 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -114,7 +114,8 @@ def create_new_isl_set_with_primes(old_isl_set, marker="'"): return new_set -def make_islvars_with_var_primes(var_names, param_names, marker="'"): +def make_islvars_with_var_primes( + var_names_in, param_names, marker="'", var_names_out=None): """Return a dictionary from variable and parameter names to :class:`PwAff` instances that represent each of the variables and parameters, including @@ -140,8 +141,11 @@ def make_islvars_with_var_primes(var_names, param_names, marker="'"): new_l.append(s+mark) return new_l + if var_names_out is None: + var_names_out = var_names_in[:] + return isl.make_zero_and_vars( - var_names+append_marker(var_names, marker), param_names) + var_names_in+append_marker(var_names_out, marker), param_names) def append_marker_to_strings(strings, marker="'"): diff --git a/schedule.py b/schedule.py index 5243ad03b..80002455e 100644 --- a/schedule.py +++ b/schedule.py @@ -87,7 +87,8 @@ class LexSchedule(object): self, knl, sched_items_ordered, - include_only_insn_ids=None, + before_insn_id, + after_insn_id, prohibited_var_names=[], ): """ @@ -99,9 +100,11 @@ class LexSchedule(object): to None, all insructions will be included. """ + # TODO update docs now that we have two schedules - # list of LexScheduleStatements - self.lex_schedule = [] + # LexScheduleStatements + self.lex_sched_stmt_before = None + self.lex_sched_stmt_after = None # make sure we don't have an iname name conflict assert not any( @@ -109,12 +112,6 @@ class LexSchedule(object): assert not any( iname == self.unused_param_name for iname in prohibited_var_names) - if ((include_only_insn_ids is None and len(sched_items_ordered) > 2) - or len(include_only_insn_ids) > 2): - raise NotImplementedError( - "LexSchedule currently does not produce program orderings " - "with greater than 2 statements.") - from loopy.schedule import (EnterLoop, LeaveLoop, Barrier, RunInstruction) from loopy.kernel.data import ConcurrentTag @@ -123,6 +120,7 @@ class LexSchedule(object): # keep track of the next point in our lexicographic ordering # initially this as a 1-d point with value 0 next_insn_lex_pt = [0] + next_sid = 0 for sched_item in sched_items_ordered: if isinstance(sched_item, EnterLoop): iname = sched_item.iname @@ -139,7 +137,7 @@ class LexSchedule(object): # don't increment lex dim val enumerating items in current block, # otherwise, this loop is next item in current code block, so # increment lex dim val enumerating items in current code block - if self.lex_schedule: # if the schedule is not empty + if self.lex_sched_stmt_before or self.lex_sched_stmt_after: # if either statement has been set # this lex value will correspond to everything inside this loop # we will add new lex dimensions to enuerate items inside loop next_insn_lex_pt[-1] = next_insn_lex_pt[-1]+1 @@ -163,6 +161,7 @@ class LexSchedule(object): next_insn_lex_pt[-1] = next_insn_lex_pt[-1]+1 # if we didn't add any statements while in this loop, we might # sometimes be able to skip increment, but it's not hurting anything + # TODO might not need this increment period? elif isinstance(sched_item, (RunInstruction, Barrier)): from schedule_checker.sched_check_utils import ( _get_insn_id_from_sched_item, @@ -178,22 +177,58 @@ class LexSchedule(object): # if include_only_insn_ids list was passed, # only process insns found in list, # otherwise process all instructions - if (include_only_insn_ids is None - or lp_insn_id in include_only_insn_ids): + if lp_insn_id == before_insn_id and lp_insn_id == after_insn_id: + # add before sched item + self.lex_sched_stmt_before = ( + LexScheduleStatement( + insn_id=lp_insn_id, + int_id=next_sid, # int representing insn + ), + next_insn_lex_pt[:] + ) + # add after sched item + self.lex_sched_stmt_after = ( + LexScheduleStatement( + insn_id=lp_insn_id, + int_id=next_sid, # int representing insn + ), + next_insn_lex_pt[:] + ) - # add sched item - self.lex_schedule.append(( + # increment lex dim val enumerating items in current code block + next_insn_lex_pt[-1] = next_insn_lex_pt[-1] + 1 + next_sid += 1 + elif lp_insn_id == before_insn_id: + # add before sched item + self.lex_sched_stmt_before = ( LexScheduleStatement( insn_id=lp_insn_id, - int_id=len(self.lex_schedule), # int representing insn + int_id=next_sid, # int representing insn ), next_insn_lex_pt[:] - )) + ) # increment lex dim val enumerating items in current code block next_insn_lex_pt[-1] = next_insn_lex_pt[-1] + 1 + next_sid += 1 + elif lp_insn_id == after_insn_id: + # add after sched item + self.lex_sched_stmt_after = ( + LexScheduleStatement( + insn_id=lp_insn_id, + int_id=next_sid, # int representing insn + ), + next_insn_lex_pt[:] + ) + + # increment lex dim val enumerating items in current code block + next_insn_lex_pt[-1] = next_insn_lex_pt[-1] + 1 + next_sid += 1 else: pass + # to save time, stop when we've created both statements + if self.lex_sched_stmt_before and self.lex_sched_stmt_after: + break # at this point, lex_schedule may contain lex points missing dimensions, # the values in these missing dims should be zero, so add them @@ -204,10 +239,13 @@ class LexSchedule(object): ``int_id`` refer to the ``insn_id`` and ``int_id`` attributes of :class:`LexScheduleStatement`. """ - return dict([(stmt.insn_id, stmt.int_id) for stmt, _ in self.lex_schedule]) + return { + self.lex_sched_stmt_before[0].insn_id: self.lex_sched_stmt_before[0].int_id, + self.lex_sched_stmt_after[0].insn_id: self.lex_sched_stmt_after[0].int_id, + } def max_lex_dims(self): - return max(len(lex_pt) for _, lex_pt in self.lex_schedule) + return max([len(self.lex_sched_stmt_before[1]), len(self.lex_sched_stmt_after[1])]) def pad_lex_pts_with_zeros(self): """Find the maximum number of lexicographic dimensions represented @@ -218,15 +256,24 @@ class LexSchedule(object): """ max_lex_dim = self.max_lex_dims() - new_sched = [] - for stmt, lex_pt in self.lex_schedule: - new_sched.append((stmt, lex_pt + [0]*(max_lex_dim-len(lex_pt)))) - self.lex_schedule = new_sched + self.lex_sched_stmt_before = ( + self.lex_sched_stmt_before[0], + self.lex_sched_stmt_before[1][:] + [0]*( + max_lex_dim-len(self.lex_sched_stmt_before[1])) + ) + self.lex_sched_stmt_after = ( + self.lex_sched_stmt_after[0], + self.lex_sched_stmt_after[1][:] + [0]*( + max_lex_dim-len(self.lex_sched_stmt_after[1])) + ) def create_symbolic_isl_map( self, - sid_to_dom, - dom_inames_ordered=None): + sid_to_dom_before, + sid_to_dom_after, + dom_inames_ordered_before=None, + dom_inames_ordered_after=None, + ): """Create an isl map representing lex schedule as a mapping from each statement instance to all statement instances occuring later. @@ -253,31 +300,47 @@ class LexSchedule(object): add_dims_to_isl_set ) - assert len(sid_to_dom) == len(self.lex_schedule) + # TODO if sid_to_dom_before/after always contain single pair, + # maybe don't use dict + assert len(sid_to_dom_before) == 1 + assert len(sid_to_dom_after) == 1 from schedule_checker.sched_check_utils import ( list_var_names_in_isl_sets, ) - if dom_inames_ordered is None: - dom_inames_ordered = list_var_names_in_isl_sets(sid_to_dom.values()) + if dom_inames_ordered_before is None: + dom_inames_ordered_before = list_var_names_in_isl_sets( + sid_to_dom_before.values()) + if dom_inames_ordered_after is None: + dom_inames_ordered_after = list_var_names_in_isl_sets( + sid_to_dom_after.values()) # create an isl space # {('statement', used in >=1 statement domain>) -> # (lexicographic ordering dims)} + from schedule_checker.sched_check_utils import get_isl_space params_sched = [self.unused_param_name] - in_names_sched = [self.statement_var_name] + dom_inames_ordered[:] out_names_sched = self.get_lex_var_names() - from schedule_checker.sched_check_utils import get_isl_space - sched_space = get_isl_space(params_sched, in_names_sched, out_names_sched) + + in_names_sched_before = [self.statement_var_name] + dom_inames_ordered_before[:] + sched_space_before = get_isl_space( + params_sched, in_names_sched_before, out_names_sched) + in_names_sched_after = [self.statement_var_name] + dom_inames_ordered_after[:] + sched_space_after = get_isl_space( + params_sched, in_names_sched_after, out_names_sched) # Insert 'statement' dim into domain so that its space allows for # intersection with sched map later - doms_to_intersect = [] - for stmt, _ in self.lex_schedule: - doms_to_intersect.append( - add_dims_to_isl_set( - sid_to_dom[stmt.int_id], isl.dim_type.set, - [self.statement_var_name], 0)) + doms_to_intersect_before = [ + add_dims_to_isl_set( + sid_to_dom_before[self.lex_sched_stmt_before[0].int_id], isl.dim_type.set, + [self.statement_var_name], 0), + ] + doms_to_intersect_after = [ + add_dims_to_isl_set( + sid_to_dom_after[self.lex_sched_stmt_after[0].int_id], isl.dim_type.set, + [self.statement_var_name], 0), + ] # The isl map representing the schedule maps # statement instances -> lex time @@ -290,13 +353,22 @@ class LexSchedule(object): # Add all inames from combined domains to map domain tuples. # create isl map - return create_symbolic_isl_map_from_tuples( - zip( - [((stmt.int_id,) + tuple(dom_inames_ordered), lex_pt) - for stmt, lex_pt in self.lex_schedule], - doms_to_intersect - ), - sched_space, self.unused_param_name, self.statement_var_name) + return ( + create_symbolic_isl_map_from_tuples( + zip( + [((self.lex_sched_stmt_before[0].int_id,) + tuple(dom_inames_ordered_before), + self.lex_sched_stmt_before[1])], + doms_to_intersect_before + ), + sched_space_before, self.unused_param_name, self.statement_var_name), + create_symbolic_isl_map_from_tuples( + zip( + [((self.lex_sched_stmt_after[0].int_id,) + tuple(dom_inames_ordered_after), + self.lex_sched_stmt_after[1])], + doms_to_intersect_after + ), + sched_space_after, self.unused_param_name, self.statement_var_name) + ) def get_lex_var_names(self): return [self.lex_var_prefix+str(i) @@ -315,27 +387,25 @@ class LexSchedule(object): return create_lex_order_map( n_dims, before_names=self.get_lex_var_names()) - def __bool__(self): - return bool(self.lex_schedule) - def __nonzero__(self): return self.__bool__() def __eq__(self, other): - return self.lex_schedule == other.lex_schedule - - def __iter__(self): - return iter(self.lex_schedule) - - def __len__(self): - return len(self.lex_schedule) + return (self.lex_sched_stmt_before == other.lex_sched_stmt_before and + self.lex_sched_stmt_after == other.lex_sched_stmt_after) def __str__(self): - sched_str = "{\n" - for stmt, lex_pt in self.lex_schedule: - domain_elem = "[%s=%s,]" % ( - self.statement_var_name, - stmt.int_id) - sched_str += "%s -> %s;\n" % (domain_elem, lex_pt) + sched_str = "Before: {\n" + domain_elem = "[%s=%s,]" % ( + self.statement_var_name, + self.lex_sched_stmt_before[0].int_id) + sched_str += "%s -> %s;\n" % (domain_elem, self.lex_sched_stmt_before[1]) + sched_str += "}\n" + + sched_str += "After: {\n" + domain_elem += "[%s=%s,]" % ( + self.statement_var_name, + self.lex_sched_stmt_after[0].int_id) + sched_str += "%s -> %s;\n" % (domain_elem, self.lex_sched_stmt_after[1]) sched_str += "}" return sched_str -- GitLab From 77151322f97bf2cea7b39a552b464a04e4a24299 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Fri, 13 Dec 2019 11:53:17 -0600 Subject: [PATCH 158/415] added function to get all dependency maps (from legacy kernel) --- __init__.py | 51 +++++++++++++++++++++++++++ example_pairwise_schedule_validity.py | 12 +++++++ 2 files changed, 63 insertions(+) diff --git a/__init__.py b/__init__.py index e0ae56a90..4828c9410 100644 --- a/__init__.py +++ b/__init__.py @@ -291,3 +291,54 @@ def check_schedule_validity( print("===========================================================") return sched_is_valid + + +def get_dependency_maps( + deps_and_domains, + schedule_items, + loop_priority, + ): + + from schedule_checker.dependency import ( + create_dependency_constraint, + ) + from schedule_checker.sched_check_utils import ( + prettier_map_string, + ) + + # create map from loopy insn ids to ints + lp_insn_id_to_lex_sched_id = {} # TODO + next_sid = 0 + from loopy.schedule import Barrier, RunInstruction + for sched_item in schedule_items: + if isinstance(sched_item, (RunInstruction, Barrier)): + from schedule_checker.sched_check_utils import ( + _get_insn_id_from_sched_item, + ) + lp_insn_id = _get_insn_id_from_sched_item(sched_item) + lp_insn_id_to_lex_sched_id[lp_insn_id] = next_sid + next_sid += 1 + + all_constraint_maps = [] + for statement_pair_dep_set, dom_before, dom_after in deps_and_domains: + + # create a map representing constraints from the dependency, + # maps statement instance to all statement instances that must occur later + all_constraint_maps.append( + create_dependency_constraint( + statement_pair_dep_set, + dom_before, + dom_after, + loop_priority, + lp_insn_id_to_lex_sched_id, + "unused", # TODO shouldn't be necessary + "statement", + ) + ) + + for constraint_map in all_constraint_maps: + print("") + print(prettier_map_string(constraint_map)) + print("") + + return all_constraint_maps diff --git a/example_pairwise_schedule_validity.py b/example_pairwise_schedule_validity.py index 0b83a1780..2ccdefffb 100644 --- a/example_pairwise_schedule_validity.py +++ b/example_pairwise_schedule_validity.py @@ -3,6 +3,7 @@ import numpy as np from schedule_checker import ( get_statement_pair_dependency_sets_from_legacy_knl, check_schedule_validity, + get_dependency_maps, ) from loopy.kernel import KernelState from loopy import ( @@ -310,3 +311,14 @@ sched_is_valid = check_schedule_validity(knl, deps_and_domains, schedule_items, print("is sched valid? constraint map subset of SIO?") print(sched_is_valid) + + +print("="*80) +print("testing dep sort") +print("="*80) + +dep_maps = get_dependency_maps( + deps_and_domains, + schedule_items, + knl.loop_priority, + ) -- GitLab From 1a21bf155be460b290e65b0c741fb914ce7c8aef Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 16 Dec 2019 09:22:44 -0600 Subject: [PATCH 159/415] create a SAME dep and compare to dep map to determine whether we will need an edge in our dependency graph --- __init__.py | 60 +++++++++++++++++++++++---- example_pairwise_schedule_validity.py | 3 +- 2 files changed, 53 insertions(+), 10 deletions(-) diff --git a/__init__.py b/__init__.py index 4828c9410..39655a529 100644 --- a/__init__.py +++ b/__init__.py @@ -297,10 +297,13 @@ def get_dependency_maps( deps_and_domains, schedule_items, loop_priority, + knl, # TODO avoid passing this in ): from schedule_checker.dependency import ( create_dependency_constraint, + StatementPairDependencySet, + DependencyType as dt, ) from schedule_checker.sched_check_utils import ( prettier_map_string, @@ -319,26 +322,65 @@ def get_dependency_maps( lp_insn_id_to_lex_sched_id[lp_insn_id] = next_sid next_sid += 1 - all_constraint_maps = [] + from schedule_checker.sched_check_utils import ( + get_concurrent_inames, + ) + conc_inames, non_conc_inames = get_concurrent_inames(knl) + + deps_domains_and_constraint_maps = [] # TODO refactor this (maybe make a new data structure) for statement_pair_dep_set, dom_before, dom_after in deps_and_domains: + dep_constraint_map = create_dependency_constraint( + statement_pair_dep_set, + dom_before, + dom_after, + loop_priority, + lp_insn_id_to_lex_sched_id, + "unused", # TODO shouldn't be necessary + "statement", + ) + + # create "same" dep for these two insns + s_before = statement_pair_dep_set.statement_before + s_after = statement_pair_dep_set.statement_after + shared_nc_inames = s_before.within_inames & s_after.within_inames & non_conc_inames + same_dep_set = StatementPairDependencySet( + s_before, + s_after, + {dt.SAME: shared_nc_inames} + ) + same_dep_constraint_map = create_dependency_constraint( + same_dep_set, + dom_before, + dom_after, + loop_priority, + lp_insn_id_to_lex_sched_id, + "unused", # TODO shouldn't be necessary + "statement", + ) + + # see whether we should create an edge in our statement dep graph + # TODO is this the right test? + same_is_subset = same_dep_constraint_map.is_subset(dep_constraint_map) + # create a map representing constraints from the dependency, # maps statement instance to all statement instances that must occur later - all_constraint_maps.append( - create_dependency_constraint( + deps_domains_and_constraint_maps.append( + ( statement_pair_dep_set, dom_before, dom_after, - loop_priority, - lp_insn_id_to_lex_sched_id, - "unused", # TODO shouldn't be necessary - "statement", + dep_constraint_map, + same_is_subset, ) ) - for constraint_map in all_constraint_maps: + for spds, _, _, constraint_map, same_is_subset in deps_domains_and_constraint_maps: print("") + print("dep: %s" % (spds)) + print("map: ") print(prettier_map_string(constraint_map)) + print(same_is_subset) print("") - return all_constraint_maps + return deps_domains_and_constraint_maps diff --git a/example_pairwise_schedule_validity.py b/example_pairwise_schedule_validity.py index 2ccdefffb..dfc79bdc8 100644 --- a/example_pairwise_schedule_validity.py +++ b/example_pairwise_schedule_validity.py @@ -317,8 +317,9 @@ print("="*80) print("testing dep sort") print("="*80) -dep_maps = get_dependency_maps( +dep_domains_and_maps = get_dependency_maps( deps_and_domains, schedule_items, knl.loop_priority, + knl, ) -- GitLab From 8d8a9eb42d45b36fbb8583e9460df4a4191d6027 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 16 Dec 2019 09:38:37 -0600 Subject: [PATCH 160/415] create graph representing ordering of statements based on dependencies --- example_pairwise_schedule_validity.py | 18 ++++++++++++++++++ sched_check_utils.py | 7 +++++++ 2 files changed, 25 insertions(+) diff --git a/example_pairwise_schedule_validity.py b/example_pairwise_schedule_validity.py index dfc79bdc8..453708f1c 100644 --- a/example_pairwise_schedule_validity.py +++ b/example_pairwise_schedule_validity.py @@ -5,6 +5,9 @@ from schedule_checker import ( check_schedule_validity, get_dependency_maps, ) +from schedule_checker.sched_check_utils import ( + create_graph_from_pairs, +) from loopy.kernel import KernelState from loopy import ( preprocess_kernel, @@ -323,3 +326,18 @@ dep_domains_and_maps = get_dependency_maps( knl.loop_priority, knl, ) + +dep_graph_pairs = [ + ( + statement_pair_dep_set.statement_before.insn_id, + statement_pair_dep_set.statement_after.insn_id + ) + for statement_pair_dep_set, _, _, _, same_is_subset in dep_domains_and_maps + if same_is_subset + ] + +dep_graph = create_graph_from_pairs(dep_graph_pairs) + +print("dep_graph:") +for k, v in dep_graph.items(): + print("%s: %s" % (k, v)) diff --git a/sched_check_utils.py b/sched_check_utils.py index fa4e3e3eb..df8c07797 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -482,6 +482,13 @@ def get_orderings_of_length_n( return orderings +def create_graph_from_pairs(before_after_pairs): + # create key for every before + graph = dict([(before, set()) for before, _ in before_after_pairs]) + for before, after in before_after_pairs: + graph[before] = graph[before] | set([after, ]) + return graph + # only used for example purposes: -- GitLab From 3d5f85f769976d7f39f304d439f5b2afe6e2159b Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Thu, 19 Dec 2019 17:59:22 -0600 Subject: [PATCH 161/415] change dep graph edge criterion to S&C not empty --- __init__.py | 22 ++++++++++++---------- example_pairwise_schedule_validity.py | 4 ++-- 2 files changed, 14 insertions(+), 12 deletions(-) diff --git a/__init__.py b/__init__.py index 39655a529..308827778 100644 --- a/__init__.py +++ b/__init__.py @@ -360,8 +360,17 @@ def get_dependency_maps( ) # see whether we should create an edge in our statement dep graph - # TODO is this the right test? - same_is_subset = same_dep_constraint_map.is_subset(dep_constraint_map) + intersect_dep_and_same = same_dep_constraint_map & dep_constraint_map + intersect_not_empty = not bool(intersect_dep_and_same.is_empty()) + + """ + print("") + print("dep: %s" % (statement_pair_dep_set)) + print("map: ") + print(prettier_map_string(dep_constraint_map)) + print(intersect_not_empty) + print(intersect_dep_and_same) + """ # create a map representing constraints from the dependency, # maps statement instance to all statement instances that must occur later @@ -371,16 +380,9 @@ def get_dependency_maps( dom_before, dom_after, dep_constraint_map, - same_is_subset, + intersect_not_empty, ) ) - - for spds, _, _, constraint_map, same_is_subset in deps_domains_and_constraint_maps: - print("") - print("dep: %s" % (spds)) - print("map: ") - print(prettier_map_string(constraint_map)) - print(same_is_subset) print("") return deps_domains_and_constraint_maps diff --git a/example_pairwise_schedule_validity.py b/example_pairwise_schedule_validity.py index 453708f1c..bf7f0b232 100644 --- a/example_pairwise_schedule_validity.py +++ b/example_pairwise_schedule_validity.py @@ -332,8 +332,8 @@ dep_graph_pairs = [ statement_pair_dep_set.statement_before.insn_id, statement_pair_dep_set.statement_after.insn_id ) - for statement_pair_dep_set, _, _, _, same_is_subset in dep_domains_and_maps - if same_is_subset + for statement_pair_dep_set, _, _, _, add_edge in dep_domains_and_maps + if add_edge ] dep_graph = create_graph_from_pairs(dep_graph_pairs) -- GitLab From d439535322d4773a3f1691aa1f3fc7160a176175 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sat, 4 Jan 2020 20:23:21 -0600 Subject: [PATCH 162/415] fixing flake8 issues --- example_pairwise_schedule_validity.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/example_pairwise_schedule_validity.py b/example_pairwise_schedule_validity.py index bf7f0b232..697e7f68c 100644 --- a/example_pairwise_schedule_validity.py +++ b/example_pairwise_schedule_validity.py @@ -288,7 +288,8 @@ if knl_choice == "loop_carried_deps": unprocessed_knl = knl.copy() -deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) +deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl( + unprocessed_knl) # get a schedule to check if knl.state < KernelState.PREPROCESSED: @@ -309,7 +310,8 @@ from loopy import get_one_scheduled_kernel scheduled_knl = get_one_scheduled_kernel(knl) schedule_items = scheduled_knl.schedule -sched_is_valid = check_schedule_validity(knl, deps_and_domains, schedule_items, verbose=True) +sched_is_valid = check_schedule_validity( + knl, deps_and_domains, schedule_items, verbose=True) """ print("is sched valid? constraint map subset of SIO?") @@ -329,11 +331,11 @@ dep_domains_and_maps = get_dependency_maps( dep_graph_pairs = [ ( - statement_pair_dep_set.statement_before.insn_id, - statement_pair_dep_set.statement_after.insn_id + statement_pair_dep_set.statement_before.insn_id, + statement_pair_dep_set.statement_after.insn_id ) for statement_pair_dep_set, _, _, _, add_edge in dep_domains_and_maps - if add_edge + if add_edge ] dep_graph = create_graph_from_pairs(dep_graph_pairs) -- GitLab From 211f272a2e2dc5f9907b7a4bfaddd45475f4509a Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 6 Jan 2020 19:39:29 -0600 Subject: [PATCH 163/415] in get_dependency_maps(), allow for schedule item ids to be passed as strings instead of shedule items --- __init__.py | 9 +++++++-- example_pairwise_schedule_validity.py | 27 ++++++++++++++++++++------- sched_check_utils.py | 1 + 3 files changed, 28 insertions(+), 9 deletions(-) diff --git a/__init__.py b/__init__.py index 308827778..be235f11d 100644 --- a/__init__.py +++ b/__init__.py @@ -295,7 +295,7 @@ def check_schedule_validity( def get_dependency_maps( deps_and_domains, - schedule_items, + schedule_items, # TODO always pass these as strings since we only need the name? loop_priority, knl, # TODO avoid passing this in ): @@ -321,6 +321,10 @@ def get_dependency_maps( lp_insn_id = _get_insn_id_from_sched_item(sched_item) lp_insn_id_to_lex_sched_id[lp_insn_id] = next_sid next_sid += 1 + elif isinstance(sched_item, str): + # a string was passed, assume it's the insn_id + lp_insn_id_to_lex_sched_id[sched_item] = next_sid + next_sid += 1 from schedule_checker.sched_check_utils import ( get_concurrent_inames, @@ -336,7 +340,7 @@ def get_dependency_maps( dom_after, loop_priority, lp_insn_id_to_lex_sched_id, - "unused", # TODO shouldn't be necessary + "unused", # TODO shouldn't be necessary anymore "statement", ) @@ -374,6 +378,7 @@ def get_dependency_maps( # create a map representing constraints from the dependency, # maps statement instance to all statement instances that must occur later + # TODO instead of tuple, store all this in a class deps_domains_and_constraint_maps.append( ( statement_pair_dep_set, diff --git a/example_pairwise_schedule_validity.py b/example_pairwise_schedule_validity.py index 697e7f68c..d47d5b54f 100644 --- a/example_pairwise_schedule_validity.py +++ b/example_pairwise_schedule_validity.py @@ -288,7 +288,7 @@ if knl_choice == "loop_carried_deps": unprocessed_knl = knl.copy() -deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl( +legacy_deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl( unprocessed_knl) # get a schedule to check @@ -300,10 +300,10 @@ schedule_items = knl.schedule print("checking validity") sched_is_valid = check_schedule_validity( - unprocessed_knl, deps_and_domains, schedule_items, verbose=True) + unprocessed_knl, legacy_deps_and_domains, schedule_items, verbose=True) """ -deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(knl) +legacy_deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(knl) # get a schedule to check from loopy import get_one_scheduled_kernel @@ -311,7 +311,7 @@ scheduled_knl = get_one_scheduled_kernel(knl) schedule_items = scheduled_knl.schedule sched_is_valid = check_schedule_validity( - knl, deps_and_domains, schedule_items, verbose=True) + knl, legacy_deps_and_domains, schedule_items, verbose=True) """ print("is sched valid? constraint map subset of SIO?") @@ -322,22 +322,35 @@ print("="*80) print("testing dep sort") print("="*80) -dep_domains_and_maps = get_dependency_maps( - deps_and_domains, +# create maps representing legacy deps +# (includes bool representing result of test for dep graph edge) +legacy_dep_domains_and_maps = get_dependency_maps( + legacy_deps_and_domains, schedule_items, knl.loop_priority, knl, ) +# tuples in legacy_dep_domains_and_maps look like this: +# ( +# statement_pair_dep_set, +# dom_before, +# dom_after, +# dep_constraint_map, +# intersect_not_empty, +# ) + +# get dep graph edges dep_graph_pairs = [ ( statement_pair_dep_set.statement_before.insn_id, statement_pair_dep_set.statement_after.insn_id ) - for statement_pair_dep_set, _, _, _, add_edge in dep_domains_and_maps + for statement_pair_dep_set, _, _, _, add_edge in legacy_dep_domains_and_maps if add_edge ] +# create dep graph from edges dep_graph = create_graph_from_pairs(dep_graph_pairs) print("dep_graph:") diff --git a/sched_check_utils.py b/sched_check_utils.py index df8c07797..ee3cbb532 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -489,6 +489,7 @@ def create_graph_from_pairs(before_after_pairs): graph[before] = graph[before] | set([after, ]) return graph + # only used for example purposes: -- GitLab From d920778491f489c5b998a2e966ee47f2440473e3 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Wed, 8 Jan 2020 18:40:07 -0600 Subject: [PATCH 164/415] moved get_dependency_maps to dependency.py --- __init__.py | 100 -------------------------- dependency.py | 87 ++++++++++++++++++++++ example_pairwise_schedule_validity.py | 4 +- 3 files changed, 90 insertions(+), 101 deletions(-) diff --git a/__init__.py b/__init__.py index be235f11d..e0ae56a90 100644 --- a/__init__.py +++ b/__init__.py @@ -291,103 +291,3 @@ def check_schedule_validity( print("===========================================================") return sched_is_valid - - -def get_dependency_maps( - deps_and_domains, - schedule_items, # TODO always pass these as strings since we only need the name? - loop_priority, - knl, # TODO avoid passing this in - ): - - from schedule_checker.dependency import ( - create_dependency_constraint, - StatementPairDependencySet, - DependencyType as dt, - ) - from schedule_checker.sched_check_utils import ( - prettier_map_string, - ) - - # create map from loopy insn ids to ints - lp_insn_id_to_lex_sched_id = {} # TODO - next_sid = 0 - from loopy.schedule import Barrier, RunInstruction - for sched_item in schedule_items: - if isinstance(sched_item, (RunInstruction, Barrier)): - from schedule_checker.sched_check_utils import ( - _get_insn_id_from_sched_item, - ) - lp_insn_id = _get_insn_id_from_sched_item(sched_item) - lp_insn_id_to_lex_sched_id[lp_insn_id] = next_sid - next_sid += 1 - elif isinstance(sched_item, str): - # a string was passed, assume it's the insn_id - lp_insn_id_to_lex_sched_id[sched_item] = next_sid - next_sid += 1 - - from schedule_checker.sched_check_utils import ( - get_concurrent_inames, - ) - conc_inames, non_conc_inames = get_concurrent_inames(knl) - - deps_domains_and_constraint_maps = [] # TODO refactor this (maybe make a new data structure) - for statement_pair_dep_set, dom_before, dom_after in deps_and_domains: - - dep_constraint_map = create_dependency_constraint( - statement_pair_dep_set, - dom_before, - dom_after, - loop_priority, - lp_insn_id_to_lex_sched_id, - "unused", # TODO shouldn't be necessary anymore - "statement", - ) - - # create "same" dep for these two insns - s_before = statement_pair_dep_set.statement_before - s_after = statement_pair_dep_set.statement_after - shared_nc_inames = s_before.within_inames & s_after.within_inames & non_conc_inames - same_dep_set = StatementPairDependencySet( - s_before, - s_after, - {dt.SAME: shared_nc_inames} - ) - same_dep_constraint_map = create_dependency_constraint( - same_dep_set, - dom_before, - dom_after, - loop_priority, - lp_insn_id_to_lex_sched_id, - "unused", # TODO shouldn't be necessary - "statement", - ) - - # see whether we should create an edge in our statement dep graph - intersect_dep_and_same = same_dep_constraint_map & dep_constraint_map - intersect_not_empty = not bool(intersect_dep_and_same.is_empty()) - - """ - print("") - print("dep: %s" % (statement_pair_dep_set)) - print("map: ") - print(prettier_map_string(dep_constraint_map)) - print(intersect_not_empty) - print(intersect_dep_and_same) - """ - - # create a map representing constraints from the dependency, - # maps statement instance to all statement instances that must occur later - # TODO instead of tuple, store all this in a class - deps_domains_and_constraint_maps.append( - ( - statement_pair_dep_set, - dom_before, - dom_after, - dep_constraint_map, - intersect_not_empty, - ) - ) - print("") - - return deps_domains_and_constraint_maps diff --git a/dependency.py b/dependency.py index 56e6bcd73..9e4af1f96 100644 --- a/dependency.py +++ b/dependency.py @@ -793,3 +793,90 @@ def get_dependency_sources_and_sinks(knl, sched_item_ids): sinks = sched_item_ids - dependees return sources, sinks + + +def get_dependency_maps( + deps_and_domains, + schedule_items, # TODO always pass these as strings since we only need the name? + loop_priority, + knl, # TODO avoid passing this in + ): + + from schedule_checker.sched_check_utils import ( + prettier_map_string, + ) + dt = DependencyType + + # create map from loopy insn ids to ints + lp_insn_id_to_lex_sched_id = {} # TODO + next_sid = 0 + from loopy.schedule import Barrier, RunInstruction + for sched_item in schedule_items: + if isinstance(sched_item, (RunInstruction, Barrier)): + from schedule_checker.sched_check_utils import ( + _get_insn_id_from_sched_item, + ) + lp_insn_id = _get_insn_id_from_sched_item(sched_item) + lp_insn_id_to_lex_sched_id[lp_insn_id] = next_sid + next_sid += 1 + elif isinstance(sched_item, str): + # a string was passed, assume it's the insn_id + lp_insn_id_to_lex_sched_id[sched_item] = next_sid + next_sid += 1 + + from schedule_checker.sched_check_utils import ( + get_concurrent_inames, + ) + conc_inames, non_conc_inames = get_concurrent_inames(knl) + + deps_domains_and_constraint_maps = [] # TODO refactor this (maybe make a new data structure) + for statement_pair_dep_set, dom_before, dom_after in deps_and_domains: + + dep_constraint_map = create_dependency_constraint( + statement_pair_dep_set, + dom_before, + dom_after, + loop_priority, + lp_insn_id_to_lex_sched_id, + "unused", # TODO shouldn't be necessary anymore + "statement", + ) + + # create "same" dep for these two insns + s_before = statement_pair_dep_set.statement_before + s_after = statement_pair_dep_set.statement_after + shared_nc_inames = s_before.within_inames & s_after.within_inames & non_conc_inames + same_dep_set = StatementPairDependencySet( + s_before, + s_after, + {dt.SAME: shared_nc_inames} + ) + same_dep_constraint_map = create_dependency_constraint( + same_dep_set, + dom_before, + dom_after, + loop_priority, + lp_insn_id_to_lex_sched_id, + "unused", # TODO shouldn't be necessary + "statement", + ) + + # see whether we should create an edge in our statement dep graph + intersect_dep_and_same = same_dep_constraint_map & dep_constraint_map + intersect_not_empty = not bool(intersect_dep_and_same.is_empty()) + + # create a map representing constraints from the dependency, + # maps statement instance to all statement instances that must occur later + # TODO instead of tuple, store all this in a class + deps_domains_and_constraint_maps.append( + ( + statement_pair_dep_set, + dom_before, + dom_after, + dep_constraint_map, + intersect_not_empty, + ) + ) + print("") + + return deps_domains_and_constraint_maps diff --git a/example_pairwise_schedule_validity.py b/example_pairwise_schedule_validity.py index d47d5b54f..b8c3663e6 100644 --- a/example_pairwise_schedule_validity.py +++ b/example_pairwise_schedule_validity.py @@ -3,11 +3,13 @@ import numpy as np from schedule_checker import ( get_statement_pair_dependency_sets_from_legacy_knl, check_schedule_validity, - get_dependency_maps, ) from schedule_checker.sched_check_utils import ( create_graph_from_pairs, ) +from schedule_checker.dependency import ( + get_dependency_maps, +) from loopy.kernel import KernelState from loopy import ( preprocess_kernel, -- GitLab From 9b2628e561acc64d246c12a7cab42bcecda9aa94 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Wed, 8 Jan 2020 19:05:11 -0600 Subject: [PATCH 165/415] encapsulate dep info previously held in tuple --- dependency.py | 38 ++++++++++++++++++++------- example_pairwise_schedule_validity.py | 19 +++----------- 2 files changed, 32 insertions(+), 25 deletions(-) diff --git a/dependency.py b/dependency.py index 9e4af1f96..c00c9cad7 100644 --- a/dependency.py +++ b/dependency.py @@ -795,6 +795,24 @@ def get_dependency_sources_and_sinks(knl, sched_item_ids): return sources, sinks +class DependencyInfo(object): + # TODO rename + # TODO use Record? + def __init__( + self, + statement_pair_dep_set, + dom_before, + dom_after, + dep_constraint_map, + is_edge_in_dep_graph, # { dep & SAME } != empty + ): + self.statement_pair_dep_set = statement_pair_dep_set + self.dom_before = dom_before + self.dom_after = dom_after + self.dep_constraint_map = dep_constraint_map + self.is_edge_in_dep_graph = is_edge_in_dep_graph + + def get_dependency_maps( deps_and_domains, schedule_items, # TODO always pass these as strings since we only need the name? @@ -829,7 +847,7 @@ def get_dependency_maps( ) conc_inames, non_conc_inames = get_concurrent_inames(knl) - deps_domains_and_constraint_maps = [] # TODO refactor this (maybe make a new data structure) + dep_info_list = [] for statement_pair_dep_set, dom_before, dom_after in deps_and_domains: dep_constraint_map = create_dependency_constraint( @@ -868,15 +886,15 @@ def get_dependency_maps( # create a map representing constraints from the dependency, # maps statement instance to all statement instances that must occur later # TODO instead of tuple, store all this in a class - deps_domains_and_constraint_maps.append( - ( - statement_pair_dep_set, - dom_before, - dom_after, - dep_constraint_map, - intersect_not_empty, + dep_info_list.append( + DependencyInfo( + statement_pair_dep_set, + dom_before, + dom_after, + dep_constraint_map, + intersect_not_empty, + ) ) - ) print("") - return deps_domains_and_constraint_maps + return dep_info_list diff --git a/example_pairwise_schedule_validity.py b/example_pairwise_schedule_validity.py index b8c3663e6..542f6ee6f 100644 --- a/example_pairwise_schedule_validity.py +++ b/example_pairwise_schedule_validity.py @@ -326,31 +326,20 @@ print("="*80) # create maps representing legacy deps # (includes bool representing result of test for dep graph edge) -legacy_dep_domains_and_maps = get_dependency_maps( +legacy_dep_info_list = get_dependency_maps( legacy_deps_and_domains, schedule_items, knl.loop_priority, knl, ) -# tuples in legacy_dep_domains_and_maps look like this: -# ( -# statement_pair_dep_set, -# dom_before, -# dom_after, -# dep_constraint_map, -# intersect_not_empty, -# ) - # get dep graph edges dep_graph_pairs = [ ( - statement_pair_dep_set.statement_before.insn_id, - statement_pair_dep_set.statement_after.insn_id + dep.statement_pair_dep_set.statement_before.insn_id, + dep.statement_pair_dep_set.statement_after.insn_id ) - for statement_pair_dep_set, _, _, _, add_edge in legacy_dep_domains_and_maps - if add_edge - ] + for dep in legacy_dep_info_list if dep.is_edge_in_dep_graph] # create dep graph from edges dep_graph = create_graph_from_pairs(dep_graph_pairs) -- GitLab From 6f55ce11e9491b32c6401ba1d0d1c6799ca29566 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 13 Jan 2020 05:40:44 -0600 Subject: [PATCH 166/415] sid_to_dom dicts now only contain one item, so no longer use dict --- __init__.py | 19 ++++--------------- example_wave_equation.py | 8 ++++++-- schedule.py | 19 ++++++++----------- 3 files changed, 18 insertions(+), 28 deletions(-) diff --git a/__init__.py b/__init__.py index e0ae56a90..6381c9cf7 100644 --- a/__init__.py +++ b/__init__.py @@ -136,25 +136,15 @@ def check_schedule_validity( # Get an isl map representing the LexSchedule; # this requires the iname domains - # get a mapping from lex schedule id to relevant inames domain - # TODO if sid_to_dom_before/after always contain single pair, - # maybe don't use dict - sid_to_dom_before = { - lp_insn_id_to_lex_sched_id[s_before.insn_id]: dom_before, - } - sid_to_dom_after = { - lp_insn_id_to_lex_sched_id[s_after.insn_id]: dom_after, - } - sched_map_symbolic_before, sched_map_symbolic_after = \ sched.create_symbolic_isl_map( - sid_to_dom_before, - sid_to_dom_after, + dom_before, + dom_after, ) if verbose: - print("sid_to_dom_before:\n", sid_to_dom_before) - print("sid_to_dom_after:\n", sid_to_dom_after) + print("dom_before:\n", dom_before) + print("dom_after:\n", dom_after) print("LexSchedule after creating symbolic isl map:") print(sched) print("LexSched:") @@ -215,7 +205,6 @@ def check_schedule_validity( import islpy as isl from schedule_checker.sched_check_utils import ( reorder_dims_by_name, - append_apostrophes, ) sio_in_names = sio.space.get_var_names(isl.dim_type.in_) aligned_constraint_map = reorder_dims_by_name( diff --git a/example_wave_equation.py b/example_wave_equation.py index 2be546a78..5860641b0 100644 --- a/example_wave_equation.py +++ b/example_wave_equation.py @@ -176,7 +176,9 @@ sid_to_dom = { sid_after: inames_domain_after, } -sched_map_symbolic = sched.create_symbolic_isl_map(sid_to_dom) +#sched_map_symbolic = sched.create_symbolic_isl_map(sid_to_dom) +sched_map_symbolic = sched.create_symbolic_isl_map( + inames_domain_before, inames_domain_after) # {{{ verbose @@ -449,7 +451,9 @@ sid_to_dom = { sid_after: inames_domain_after_mapped, } -sched_map_symbolic = sched.create_symbolic_isl_map(sid_to_dom) +#sched_map_symbolic = sched.create_symbolic_isl_map(sid_to_dom) +sched_map_symbolic = sched.create_symbolic_isl_map( + inames_domain_before_mapped, inames_domain_after_mapped) # {{{ verbose diff --git a/schedule.py b/schedule.py index 80002455e..661225805 100644 --- a/schedule.py +++ b/schedule.py @@ -269,8 +269,8 @@ class LexSchedule(object): def create_symbolic_isl_map( self, - sid_to_dom_before, - sid_to_dom_after, + dom_before, + dom_after, dom_inames_ordered_before=None, dom_inames_ordered_after=None, ): @@ -300,20 +300,15 @@ class LexSchedule(object): add_dims_to_isl_set ) - # TODO if sid_to_dom_before/after always contain single pair, - # maybe don't use dict - assert len(sid_to_dom_before) == 1 - assert len(sid_to_dom_after) == 1 - from schedule_checker.sched_check_utils import ( list_var_names_in_isl_sets, ) if dom_inames_ordered_before is None: dom_inames_ordered_before = list_var_names_in_isl_sets( - sid_to_dom_before.values()) + [dom_before]) if dom_inames_ordered_after is None: dom_inames_ordered_after = list_var_names_in_isl_sets( - sid_to_dom_after.values()) + [dom_after]) # create an isl space # {('statement', used in >=1 statement domain>) -> @@ -333,12 +328,14 @@ class LexSchedule(object): # intersection with sched map later doms_to_intersect_before = [ add_dims_to_isl_set( - sid_to_dom_before[self.lex_sched_stmt_before[0].int_id], isl.dim_type.set, + #sid_to_dom_before[self.lex_sched_stmt_before[0].int_id], isl.dim_type.set, + dom_before, isl.dim_type.set, [self.statement_var_name], 0), ] doms_to_intersect_after = [ add_dims_to_isl_set( - sid_to_dom_after[self.lex_sched_stmt_after[0].int_id], isl.dim_type.set, + #sid_to_dom_after[self.lex_sched_stmt_after[0].int_id], isl.dim_type.set, + dom_after, isl.dim_type.set, [self.statement_var_name], 0), ] -- GitLab From c0e6d5793836553827f34a673465738ab6e2823d Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 13 Jan 2020 06:06:03 -0600 Subject: [PATCH 167/415] add LexScheduleStatementInstance to use instead of a two-tuple holding a LexScheduleStatement and a point in lex space --- __init__.py | 1 - schedule.py | 125 +++++++++++++++++++++++++++++----------------------- 2 files changed, 69 insertions(+), 57 deletions(-) diff --git a/__init__.py b/__init__.py index 6381c9cf7..e3e9bd293 100644 --- a/__init__.py +++ b/__init__.py @@ -163,7 +163,6 @@ def check_schedule_validity( print("-"*80) """ - # TODO which direction does this composition go? # create statement instance ordering, # maps each statement instance to all statement instances occuring later sio = get_statement_ordering_map( diff --git a/schedule.py b/schedule.py index 661225805..38b6d66cc 100644 --- a/schedule.py +++ b/schedule.py @@ -2,7 +2,7 @@ import islpy as isl class LexScheduleStatement(object): - """A representation of a Loopy statement instance. + """A representation of a Loopy statement. .. attribute:: insn_id @@ -42,6 +42,23 @@ class LexScheduleStatement(object): self.insn_id, int_id, within_inames) +class LexScheduleStatementInstance(object): + """A representation of a Loopy statement instance. + + """ + + def __init__( + self, + stmt, # a LexScheduleStatement + lex_pt, # [string/int, ] + ): + self.stmt = stmt + self.lex_pt = lex_pt + + def __str__(self): + return "{%s, %s}" % (self.stmt, self.lex_pt) + + class LexSchedule(object): """A program ordering represented as a mapping from statement instances to points in a lexicographic ordering. @@ -103,8 +120,8 @@ class LexSchedule(object): # TODO update docs now that we have two schedules # LexScheduleStatements - self.lex_sched_stmt_before = None - self.lex_sched_stmt_after = None + self.stmt_instance_before = None + self.stmt_instance_after = None # make sure we don't have an iname name conflict assert not any( @@ -137,7 +154,7 @@ class LexSchedule(object): # don't increment lex dim val enumerating items in current block, # otherwise, this loop is next item in current code block, so # increment lex dim val enumerating items in current code block - if self.lex_sched_stmt_before or self.lex_sched_stmt_after: # if either statement has been set + if self.stmt_instance_before or self.stmt_instance_after: # if either statement has been set # this lex value will correspond to everything inside this loop # we will add new lex dimensions to enuerate items inside loop next_insn_lex_pt[-1] = next_insn_lex_pt[-1]+1 @@ -179,47 +196,43 @@ class LexSchedule(object): # otherwise process all instructions if lp_insn_id == before_insn_id and lp_insn_id == after_insn_id: # add before sched item - self.lex_sched_stmt_before = ( - LexScheduleStatement( - insn_id=lp_insn_id, - int_id=next_sid, # int representing insn - ), - next_insn_lex_pt[:] - ) + self.stmt_instance_before = LexScheduleStatementInstance( + LexScheduleStatement( + insn_id=lp_insn_id, + int_id=next_sid, # int representing insn + ), + next_insn_lex_pt[:]) # add after sched item - self.lex_sched_stmt_after = ( - LexScheduleStatement( - insn_id=lp_insn_id, - int_id=next_sid, # int representing insn - ), - next_insn_lex_pt[:] - ) + self.stmt_instance_after = LexScheduleStatementInstance( + LexScheduleStatement( + insn_id=lp_insn_id, + int_id=next_sid, # int representing insn + ), + next_insn_lex_pt[:]) # increment lex dim val enumerating items in current code block next_insn_lex_pt[-1] = next_insn_lex_pt[-1] + 1 next_sid += 1 elif lp_insn_id == before_insn_id: # add before sched item - self.lex_sched_stmt_before = ( - LexScheduleStatement( - insn_id=lp_insn_id, - int_id=next_sid, # int representing insn - ), - next_insn_lex_pt[:] - ) + self.stmt_instance_before = LexScheduleStatementInstance( + LexScheduleStatement( + insn_id=lp_insn_id, + int_id=next_sid, # int representing insn + ), + next_insn_lex_pt[:]) # increment lex dim val enumerating items in current code block next_insn_lex_pt[-1] = next_insn_lex_pt[-1] + 1 next_sid += 1 elif lp_insn_id == after_insn_id: # add after sched item - self.lex_sched_stmt_after = ( - LexScheduleStatement( - insn_id=lp_insn_id, - int_id=next_sid, # int representing insn - ), - next_insn_lex_pt[:] - ) + self.stmt_instance_after = LexScheduleStatementInstance( + LexScheduleStatement( + insn_id=lp_insn_id, + int_id=next_sid, # int representing insn + ), + next_insn_lex_pt[:]) # increment lex dim val enumerating items in current code block next_insn_lex_pt[-1] = next_insn_lex_pt[-1] + 1 @@ -227,7 +240,7 @@ class LexSchedule(object): else: pass # to save time, stop when we've created both statements - if self.lex_sched_stmt_before and self.lex_sched_stmt_after: + if self.stmt_instance_before and self.stmt_instance_after: break # at this point, lex_schedule may contain lex points missing dimensions, @@ -240,12 +253,12 @@ class LexSchedule(object): :class:`LexScheduleStatement`. """ return { - self.lex_sched_stmt_before[0].insn_id: self.lex_sched_stmt_before[0].int_id, - self.lex_sched_stmt_after[0].insn_id: self.lex_sched_stmt_after[0].int_id, + self.stmt_instance_before.stmt.insn_id: self.stmt_instance_before.stmt.int_id, + self.stmt_instance_after.stmt.insn_id: self.stmt_instance_after.stmt.int_id, } def max_lex_dims(self): - return max([len(self.lex_sched_stmt_before[1]), len(self.lex_sched_stmt_after[1])]) + return max([len(self.stmt_instance_before.lex_pt), len(self.stmt_instance_after.lex_pt)]) def pad_lex_pts_with_zeros(self): """Find the maximum number of lexicographic dimensions represented @@ -256,15 +269,15 @@ class LexSchedule(object): """ max_lex_dim = self.max_lex_dims() - self.lex_sched_stmt_before = ( - self.lex_sched_stmt_before[0], - self.lex_sched_stmt_before[1][:] + [0]*( - max_lex_dim-len(self.lex_sched_stmt_before[1])) + self.stmt_instance_before = LexScheduleStatementInstance( + self.stmt_instance_before.stmt, + self.stmt_instance_before.lex_pt[:] + [0]*( + max_lex_dim-len(self.stmt_instance_before.lex_pt)) ) - self.lex_sched_stmt_after = ( - self.lex_sched_stmt_after[0], - self.lex_sched_stmt_after[1][:] + [0]*( - max_lex_dim-len(self.lex_sched_stmt_after[1])) + self.stmt_instance_after = LexScheduleStatementInstance( + self.stmt_instance_after.stmt, + self.stmt_instance_after.lex_pt[:] + [0]*( + max_lex_dim-len(self.stmt_instance_after.lex_pt)) ) def create_symbolic_isl_map( @@ -328,13 +341,13 @@ class LexSchedule(object): # intersection with sched map later doms_to_intersect_before = [ add_dims_to_isl_set( - #sid_to_dom_before[self.lex_sched_stmt_before[0].int_id], isl.dim_type.set, + #sid_to_dom_before[self.stmt_instance_before.stmt.int_id], isl.dim_type.set, dom_before, isl.dim_type.set, [self.statement_var_name], 0), ] doms_to_intersect_after = [ add_dims_to_isl_set( - #sid_to_dom_after[self.lex_sched_stmt_after[0].int_id], isl.dim_type.set, + #sid_to_dom_after[self.stmt_instance_after.stmt.int_id], isl.dim_type.set, dom_after, isl.dim_type.set, [self.statement_var_name], 0), ] @@ -353,15 +366,15 @@ class LexSchedule(object): return ( create_symbolic_isl_map_from_tuples( zip( - [((self.lex_sched_stmt_before[0].int_id,) + tuple(dom_inames_ordered_before), - self.lex_sched_stmt_before[1])], + [((self.stmt_instance_before.stmt.int_id,) + tuple(dom_inames_ordered_before), + self.stmt_instance_before.lex_pt)], doms_to_intersect_before ), sched_space_before, self.unused_param_name, self.statement_var_name), create_symbolic_isl_map_from_tuples( zip( - [((self.lex_sched_stmt_after[0].int_id,) + tuple(dom_inames_ordered_after), - self.lex_sched_stmt_after[1])], + [((self.stmt_instance_after.stmt.int_id,) + tuple(dom_inames_ordered_after), + self.stmt_instance_after.lex_pt)], doms_to_intersect_after ), sched_space_after, self.unused_param_name, self.statement_var_name) @@ -388,21 +401,21 @@ class LexSchedule(object): return self.__bool__() def __eq__(self, other): - return (self.lex_sched_stmt_before == other.lex_sched_stmt_before and - self.lex_sched_stmt_after == other.lex_sched_stmt_after) + return (self.stmt_instance_before == other.stmt_instance_before and + self.stmt_instance_after == other.stmt_instance_after) def __str__(self): sched_str = "Before: {\n" domain_elem = "[%s=%s,]" % ( self.statement_var_name, - self.lex_sched_stmt_before[0].int_id) - sched_str += "%s -> %s;\n" % (domain_elem, self.lex_sched_stmt_before[1]) + self.stmt_instance_before.stmt.int_id) + sched_str += "%s -> %s;\n" % (domain_elem, self.stmt_instance_before.lex_pt) sched_str += "}\n" sched_str += "After: {\n" domain_elem += "[%s=%s,]" % ( self.statement_var_name, - self.lex_sched_stmt_after[0].int_id) - sched_str += "%s -> %s;\n" % (domain_elem, self.lex_sched_stmt_after[1]) + self.stmt_instance_after.stmt.int_id) + sched_str += "%s -> %s;\n" % (domain_elem, self.stmt_instance_after.lex_pt) sched_str += "}" return sched_str -- GitLab From 58dba1f52486c68a548f102a40c798d8888ab108 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 13 Jan 2020 11:47:59 -0600 Subject: [PATCH 168/415] switch notation so that primes are used to denote statement *before* (dependee) rather than statement after (depender) --- dependency.py | 121 ++++++++++++++++--------------------- lexicographic_order_map.py | 17 +++--- sched_check_utils.py | 4 +- 3 files changed, 63 insertions(+), 79 deletions(-) diff --git a/dependency.py b/dependency.py index c00c9cad7..c72219222 100644 --- a/dependency.py +++ b/dependency.py @@ -1,6 +1,9 @@ import islpy as isl +# TODO update all documentation/comments after apostrophe switched to +# *before* statement/inames + class DependencyType: """Strings specifying a particular type of dependency relationship. @@ -209,15 +212,18 @@ def create_dependency_constraint( statement_var_name_prime = statement_var_name+"'" # get (ordered) list of unused before/after inames - # TODO are there ever unused inames now that we're separating the in/out spaces? inames_before_unused = [] for iname in dom_inames_ordered_before: if iname not in dom_before_constraint_set.get_var_names(isl.dim_type.out): - inames_before_unused.append(iname) + inames_before_unused.append(iname + "'") inames_after_unused = [] for iname in dom_inames_ordered_after: if iname not in dom_after_constraint_set.get_var_names(isl.dim_type.out): - inames_after_unused.append(iname + "'") + inames_after_unused.append(iname) + + # TODO are there ever unused inames now that we're separating the in/out spaces? + if inames_before_unused or inames_after_unused: + assert False # initialize constraints to False # this will disappear as soon as we add a constraint @@ -237,7 +243,7 @@ def create_dependency_constraint( if dep_type == dt.SAME: constraint_set = create_elementwise_comparison_conjunction_set( - inames_list, inames_prime, islvars, op="eq") + inames_prime, inames_list, islvars, op="eq") elif dep_type == dt.PRIOR: priority_known = False @@ -317,14 +323,15 @@ def create_dependency_constraint( # TODO handle case where inames list is empty constraint_set = get_lex_order_constraint( islvars, - inames_list_nest_ordered, inames_list_nest_ordered_prime, + inames_list_nest_ordered, ) else: # priority not known # PRIOR requires upper left quadrant happen before: constraint_set = create_elementwise_comparison_conjunction_set( - inames_list, inames_prime, islvars, op="lt") + inames_prime, inames_list, islvars, op="lt") + # TODO remove, this shouldn't happen anymore # set unused vars == unused dummy param for iname in inames_before_unused+inames_after_unused: constraint_set = constraint_set & islvars[iname].eq_set( @@ -333,9 +340,9 @@ def create_dependency_constraint( # set statement_var_name == statement # s_before_int = insn_id_to_int[statement_dep_set.statement_before.insn_id] s_after_int = insn_id_to_int[statement_dep_set.statement_after.insn_id] - constraint_set = constraint_set & islvars[statement_var_name].eq_set( - islvars[0]+s_before_int) constraint_set = constraint_set & islvars[statement_var_name_prime].eq_set( + islvars[0]+s_before_int) + constraint_set = constraint_set & islvars[statement_var_name].eq_set( islvars[0]+s_after_int) # union this constraint_set with all_constraints_set @@ -351,23 +358,23 @@ def create_dependency_constraint( # now apply domain sets to constraint variables # add statement variable to doms to enable intersection - domain_to_intersect = add_dims_to_isl_set( - dom_before_constraint_set, isl.dim_type.out, - [statement_var_name], statement_var_pose) - range_constraint_set = create_new_isl_set_with_primes(dom_after_constraint_set) range_to_intersect = add_dims_to_isl_set( - range_constraint_set, isl.dim_type.out, + dom_after_constraint_set, isl.dim_type.out, + [statement_var_name], statement_var_pose) + domain_constraint_set = create_new_isl_set_with_primes(dom_before_constraint_set) + domain_to_intersect = add_dims_to_isl_set( + domain_constraint_set, isl.dim_type.out, [statement_var_name_prime], statement_var_pose) # insert inames missing from doms to enable intersection domain_to_intersect = reorder_dims_by_name( domain_to_intersect, isl.dim_type.out, - [statement_var_name] + dom_inames_ordered_before, + append_apostrophes([statement_var_name] + dom_inames_ordered_before), add_missing=True) range_to_intersect = reorder_dims_by_name( range_to_intersect, isl.dim_type.out, - append_apostrophes([statement_var_name] + dom_inames_ordered_after), + [statement_var_name] + dom_inames_ordered_after, add_missing=True) # intersect doms @@ -417,11 +424,11 @@ def _create_5pt_stencil_dependency_constraint( inames_before_unused = [] for iname in all_dom_inames_ordered: if iname not in dom_before_constraint_set.get_var_names(isl.dim_type.out): - inames_before_unused.append(iname) + inames_before_unused.append(iname + "'") inames_after_unused = [] for iname in all_dom_inames_ordered: if iname not in dom_after_constraint_set.get_var_names(isl.dim_type.out): - inames_after_unused.append(iname + "'") + inames_after_unused.append(iname) # initialize constraints to False # this will disappear as soon as we add a constraint @@ -446,30 +453,29 @@ def _create_5pt_stencil_dependency_constraint( """ # local dep: constraint_set = ( - islvars[time_iname_prime].eq_set(islvars[time_iname] + one) & + islvars[time_iname].eq_set(islvars[time_iname_prime] + one) & ( - (islvars[space_iname_prime]-two).lt_set(islvars[space_iname]) & - islvars[space_iname].lt_set(islvars[space_iname_prime]+two) + (islvars[space_iname]-two).lt_set(islvars[space_iname_prime]) & + islvars[space_iname_prime].lt_set(islvars[space_iname]+two) ) #( - #(islvars[space_iname]-two).lt_set(islvars[space_iname_prime]) & - # islvars[space_iname_prime].lt_set(islvars[space_iname]+two) + #(islvars[space_iname_prime]-two).lt_set(islvars[space_iname]) & + # islvars[space_iname].lt_set(islvars[space_iname_prime]+two) #) | - islvars[time_iname_prime].eq_set(islvars[time_iname] + two) & - islvars[space_iname].eq_set(islvars[space_iname_prime]) + islvars[time_iname].eq_set(islvars[time_iname_prime] + two) & + islvars[space_iname_prime].eq_set(islvars[space_iname]) ) - # set unused vars == unused dummy param for iname in inames_before_unused+inames_after_unused: constraint_set = constraint_set & islvars[iname].eq_set( islvars[unused_param_name]) # set statement_var_name == statement # - constraint_set = constraint_set & islvars[statement_var_name].eq_set( - islvars[0]+sid_before) constraint_set = constraint_set & islvars[statement_var_name_prime].eq_set( + islvars[0]+sid_before) + constraint_set = constraint_set & islvars[statement_var_name].eq_set( islvars[0]+sid_after) # convert constraint set to map @@ -479,23 +485,23 @@ def _create_5pt_stencil_dependency_constraint( # now apply domain sets to constraint variables # add statement variable to doms to enable intersection - domain_to_intersect = add_dims_to_isl_set( - dom_before_constraint_set, isl.dim_type.out, - [statement_var_name], statement_var_pose) - range_constraint_set = create_new_isl_set_with_primes(dom_after_constraint_set) range_to_intersect = add_dims_to_isl_set( - range_constraint_set, isl.dim_type.out, + dom_after_constraint_set, isl.dim_type.out, + [statement_var_name], statement_var_pose) + domain_constraint_set = create_new_isl_set_with_primes(dom_before_constraint_set) + domain_to_intersect = add_dims_to_isl_set( + domain_constraint_set, isl.dim_type.out, [statement_var_name_prime], statement_var_pose) # insert inames missing from doms to enable intersection domain_to_intersect = reorder_dims_by_name( domain_to_intersect, isl.dim_type.out, - [statement_var_name] + all_dom_inames_ordered, + append_apostrophes([statement_var_name] + all_dom_inames_ordered), add_missing=True) range_to_intersect = reorder_dims_by_name( range_to_intersect, isl.dim_type.out, - append_apostrophes([statement_var_name] + all_dom_inames_ordered), + [statement_var_name] + all_dom_inames_ordered, add_missing=True) # intersect doms @@ -518,6 +524,7 @@ def create_arbitrary_dependency_constraint( ): # TODO update after allowing different inames for before/after + # TODO test after switching primes to before vars from schedule_checker.sched_check_utils import ( make_islvars_with_var_primes, @@ -549,12 +556,12 @@ def create_arbitrary_dependency_constraint( inames_before_unused = [] for iname in all_dom_inames_ordered: if iname not in dom_before_constraint_set.get_var_names(isl.dim_type.out): - inames_before_unused.append(iname) + inames_before_unused.append(iname + "p") inames_after_unused = [] for iname in all_dom_inames_ordered: if iname not in dom_after_constraint_set.get_var_names(isl.dim_type.out): #inames_after_unused.append(iname + "'") - inames_after_unused.append(iname + "p") # TODO figure out before/after notation + inames_after_unused.append(iname) # TODO figure out before/after notation # initialize constraints to False # this will disappear as soon as we add a constraint @@ -604,41 +611,15 @@ def create_arbitrary_dependency_constraint( 1/0 all_constraints_set = all_constraints_set | conj_constraint - #TODO deleteme - """ - space_iname = "ix" - time_iname = "it" - - space_iname_prime = space_iname + "'" - time_iname_prime = time_iname + "'" - one = islvars[0] + 1 - two = islvars[0] + 2 - # local dep: - constraint_set = ( - islvars[time_iname_prime].eq_set(islvars[time_iname] + one) & - ( - (islvars[space_iname_prime]-two).lt_set(islvars[space_iname]) & - islvars[space_iname].lt_set(islvars[space_iname_prime]+two) - ) - #( - #(islvars[space_iname]-two).lt_set(islvars[space_iname_prime]) & - # islvars[space_iname_prime].lt_set(islvars[space_iname]+two) - #) - | - islvars[time_iname_prime].eq_set(islvars[time_iname] + two) & - islvars[space_iname].eq_set(islvars[space_iname_prime]) - ) - """ - # set unused vars == unused dummy param for iname in inames_before_unused+inames_after_unused: all_constraints_set = all_constraints_set & islvars[iname].eq_set( islvars[unused_param_name]) # set statement_var_name == statement # - all_constraints_set = all_constraints_set & islvars[statement_var_name].eq_set( - islvars[0]+sid_before) all_constraints_set = all_constraints_set & islvars[statement_var_name_prime].eq_set( + islvars[0]+sid_before) + all_constraints_set = all_constraints_set & islvars[statement_var_name].eq_set( islvars[0]+sid_after) # convert constraint set to map @@ -648,14 +629,14 @@ def create_arbitrary_dependency_constraint( # now apply domain sets to constraint variables # add statement variable to doms to enable intersection - domain_to_intersect = add_dims_to_isl_set( - dom_before_constraint_set, isl.dim_type.out, + range_to_intersect = add_dims_to_isl_set( + dom_after_constraint_set, isl.dim_type.out, [statement_var_name], statement_var_pose) - range_constraint_set = create_new_isl_set_with_primes( - dom_after_constraint_set, + domain_constraint_set = create_new_isl_set_with_primes( + dom_before_constraint_set, marker="p") # TODO figure out before/after notation - range_to_intersect = add_dims_to_isl_set( - range_constraint_set, isl.dim_type.out, + domain_to_intersect = add_dims_to_isl_set( + domain_constraint_set, isl.dim_type.out, [statement_var_name_prime], statement_var_pose) # insert inames missing from doms to enable intersection diff --git a/lexicographic_order_map.py b/lexicographic_order_map.py index ccfb9d6f9..52afadaa0 100644 --- a/lexicographic_order_map.py +++ b/lexicographic_order_map.py @@ -2,7 +2,7 @@ import islpy as isl def get_statement_ordering_map( - sched_map_before, sched_map_after, lex_map, out_marker="'"): + sched_map_before, sched_map_after, lex_map, before_marker="'"): """Return a mapping that maps each statement instance to all statement instances occuring later. @@ -25,12 +25,11 @@ def get_statement_ordering_map( """ - # TODO determine which order is correct sio = sched_map_before.apply_range(lex_map).apply_range(sched_map_after.reverse()) - # append marker to out names - for i in range(sio.dim(isl.dim_type.out)): - sio = sio.set_dim_name(isl.dim_type.out, i, sio.get_dim_name( - isl.dim_type.out, i)+out_marker) + # append marker to in names + for i in range(sio.dim(isl.dim_type.in_)): + sio = sio.set_dim_name(isl.dim_type.in_, i, sio.get_dim_name( + isl.dim_type.in_, i)+before_marker) return sio @@ -109,8 +108,10 @@ def create_lex_order_map( if before_names is None: before_names = ["i%s" % (i) for i in range(n_dims)] if after_names is None: - from schedule_checker.sched_check_utils import append_apostrophes - after_names = append_apostrophes(before_names) + from schedule_checker.sched_check_utils import ( + append_marker_to_strings, + ) + after_names = append_marker_to_strings(before_names, marker="_") assert len(before_names) == len(after_names) == n_dims dim_type = isl.dim_type diff --git a/sched_check_utils.py b/sched_check_utils.py index ee3cbb532..9d64ac221 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -2,6 +2,8 @@ import islpy as isl # TODO remove assertions once satisified they are unnecessary +# TODO update all documentation/comments after apostrophe switched to +# *before* statement/inames def prettier_map_string(isl_map): @@ -145,7 +147,7 @@ def make_islvars_with_var_primes( var_names_out = var_names_in[:] return isl.make_zero_and_vars( - var_names_in+append_marker(var_names_out, marker), param_names) + append_marker(var_names_in, marker) + var_names_out, param_names) def append_marker_to_strings(strings, marker="'"): -- GitLab From 4c3b68b4eca4d63506b3a914966b6a211e378b5d Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Wed, 15 Jan 2020 19:16:05 -0600 Subject: [PATCH 169/415] store iname domains for before/after insn inside the StatementPairDependencySet instead of separately --- __init__.py | 33 +++++----------- dependency.py | 71 ++++++++++++++++++---------------- example_dependency_checking.py | 14 ++----- example_wave_equation.py | 2 - 4 files changed, 52 insertions(+), 68 deletions(-) diff --git a/__init__.py b/__init__.py index e3e9bd293..c965be217 100644 --- a/__init__.py +++ b/__init__.py @@ -21,20 +21,7 @@ def get_statement_pair_dependency_sets_from_legacy_knl(knl): # For each set of insns within a given iname subset, find sources and sinks, # then make PRIOR dep from all sinks to all sources at previous iterations. - statement_pair_dep_sets = create_dependencies_from_legacy_knl(preprocessed_knl) - - # get separate domains for before.within_inames and after.within_inames - deps_and_domains = [] - for dep_set in statement_pair_dep_sets: - deps_and_domains.append([ - dep_set, - preprocessed_knl.get_inames_domain( - dep_set.statement_before.within_inames), - preprocessed_knl.get_inames_domain( - dep_set.statement_after.within_inames) - ]) - - return deps_and_domains + return create_dependencies_from_legacy_knl(preprocessed_knl) # TODO create a set of broken kernels to test against @@ -75,10 +62,10 @@ def check_schedule_validity( if verbose: print("="*80) print("StatementDependencies w/domains:") - for dep_set, dom_before, dom_after in deps_and_domains: + for dep_set in deps_and_domains: print(dep_set) - print(dom_before) - print(dom_after) + print(dep_set.dom_before) + print(dep_set.dom_after) # Print kernel info ------------------------------------------------------ print("="*80) @@ -100,7 +87,12 @@ def check_schedule_validity( # For each dependency, create+test schedule containing pair of insns------ sched_is_valid = True - for statement_pair_dep_set, dom_before, dom_after in deps_and_domains: + for statement_pair_dep_set in deps_and_domains: + s_before = statement_pair_dep_set.statement_before + s_after = statement_pair_dep_set.statement_after + dom_before = statement_pair_dep_set.dom_before + dom_after = statement_pair_dep_set.dom_after + if verbose: print("="*80) print("statement dep set:") @@ -108,9 +100,6 @@ def check_schedule_validity( print("dom_before:", dom_before) print("dom_after:", dom_after) - s_before = statement_pair_dep_set.statement_before - s_after = statement_pair_dep_set.statement_after - # Create a mapping of {statement instance: lex point} # including only instructions involved in this dependency sched = LexSchedule( @@ -182,8 +171,6 @@ def check_schedule_validity( # maps statement instance to all statement instances that must occur later constraint_map = create_dependency_constraint( statement_pair_dep_set, - dom_before, - dom_after, knl.loop_priority, lp_insn_id_to_lex_sched_id, sched.unused_param_name, diff --git a/dependency.py b/dependency.py index c72219222..3f2318ed1 100644 --- a/dependency.py +++ b/dependency.py @@ -65,10 +65,14 @@ class StatementPairDependencySet(object): statement_before, statement_after, deps, # {dep_type: iname_set} + dom_before=None, + dom_after=None, ): self.statement_before = statement_before self.statement_after = statement_after self.deps = deps + self.dom_before = dom_before + self.dom_after = dom_after def __str__(self): result = "%s --before->\n%s iff\n " % ( @@ -124,8 +128,6 @@ def _convert_constraint_set_to_map(constraint_set, mv_count, src_position=None): def create_dependency_constraint( statement_dep_set, - dom_before_constraint_set, - dom_after_constraint_set, loop_priorities, insn_id_to_int, unused_param_name, @@ -198,10 +200,10 @@ def create_dependency_constraint( ) if dom_inames_ordered_before is None: dom_inames_ordered_before = list_var_names_in_isl_sets( - [dom_before_constraint_set]) + [statement_dep_set.dom_before]) if dom_inames_ordered_after is None: dom_inames_ordered_after = list_var_names_in_isl_sets( - [dom_after_constraint_set]) + [statement_dep_set.dom_after]) # create some (ordered) isl vars to use, e.g., {s, i, j, s', i', j'} islvars = make_islvars_with_var_primes( @@ -214,11 +216,11 @@ def create_dependency_constraint( # get (ordered) list of unused before/after inames inames_before_unused = [] for iname in dom_inames_ordered_before: - if iname not in dom_before_constraint_set.get_var_names(isl.dim_type.out): + if iname not in statement_dep_set.dom_before.get_var_names(isl.dim_type.out): inames_before_unused.append(iname + "'") inames_after_unused = [] for iname in dom_inames_ordered_after: - if iname not in dom_after_constraint_set.get_var_names(isl.dim_type.out): + if iname not in statement_dep_set.dom_after.get_var_names(isl.dim_type.out): inames_after_unused.append(iname) # TODO are there ever unused inames now that we're separating the in/out spaces? @@ -359,9 +361,9 @@ def create_dependency_constraint( # add statement variable to doms to enable intersection range_to_intersect = add_dims_to_isl_set( - dom_after_constraint_set, isl.dim_type.out, + statement_dep_set.dom_after, isl.dim_type.out, [statement_var_name], statement_var_pose) - domain_constraint_set = create_new_isl_set_with_primes(dom_before_constraint_set) + domain_constraint_set = create_new_isl_set_with_primes(statement_dep_set.dom_before) domain_to_intersect = add_dims_to_isl_set( domain_constraint_set, isl.dim_type.out, [statement_var_name_prime], statement_var_pose) @@ -688,23 +690,24 @@ def create_dependencies_from_legacy_knl(knl): statement_dep_sets = [] for insn_after in knl.instructions: for insn_before_id in insn_after.depends_on: - dep_dict = {} insn_before = knl.id_to_insn[insn_before_id] insn_before_inames = insn_before.within_inames insn_after_inames = insn_after.within_inames shared_inames = insn_before_inames & insn_after_inames shared_non_conc_inames = shared_inames & non_conc_inames - dep_dict[dt.SAME] = shared_non_conc_inames - - s_before = LexScheduleStatement( - insn_id=insn_before.id, - within_inames=insn_before_inames) - s_after = LexScheduleStatement( - insn_id=insn_after.id, - within_inames=insn_after_inames) statement_dep_sets.append( - StatementPairDependencySet(s_before, s_after, dep_dict)) + StatementPairDependencySet( + LexScheduleStatement( + insn_id=insn_before.id, + within_inames=insn_before_inames), + LexScheduleStatement( + insn_id=insn_after.id, + within_inames=insn_after_inames), + {dt.SAME: shared_non_conc_inames}, + knl.get_inames_domain(insn_before_inames), + knl.get_inames_domain(insn_after_inames), + )) # loop-carried deps ------------------------------------------ @@ -732,16 +735,18 @@ def create_dependencies_from_legacy_knl(knl): shared_inames = sink_insn_inames & source_insn_inames shared_non_conc_inames = shared_inames & non_conc_inames - dep_dict[dt.PRIOR] = shared_non_conc_inames - - s_before = LexScheduleStatement( - insn_id=sink_id, - within_inames=sink_insn_inames) - s_after = LexScheduleStatement( - insn_id=source_id, - within_inames=source_insn_inames) statement_dep_sets.append( - StatementPairDependencySet(s_before, s_after, dep_dict)) + StatementPairDependencySet( + LexScheduleStatement( + insn_id=sink_id, + within_inames=sink_insn_inames), + LexScheduleStatement( + insn_id=source_id, + within_inames=source_insn_inames), + {dt.PRIOR: shared_non_conc_inames}, + knl.get_inames_domain(sink_insn_inames), + knl.get_inames_domain(source_insn_inames), + )) return statement_dep_sets @@ -829,12 +834,10 @@ def get_dependency_maps( conc_inames, non_conc_inames = get_concurrent_inames(knl) dep_info_list = [] - for statement_pair_dep_set, dom_before, dom_after in deps_and_domains: + for statement_pair_dep_set in deps_and_domains: dep_constraint_map = create_dependency_constraint( statement_pair_dep_set, - dom_before, - dom_after, loop_priority, lp_insn_id_to_lex_sched_id, "unused", # TODO shouldn't be necessary anymore @@ -844,16 +847,18 @@ def get_dependency_maps( # create "same" dep for these two insns s_before = statement_pair_dep_set.statement_before s_after = statement_pair_dep_set.statement_after + dom_before = statement_pair_dep_set.dom_before + dom_after = statement_pair_dep_set.dom_after shared_nc_inames = s_before.within_inames & s_after.within_inames & non_conc_inames same_dep_set = StatementPairDependencySet( s_before, s_after, - {dt.SAME: shared_nc_inames} + {dt.SAME: shared_nc_inames}, + dom_before, + dom_after, ) same_dep_constraint_map = create_dependency_constraint( same_dep_set, - dom_before, - dom_after, loop_priority, lp_insn_id_to_lex_sched_id, "unused", # TODO shouldn't be necessary diff --git a/example_dependency_checking.py b/example_dependency_checking.py index 52c554607..54ab553db 100644 --- a/example_dependency_checking.py +++ b/example_dependency_checking.py @@ -107,26 +107,20 @@ s0 = LexScheduleStatement(insn_id="0", within_inames={"i", "j"}) s1 = LexScheduleStatement(insn_id="1", within_inames={"i", "j"}) insnid_to_int_sid = {"0": 0, "1": 1} +dom_before = knl.get_inames_domain(s0.within_inames) +dom_after = knl.get_inames_domain(s1.within_inames) + statement_pair_dep_set = StatementPairDependencySet( - s0, s1, {dt.SAME: ["i", "j"]}) + s0, s1, {dt.SAME: ["i", "j"]}, dom_before, dom_after) # SAME({i,j}) means: # insn0{i,j} happens before insn1{i',j'} iff i = i' and j = j' print("Statement pair dependency set:") print(statement_pair_dep_set) -dom_before = knl.get_inames_domain( - statement_pair_dep_set.statement_before.within_inames - ) -dom_after = knl.get_inames_domain( - statement_pair_dep_set.statement_after.within_inames - ) - loop_priority = None constraint_map = create_dependency_constraint( statement_pair_dep_set, - dom_before, - dom_after, loop_priority, insnid_to_int_sid, unused_param_name, diff --git a/example_wave_equation.py b/example_wave_equation.py index 5860641b0..af6e52f2f 100644 --- a/example_wave_equation.py +++ b/example_wave_equation.py @@ -14,8 +14,6 @@ from schedule_checker.sched_check_utils import ( append_apostrophes, ) from schedule_checker.dependency import ( - create_dependencies_from_legacy_knl, - create_dependency_constraint, create_arbitrary_dependency_constraint, ) from dependency import _create_5pt_stencil_dependency_constraint -- GitLab From 1e1b6e9238577bf0ab59f5a55c7d27f7263ceae6 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Wed, 15 Jan 2020 21:06:50 -0600 Subject: [PATCH 170/415] added append_marker_to_isl_map_var_names() --- sched_check_utils.py | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/sched_check_utils.py b/sched_check_utils.py index 9d64ac221..32317cccb 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -108,6 +108,7 @@ def create_new_isl_set_with_primes(old_isl_set, marker="'"): apostrophes appended to dim_type.set dimension names. """ + # TODO this is a special case of append_marker_to_isl_map_var_names new_set = old_isl_set.copy() for i in range(old_isl_set.n_dim()): @@ -116,6 +117,27 @@ def create_new_isl_set_with_primes(old_isl_set, marker="'"): return new_set +def append_marker_to_isl_map_var_names(old_isl_map, dim_type, marker="'"): + """Return an isl_map with marker appended to + dim_type dimension names. + + .. arg old_isl_map: A :class:`islpy.Map`. + + .. arg dim_type: A :class:`islpy.dim_type`, i.e., an :class:`int`, + specifying the dimension to be marked. + + .. return: A :class:`islpy.Map` matching `old_isl_map` with + apostrophes appended to dim_type dimension names. + + """ + + new_map = old_isl_map.copy() + for i in range(len(old_isl_map.get_var_names(dim_type))): + new_map = new_map.set_dim_name(dim_type, i, old_isl_map.get_dim_name( + dim_type, i)+marker) + return new_map + + def make_islvars_with_var_primes( var_names_in, param_names, marker="'", var_names_out=None): """Return a dictionary from variable and parameter names -- GitLab From 85b04a4ff2f45d0c7516f6a6766ae7cfad97cf2e Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Wed, 15 Jan 2020 21:09:13 -0600 Subject: [PATCH 171/415] updated example_wave_equation.py to be consistent with recent changes to schedule checking functions (schedules containing before+after map instead of single mapping, LexSchedule requiring list of sched items, markers being applied to dependee instead of depender) --- example_wave_equation.py | 91 ++++++++++++++++++++-------------------- 1 file changed, 45 insertions(+), 46 deletions(-) diff --git a/example_wave_equation.py b/example_wave_equation.py index af6e52f2f..6afa3044b 100644 --- a/example_wave_equation.py +++ b/example_wave_equation.py @@ -12,6 +12,7 @@ from schedule_checker.sched_check_utils import ( prettier_map_string, reorder_dims_by_name, append_apostrophes, + append_marker_to_isl_map_var_names, ) from schedule_checker.dependency import ( create_arbitrary_dependency_constraint, @@ -105,7 +106,8 @@ print(prettier_map_string(constraint_map)) """ # TODO testing new dep map constraint_map = create_arbitrary_dependency_constraint( - "itp = it + 1 and ixp - 2 < ix and ix < ixp + 2 or itp = it + 2 and ix = ixp", + #"itp = it + 1 and ixp - 2 < ix and ix < ixp + 2 or itp = it + 2 and ix = ixp", + "it = itp + 1 and ix - 2 < ixp and ixp < ix + 2 or it = itp + 2 and ixp = ix", # primes moved to 'before' statement inames_domain_before, inames_domain_after, sid_before = sid_before, @@ -120,7 +122,7 @@ print("constraint_map before mapping:") print(prettier_map_string(constraint_map)) # TODO (left off here) # TODO decide on before/after notation and make consistent -1/0 +#1/0 verbose = False verbose = True @@ -156,18 +158,11 @@ if verbose: # Create a mapping of {statement instance: lex point} # including only instructions involved in this dependency -sched = LexSchedule(scheduled_knl, include_only_insn_ids=[ - str(sid_before), - str(sid_after) - ]) +sched = LexSchedule(scheduled_knl, scheduled_knl.schedule, str(sid_before), str(sid_after)) # Get an isl map representing the LexSchedule; # this requires the iname domains -assert len(sched) in [1, 2] -if len(sched) == 1: - assert inames_domain_before == inames_domain_after - # get a mapping from lex schedule id to relevant inames domain sid_to_dom = { sid_before: inames_domain_before, @@ -175,7 +170,7 @@ sid_to_dom = { } #sched_map_symbolic = sched.create_symbolic_isl_map(sid_to_dom) -sched_map_symbolic = sched.create_symbolic_isl_map( +sched_map_symbolic_before, sched_map_symbolic_after = sched.create_symbolic_isl_maps( inames_domain_before, inames_domain_after) # {{{ verbose @@ -185,7 +180,8 @@ if verbose: print("LexSchedule after creating symbolic isl map:") print(sched) print("LexSched:") - print(prettier_map_string(sched_map_symbolic)) + print(prettier_map_string(sched_map_symbolic_before)) + print(prettier_map_string(sched_map_symbolic_after)) #print("space (statement instances -> lex time):") #print(sched_map_symbolic.space) #print("-"*80) @@ -211,7 +207,10 @@ if verbose: # create statement instance ordering, # maps each statement instance to all statement instances occuring later sio = get_statement_ordering_map( - sched_map_symbolic, lex_order_map_symbolic) + sched_map_symbolic_before, + sched_map_symbolic_after, + lex_order_map_symbolic, + before_marker="p") # {{{ verbose @@ -244,11 +243,11 @@ aligned_constraint_map = reorder_dims_by_name( ) # align out dims +sio_out_names = sio.space.get_var_names(isl.dim_type.out) aligned_constraint_map = reorder_dims_by_name( aligned_constraint_map, isl.dim_type.out, - append_apostrophes(sio_in_names), - # TODO sio out names are only pretending to have apostrophes; confusing + sio_out_names, add_missing=False, new_names_are_permutation_only=True, ) @@ -269,7 +268,7 @@ assert ( == sio.space.get_var_names(isl.dim_type.in_)) assert ( aligned_constraint_map.space.get_var_names(isl.dim_type.out) - == append_apostrophes(sio.space.get_var_names(isl.dim_type.out))) + == sio.space.get_var_names(isl.dim_type.out)) assert ( aligned_constraint_map.space.get_var_names(isl.dim_type.param) == sio.space.get_var_names(isl.dim_type.param)) @@ -354,11 +353,16 @@ m2 = isl.BasicMap( "16*(tx - tt) + itx - itt = ix - it and " "16*(tx + tt + tparity) + itt + itx = ix + it and " "0<=tparity<2 and 0 <= itx - itt < 16 and 0 <= itt+itx < 16}") +#m2_primes_after = isl.BasicMap( +# "[nx,nt,unused] -> {[statement, ix, it] -> [statement'=statement, tx', tt', tparity', itt', itx']: " +# "16*(tx' - tt') + itx' - itt' = ix - it and " +# "16*(tx' + tt' + tparity') + itt' + itx' = ix + it and " +# "0<=tparity'<2 and 0 <= itx' - itt' < 16 and 0 <= itt'+itx' < 16}") m2_prime = isl.BasicMap( - "[nx,nt,unused] -> {[statement, ix, it] -> [statement'=statement, tx', tt', tparity', itt', itx']: " - "16*(tx' - tt') + itx' - itt' = ix - it and " - "16*(tx' + tt' + tparity') + itt' + itx' = ix + it and " - "0<=tparity'<2 and 0 <= itx' - itt' < 16 and 0 <= itt'+itx' < 16}") + "[nx,nt,unused] -> {[statement', ix', it'] -> [statement=statement', tx, tt, tparity, itt, itx]: " + "16*(tx - tt) + itx - itt = ix' - it' and " + "16*(tx + tt + tparity) + itt + itx = ix' + it' and " + "0<=tparity<2 and 0 <= itx - itt < 16 and 0 <= itt+itx < 16}") # TODO note order must match statement_iname_premap_order @@ -370,7 +374,7 @@ knl = lp.map_domain(ref_knl, m) knl = lp.prioritize_loops(knl, "tt,tparity,tx,itt,itx") print("code after mapping:") print(generate_code_v2(knl).device_code()) -1/0 +#1/0 print("constraint_map before apply_range:") print(prettier_map_string(constraint_map)) @@ -378,7 +382,12 @@ print(prettier_map_string(constraint_map)) mapped_constraint_map = constraint_map.apply_range(m2) print("constraint_map after apply_range:") print(prettier_map_string(mapped_constraint_map)) -mapped_constraint_map = mapped_constraint_map.apply_domain(m2) +#mapped_constraint_map = mapped_constraint_map.apply_domain(m2) +mapped_constraint_map = mapped_constraint_map.apply_domain(m2_prime) +# put primes on *before* names +mapped_constraint_map = append_marker_to_isl_map_var_names( + mapped_constraint_map, isl.dim_type.in_, marker="'") + print("constraint_map after apply_domain:") print(prettier_map_string(mapped_constraint_map)) @@ -431,18 +440,16 @@ if verbose: # Create a mapping of {statement instance: lex point} # including only instructions involved in this dependency -sched = LexSchedule(scheduled_knl, include_only_insn_ids=[ +sched = LexSchedule( + scheduled_knl, + scheduled_knl.schedule, str(sid_before), str(sid_after) - ]) + ) # Get an isl map representing the LexSchedule; # this requires the iname domains -assert len(sched) in [1, 2] -if len(sched) == 1: - assert inames_domain_before_mapped == inames_domain_after_mapped - # get a mapping from lex schedule id to relevant inames domain sid_to_dom = { sid_before: inames_domain_before_mapped, @@ -450,7 +457,7 @@ sid_to_dom = { } #sched_map_symbolic = sched.create_symbolic_isl_map(sid_to_dom) -sched_map_symbolic = sched.create_symbolic_isl_map( +sched_map_symbolic_before, sched_map_symbolic_after = sched.create_symbolic_isl_maps( inames_domain_before_mapped, inames_domain_after_mapped) # {{{ verbose @@ -460,7 +467,8 @@ if verbose: print("LexSchedule after creating symbolic isl map:") print(sched) print("LexSched:") - print(prettier_map_string(sched_map_symbolic)) + print(prettier_map_string(sched_map_symbolic_before)) + print(prettier_map_string(sched_map_symbolic_after)) #print("space (statement instances -> lex time):") #print(sched_map_symbolic.space) #print("-"*80) @@ -486,7 +494,10 @@ if verbose: # create statement instance ordering, # maps each statement instance to all statement instances occuring later sio = get_statement_ordering_map( - sched_map_symbolic, lex_order_map_symbolic) + sched_map_symbolic_before, + sched_map_symbolic_after, + lex_order_map_symbolic, + before_marker="'") # {{{ verbose @@ -518,20 +529,12 @@ aligned_constraint_map = reorder_dims_by_name( new_names_are_permutation_only=True, ) -#print(".....") -#print(aligned_constraint_map.space) -#print("...") -#print(set(aligned_constraint_map.get_var_names(isl.dim_type.out))) -#ppp = append_apostrophes(sio_in_names) -#print(ppp) -#print(set(aligned_constraint_map.get_var_names(isl.dim_type.out)).issubset(ppp)) # align out dims +sio_out_names = sio.space.get_var_names(isl.dim_type.out) aligned_constraint_map = reorder_dims_by_name( aligned_constraint_map, isl.dim_type.out, - #append_apostrophes(sio_in_names), - sio_in_names, # TODO WHY no apostrophes? - # TODO sio out names are only pretending to have apostrophes; confusing + sio_out_names, add_missing=False, new_names_are_permutation_only=True, ) @@ -552,11 +555,7 @@ assert ( == sio.space.get_var_names(isl.dim_type.in_)) assert ( aligned_constraint_map.space.get_var_names(isl.dim_type.out) - == append_apostrophes(sio.space.get_var_names(isl.dim_type.out)) - ) or ( # TODO why no appostrophes? - aligned_constraint_map.space.get_var_names(isl.dim_type.out) - == sio.space.get_var_names(isl.dim_type.out) - ) + == sio.space.get_var_names(isl.dim_type.out)) assert ( aligned_constraint_map.space.get_var_names(isl.dim_type.param) == sio.space.get_var_names(isl.dim_type.param)) -- GitLab From f3cce6e351f877ad29e196336cc0fece2d62fbc6 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Wed, 15 Jan 2020 21:49:38 -0600 Subject: [PATCH 172/415] updated variable names, function names/signatures, and documentation based on the following recent changes: 1) schedule represented as two maps instead of one (one for depender and one for dependee), 2) dependee inames get apostrophe marker instead of depender, 3) iname domains for before/after insns stored inside StatementPairDependencySet instead of separately, 4) created LexScheduleStatementInstance to hold both LexScheduleStatement and the point(s) in lexicographic time in which it occurs --- __init__.py | 27 ++++++++--- dependency.py | 89 +++++++++++++++++++++--------------- lexicographic_order_map.py | 47 +++++++++++-------- sched_check_utils.py | 28 ++++++------ schedule.py | 94 +++++++++++++++++++++++++------------- 5 files changed, 174 insertions(+), 111 deletions(-) diff --git a/__init__.py b/__init__.py index c965be217..5b8062070 100644 --- a/__init__.py +++ b/__init__.py @@ -1,6 +1,24 @@ def get_statement_pair_dependency_sets_from_legacy_knl(knl): + """Return a list of :class:`StatementPairDependySet` instances created + for a :class:`loopy.LoopKernel` containing legacy depencencies. Create + the new dependencies according to the following rules. (1) If + a dependency exists between ``insn0`` and ``insn1``, create the dependnecy + ``SAME(SNC)`` where ``SNC`` is the set of non-concurrent inames used + by both ``insn0`` and ``insn1``, and ``SAME`` is the relationship specified + by the ``SAME`` attribute of :class:`DependencyType`. (2) For each subset + of non-concurrent inames used by any instruction, find the set of all + instructions using those inames, create a directed graph with these + instructions as nodes and edges representing a 'happens before' + relationship specfied by each dependency, find the sources and sinks within + this graph, and connect each sink to each source (sink happens before + source) with a ``PRIOR(SNC)`` dependency, where ``PRIOR`` is the + relationship specified by the ``PRIOR`` attribute of + :class:`DependencyType`. + + """ + from schedule_checker.dependency import ( create_dependencies_from_legacy_knl, ) @@ -14,12 +32,7 @@ def get_statement_pair_dependency_sets_from_legacy_knl(knl): else: preprocessed_knl = knl - # Create StatementPairDependencySet(s) from kernel dependencies ----------------- - - # Introduce SAME dep for set of shared, non-concurrent inames. - - # For each set of insns within a given iname subset, find sources and sinks, - # then make PRIOR dep from all sinks to all sources at previous iterations. + # Create StatementPairDependencySet(s) from kernel dependencies return create_dependencies_from_legacy_knl(preprocessed_knl) @@ -126,7 +139,7 @@ def check_schedule_validity( # this requires the iname domains sched_map_symbolic_before, sched_map_symbolic_after = \ - sched.create_symbolic_isl_map( + sched.create_symbolic_isl_maps( dom_before, dom_after, ) diff --git a/dependency.py b/dependency.py index 3f2318ed1..39ce21d28 100644 --- a/dependency.py +++ b/dependency.py @@ -12,11 +12,11 @@ class DependencyType: A :class:`str` specifying the following dependency relationship: If ``S = {i, j, ...}`` is a set of inames used in both statements - ``insn0`` and ``insn1``, and ``{i, j, ...}`` represent the values - of the inames in ``insn0``, and ``{i', j', ...}`` represent the + ``insn0`` and ``insn1``, and ``{i', j', ...}`` represent the values + of the inames in ``insn0``, and ``{i, j, ...}`` represent the values of the inames in ``insn1``, then the dependency ``insn0 happens before insn1 iff SAME({i, j})`` specifies that - ``insn0 happens before insn1 iff {i = i' and j = j' and ...}``. + ``insn0 happens before insn1 iff {i' = i and j' = j and ...}``. Note that ``SAME({}) = True``. .. attribute:: PRIOR @@ -24,16 +24,16 @@ class DependencyType: A :class:`str` specifying the following dependency relationship: If ``S = {i, j, k, ...}`` is a set of inames used in both statements - ``insn0`` and ``insn1``, and ``{i, j, k, ...}`` represent the values - of the inames in ``insn0``, and ``{i', j', k', ...}`` represent the + ``insn0`` and ``insn1``, and ``{i', j', k', ...}`` represent the values + of the inames in ``insn0``, and ``{i, j, k, ...}`` represent the values of the inames in ``insn1``, then the dependency ``insn0 happens before insn1 iff PRIOR({i, j, k})`` specifies one of two possibilities, depending on whether the loop nest ordering is known. If the loop nest ordering is unknown, then - ``insn0 happens before insn1 iff {i < i' and j < j' and k < k' ...}``. + ``insn0 happens before insn1 iff {i' < i and j' < j and k' < k ...}``. If the loop nest ordering is known, the condition becomes - ``{i, j, k, ...}`` is lexicographically less than ``{i', j', k', ...}``, - i.e., ``i < i' or (i = i' and j < j') or (i = i' and j = j' and k < k') ...``. + ``{i', j', k', ...}`` is lexicographically less than ``{i, j, k, ...}``, + i.e., ``i' < i or (i' = i and j' < j) or (i' = i and j' = j and k' < k) ...``. """ @@ -58,6 +58,16 @@ class StatementPairDependencySet(object): the Loopy kernel inames involved in that particular dependency relationship. + .. attribute:: dom_before + + A :class:`islpy.BasicSet` representing the domain for the + dependee statement. + + .. attribute:: dom_after + + A :class:`islpy.BasicSet` representing the domain for the + dependee statement. + """ def __init__( @@ -138,20 +148,14 @@ def create_dependency_constraint( ): """Create a statement dependency constraint represented as a map from each statement instance to statement instances that must occur later, - i.e., ``{[s=0, i, j] -> [s'=1, i', j'] : condition on {i, j, i', j'}}`` + i.e., ``{[s'=0, i', j'] -> [s=1, i, j] : condition on {i', j', i, j}}`` indicates that statement ``0`` comes before statment ``1`` when the - specified condition on inames ``i,j,i',j'`` is met. ``i'`` and ``j'`` - are the values of inames ``i`` and ``j`` in second statement instance. + specified condition on inames ``i',j',i,j`` is met. ``i'`` and ``j'`` + are the values of inames ``i`` and ``j`` in first statement instance. .. arg statement_dep_set: A :class:`StatementPairDependencySet` describing the dependency relationship between the two statements. - .. arg dom_before_constraint_set: A :class:`islpy.BasicSet` specifying the - domain for the 'before' statement in the relationship. - - .. arg dom_after_constraint_set: A :class:`islpy.BasicSet` specifying the - domain for the 'after' statement in the relationship. - .. arg loop_priorities: A list of tuples from the ``loop_priority`` attribute of :class:`loopy.LoopKernel` specifying the loop nest ordering rules. @@ -177,8 +181,11 @@ def create_dependency_constraint( statement instance tuples holds the dimension representing the statement id. Defaults to ``0``. - .. arg all_dom_inames_ordered: A :class:`list` of :class:`str` specifying - an order for the dimensions representing inames. + .. arg all_dom_inames_ordered_before: A :class:`list` of :class:`str` + specifying an order for the dimensions representing dependee inames. + + .. arg all_dom_inames_ordered_after: A :class:`list` of :class:`str` + specifying an order for the dimensions representing depender inames. .. return: An :class:`islpy.Map` mapping each statement instance to all statement instances that must occur later according to the constraints. @@ -186,7 +193,7 @@ def create_dependency_constraint( """ from schedule_checker.sched_check_utils import ( - make_islvars_with_var_primes, + make_islvars_with_marker, append_apostrophes, add_dims_to_isl_set, reorder_dims_by_name, @@ -206,11 +213,12 @@ def create_dependency_constraint( [statement_dep_set.dom_after]) # create some (ordered) isl vars to use, e.g., {s, i, j, s', i', j'} - islvars = make_islvars_with_var_primes( - var_names_in=[statement_var_name]+dom_inames_ordered_before, - param_names=[unused_param_name], - var_names_out=[statement_var_name]+dom_inames_ordered_after, - ) + islvars = make_islvars_with_marker( + var_names_needing_marker=[statement_var_name]+dom_inames_ordered_before, + other_var_names=[statement_var_name]+dom_inames_ordered_after, + param_names=[unused_param_name], + marker="'", + ) statement_var_name_prime = statement_var_name+"'" # get (ordered) list of unused before/after inames @@ -386,6 +394,7 @@ def create_dependency_constraint( return map_with_loop_domain_constraints +# TODO no longer used, remove def _create_5pt_stencil_dependency_constraint( dom_before_constraint_set, dom_after_constraint_set, @@ -400,7 +409,7 @@ def _create_5pt_stencil_dependency_constraint( ): from schedule_checker.sched_check_utils import ( - make_islvars_with_var_primes, + make_islvars_with_marker, append_apostrophes, add_dims_to_isl_set, reorder_dims_by_name, @@ -417,9 +426,12 @@ def _create_5pt_stencil_dependency_constraint( [dom_before_constraint_set, dom_after_constraint_set]) # create some (ordered) isl vars to use, e.g., {s, i, j, s', i', j'} - islvars = make_islvars_with_var_primes( - [statement_var_name]+all_dom_inames_ordered, - [unused_param_name]) + islvars = make_islvars_with_marker( + var_names_needing_marker=[statement_var_name]+all_dom_inames_ordered, + other_var_names=[statement_var_name]+all_dom_inames_ordered, + param_names=[unused_param_name], + marker="'", + ) statement_var_name_prime = statement_var_name+"'" # get (ordered) list of unused before/after inames @@ -525,11 +537,10 @@ def create_arbitrary_dependency_constraint( all_dom_inames_ordered=None, ): - # TODO update after allowing different inames for before/after # TODO test after switching primes to before vars from schedule_checker.sched_check_utils import ( - make_islvars_with_var_primes, + make_islvars_with_marker, #append_apostrophes, append_marker_to_strings, add_dims_to_isl_set, @@ -547,10 +558,12 @@ def create_arbitrary_dependency_constraint( [dom_before_constraint_set, dom_after_constraint_set]) # create some (ordered) isl vars to use, e.g., {s, i, j, s', i', j'} - islvars = make_islvars_with_var_primes( - [statement_var_name]+all_dom_inames_ordered, - [unused_param_name], - marker="p") # TODO figure out before/after notation + islvars = make_islvars_with_marker( + var_names_needing_marker=[statement_var_name]+all_dom_inames_ordered, + other_var_names=[statement_var_name]+all_dom_inames_ordered, + param_names=[unused_param_name], + marker="p", + ) # TODO figure out before/after notation #statement_var_name_prime = statement_var_name+"'" statement_var_name_prime = statement_var_name+"p" # TODO figure out before/after notation @@ -644,12 +657,12 @@ def create_arbitrary_dependency_constraint( # insert inames missing from doms to enable intersection domain_to_intersect = reorder_dims_by_name( domain_to_intersect, isl.dim_type.out, - [statement_var_name] + all_dom_inames_ordered, + append_marker_to_strings([statement_var_name] + all_dom_inames_ordered, "p"), # TODO figure out before/after notation add_missing=True) range_to_intersect = reorder_dims_by_name( range_to_intersect, isl.dim_type.out, - append_marker_to_strings([statement_var_name] + all_dom_inames_ordered, "p"), # TODO figure out before/after notation + [statement_var_name] + all_dom_inames_ordered, add_missing=True) # intersect doms @@ -665,7 +678,7 @@ def create_dependencies_from_legacy_knl(knl): the new dependencies according to the following rules. (1) If a dependency exists between ``insn0`` and ``insn1``, create the dependnecy ``SAME(SNC)`` where ``SNC`` is the set of non-concurrent inames used - by both ``insn0 and ``insn1``, and ``SAME`` is the relationship specified + by both ``insn0`` and ``insn1``, and ``SAME`` is the relationship specified by the ``SAME`` attribute of :class:`DependencyType`. (2) For each subset of non-concurrent inames used by any instruction, find the set of all instructions using those inames, create a directed graph with these diff --git a/lexicographic_order_map.py b/lexicographic_order_map.py index 52afadaa0..7abe6b0c5 100644 --- a/lexicographic_order_map.py +++ b/lexicographic_order_map.py @@ -6,26 +6,32 @@ def get_statement_ordering_map( """Return a mapping that maps each statement instance to all statement instances occuring later. - .. arg sched_map: An :class:`islpy.Map` representing a schedule - as a mapping from each statement instance to a point in - a lexicographic ordering. + .. arg sched_map_before: An :class:`islpy.Map` representing instruction + instance order for the dependee as a mapping from each statement + instance to a point in the lexicographic ordering. + + .. arg sched_map_after: An :class:`islpy.Map` representing instruction + instance order for the depender as a mapping from each statement + instance to a point in the lexicographic ordering. .. arg lex_map: An :class:`islpy.Map` representing a lexicographic ordering as a mapping from each point in lexicographic time to every point that occurs later in lexicographic time. E.g.:: - {[i0, i1, i2, ...] -> [i0', i1', i2', ...] : - i0 < i0' or (i0 = i0' and i1 < i1') - or (i0 = i0' and i1 = i1' and i2 < i2') ...} + {[i0', i1', i2', ...] -> [i0, i1, i2, ...] : + i0' < i0 or (i0' = i0 and i1' < i1) + or (i0' = i0 and i1' = i1 and i2' < i2) ...} .. return: An :class:`islpy.Map` representing the lex schedule as a mapping from each statement instance to all statement instances - occuring later. I.e., we compose S -> L -> S^-1, where S - is the schedule map and L is the lexicographic ordering map. + occuring later. I.e., we compose B -> L -> A^-1, where B + is sched_map_before, A is sched_map_after, and L is the + lexicographic ordering map. """ - sio = sched_map_before.apply_range(lex_map).apply_range(sched_map_after.reverse()) + sio = sched_map_before.apply_range( + lex_map).apply_range(sched_map_after.reverse()) # append marker to in names for i in range(sio.dim(isl.dim_type.in_)): sio = sio.set_dim_name(isl.dim_type.in_, i, sio.get_dim_name( @@ -53,12 +59,12 @@ def get_lex_order_constraint(islvars, before_names, after_names): time that occurs after. (see example below) .. return: An :class:`islpy.Set` representing a constraint that enforces a - lexicographic ordering. E.g., if ``before_names = [i0, i1, i2]`` and - ``after_names = [i0', i1', i2']``, return the set:: + lexicographic ordering. E.g., if ``before_names = [i0', i1', i2']`` and + ``after_names = [i0, i1, i2]``, return the set:: - {[i0, i1, i2, i0', i1', i2'] : - i0 < i0' or (i0 = i0' and i1 < i1') - or (i0 = i0' and i1 = i1' and i2 < i2')} + {[i0', i1', i2', i0, i1, i2] : + i0' < i0 or (i0' = i0 and i1' < i1) + or (i0' = i0 and i1' = i1 and i2' < i2)} """ @@ -96,12 +102,12 @@ def create_lex_order_map( .. return: An :class:`islpy.Map` representing a lexicographic ordering as a mapping from each point in lexicographic time to every point that occurs later in lexicographic time. - E.g., if ``before_names = [i0, i1, i2]`` and - ``after_names = [i0', i1', i2']``, return the map:: + E.g., if ``before_names = [i0', i1', i2']`` and + ``after_names = [i0, i1, i2]``, return the map:: - {[i0, i1, i2] -> [i0', i1', i2'] : - i0 < i0' or (i0 = i0' and i1 < i1') - or (i0 = i0' and i1 = i1' and i2 < i2')} + {[i0', i1', i2'] -> [i0, i1, i2] : + i0' < i0 or (i0' = i0 and i1' < i1) + or (i0' = i0 and i1' = i1 and i2' < i2)} """ @@ -120,7 +126,8 @@ def create_lex_order_map( before_names+after_names, []) - lex_order_constraint = get_lex_order_constraint(islvars, before_names, after_names) + lex_order_constraint = get_lex_order_constraint( + islvars, before_names, after_names) lex_map = isl.Map.from_domain(lex_order_constraint) lex_map = lex_map.move_dims( diff --git a/sched_check_utils.py b/sched_check_utils.py index 32317cccb..f9183d2c2 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -108,12 +108,13 @@ def create_new_isl_set_with_primes(old_isl_set, marker="'"): apostrophes appended to dim_type.set dimension names. """ - # TODO this is a special case of append_marker_to_isl_map_var_names + # TODO this is just a special case of append_marker_to_isl_map_var_names new_set = old_isl_set.copy() for i in range(old_isl_set.n_dim()): - new_set = new_set.set_dim_name(isl.dim_type.set, i, old_isl_set.get_dim_name( - isl.dim_type.set, i)+marker) + new_set = new_set.set_dim_name( + isl.dim_type.set, i, old_isl_set.get_dim_name( + isl.dim_type.set, i)+marker) return new_set @@ -138,16 +139,18 @@ def append_marker_to_isl_map_var_names(old_isl_map, dim_type, marker="'"): return new_map -def make_islvars_with_var_primes( - var_names_in, param_names, marker="'", var_names_out=None): +def make_islvars_with_marker( + var_names_needing_marker, other_var_names, param_names, marker="'"): """Return a dictionary from variable and parameter names to :class:`PwAff` instances that represent each of - the variables and parameters, including - both the variables in `var_names` and a copy of each - variable with an apostrophe appended. + the variables and parameters, appending marker to + var_names_needing_marker. - .. arg var_names: A :class:`list` of :class:`str` elements - representing variable names. + .. arg var_names_needing_marker: A :class:`list` of :class:`str` + elements representing variable names to have markers appended. + + .. arg other_var_names: A :class:`list` of :class:`str` + elements representing variable names to be included as-is. .. arg param_names: A :class:`list` of :class:`str` elements representing parameter names. @@ -165,11 +168,8 @@ def make_islvars_with_var_primes( new_l.append(s+mark) return new_l - if var_names_out is None: - var_names_out = var_names_in[:] - return isl.make_zero_and_vars( - append_marker(var_names_in, marker) + var_names_out, param_names) + append_marker(var_names_needing_marker, marker) + other_var_names, param_names) def append_marker_to_strings(strings, marker="'"): diff --git a/schedule.py b/schedule.py index 38b6d66cc..f221c1595 100644 --- a/schedule.py +++ b/schedule.py @@ -45,6 +45,15 @@ class LexScheduleStatement(object): class LexScheduleStatementInstance(object): """A representation of a Loopy statement instance. + .. attribute:: stmt + + A :class:`LexScheduleStatement`. + + .. attribute:: lex_pt + + A list of :class:`int` or as :class:`str` Loopy inames representing + a point or set of points in a lexicographic ordering. + """ def __init__( @@ -63,12 +72,21 @@ class LexSchedule(object): """A program ordering represented as a mapping from statement instances to points in a lexicographic ordering. - .. attribute:: lex_schedule + .. attribute:: stmt_instance_before + + A :class:`LexScheduleStatementInstance` describing the dependee + statement's order relative to the depender statment by mapping + a statement to a point or set of points in a lexicographic + ordering. Points in lexicographic ordering are represented as + a list of :class:`int` or as :class:`str` Loopy inames. + + .. attribute:: stmt_instance_after - A :class:`list` of (:class:`LexScheduleStatement`, :class:`list`) - tuples, representing the program ordering as a map from - statement instances to points in a lexicographic ordering. Points - in lexicographic ordering are represented as list of :class:`int`. + A :class:`LexScheduleStatementInstance` describing the depender + statement's order relative to the dependee statment by mapping + a statement to a point or set of points in a lexicographic + ordering. Points in lexicographic ordering are represented as + a list of :class:`int` or as :class:`str` Loopy inames. .. attribute:: unused_param_name @@ -109,15 +127,23 @@ class LexSchedule(object): prohibited_var_names=[], ): """ - :arg knl: A :class:`LoopKernel` whose instructions will be + :arg knl: A :class:`LoopKernel` whose schedule items will be described by this :class:`LexSchedule`. - :arg include_only_insn_ids: A list of :class:`str` instruction ids - specifying which instructions to include in the mapping. If set - to None, all insructions will be included. + :arg sched_items_ordered: A list of :class:`ScheduleItem` whose + order will be described by this :class:`LexSchedule`. + + :arg before_insn_id: A :class:`str` instruction id specifying + the dependee in this pair of instructions. + + :arg after_insn_id: A :class:`str` instruction id specifying + the depender in this pair of instructions. + + :arg prohibited_var_names: A list of :class:`str` variable names + that may not be used as the statement variable name (e.g., + because they are already being used as inames). """ - # TODO update docs now that we have two schedules # LexScheduleStatements self.stmt_instance_before = None @@ -280,31 +306,39 @@ class LexSchedule(object): max_lex_dim-len(self.stmt_instance_after.lex_pt)) ) - def create_symbolic_isl_map( + def create_symbolic_isl_maps( self, dom_before, dom_after, dom_inames_ordered_before=None, dom_inames_ordered_after=None, ): - """Create an isl map representing lex schedule as a mapping - from each statement instance to all statement instances - occuring later. + """Create two isl maps representing lex schedule as two mappings + from statement instances to lexicographic time, one for + the dependee and one for the depender. + + .. arg dom_before: A :class:`islpy.BasicSet` representing the + domain for the dependee statement. + + .. arg dom_after: A :class:`islpy.BasicSet` representing the + domain for the dependee statement. - .. arg sid_to_dom: A :class:`dict` mapping integer ids to domains, - where integer ids are instances of the ``int_id`` attribute of - :class:`LexScheduleStatement`, and domains are the - :class:`islpy.BasicSet` representing the domain for this - statement. + .. arg dom_inames_ordered_before: A list of :class:`str` + representing the union of inames used in instances of the + dependee statement. ``statement_var_name`` and + ``dom_inames_ordered_before`` are the names of the dims of + the space of the ISL map domain for the dependee. - .. arg dom_inames_ordered: A list of :class:`str` representing - the union of inames used in all statement instances. - ``statement_var_name`` and ``dom_inames_ordered`` are the names - of the dims of the space of the ISL map domain. + .. arg dom_inames_ordered_after: A list of :class:`str` + representing the union of inames used in instances of the + depender statement. ``statement_var_name`` and + ``dom_inames_ordered_after`` are the names of the dims of + the space of the ISL map domain for the depender. - .. return: An :class:`islpy.Map` representing a schedule - as a mapping from each statement instance to a point in - a lexicographic ordering. + .. return: A two-tuple containing two :class:`islpy.Map`s + representing the schedule as two mappings + from statement instances to lexicographic time, one for + the dependee and one for the depender. """ @@ -352,15 +386,11 @@ class LexSchedule(object): [self.statement_var_name], 0), ] - # The isl map representing the schedule maps + # Each isl map representing the schedule maps # statement instances -> lex time - # The 'in_' dim vars need to match for all sched items in the map, - # Instructions that use fewer inames will still need to - # have the unused inames in their 'in_' dim vars, so we'll - # include them and set them equal to a dummy variable. # Right now, statement tuples consist of single int. - # Add all inames from combined domains to map domain tuples. + # Add all inames from domains to map domain tuples. # create isl map return ( -- GitLab From 94315283fa9f3fec865837b0620906c6b3a6b0f5 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sat, 25 Jan 2020 16:24:45 -0600 Subject: [PATCH 173/415] add hashing functions to classes to avoid PersistentDict error --- dependency.py | 19 ++++++++++++++++++- schedule.py | 12 ++++++++++++ 2 files changed, 30 insertions(+), 1 deletion(-) diff --git a/dependency.py b/dependency.py index 39ce21d28..e3a6f2388 100644 --- a/dependency.py +++ b/dependency.py @@ -84,6 +84,23 @@ class StatementPairDependencySet(object): self.dom_before = dom_before self.dom_after = dom_after + def __lt__(self, other): + return self.__hash__() < other.__hash__() + + def __hash__(self): + return hash(repr(self)) + + def update_persistent_hash(self, key_hash, key_builder): + """Custom hash computation function for use with + :class:`pytools.persistent_dict.PersistentDict`. + """ + + key_builder.rec(key_hash, self.statement_before) + key_builder.rec(key_hash, self.statement_after) + key_builder.rec(key_hash, self.deps) + key_builder.rec(key_hash, self.dom_before) + key_builder.rec(key_hash, self.dom_after) + def __str__(self): result = "%s --before->\n%s iff\n " % ( self.statement_before, self.statement_after) @@ -761,7 +778,7 @@ def create_dependencies_from_legacy_knl(knl): knl.get_inames_domain(source_insn_inames), )) - return statement_dep_sets + return set(statement_dep_sets) def get_dependency_sources_and_sinks(knl, sched_item_ids): diff --git a/schedule.py b/schedule.py index f221c1595..a1876d43a 100644 --- a/schedule.py +++ b/schedule.py @@ -29,6 +29,18 @@ class LexScheduleStatement(object): self.int_id = int_id self.within_inames = within_inames + def __hash__(self): + return hash(repr(self)) + + def update_persistent_hash(self, key_hash, key_builder): + """Custom hash computation function for use with + :class:`pytools.persistent_dict.PersistentDict`. + """ + + key_builder.rec(key_hash, self.insn_id) + key_builder.rec(key_hash, self.int_id) + key_builder.rec(key_hash, self.within_inames) + def __str__(self): if self.int_id: int_id = ":%d" % (self.int_id) -- GitLab From a59918f40682d4f99c3a7372051306feef1aeda1 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sat, 25 Jan 2020 16:25:46 -0600 Subject: [PATCH 174/415] add legacy deps using lp.add_dependencies_v2 since these are no longer automatically generated when scheduling --- test/test_invalid_scheds.py | 14 ++++++++++---- test/test_valid_scheds.py | 8 ++++++++ 2 files changed, 18 insertions(+), 4 deletions(-) diff --git a/test/test_invalid_scheds.py b/test/test_invalid_scheds.py index 5f43909cf..a822ea9df 100644 --- a/test/test_invalid_scheds.py +++ b/test/test_invalid_scheds.py @@ -66,6 +66,7 @@ def test_invalid_prioritiy_detection(): unprocessed_knl = knl0.copy() deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) + knl0 = lp.add_dependencies_v2(knl0, deps_and_domains) # get a schedule to check if knl0.state < KernelState.PREPROCESSED: @@ -83,6 +84,7 @@ def test_invalid_prioritiy_detection(): unprocessed_knl = knl1.copy() deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) + knl1 = lp.add_dependencies_v2(knl1, deps_and_domains) # get a schedule to check if knl1.state < KernelState.PREPROCESSED: @@ -96,8 +98,9 @@ def test_invalid_prioritiy_detection(): # error (cycle): knl2 = lp.prioritize_loops(ref_knl, "h,i,j") knl2 = lp.prioritize_loops(knl2, "j,k") - knl2 = lp.prioritize_loops(knl2, "k,i") try: + knl2 = lp.prioritize_loops(knl2, "k,i") + """ unprocessed_knl = knl2.copy() deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) @@ -109,15 +112,17 @@ def test_invalid_prioritiy_detection(): schedule_items = knl2.schedule sched_is_valid = check_schedule_validity(unprocessed_knl, deps_and_domains, schedule_items) + """ # should raise error assert False except ValueError as e: - assert "invalid priorities" in str(e) + assert "cycle detected" in str(e) # error (inconsistent priorities): knl3 = lp.prioritize_loops(ref_knl, "h,i,j,k") - knl3 = lp.prioritize_loops(knl3, "h,j,i,k") try: + knl3 = lp.prioritize_loops(knl3, "h,j,i,k") + """ unprocessed_knl = knl3.copy() deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) @@ -129,10 +134,11 @@ def test_invalid_prioritiy_detection(): schedule_items = knl3.schedule sched_is_valid = check_schedule_validity(unprocessed_knl, deps_and_domains, schedule_items) + """ # should raise error assert False except ValueError as e: - assert "invalid priorities" in str(e) + assert "cycle detected" in str(e) if __name__ == "__main__": diff --git a/test/test_valid_scheds.py b/test/test_valid_scheds.py index 0ed14502e..d6cbde674 100644 --- a/test/test_valid_scheds.py +++ b/test/test_valid_scheds.py @@ -73,6 +73,8 @@ def test_loop_prioritization(): unprocessed_knl = knl.copy() deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) + knl = lp.add_dependencies_v2(knl, deps_and_domains) + # TODO why is this failing to find valid schedule? # get a schedule to check if knl.state < KernelState.PREPROCESSED: @@ -106,6 +108,7 @@ def test_matmul(): unprocessed_knl = knl.copy() deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) + knl = lp.add_dependencies_v2(knl, deps_and_domains) # get a schedule to check if knl.state < KernelState.PREPROCESSED: @@ -150,6 +153,7 @@ def test_dependent_domain(): unprocessed_knl = knl.copy() deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) + knl = lp.add_dependencies_v2(knl, deps_and_domains) # get a schedule to check if knl.state < KernelState.PREPROCESSED: @@ -199,6 +203,7 @@ def test_stroud_bernstein(): unprocessed_knl = knl.copy() deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) + knl = lp.add_dependencies_v2(knl, deps_and_domains) # get a schedule to check if knl.state < KernelState.PREPROCESSED: @@ -231,6 +236,7 @@ def test_nop(): unprocessed_knl = knl.copy() deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) + knl = lp.add_dependencies_v2(knl, deps_and_domains) # get a schedule to check if knl.state < KernelState.PREPROCESSED: @@ -273,6 +279,7 @@ def test_multi_domain(): unprocessed_knl = knl.copy() deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) + knl = lp.add_dependencies_v2(knl, deps_and_domains) # get a schedule to check if knl.state < KernelState.PREPROCESSED: @@ -304,6 +311,7 @@ def test_loop_carried_deps(): unprocessed_knl = knl.copy() deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) + knl = lp.add_dependencies_v2(knl, deps_and_domains) # get a schedule to check if knl.state < KernelState.PREPROCESSED: -- GitLab From 04e24f95b9e0edab81f8d947eb9df89d2a5025f3 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Fri, 14 Feb 2020 16:50:46 -0600 Subject: [PATCH 175/415] remove unused inames from test kernels --- test/test_valid_scheds.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/test/test_valid_scheds.py b/test/test_valid_scheds.py index d6cbde674..3cb100e09 100644 --- a/test/test_valid_scheds.py +++ b/test/test_valid_scheds.py @@ -41,9 +41,9 @@ from loopy import ( def test_loop_prioritization(): knl = lp.make_kernel( [ - "{[i,ii]: 0<=i Date: Fri, 14 Feb 2020 18:31:15 -0600 Subject: [PATCH 176/415] removed TODO that was fixed in loopy.schedule --- test/test_valid_scheds.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/test_valid_scheds.py b/test/test_valid_scheds.py index 3cb100e09..e98a52ada 100644 --- a/test/test_valid_scheds.py +++ b/test/test_valid_scheds.py @@ -38,6 +38,7 @@ from loopy import ( get_one_scheduled_kernel, ) + def test_loop_prioritization(): knl = lp.make_kernel( [ @@ -74,7 +75,6 @@ def test_loop_prioritization(): deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) knl = lp.add_dependencies_v2(knl, deps_and_domains) - # TODO why is this failing to find valid schedule? # get a schedule to check if knl.state < KernelState.PREPROCESSED: -- GitLab From a49a087a2eb123a35058fc92a5e4b791530499b9 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Thu, 20 Feb 2020 19:17:32 -0600 Subject: [PATCH 177/415] adding __eq__() funcs to StatementPairDependencySet and LexScheduleStatement to avoid sets with duplicate deps --- dependency.py | 10 ++++++++++ schedule.py | 7 +++++++ 2 files changed, 17 insertions(+) diff --git a/dependency.py b/dependency.py index e3a6f2388..2fa13a0f0 100644 --- a/dependency.py +++ b/dependency.py @@ -84,6 +84,15 @@ class StatementPairDependencySet(object): self.dom_before = dom_before self.dom_after = dom_after + def __eq__(self, other): + return ( + self.statement_before == other.statement_before and + self.statement_after == other.statement_after and + self.deps == other.deps and + self.dom_before == other.dom_before and + self.dom_after == other.dom_after + ) + def __lt__(self, other): return self.__hash__() < other.__hash__() @@ -835,6 +844,7 @@ def get_dependency_maps( loop_priority, knl, # TODO avoid passing this in ): + # TODO document from schedule_checker.sched_check_utils import ( prettier_map_string, diff --git a/schedule.py b/schedule.py index a1876d43a..bbdea1806 100644 --- a/schedule.py +++ b/schedule.py @@ -29,6 +29,13 @@ class LexScheduleStatement(object): self.int_id = int_id self.within_inames = within_inames + def __eq__(self, other): + return ( + self.insn_id == other.insn_id and + self.int_id == other.int_id and + self.within_inames == other.within_inames + ) + def __hash__(self): return hash(repr(self)) -- GitLab From ee92f518e7634d8c2c90568614247122ee454388 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 25 Feb 2020 09:50:08 -0600 Subject: [PATCH 178/415] fix flake8 issues --- dependency.py | 68 +++++++++++++++++++++----------------------- sched_check_utils.py | 9 +++--- schedule.py | 46 +++++++++++++++++++----------- 3 files changed, 67 insertions(+), 56 deletions(-) diff --git a/dependency.py b/dependency.py index 2fa13a0f0..a780a036d 100644 --- a/dependency.py +++ b/dependency.py @@ -1,9 +1,6 @@ import islpy as isl -# TODO update all documentation/comments after apostrophe switched to -# *before* statement/inames - class DependencyType: """Strings specifying a particular type of dependency relationship. @@ -86,11 +83,11 @@ class StatementPairDependencySet(object): def __eq__(self, other): return ( - self.statement_before == other.statement_before and - self.statement_after == other.statement_after and - self.deps == other.deps and - self.dom_before == other.dom_before and - self.dom_after == other.dom_after + self.statement_before == other.statement_before + and self.statement_after == other.statement_after + and self.deps == other.deps + and self.dom_before == other.dom_before + and self.dom_after == other.dom_after ) def __lt__(self, other): @@ -397,7 +394,8 @@ def create_dependency_constraint( range_to_intersect = add_dims_to_isl_set( statement_dep_set.dom_after, isl.dim_type.out, [statement_var_name], statement_var_pose) - domain_constraint_set = create_new_isl_set_with_primes(statement_dep_set.dom_before) + domain_constraint_set = create_new_isl_set_with_primes( + statement_dep_set.dom_before) domain_to_intersect = add_dims_to_isl_set( domain_constraint_set, isl.dim_type.out, [statement_var_name_prime], statement_var_pose) @@ -493,18 +491,12 @@ def _create_5pt_stencil_dependency_constraint( """ # local dep: constraint_set = ( - islvars[time_iname].eq_set(islvars[time_iname_prime] + one) & - ( + islvars[time_iname].eq_set(islvars[time_iname_prime] + one) & ( (islvars[space_iname]-two).lt_set(islvars[space_iname_prime]) & - islvars[space_iname_prime].lt_set(islvars[space_iname]+two) - ) - #( - #(islvars[space_iname_prime]-two).lt_set(islvars[space_iname]) & - # islvars[space_iname].lt_set(islvars[space_iname_prime]+two) - #) + islvars[space_iname_prime].lt_set(islvars[space_iname]+two)) | - islvars[time_iname].eq_set(islvars[time_iname_prime] + two) & - islvars[space_iname_prime].eq_set(islvars[space_iname]) + (islvars[time_iname].eq_set(islvars[time_iname_prime] + two) + & islvars[space_iname_prime].eq_set(islvars[space_iname])) ) # set unused vars == unused dummy param @@ -589,9 +581,10 @@ def create_arbitrary_dependency_constraint( other_var_names=[statement_var_name]+all_dom_inames_ordered, param_names=[unused_param_name], marker="p", - ) # TODO figure out before/after notation + ) # TODO figure out before/after notation #statement_var_name_prime = statement_var_name+"'" - statement_var_name_prime = statement_var_name+"p" # TODO figure out before/after notation + statement_var_name_prime = statement_var_name+"p" + # TODO figure out before/after notation # get (ordered) list of unused before/after inames inames_before_unused = [] @@ -602,7 +595,8 @@ def create_arbitrary_dependency_constraint( for iname in all_dom_inames_ordered: if iname not in dom_after_constraint_set.get_var_names(isl.dim_type.out): #inames_after_unused.append(iname + "'") - inames_after_unused.append(iname) # TODO figure out before/after notation + inames_after_unused.append(iname) + # TODO figure out before/after notation # initialize constraints to False # this will disappear as soon as we add a constraint @@ -612,8 +606,10 @@ def create_arbitrary_dependency_constraint( from loopy.symbolic import aff_from_expr or_constraint_strs = constraint_str.split("or") + def _quant(s): return "(" + s + ")" + def _diff(s0, s1): return _quant(s0) + "-" + _quant(s1) @@ -626,13 +622,13 @@ def create_arbitrary_dependency_constraint( lhs, rhs = cons_str.split("<=") conj_constraint = conj_constraint.add_constraint( isl.Constraint.inequality_from_aff( - aff_from_expr(space, parse(_diff(rhs,lhs))))) + aff_from_expr(space, parse(_diff(rhs, lhs))))) # TODO something more robust than this string meddling^ elif ">=" in cons_str: lhs, rhs = cons_str.split(">=") conj_constraint = conj_constraint.add_constraint( isl.Constraint.inequality_from_aff( - aff_from_expr(space, parse(_diff(lhs,rhs))))) + aff_from_expr(space, parse(_diff(lhs, rhs))))) elif "<" in cons_str: lhs, rhs = cons_str.split("<") conj_constraint = conj_constraint.add_constraint( @@ -658,10 +654,14 @@ def create_arbitrary_dependency_constraint( islvars[unused_param_name]) # set statement_var_name == statement # - all_constraints_set = all_constraints_set & islvars[statement_var_name_prime].eq_set( - islvars[0]+sid_before) - all_constraints_set = all_constraints_set & islvars[statement_var_name].eq_set( - islvars[0]+sid_after) + all_constraints_set = ( + all_constraints_set & islvars[statement_var_name_prime].eq_set( + islvars[0]+sid_before) + ) + all_constraints_set = ( + all_constraints_set & islvars[statement_var_name].eq_set( + islvars[0]+sid_after) + ) # convert constraint set to map all_constraints_map = _convert_constraint_set_to_map( @@ -683,7 +683,8 @@ def create_arbitrary_dependency_constraint( # insert inames missing from doms to enable intersection domain_to_intersect = reorder_dims_by_name( domain_to_intersect, isl.dim_type.out, - append_marker_to_strings([statement_var_name] + all_dom_inames_ordered, "p"), # TODO figure out before/after notation + append_marker_to_strings( # TODO figure out before/after notation + [statement_var_name] + all_dom_inames_ordered, "p"), add_missing=True) range_to_intersect = reorder_dims_by_name( range_to_intersect, @@ -768,7 +769,6 @@ def create_dependencies_from_legacy_knl(knl): # in future, consider inserting single no-op source and sink for source_id in sources: for sink_id in sinks: - dep_dict = {} sink_insn_inames = knl.id_to_insn[sink_id].within_inames source_insn_inames = knl.id_to_insn[source_id].within_inames shared_inames = sink_insn_inames & source_insn_inames @@ -840,15 +840,12 @@ class DependencyInfo(object): def get_dependency_maps( deps_and_domains, - schedule_items, # TODO always pass these as strings since we only need the name? + schedule_items, # TODO always pass as strings since we only need the name? loop_priority, knl, # TODO avoid passing this in ): # TODO document - from schedule_checker.sched_check_utils import ( - prettier_map_string, - ) dt = DependencyType # create map from loopy insn ids to ints @@ -889,7 +886,8 @@ def get_dependency_maps( s_after = statement_pair_dep_set.statement_after dom_before = statement_pair_dep_set.dom_before dom_after = statement_pair_dep_set.dom_after - shared_nc_inames = s_before.within_inames & s_after.within_inames & non_conc_inames + shared_nc_inames = ( + s_before.within_inames & s_after.within_inames & non_conc_inames) same_dep_set = StatementPairDependencySet( s_before, s_after, diff --git a/sched_check_utils.py b/sched_check_utils.py index f9183d2c2..6fefa1483 100644 --- a/sched_check_utils.py +++ b/sched_check_utils.py @@ -47,9 +47,9 @@ def reorder_dims_by_name( .. arg new_names_are_permutation_only: A :class:`bool` indicating that `desired_dims_ordered` contains the same names as the specified - dimensions in `isl_set`, and does not, e.g., contain additional dimension names - not found in `isl_set`. If set to True, and these two sets of names - do not match, an error is produced. + dimensions in `isl_set`, and does not, e.g., contain additional + dimension names not found in `isl_set`. If set to True, and these + two sets of names do not match, an error is produced. .. return: An :class:`islpy.Set` matching `isl_set` with the dimension order matching `desired_dims_ordered`, optionally @@ -169,7 +169,8 @@ def make_islvars_with_marker( return new_l return isl.make_zero_and_vars( - append_marker(var_names_needing_marker, marker) + other_var_names, param_names) + append_marker(var_names_needing_marker, marker) + + other_var_names, param_names) def append_marker_to_strings(strings, marker="'"): diff --git a/schedule.py b/schedule.py index bbdea1806..4c99f45ce 100644 --- a/schedule.py +++ b/schedule.py @@ -31,9 +31,9 @@ class LexScheduleStatement(object): def __eq__(self, other): return ( - self.insn_id == other.insn_id and - self.int_id == other.int_id and - self.within_inames == other.within_inames + self.insn_id == other.insn_id + and self.int_id == other.int_id + and self.within_inames == other.within_inames ) def __hash__(self): @@ -78,7 +78,7 @@ class LexScheduleStatementInstance(object): def __init__( self, stmt, # a LexScheduleStatement - lex_pt, # [string/int, ] + lex_pt, # [string/int, ] ): self.stmt = stmt self.lex_pt = lex_pt @@ -195,11 +195,13 @@ class LexSchedule(object): "with ConcurrentTag(s) in schedule for kernel %s. " "Ignoring this loop." % (iname, knl.name)) continue + # if the schedule is empty, this is the first schedule item, so # don't increment lex dim val enumerating items in current block, # otherwise, this loop is next item in current code block, so # increment lex dim val enumerating items in current code block - if self.stmt_instance_before or self.stmt_instance_after: # if either statement has been set + if self.stmt_instance_before or self.stmt_instance_after: + # (if either statement has been set) # this lex value will correspond to everything inside this loop # we will add new lex dimensions to enuerate items inside loop next_insn_lex_pt[-1] = next_insn_lex_pt[-1]+1 @@ -298,12 +300,16 @@ class LexSchedule(object): :class:`LexScheduleStatement`. """ return { - self.stmt_instance_before.stmt.insn_id: self.stmt_instance_before.stmt.int_id, - self.stmt_instance_after.stmt.insn_id: self.stmt_instance_after.stmt.int_id, + self.stmt_instance_before.stmt.insn_id: + self.stmt_instance_before.stmt.int_id, + self.stmt_instance_after.stmt.insn_id: + self.stmt_instance_after.stmt.int_id, } def max_lex_dims(self): - return max([len(self.stmt_instance_before.lex_pt), len(self.stmt_instance_after.lex_pt)]) + return max([ + len(self.stmt_instance_before.lex_pt), + len(self.stmt_instance_after.lex_pt)]) def pad_lex_pts_with_zeros(self): """Find the maximum number of lexicographic dimensions represented @@ -383,10 +389,12 @@ class LexSchedule(object): params_sched = [self.unused_param_name] out_names_sched = self.get_lex_var_names() - in_names_sched_before = [self.statement_var_name] + dom_inames_ordered_before[:] + in_names_sched_before = [ + self.statement_var_name] + dom_inames_ordered_before[:] sched_space_before = get_isl_space( params_sched, in_names_sched_before, out_names_sched) - in_names_sched_after = [self.statement_var_name] + dom_inames_ordered_after[:] + in_names_sched_after = [ + self.statement_var_name] + dom_inames_ordered_after[:] sched_space_after = get_isl_space( params_sched, in_names_sched_after, out_names_sched) @@ -394,13 +402,11 @@ class LexSchedule(object): # intersection with sched map later doms_to_intersect_before = [ add_dims_to_isl_set( - #sid_to_dom_before[self.stmt_instance_before.stmt.int_id], isl.dim_type.set, dom_before, isl.dim_type.set, [self.statement_var_name], 0), ] doms_to_intersect_after = [ add_dims_to_isl_set( - #sid_to_dom_after[self.stmt_instance_after.stmt.int_id], isl.dim_type.set, dom_after, isl.dim_type.set, [self.statement_var_name], 0), ] @@ -415,14 +421,19 @@ class LexSchedule(object): return ( create_symbolic_isl_map_from_tuples( zip( - [((self.stmt_instance_before.stmt.int_id,) + tuple(dom_inames_ordered_before), - self.stmt_instance_before.lex_pt)], + [( + (self.stmt_instance_before.stmt.int_id,) + + tuple(dom_inames_ordered_before), + self.stmt_instance_before.lex_pt + )], doms_to_intersect_before ), sched_space_before, self.unused_param_name, self.statement_var_name), create_symbolic_isl_map_from_tuples( zip( - [((self.stmt_instance_after.stmt.int_id,) + tuple(dom_inames_ordered_after), + [( + (self.stmt_instance_after.stmt.int_id,) + + tuple(dom_inames_ordered_after), self.stmt_instance_after.lex_pt)], doms_to_intersect_after ), @@ -450,8 +461,9 @@ class LexSchedule(object): return self.__bool__() def __eq__(self, other): - return (self.stmt_instance_before == other.stmt_instance_before and - self.stmt_instance_after == other.stmt_instance_after) + return ( + self.stmt_instance_before == other.stmt_instance_before + and self.stmt_instance_after == other.stmt_instance_after) def __str__(self): sched_str = "Before: {\n" -- GitLab From 2188f2836a41ddcc548c5468389559136f71ef03 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 25 Feb 2020 09:59:02 -0600 Subject: [PATCH 179/415] ask if loopy hasattr add_dependencies_v2 before using it (so that future MR isn't dependent on loopy/iname-sets-in-loop-priorities branch --- test/test_invalid_scheds.py | 6 ++++-- test/test_valid_scheds.py | 22 +++++++++++++++------- 2 files changed, 19 insertions(+), 9 deletions(-) diff --git a/test/test_invalid_scheds.py b/test/test_invalid_scheds.py index a822ea9df..2eb31b0fa 100644 --- a/test/test_invalid_scheds.py +++ b/test/test_invalid_scheds.py @@ -66,7 +66,8 @@ def test_invalid_prioritiy_detection(): unprocessed_knl = knl0.copy() deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) - knl0 = lp.add_dependencies_v2(knl0, deps_and_domains) + if hasattr(lp, "add_dependencies_v2"): + knl0 = lp.add_dependencies_v2(knl0, deps_and_domains) # get a schedule to check if knl0.state < KernelState.PREPROCESSED: @@ -84,7 +85,8 @@ def test_invalid_prioritiy_detection(): unprocessed_knl = knl1.copy() deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) - knl1 = lp.add_dependencies_v2(knl1, deps_and_domains) + if hasattr(lp, "add_dependencies_v2"): + knl1 = lp.add_dependencies_v2(knl1, deps_and_domains) # get a schedule to check if knl1.state < KernelState.PREPROCESSED: diff --git a/test/test_valid_scheds.py b/test/test_valid_scheds.py index e98a52ada..d897d72bf 100644 --- a/test/test_valid_scheds.py +++ b/test/test_valid_scheds.py @@ -74,7 +74,9 @@ def test_loop_prioritization(): unprocessed_knl = knl.copy() deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) - knl = lp.add_dependencies_v2(knl, deps_and_domains) + if hasattr(lp, "add_dependencies_v2"): + knl = lp.add_dependencies_v2(knl, deps_and_domains) + # get a schedule to check if knl.state < KernelState.PREPROCESSED: @@ -108,7 +110,8 @@ def test_matmul(): unprocessed_knl = knl.copy() deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) - knl = lp.add_dependencies_v2(knl, deps_and_domains) + if hasattr(lp, "add_dependencies_v2"): + knl = lp.add_dependencies_v2(knl, deps_and_domains) # get a schedule to check if knl.state < KernelState.PREPROCESSED: @@ -155,7 +158,8 @@ def test_dependent_domain(): unprocessed_knl = knl.copy() deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) - knl = lp.add_dependencies_v2(knl, deps_and_domains) + if hasattr(lp, "add_dependencies_v2"): + knl = lp.add_dependencies_v2(knl, deps_and_domains) # get a schedule to check if knl.state < KernelState.PREPROCESSED: @@ -205,7 +209,8 @@ def test_stroud_bernstein(): unprocessed_knl = knl.copy() deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) - knl = lp.add_dependencies_v2(knl, deps_and_domains) + if hasattr(lp, "add_dependencies_v2"): + knl = lp.add_dependencies_v2(knl, deps_and_domains) # get a schedule to check if knl.state < KernelState.PREPROCESSED: @@ -238,7 +243,8 @@ def test_nop(): unprocessed_knl = knl.copy() deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) - knl = lp.add_dependencies_v2(knl, deps_and_domains) + if hasattr(lp, "add_dependencies_v2"): + knl = lp.add_dependencies_v2(knl, deps_and_domains) # get a schedule to check if knl.state < KernelState.PREPROCESSED: @@ -281,7 +287,8 @@ def test_multi_domain(): unprocessed_knl = knl.copy() deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) - knl = lp.add_dependencies_v2(knl, deps_and_domains) + if hasattr(lp, "add_dependencies_v2"): + knl = lp.add_dependencies_v2(knl, deps_and_domains) # get a schedule to check if knl.state < KernelState.PREPROCESSED: @@ -313,7 +320,8 @@ def test_loop_carried_deps(): unprocessed_knl = knl.copy() deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) - knl = lp.add_dependencies_v2(knl, deps_and_domains) + if hasattr(lp, "add_dependencies_v2"): + knl = lp.add_dependencies_v2(knl, deps_and_domains) # get a schedule to check if knl.state < KernelState.PREPROCESSED: -- GitLab From 0eb9fe5d758ef3ce62fb0baafd0fcb021912959e Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 25 Feb 2020 10:04:49 -0600 Subject: [PATCH 180/415] fixing flake8 issues --- test/test_invalid_scheds.py | 25 +++++++++++++-------- test/test_valid_scheds.py | 43 ++++++++++++++++++++++++------------- 2 files changed, 44 insertions(+), 24 deletions(-) diff --git a/test/test_invalid_scheds.py b/test/test_invalid_scheds.py index 2eb31b0fa..0b06fb9c2 100644 --- a/test/test_invalid_scheds.py +++ b/test/test_invalid_scheds.py @@ -27,7 +27,6 @@ from pyopencl.tools import ( # noqa pytest_generate_tests_for_pyopencl as pytest_generate_tests) import loopy as lp -import numpy as np from schedule_checker import ( get_statement_pair_dependency_sets_from_legacy_knl, check_schedule_validity, @@ -65,7 +64,8 @@ def test_invalid_prioritiy_detection(): unprocessed_knl = knl0.copy() - deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) + deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl( + unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): knl0 = lp.add_dependencies_v2(knl0, deps_and_domains) @@ -75,7 +75,8 @@ def test_invalid_prioritiy_detection(): knl0 = get_one_scheduled_kernel(knl0) schedule_items = knl0.schedule - sched_is_valid = check_schedule_validity(unprocessed_knl, deps_and_domains, schedule_items) + sched_is_valid = check_schedule_validity( + unprocessed_knl, deps_and_domains, schedule_items) assert sched_is_valid # no error: @@ -84,7 +85,8 @@ def test_invalid_prioritiy_detection(): unprocessed_knl = knl1.copy() - deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) + deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl( + unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): knl1 = lp.add_dependencies_v2(knl1, deps_and_domains) @@ -94,7 +96,8 @@ def test_invalid_prioritiy_detection(): knl1 = get_one_scheduled_kernel(knl1) schedule_items = knl1.schedule - sched_is_valid = check_schedule_validity(unprocessed_knl, deps_and_domains, schedule_items) + sched_is_valid = check_schedule_validity( + unprocessed_knl, deps_and_domains, schedule_items) assert sched_is_valid # error (cycle): @@ -105,7 +108,8 @@ def test_invalid_prioritiy_detection(): """ unprocessed_knl = knl2.copy() - deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) + deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl( + unprocessed_knl) # get a schedule to check if knl2.state < KernelState.PREPROCESSED: @@ -113,7 +117,8 @@ def test_invalid_prioritiy_detection(): knl2 = get_one_scheduled_kernel(knl2) schedule_items = knl2.schedule - sched_is_valid = check_schedule_validity(unprocessed_knl, deps_and_domains, schedule_items) + sched_is_valid = check_schedule_validity( + unprocessed_knl, deps_and_domains, schedule_items) """ # should raise error assert False @@ -127,7 +132,8 @@ def test_invalid_prioritiy_detection(): """ unprocessed_knl = knl3.copy() - deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) + deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl( + unprocessed_knl) # get a schedule to check if knl3.state < KernelState.PREPROCESSED: @@ -135,7 +141,8 @@ def test_invalid_prioritiy_detection(): knl3 = get_one_scheduled_kernel(knl3) schedule_items = knl3.schedule - sched_is_valid = check_schedule_validity(unprocessed_knl, deps_and_domains, schedule_items) + sched_is_valid = check_schedule_validity( + unprocessed_knl, deps_and_domains, schedule_items) """ # should raise error assert False diff --git a/test/test_valid_scheds.py b/test/test_valid_scheds.py index d897d72bf..f12211dce 100644 --- a/test/test_valid_scheds.py +++ b/test/test_valid_scheds.py @@ -73,18 +73,19 @@ def test_loop_prioritization(): unprocessed_knl = knl.copy() - deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) + deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl( + unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): knl = lp.add_dependencies_v2(knl, deps_and_domains) - # get a schedule to check if knl.state < KernelState.PREPROCESSED: knl = preprocess_kernel(knl) knl = get_one_scheduled_kernel(knl) schedule_items = knl.schedule - sched_is_valid = check_schedule_validity(unprocessed_knl, deps_and_domains, schedule_items) + sched_is_valid = check_schedule_validity( + unprocessed_knl, deps_and_domains, schedule_items) assert sched_is_valid @@ -109,7 +110,8 @@ def test_matmul(): unprocessed_knl = knl.copy() - deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) + deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl( + unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): knl = lp.add_dependencies_v2(knl, deps_and_domains) @@ -119,7 +121,8 @@ def test_matmul(): knl = get_one_scheduled_kernel(knl) schedule_items = knl.schedule - sched_is_valid = check_schedule_validity(unprocessed_knl, deps_and_domains, schedule_items) + sched_is_valid = check_schedule_validity( + unprocessed_knl, deps_and_domains, schedule_items) assert sched_is_valid @@ -157,7 +160,8 @@ def test_dependent_domain(): unprocessed_knl = knl.copy() - deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) + deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl( + unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): knl = lp.add_dependencies_v2(knl, deps_and_domains) @@ -167,7 +171,8 @@ def test_dependent_domain(): knl = get_one_scheduled_kernel(knl) schedule_items = knl.schedule - sched_is_valid = check_schedule_validity(unprocessed_knl, deps_and_domains, schedule_items) + sched_is_valid = check_schedule_validity( + unprocessed_knl, deps_and_domains, schedule_items) assert sched_is_valid @@ -208,7 +213,8 @@ def test_stroud_bernstein(): unprocessed_knl = knl.copy() - deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) + deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl( + unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): knl = lp.add_dependencies_v2(knl, deps_and_domains) @@ -218,7 +224,8 @@ def test_stroud_bernstein(): knl = get_one_scheduled_kernel(knl) schedule_items = knl.schedule - sched_is_valid = check_schedule_validity(unprocessed_knl, deps_and_domains, schedule_items) + sched_is_valid = check_schedule_validity( + unprocessed_knl, deps_and_domains, schedule_items) assert sched_is_valid @@ -242,7 +249,8 @@ def test_nop(): unprocessed_knl = knl.copy() - deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) + deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl( + unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): knl = lp.add_dependencies_v2(knl, deps_and_domains) @@ -252,7 +260,8 @@ def test_nop(): knl = get_one_scheduled_kernel(knl) schedule_items = knl.schedule - sched_is_valid = check_schedule_validity(unprocessed_knl, deps_and_domains, schedule_items) + sched_is_valid = check_schedule_validity( + unprocessed_knl, deps_and_domains, schedule_items) assert sched_is_valid @@ -286,7 +295,8 @@ def test_multi_domain(): unprocessed_knl = knl.copy() - deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) + deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl( + unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): knl = lp.add_dependencies_v2(knl, deps_and_domains) @@ -296,7 +306,8 @@ def test_multi_domain(): knl = get_one_scheduled_kernel(knl) schedule_items = knl.schedule - sched_is_valid = check_schedule_validity(unprocessed_knl, deps_and_domains, schedule_items) + sched_is_valid = check_schedule_validity( + unprocessed_knl, deps_and_domains, schedule_items) assert sched_is_valid @@ -319,7 +330,8 @@ def test_loop_carried_deps(): unprocessed_knl = knl.copy() - deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(unprocessed_knl) + deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl( + unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): knl = lp.add_dependencies_v2(knl, deps_and_domains) @@ -329,7 +341,8 @@ def test_loop_carried_deps(): knl = get_one_scheduled_kernel(knl) schedule_items = knl.schedule - sched_is_valid = check_schedule_validity(unprocessed_knl, deps_and_domains, schedule_items) + sched_is_valid = check_schedule_validity( + unprocessed_knl, deps_and_domains, schedule_items) assert sched_is_valid -- GitLab From eee9dcb1ffc6c1ef91267a8fe30c0200bf0a8821 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 25 Feb 2020 10:17:20 -0600 Subject: [PATCH 181/415] check whether loopy hasattr constrain_loop_nesting and alter test accordingly; makes tests pass whether or not we have branch loopy/iname-sets-in-loop-priorities --- test/test_invalid_scheds.py | 66 +++++++++++++++++++++---------------- 1 file changed, 38 insertions(+), 28 deletions(-) diff --git a/test/test_invalid_scheds.py b/test/test_invalid_scheds.py index 0b06fb9c2..05073502a 100644 --- a/test/test_invalid_scheds.py +++ b/test/test_invalid_scheds.py @@ -104,50 +104,60 @@ def test_invalid_prioritiy_detection(): knl2 = lp.prioritize_loops(ref_knl, "h,i,j") knl2 = lp.prioritize_loops(knl2, "j,k") try: - knl2 = lp.prioritize_loops(knl2, "k,i") - """ - unprocessed_knl = knl2.copy() + if hasattr(lp, "constrain_loop_nesting"): + knl2 = lp.constrain_loop_nesting(knl2, "k,i") + else: + knl2 = lp.prioritize_loops(knl2, "k,i") - deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl( - unprocessed_knl) + unprocessed_knl = knl2.copy() - # get a schedule to check - if knl2.state < KernelState.PREPROCESSED: - knl2 = preprocess_kernel(knl2) - knl2 = get_one_scheduled_kernel(knl2) - schedule_items = knl2.schedule + deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl( + unprocessed_knl) - sched_is_valid = check_schedule_validity( - unprocessed_knl, deps_and_domains, schedule_items) - """ + # get a schedule to check + if knl2.state < KernelState.PREPROCESSED: + knl2 = preprocess_kernel(knl2) + knl2 = get_one_scheduled_kernel(knl2) + schedule_items = knl2.schedule + + sched_is_valid = check_schedule_validity( + unprocessed_knl, deps_and_domains, schedule_items) # should raise error assert False except ValueError as e: - assert "cycle detected" in str(e) + if hasattr(lp, "constrain_loop_nesting"): + assert "cycle detected" in str(e) + else: + assert "invalid priorities" in str(e) # error (inconsistent priorities): knl3 = lp.prioritize_loops(ref_knl, "h,i,j,k") try: - knl3 = lp.prioritize_loops(knl3, "h,j,i,k") - """ - unprocessed_knl = knl3.copy() + if hasattr(lp, "constrain_loop_nesting"): + knl3 = lp.constrain_loop_nesting(knl3, "h,j,i,k") + else: + knl3 = lp.prioritize_loops(knl3, "h,j,i,k") - deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl( - unprocessed_knl) + unprocessed_knl = knl3.copy() - # get a schedule to check - if knl3.state < KernelState.PREPROCESSED: - knl3 = preprocess_kernel(knl3) - knl3 = get_one_scheduled_kernel(knl3) - schedule_items = knl3.schedule + deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl( + unprocessed_knl) - sched_is_valid = check_schedule_validity( - unprocessed_knl, deps_and_domains, schedule_items) - """ + # get a schedule to check + if knl3.state < KernelState.PREPROCESSED: + knl3 = preprocess_kernel(knl3) + knl3 = get_one_scheduled_kernel(knl3) + schedule_items = knl3.schedule + + sched_is_valid = check_schedule_validity( + unprocessed_knl, deps_and_domains, schedule_items) # should raise error assert False except ValueError as e: - assert "cycle detected" in str(e) + if hasattr(lp, "constrain_loop_nesting"): + assert "cycle detected" in str(e) + else: + assert "invalid priorities" in str(e) if __name__ == "__main__": -- GitLab From 6876844e6e7fc6602e30a123b08a2e7f998ace4b Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 25 Feb 2020 11:27:38 -0600 Subject: [PATCH 182/415] preparing to move schedule_checker files into loopy while preserving git history --- __init__.py => schedule_checker/__init__.py | 0 dependency.py => schedule_checker/dependency.py | 0 .../example_dependency_checking.py | 0 .../example_lex_map_creation.py | 0 .../example_pairwise_schedule_validity.py | 0 .../example_wave_equation.py | 0 .../lexicographic_order_map.py | 0 sched_check_utils.py => schedule_checker/sched_check_utils.py | 0 schedule.py => schedule_checker/schedule.py | 0 {test => schedule_checker/test}/test_invalid_scheds.py | 0 {test => schedule_checker/test}/test_valid_scheds.py | 0 version.py => schedule_checker/version.py | 0 12 files changed, 0 insertions(+), 0 deletions(-) rename __init__.py => schedule_checker/__init__.py (100%) rename dependency.py => schedule_checker/dependency.py (100%) rename example_dependency_checking.py => schedule_checker/example_dependency_checking.py (100%) rename example_lex_map_creation.py => schedule_checker/example_lex_map_creation.py (100%) rename example_pairwise_schedule_validity.py => schedule_checker/example_pairwise_schedule_validity.py (100%) rename example_wave_equation.py => schedule_checker/example_wave_equation.py (100%) rename lexicographic_order_map.py => schedule_checker/lexicographic_order_map.py (100%) rename sched_check_utils.py => schedule_checker/sched_check_utils.py (100%) rename schedule.py => schedule_checker/schedule.py (100%) rename {test => schedule_checker/test}/test_invalid_scheds.py (100%) rename {test => schedule_checker/test}/test_valid_scheds.py (100%) rename version.py => schedule_checker/version.py (100%) diff --git a/__init__.py b/schedule_checker/__init__.py similarity index 100% rename from __init__.py rename to schedule_checker/__init__.py diff --git a/dependency.py b/schedule_checker/dependency.py similarity index 100% rename from dependency.py rename to schedule_checker/dependency.py diff --git a/example_dependency_checking.py b/schedule_checker/example_dependency_checking.py similarity index 100% rename from example_dependency_checking.py rename to schedule_checker/example_dependency_checking.py diff --git a/example_lex_map_creation.py b/schedule_checker/example_lex_map_creation.py similarity index 100% rename from example_lex_map_creation.py rename to schedule_checker/example_lex_map_creation.py diff --git a/example_pairwise_schedule_validity.py b/schedule_checker/example_pairwise_schedule_validity.py similarity index 100% rename from example_pairwise_schedule_validity.py rename to schedule_checker/example_pairwise_schedule_validity.py diff --git a/example_wave_equation.py b/schedule_checker/example_wave_equation.py similarity index 100% rename from example_wave_equation.py rename to schedule_checker/example_wave_equation.py diff --git a/lexicographic_order_map.py b/schedule_checker/lexicographic_order_map.py similarity index 100% rename from lexicographic_order_map.py rename to schedule_checker/lexicographic_order_map.py diff --git a/sched_check_utils.py b/schedule_checker/sched_check_utils.py similarity index 100% rename from sched_check_utils.py rename to schedule_checker/sched_check_utils.py diff --git a/schedule.py b/schedule_checker/schedule.py similarity index 100% rename from schedule.py rename to schedule_checker/schedule.py diff --git a/test/test_invalid_scheds.py b/schedule_checker/test/test_invalid_scheds.py similarity index 100% rename from test/test_invalid_scheds.py rename to schedule_checker/test/test_invalid_scheds.py diff --git a/test/test_valid_scheds.py b/schedule_checker/test/test_valid_scheds.py similarity index 100% rename from test/test_valid_scheds.py rename to schedule_checker/test/test_valid_scheds.py diff --git a/version.py b/schedule_checker/version.py similarity index 100% rename from version.py rename to schedule_checker/version.py -- GitLab From 91125cdfff9746d97dd12074349da55000426e4b Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 25 Feb 2020 11:33:08 -0600 Subject: [PATCH 183/415] moving schedule_checker into schedule --- {schedule_checker => loopy/schedule/schedule_checker}/__init__.py | 0 .../schedule/schedule_checker}/dependency.py | 0 .../schedule/schedule_checker}/example_dependency_checking.py | 0 .../schedule/schedule_checker}/example_lex_map_creation.py | 0 .../schedule_checker}/example_pairwise_schedule_validity.py | 0 .../schedule/schedule_checker}/example_wave_equation.py | 0 .../schedule/schedule_checker}/lexicographic_order_map.py | 0 .../schedule/schedule_checker}/sched_check_utils.py | 0 {schedule_checker => loopy/schedule/schedule_checker}/schedule.py | 0 .../schedule/schedule_checker}/test/test_invalid_scheds.py | 0 .../schedule/schedule_checker}/test/test_valid_scheds.py | 0 {schedule_checker => loopy/schedule/schedule_checker}/version.py | 0 12 files changed, 0 insertions(+), 0 deletions(-) rename {schedule_checker => loopy/schedule/schedule_checker}/__init__.py (100%) rename {schedule_checker => loopy/schedule/schedule_checker}/dependency.py (100%) rename {schedule_checker => loopy/schedule/schedule_checker}/example_dependency_checking.py (100%) rename {schedule_checker => loopy/schedule/schedule_checker}/example_lex_map_creation.py (100%) rename {schedule_checker => loopy/schedule/schedule_checker}/example_pairwise_schedule_validity.py (100%) rename {schedule_checker => loopy/schedule/schedule_checker}/example_wave_equation.py (100%) rename {schedule_checker => loopy/schedule/schedule_checker}/lexicographic_order_map.py (100%) rename {schedule_checker => loopy/schedule/schedule_checker}/sched_check_utils.py (100%) rename {schedule_checker => loopy/schedule/schedule_checker}/schedule.py (100%) rename {schedule_checker => loopy/schedule/schedule_checker}/test/test_invalid_scheds.py (100%) rename {schedule_checker => loopy/schedule/schedule_checker}/test/test_valid_scheds.py (100%) rename {schedule_checker => loopy/schedule/schedule_checker}/version.py (100%) diff --git a/schedule_checker/__init__.py b/loopy/schedule/schedule_checker/__init__.py similarity index 100% rename from schedule_checker/__init__.py rename to loopy/schedule/schedule_checker/__init__.py diff --git a/schedule_checker/dependency.py b/loopy/schedule/schedule_checker/dependency.py similarity index 100% rename from schedule_checker/dependency.py rename to loopy/schedule/schedule_checker/dependency.py diff --git a/schedule_checker/example_dependency_checking.py b/loopy/schedule/schedule_checker/example_dependency_checking.py similarity index 100% rename from schedule_checker/example_dependency_checking.py rename to loopy/schedule/schedule_checker/example_dependency_checking.py diff --git a/schedule_checker/example_lex_map_creation.py b/loopy/schedule/schedule_checker/example_lex_map_creation.py similarity index 100% rename from schedule_checker/example_lex_map_creation.py rename to loopy/schedule/schedule_checker/example_lex_map_creation.py diff --git a/schedule_checker/example_pairwise_schedule_validity.py b/loopy/schedule/schedule_checker/example_pairwise_schedule_validity.py similarity index 100% rename from schedule_checker/example_pairwise_schedule_validity.py rename to loopy/schedule/schedule_checker/example_pairwise_schedule_validity.py diff --git a/schedule_checker/example_wave_equation.py b/loopy/schedule/schedule_checker/example_wave_equation.py similarity index 100% rename from schedule_checker/example_wave_equation.py rename to loopy/schedule/schedule_checker/example_wave_equation.py diff --git a/schedule_checker/lexicographic_order_map.py b/loopy/schedule/schedule_checker/lexicographic_order_map.py similarity index 100% rename from schedule_checker/lexicographic_order_map.py rename to loopy/schedule/schedule_checker/lexicographic_order_map.py diff --git a/schedule_checker/sched_check_utils.py b/loopy/schedule/schedule_checker/sched_check_utils.py similarity index 100% rename from schedule_checker/sched_check_utils.py rename to loopy/schedule/schedule_checker/sched_check_utils.py diff --git a/schedule_checker/schedule.py b/loopy/schedule/schedule_checker/schedule.py similarity index 100% rename from schedule_checker/schedule.py rename to loopy/schedule/schedule_checker/schedule.py diff --git a/schedule_checker/test/test_invalid_scheds.py b/loopy/schedule/schedule_checker/test/test_invalid_scheds.py similarity index 100% rename from schedule_checker/test/test_invalid_scheds.py rename to loopy/schedule/schedule_checker/test/test_invalid_scheds.py diff --git a/schedule_checker/test/test_valid_scheds.py b/loopy/schedule/schedule_checker/test/test_valid_scheds.py similarity index 100% rename from schedule_checker/test/test_valid_scheds.py rename to loopy/schedule/schedule_checker/test/test_valid_scheds.py diff --git a/schedule_checker/version.py b/loopy/schedule/schedule_checker/version.py similarity index 100% rename from schedule_checker/version.py rename to loopy/schedule/schedule_checker/version.py -- GitLab From 1e12ebe9c1aee34b5b65018af3e64dd8502a1f0b Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 2 Mar 2020 03:06:57 -0600 Subject: [PATCH 184/415] renamed directory schedule_checker -> linearization_checker --- .../{schedule_checker => linearization_checker}/__init__.py | 0 .../{schedule_checker => linearization_checker}/dependency.py | 0 .../example_dependency_checking.py | 0 .../example_lex_map_creation.py | 0 .../example_pairwise_schedule_validity.py | 0 .../example_wave_equation.py | 0 .../lexicographic_order_map.py | 0 .../sched_check_utils.py | 0 .../{schedule_checker => linearization_checker}/schedule.py | 0 .../test/test_invalid_scheds.py | 0 .../test/test_valid_scheds.py | 0 .../{schedule_checker => linearization_checker}/version.py | 0 12 files changed, 0 insertions(+), 0 deletions(-) rename loopy/schedule/{schedule_checker => linearization_checker}/__init__.py (100%) rename loopy/schedule/{schedule_checker => linearization_checker}/dependency.py (100%) rename loopy/schedule/{schedule_checker => linearization_checker}/example_dependency_checking.py (100%) rename loopy/schedule/{schedule_checker => linearization_checker}/example_lex_map_creation.py (100%) rename loopy/schedule/{schedule_checker => linearization_checker}/example_pairwise_schedule_validity.py (100%) rename loopy/schedule/{schedule_checker => linearization_checker}/example_wave_equation.py (100%) rename loopy/schedule/{schedule_checker => linearization_checker}/lexicographic_order_map.py (100%) rename loopy/schedule/{schedule_checker => linearization_checker}/sched_check_utils.py (100%) rename loopy/schedule/{schedule_checker => linearization_checker}/schedule.py (100%) rename loopy/schedule/{schedule_checker => linearization_checker}/test/test_invalid_scheds.py (100%) rename loopy/schedule/{schedule_checker => linearization_checker}/test/test_valid_scheds.py (100%) rename loopy/schedule/{schedule_checker => linearization_checker}/version.py (100%) diff --git a/loopy/schedule/schedule_checker/__init__.py b/loopy/schedule/linearization_checker/__init__.py similarity index 100% rename from loopy/schedule/schedule_checker/__init__.py rename to loopy/schedule/linearization_checker/__init__.py diff --git a/loopy/schedule/schedule_checker/dependency.py b/loopy/schedule/linearization_checker/dependency.py similarity index 100% rename from loopy/schedule/schedule_checker/dependency.py rename to loopy/schedule/linearization_checker/dependency.py diff --git a/loopy/schedule/schedule_checker/example_dependency_checking.py b/loopy/schedule/linearization_checker/example_dependency_checking.py similarity index 100% rename from loopy/schedule/schedule_checker/example_dependency_checking.py rename to loopy/schedule/linearization_checker/example_dependency_checking.py diff --git a/loopy/schedule/schedule_checker/example_lex_map_creation.py b/loopy/schedule/linearization_checker/example_lex_map_creation.py similarity index 100% rename from loopy/schedule/schedule_checker/example_lex_map_creation.py rename to loopy/schedule/linearization_checker/example_lex_map_creation.py diff --git a/loopy/schedule/schedule_checker/example_pairwise_schedule_validity.py b/loopy/schedule/linearization_checker/example_pairwise_schedule_validity.py similarity index 100% rename from loopy/schedule/schedule_checker/example_pairwise_schedule_validity.py rename to loopy/schedule/linearization_checker/example_pairwise_schedule_validity.py diff --git a/loopy/schedule/schedule_checker/example_wave_equation.py b/loopy/schedule/linearization_checker/example_wave_equation.py similarity index 100% rename from loopy/schedule/schedule_checker/example_wave_equation.py rename to loopy/schedule/linearization_checker/example_wave_equation.py diff --git a/loopy/schedule/schedule_checker/lexicographic_order_map.py b/loopy/schedule/linearization_checker/lexicographic_order_map.py similarity index 100% rename from loopy/schedule/schedule_checker/lexicographic_order_map.py rename to loopy/schedule/linearization_checker/lexicographic_order_map.py diff --git a/loopy/schedule/schedule_checker/sched_check_utils.py b/loopy/schedule/linearization_checker/sched_check_utils.py similarity index 100% rename from loopy/schedule/schedule_checker/sched_check_utils.py rename to loopy/schedule/linearization_checker/sched_check_utils.py diff --git a/loopy/schedule/schedule_checker/schedule.py b/loopy/schedule/linearization_checker/schedule.py similarity index 100% rename from loopy/schedule/schedule_checker/schedule.py rename to loopy/schedule/linearization_checker/schedule.py diff --git a/loopy/schedule/schedule_checker/test/test_invalid_scheds.py b/loopy/schedule/linearization_checker/test/test_invalid_scheds.py similarity index 100% rename from loopy/schedule/schedule_checker/test/test_invalid_scheds.py rename to loopy/schedule/linearization_checker/test/test_invalid_scheds.py diff --git a/loopy/schedule/schedule_checker/test/test_valid_scheds.py b/loopy/schedule/linearization_checker/test/test_valid_scheds.py similarity index 100% rename from loopy/schedule/schedule_checker/test/test_valid_scheds.py rename to loopy/schedule/linearization_checker/test/test_valid_scheds.py diff --git a/loopy/schedule/schedule_checker/version.py b/loopy/schedule/linearization_checker/version.py similarity index 100% rename from loopy/schedule/schedule_checker/version.py rename to loopy/schedule/linearization_checker/version.py -- GitLab From c25eeb1ee8b33f9192dfe600d02a2fc537015799 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 2 Mar 2020 03:10:36 -0600 Subject: [PATCH 185/415] rename schedule_checker->linearization_checker within files --- .../linearization_checker/__init__.py | 14 +++++------ .../linearization_checker/dependency.py | 24 +++++++++---------- .../example_dependency_checking.py | 10 ++++---- .../example_lex_map_creation.py | 4 ++-- .../example_pairwise_schedule_validity.py | 6 ++--- .../example_wave_equation.py | 14 +++++------ .../lexicographic_order_map.py | 2 +- .../linearization_checker/schedule.py | 10 ++++---- .../test/test_invalid_scheds.py | 2 +- .../test/test_valid_scheds.py | 2 +- 10 files changed, 44 insertions(+), 44 deletions(-) diff --git a/loopy/schedule/linearization_checker/__init__.py b/loopy/schedule/linearization_checker/__init__.py index 5b8062070..1042b1b83 100644 --- a/loopy/schedule/linearization_checker/__init__.py +++ b/loopy/schedule/linearization_checker/__init__.py @@ -19,7 +19,7 @@ def get_statement_pair_dependency_sets_from_legacy_knl(knl): """ - from schedule_checker.dependency import ( + from linearization_checker.dependency import ( create_dependencies_from_legacy_knl, ) @@ -49,14 +49,14 @@ def check_schedule_validity( verbose=False, _use_scheduled_kernel_to_obtain_loop_priority=False): - from schedule_checker.dependency import ( + from linearization_checker.dependency import ( create_dependency_constraint, ) - from schedule_checker.schedule import LexSchedule - from schedule_checker.lexicographic_order_map import ( + from linearization_checker.schedule import LexSchedule + from linearization_checker.lexicographic_order_map import ( get_statement_ordering_map, ) - from schedule_checker.sched_check_utils import ( + from linearization_checker.sched_check_utils import ( prettier_map_string, ) @@ -202,7 +202,7 @@ def check_schedule_validity( # align in_ dims import islpy as isl - from schedule_checker.sched_check_utils import ( + from linearization_checker.sched_check_utils import ( reorder_dims_by_name, ) sio_in_names = sio.space.get_var_names(isl.dim_type.in_) @@ -259,7 +259,7 @@ def check_schedule_validity( print("loop priority known:") print(preprocessed_knl.loop_priority) """ - from schedule_checker.sched_check_utils import ( + from linearization_checker.sched_check_utils import ( get_concurrent_inames, ) conc_inames, non_conc_inames = get_concurrent_inames(scheduled_knl) diff --git a/loopy/schedule/linearization_checker/dependency.py b/loopy/schedule/linearization_checker/dependency.py index a780a036d..418650d4f 100644 --- a/loopy/schedule/linearization_checker/dependency.py +++ b/loopy/schedule/linearization_checker/dependency.py @@ -215,7 +215,7 @@ def create_dependency_constraint( """ - from schedule_checker.sched_check_utils import ( + from linearization_checker.sched_check_utils import ( make_islvars_with_marker, append_apostrophes, add_dims_to_isl_set, @@ -225,7 +225,7 @@ def create_dependency_constraint( # This function uses the dependency given to create the following constraint: # Statement [s,i,j] comes before statement [s',i',j'] iff - from schedule_checker.sched_check_utils import ( + from linearization_checker.sched_check_utils import ( list_var_names_in_isl_sets, ) if dom_inames_ordered_before is None: @@ -312,7 +312,7 @@ def create_dependency_constraint( p_tuple[p_tuple.index(iname)+1:]]) nested_inside[iname] = comes_after_iname - from schedule_checker.sched_check_utils import ( + from linearization_checker.sched_check_utils import ( get_orderings_of_length_n) # get all orderings that are explicitly allowed by priorities orders = get_orderings_of_length_n( @@ -350,7 +350,7 @@ def create_dependency_constraint( # TODO could this happen? assert False - from schedule_checker.lexicographic_order_map import ( + from linearization_checker.lexicographic_order_map import ( get_lex_order_constraint ) # TODO handle case where inames list is empty @@ -432,7 +432,7 @@ def _create_5pt_stencil_dependency_constraint( all_dom_inames_ordered=None, ): - from schedule_checker.sched_check_utils import ( + from linearization_checker.sched_check_utils import ( make_islvars_with_marker, append_apostrophes, add_dims_to_isl_set, @@ -442,7 +442,7 @@ def _create_5pt_stencil_dependency_constraint( # This function uses the dependency given to create the following constraint: # Statement [s,i,j] comes before statement [s',i',j'] iff - from schedule_checker.sched_check_utils import ( + from linearization_checker.sched_check_utils import ( list_var_names_in_isl_sets, ) if all_dom_inames_ordered is None: @@ -557,7 +557,7 @@ def create_arbitrary_dependency_constraint( # TODO test after switching primes to before vars - from schedule_checker.sched_check_utils import ( + from linearization_checker.sched_check_utils import ( make_islvars_with_marker, #append_apostrophes, append_marker_to_strings, @@ -568,7 +568,7 @@ def create_arbitrary_dependency_constraint( # This function uses the constraint given to create the following map: # Statement [s,i,j] comes before statement [s',i',j'] iff - from schedule_checker.sched_check_utils import ( + from linearization_checker.sched_check_utils import ( list_var_names_in_isl_sets, ) if all_dom_inames_ordered is None: @@ -719,12 +719,12 @@ def create_dependencies_from_legacy_knl(knl): """ # Introduce SAME dep for set of shared, non-concurrent inames - from schedule_checker.sched_check_utils import ( + from linearization_checker.sched_check_utils import ( get_concurrent_inames, get_all_nonconcurrent_insn_iname_subsets, get_sched_item_ids_within_inames, ) - from schedule_checker.schedule import LexScheduleStatement + from linearization_checker.schedule import LexScheduleStatement dt = DependencyType conc_inames, non_conc_inames = get_concurrent_inames(knl) statement_dep_sets = [] @@ -854,7 +854,7 @@ def get_dependency_maps( from loopy.schedule import Barrier, RunInstruction for sched_item in schedule_items: if isinstance(sched_item, (RunInstruction, Barrier)): - from schedule_checker.sched_check_utils import ( + from linearization_checker.sched_check_utils import ( _get_insn_id_from_sched_item, ) lp_insn_id = _get_insn_id_from_sched_item(sched_item) @@ -865,7 +865,7 @@ def get_dependency_maps( lp_insn_id_to_lex_sched_id[sched_item] = next_sid next_sid += 1 - from schedule_checker.sched_check_utils import ( + from linearization_checker.sched_check_utils import ( get_concurrent_inames, ) conc_inames, non_conc_inames = get_concurrent_inames(knl) diff --git a/loopy/schedule/linearization_checker/example_dependency_checking.py b/loopy/schedule/linearization_checker/example_dependency_checking.py index 54ab553db..0551eb665 100644 --- a/loopy/schedule/linearization_checker/example_dependency_checking.py +++ b/loopy/schedule/linearization_checker/example_dependency_checking.py @@ -1,20 +1,20 @@ import loopy as lp -from schedule_checker.dependency import ( # noqa +from linearization_checker.dependency import ( # noqa StatementPairDependencySet, DependencyType as dt, create_dependency_constraint, ) -from schedule_checker.lexicographic_order_map import ( +from linearization_checker.lexicographic_order_map import ( create_lex_order_map, get_statement_ordering_map, ) -from schedule_checker.sched_check_utils import ( +from linearization_checker.sched_check_utils import ( prettier_map_string as pmap, append_apostrophes, create_explicit_map_from_tuples, get_isl_space, ) -from schedule_checker.schedule import LexScheduleStatement +from linearization_checker.schedule import LexScheduleStatement # make example kernel @@ -142,7 +142,7 @@ aligned_constraint_map = constraint_map.align_params(SIO_valid.space) # align in_ dims import islpy as isl -from schedule_checker.sched_check_utils import ( +from linearization_checker.sched_check_utils import ( reorder_dims_by_name, ) SIO_valid_in_names = SIO_valid.space.get_var_names(isl.dim_type.in_) diff --git a/loopy/schedule/linearization_checker/example_lex_map_creation.py b/loopy/schedule/linearization_checker/example_lex_map_creation.py index 83ff538d3..2a5dd352a 100644 --- a/loopy/schedule/linearization_checker/example_lex_map_creation.py +++ b/loopy/schedule/linearization_checker/example_lex_map_creation.py @@ -1,8 +1,8 @@ -from schedule_checker.lexicographic_order_map import ( +from linearization_checker.lexicographic_order_map import ( get_statement_ordering_map, create_lex_order_map, ) -from schedule_checker.sched_check_utils import ( +from linearization_checker.sched_check_utils import ( create_explicit_map_from_tuples, get_isl_space, prettier_map_string as pmap, diff --git a/loopy/schedule/linearization_checker/example_pairwise_schedule_validity.py b/loopy/schedule/linearization_checker/example_pairwise_schedule_validity.py index 542f6ee6f..85e85f07a 100644 --- a/loopy/schedule/linearization_checker/example_pairwise_schedule_validity.py +++ b/loopy/schedule/linearization_checker/example_pairwise_schedule_validity.py @@ -1,13 +1,13 @@ import loopy as lp import numpy as np -from schedule_checker import ( +from linearization_checker import ( get_statement_pair_dependency_sets_from_legacy_knl, check_schedule_validity, ) -from schedule_checker.sched_check_utils import ( +from linearization_checker.sched_check_utils import ( create_graph_from_pairs, ) -from schedule_checker.dependency import ( +from linearization_checker.dependency import ( get_dependency_maps, ) from loopy.kernel import KernelState diff --git a/loopy/schedule/linearization_checker/example_wave_equation.py b/loopy/schedule/linearization_checker/example_wave_equation.py index 6afa3044b..8f639caf3 100644 --- a/loopy/schedule/linearization_checker/example_wave_equation.py +++ b/loopy/schedule/linearization_checker/example_wave_equation.py @@ -7,19 +7,19 @@ import numpy as np import islpy as isl #from loopy.kernel_stat_collector import KernelStatCollector #from loopy.kernel_stat_collector import KernelStatOptions as kso # noqa -from schedule_checker import check_schedule_validity -from schedule_checker.sched_check_utils import ( +from linearization_checker import check_schedule_validity +from linearization_checker.sched_check_utils import ( prettier_map_string, reorder_dims_by_name, append_apostrophes, append_marker_to_isl_map_var_names, ) -from schedule_checker.dependency import ( +from linearization_checker.dependency import ( create_arbitrary_dependency_constraint, ) from dependency import _create_5pt_stencil_dependency_constraint -from schedule_checker.schedule import LexSchedule -from schedule_checker.lexicographic_order_map import ( +from linearization_checker.schedule import LexSchedule +from linearization_checker.lexicographic_order_map import ( get_statement_ordering_map, ) @@ -293,7 +293,7 @@ if not sched_is_valid: print("loop priority known:") print(preprocessed_knl.loop_priority) """ - from schedule_checker.sched_check_utils import ( + from linearization_checker.sched_check_utils import ( get_concurrent_inames, ) conc_inames, non_conc_inames = get_concurrent_inames(scheduled_knl) @@ -580,7 +580,7 @@ if not sched_is_valid: print("loop priority known:") print(preprocessed_knl.loop_priority) """ - from schedule_checker.sched_check_utils import ( + from linearization_checker.sched_check_utils import ( get_concurrent_inames, ) conc_inames, non_conc_inames = get_concurrent_inames(scheduled_knl) diff --git a/loopy/schedule/linearization_checker/lexicographic_order_map.py b/loopy/schedule/linearization_checker/lexicographic_order_map.py index 7abe6b0c5..870f96d7d 100644 --- a/loopy/schedule/linearization_checker/lexicographic_order_map.py +++ b/loopy/schedule/linearization_checker/lexicographic_order_map.py @@ -114,7 +114,7 @@ def create_lex_order_map( if before_names is None: before_names = ["i%s" % (i) for i in range(n_dims)] if after_names is None: - from schedule_checker.sched_check_utils import ( + from linearization_checker.sched_check_utils import ( append_marker_to_strings, ) after_names = append_marker_to_strings(before_names, marker="_") diff --git a/loopy/schedule/linearization_checker/schedule.py b/loopy/schedule/linearization_checker/schedule.py index 4c99f45ce..053180d60 100644 --- a/loopy/schedule/linearization_checker/schedule.py +++ b/loopy/schedule/linearization_checker/schedule.py @@ -227,7 +227,7 @@ class LexSchedule(object): # sometimes be able to skip increment, but it's not hurting anything # TODO might not need this increment period? elif isinstance(sched_item, (RunInstruction, Barrier)): - from schedule_checker.sched_check_utils import ( + from linearization_checker.sched_check_utils import ( _get_insn_id_from_sched_item, ) lp_insn_id = _get_insn_id_from_sched_item(sched_item) @@ -367,12 +367,12 @@ class LexSchedule(object): """ - from schedule_checker.sched_check_utils import ( + from linearization_checker.sched_check_utils import ( create_symbolic_isl_map_from_tuples, add_dims_to_isl_set ) - from schedule_checker.sched_check_utils import ( + from linearization_checker.sched_check_utils import ( list_var_names_in_isl_sets, ) if dom_inames_ordered_before is None: @@ -385,7 +385,7 @@ class LexSchedule(object): # create an isl space # {('statement', used in >=1 statement domain>) -> # (lexicographic ordering dims)} - from schedule_checker.sched_check_utils import get_isl_space + from linearization_checker.sched_check_utils import get_isl_space params_sched = [self.unused_param_name] out_names_sched = self.get_lex_var_names() @@ -450,7 +450,7 @@ class LexSchedule(object): lexocigraphically greater. """ - from schedule_checker.lexicographic_order_map import ( + from linearization_checker.lexicographic_order_map import ( create_lex_order_map, ) n_dims = self.max_lex_dims() diff --git a/loopy/schedule/linearization_checker/test/test_invalid_scheds.py b/loopy/schedule/linearization_checker/test/test_invalid_scheds.py index 05073502a..8b55d4829 100644 --- a/loopy/schedule/linearization_checker/test/test_invalid_scheds.py +++ b/loopy/schedule/linearization_checker/test/test_invalid_scheds.py @@ -27,7 +27,7 @@ from pyopencl.tools import ( # noqa pytest_generate_tests_for_pyopencl as pytest_generate_tests) import loopy as lp -from schedule_checker import ( +from linearization_checker import ( get_statement_pair_dependency_sets_from_legacy_knl, check_schedule_validity, ) diff --git a/loopy/schedule/linearization_checker/test/test_valid_scheds.py b/loopy/schedule/linearization_checker/test/test_valid_scheds.py index f12211dce..7bc445079 100644 --- a/loopy/schedule/linearization_checker/test/test_valid_scheds.py +++ b/loopy/schedule/linearization_checker/test/test_valid_scheds.py @@ -28,7 +28,7 @@ from pyopencl.tools import ( # noqa as pytest_generate_tests) import loopy as lp import numpy as np -from schedule_checker import ( +from linearization_checker import ( get_statement_pair_dependency_sets_from_legacy_knl, check_schedule_validity, ) -- GitLab From a1df1d1d713fae0078a37898f4822d3a927017d4 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 2 Mar 2020 03:26:04 -0600 Subject: [PATCH 186/415] linearization_checker isn't a stand-alone module anymore; instead of importing get_statement_pair_dependency_sets_from_legacy_knl() and check_schedule_validity() from linearization_checker, get them from loopy --- loopy/__init__.py | 5 +++ .../example_pairwise_schedule_validity.py | 12 +++---- .../example_wave_equation.py | 5 ++- .../test/test_invalid_scheds.py | 20 +++++------- .../test/test_valid_scheds.py | 32 ++++++++----------- 5 files changed, 33 insertions(+), 41 deletions(-) diff --git a/loopy/__init__.py b/loopy/__init__.py index b60de6e2d..3200a4899 100644 --- a/loopy/__init__.py +++ b/loopy/__init__.py @@ -124,6 +124,9 @@ from loopy.transform.add_barrier import add_barrier from loopy.type_inference import infer_unknown_types from loopy.preprocess import preprocess_kernel, realize_reduction from loopy.schedule import generate_loop_schedules, get_one_scheduled_kernel +from loopy.schedule.linearization_checker import ( + get_statement_pair_dependency_sets_from_legacy_knl, + check_schedule_validity) from loopy.statistics import (ToCountMap, CountGranularity, stringify_stats_mapping, Op, MemAccess, get_op_poly, get_op_map, get_lmem_access_poly, get_DRAM_access_poly, get_gmem_access_poly, get_mem_access_map, @@ -249,6 +252,8 @@ __all__ = [ "preprocess_kernel", "realize_reduction", "generate_loop_schedules", "get_one_scheduled_kernel", + "get_statement_pair_dependency_sets_from_legacy_knl", + "check_schedule_validity", "GeneratedProgram", "CodeGenerationResult", "PreambleInfo", "generate_code", "generate_code_v2", "generate_body", diff --git a/loopy/schedule/linearization_checker/example_pairwise_schedule_validity.py b/loopy/schedule/linearization_checker/example_pairwise_schedule_validity.py index 85e85f07a..d2e133271 100644 --- a/loopy/schedule/linearization_checker/example_pairwise_schedule_validity.py +++ b/loopy/schedule/linearization_checker/example_pairwise_schedule_validity.py @@ -1,9 +1,5 @@ import loopy as lp import numpy as np -from linearization_checker import ( - get_statement_pair_dependency_sets_from_legacy_knl, - check_schedule_validity, -) from linearization_checker.sched_check_utils import ( create_graph_from_pairs, ) @@ -290,7 +286,7 @@ if knl_choice == "loop_carried_deps": unprocessed_knl = knl.copy() -legacy_deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl( +legacy_deps_and_domains = lp.get_statement_pair_dependency_sets_from_legacy_knl( unprocessed_knl) # get a schedule to check @@ -301,18 +297,18 @@ print("kernel schedueld") schedule_items = knl.schedule print("checking validity") -sched_is_valid = check_schedule_validity( +sched_is_valid = lp.check_schedule_validity( unprocessed_knl, legacy_deps_and_domains, schedule_items, verbose=True) """ -legacy_deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl(knl) +legacy_deps_and_domains = lp.get_statement_pair_dependency_sets_from_legacy_knl(knl) # get a schedule to check from loopy import get_one_scheduled_kernel scheduled_knl = get_one_scheduled_kernel(knl) schedule_items = scheduled_knl.schedule -sched_is_valid = check_schedule_validity( +sched_is_valid = lp.check_schedule_validity( knl, legacy_deps_and_domains, schedule_items, verbose=True) """ diff --git a/loopy/schedule/linearization_checker/example_wave_equation.py b/loopy/schedule/linearization_checker/example_wave_equation.py index 8f639caf3..b5a496c00 100644 --- a/loopy/schedule/linearization_checker/example_wave_equation.py +++ b/loopy/schedule/linearization_checker/example_wave_equation.py @@ -7,7 +7,6 @@ import numpy as np import islpy as isl #from loopy.kernel_stat_collector import KernelStatCollector #from loopy.kernel_stat_collector import KernelStatOptions as kso # noqa -from linearization_checker import check_schedule_validity from linearization_checker.sched_check_utils import ( prettier_map_string, reorder_dims_by_name, @@ -636,14 +635,14 @@ print("time:", time_measured) """ """ -sched_is_valid = check_schedule_validity(knl, verbose=True) +sched_is_valid = lp.check_schedule_validity(knl, verbose=True) print("is sched valid? constraint map subset of SIO?") print(sched_is_valid) """ """ -sched_is_valid = check_schedule_validity(knl, verbose=True) +sched_is_valid = lp.check_schedule_validity(knl, verbose=True) print("is sched valid? constraint map subset of SIO?") print(sched_is_valid) diff --git a/loopy/schedule/linearization_checker/test/test_invalid_scheds.py b/loopy/schedule/linearization_checker/test/test_invalid_scheds.py index 8b55d4829..41051b867 100644 --- a/loopy/schedule/linearization_checker/test/test_invalid_scheds.py +++ b/loopy/schedule/linearization_checker/test/test_invalid_scheds.py @@ -27,10 +27,6 @@ from pyopencl.tools import ( # noqa pytest_generate_tests_for_pyopencl as pytest_generate_tests) import loopy as lp -from linearization_checker import ( - get_statement_pair_dependency_sets_from_legacy_knl, - check_schedule_validity, -) from loopy.kernel import KernelState from loopy import ( preprocess_kernel, @@ -64,7 +60,7 @@ def test_invalid_prioritiy_detection(): unprocessed_knl = knl0.copy() - deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl( + deps_and_domains = lp.get_statement_pair_dependency_sets_from_legacy_knl( unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): knl0 = lp.add_dependencies_v2(knl0, deps_and_domains) @@ -75,7 +71,7 @@ def test_invalid_prioritiy_detection(): knl0 = get_one_scheduled_kernel(knl0) schedule_items = knl0.schedule - sched_is_valid = check_schedule_validity( + sched_is_valid = lp.check_schedule_validity( unprocessed_knl, deps_and_domains, schedule_items) assert sched_is_valid @@ -85,7 +81,7 @@ def test_invalid_prioritiy_detection(): unprocessed_knl = knl1.copy() - deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl( + deps_and_domains = lp.get_statement_pair_dependency_sets_from_legacy_knl( unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): knl1 = lp.add_dependencies_v2(knl1, deps_and_domains) @@ -96,7 +92,7 @@ def test_invalid_prioritiy_detection(): knl1 = get_one_scheduled_kernel(knl1) schedule_items = knl1.schedule - sched_is_valid = check_schedule_validity( + sched_is_valid = lp.check_schedule_validity( unprocessed_knl, deps_and_domains, schedule_items) assert sched_is_valid @@ -111,7 +107,7 @@ def test_invalid_prioritiy_detection(): unprocessed_knl = knl2.copy() - deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl( + deps_and_domains = lp.get_statement_pair_dependency_sets_from_legacy_knl( unprocessed_knl) # get a schedule to check @@ -120,7 +116,7 @@ def test_invalid_prioritiy_detection(): knl2 = get_one_scheduled_kernel(knl2) schedule_items = knl2.schedule - sched_is_valid = check_schedule_validity( + sched_is_valid = lp.check_schedule_validity( unprocessed_knl, deps_and_domains, schedule_items) # should raise error assert False @@ -140,7 +136,7 @@ def test_invalid_prioritiy_detection(): unprocessed_knl = knl3.copy() - deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl( + deps_and_domains = lp.get_statement_pair_dependency_sets_from_legacy_knl( unprocessed_knl) # get a schedule to check @@ -149,7 +145,7 @@ def test_invalid_prioritiy_detection(): knl3 = get_one_scheduled_kernel(knl3) schedule_items = knl3.schedule - sched_is_valid = check_schedule_validity( + sched_is_valid = lp.check_schedule_validity( unprocessed_knl, deps_and_domains, schedule_items) # should raise error assert False diff --git a/loopy/schedule/linearization_checker/test/test_valid_scheds.py b/loopy/schedule/linearization_checker/test/test_valid_scheds.py index 7bc445079..56bfe1902 100644 --- a/loopy/schedule/linearization_checker/test/test_valid_scheds.py +++ b/loopy/schedule/linearization_checker/test/test_valid_scheds.py @@ -28,10 +28,6 @@ from pyopencl.tools import ( # noqa as pytest_generate_tests) import loopy as lp import numpy as np -from linearization_checker import ( - get_statement_pair_dependency_sets_from_legacy_knl, - check_schedule_validity, -) from loopy.kernel import KernelState from loopy import ( preprocess_kernel, @@ -73,7 +69,7 @@ def test_loop_prioritization(): unprocessed_knl = knl.copy() - deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl( + deps_and_domains = lp.get_statement_pair_dependency_sets_from_legacy_knl( unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): knl = lp.add_dependencies_v2(knl, deps_and_domains) @@ -84,7 +80,7 @@ def test_loop_prioritization(): knl = get_one_scheduled_kernel(knl) schedule_items = knl.schedule - sched_is_valid = check_schedule_validity( + sched_is_valid = lp.check_schedule_validity( unprocessed_knl, deps_and_domains, schedule_items) assert sched_is_valid @@ -110,7 +106,7 @@ def test_matmul(): unprocessed_knl = knl.copy() - deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl( + deps_and_domains = lp.get_statement_pair_dependency_sets_from_legacy_knl( unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): knl = lp.add_dependencies_v2(knl, deps_and_domains) @@ -121,7 +117,7 @@ def test_matmul(): knl = get_one_scheduled_kernel(knl) schedule_items = knl.schedule - sched_is_valid = check_schedule_validity( + sched_is_valid = lp.check_schedule_validity( unprocessed_knl, deps_and_domains, schedule_items) assert sched_is_valid @@ -160,7 +156,7 @@ def test_dependent_domain(): unprocessed_knl = knl.copy() - deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl( + deps_and_domains = lp.get_statement_pair_dependency_sets_from_legacy_knl( unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): knl = lp.add_dependencies_v2(knl, deps_and_domains) @@ -171,7 +167,7 @@ def test_dependent_domain(): knl = get_one_scheduled_kernel(knl) schedule_items = knl.schedule - sched_is_valid = check_schedule_validity( + sched_is_valid = lp.check_schedule_validity( unprocessed_knl, deps_and_domains, schedule_items) assert sched_is_valid @@ -213,7 +209,7 @@ def test_stroud_bernstein(): unprocessed_knl = knl.copy() - deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl( + deps_and_domains = lp.get_statement_pair_dependency_sets_from_legacy_knl( unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): knl = lp.add_dependencies_v2(knl, deps_and_domains) @@ -224,7 +220,7 @@ def test_stroud_bernstein(): knl = get_one_scheduled_kernel(knl) schedule_items = knl.schedule - sched_is_valid = check_schedule_validity( + sched_is_valid = lp.check_schedule_validity( unprocessed_knl, deps_and_domains, schedule_items) assert sched_is_valid @@ -249,7 +245,7 @@ def test_nop(): unprocessed_knl = knl.copy() - deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl( + deps_and_domains = lp.get_statement_pair_dependency_sets_from_legacy_knl( unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): knl = lp.add_dependencies_v2(knl, deps_and_domains) @@ -260,7 +256,7 @@ def test_nop(): knl = get_one_scheduled_kernel(knl) schedule_items = knl.schedule - sched_is_valid = check_schedule_validity( + sched_is_valid = lp.check_schedule_validity( unprocessed_knl, deps_and_domains, schedule_items) assert sched_is_valid @@ -295,7 +291,7 @@ def test_multi_domain(): unprocessed_knl = knl.copy() - deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl( + deps_and_domains = lp.get_statement_pair_dependency_sets_from_legacy_knl( unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): knl = lp.add_dependencies_v2(knl, deps_and_domains) @@ -306,7 +302,7 @@ def test_multi_domain(): knl = get_one_scheduled_kernel(knl) schedule_items = knl.schedule - sched_is_valid = check_schedule_validity( + sched_is_valid = lp.check_schedule_validity( unprocessed_knl, deps_and_domains, schedule_items) assert sched_is_valid @@ -330,7 +326,7 @@ def test_loop_carried_deps(): unprocessed_knl = knl.copy() - deps_and_domains = get_statement_pair_dependency_sets_from_legacy_knl( + deps_and_domains = lp.get_statement_pair_dependency_sets_from_legacy_knl( unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): knl = lp.add_dependencies_v2(knl, deps_and_domains) @@ -341,7 +337,7 @@ def test_loop_carried_deps(): knl = get_one_scheduled_kernel(knl) schedule_items = knl.schedule - sched_is_valid = check_schedule_validity( + sched_is_valid = lp.check_schedule_validity( unprocessed_knl, deps_and_domains, schedule_items) assert sched_is_valid -- GitLab From d59c9eff4dc4b590c5c804ba05abb827973ecb9b Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 2 Mar 2020 03:35:36 -0600 Subject: [PATCH 187/415] change linearization_checker import path now that it's inside loopy --- .../linearization_checker/__init__.py | 14 +++++------ .../linearization_checker/dependency.py | 24 +++++++++---------- .../example_dependency_checking.py | 10 ++++---- .../example_lex_map_creation.py | 4 ++-- .../example_pairwise_schedule_validity.py | 4 ++-- .../example_wave_equation.py | 12 +++++----- .../lexicographic_order_map.py | 2 +- .../linearization_checker/schedule.py | 10 ++++---- 8 files changed, 40 insertions(+), 40 deletions(-) diff --git a/loopy/schedule/linearization_checker/__init__.py b/loopy/schedule/linearization_checker/__init__.py index 1042b1b83..3b29af170 100644 --- a/loopy/schedule/linearization_checker/__init__.py +++ b/loopy/schedule/linearization_checker/__init__.py @@ -19,7 +19,7 @@ def get_statement_pair_dependency_sets_from_legacy_knl(knl): """ - from linearization_checker.dependency import ( + from loopy.schedule.linearization_checker.dependency import ( create_dependencies_from_legacy_knl, ) @@ -49,14 +49,14 @@ def check_schedule_validity( verbose=False, _use_scheduled_kernel_to_obtain_loop_priority=False): - from linearization_checker.dependency import ( + from loopy.schedule.linearization_checker.dependency import ( create_dependency_constraint, ) - from linearization_checker.schedule import LexSchedule - from linearization_checker.lexicographic_order_map import ( + from loopy.schedule.linearization_checker.schedule import LexSchedule + from loopy.schedule.linearization_checker.lexicographic_order_map import ( get_statement_ordering_map, ) - from linearization_checker.sched_check_utils import ( + from loopy.schedule.linearization_checker.sched_check_utils import ( prettier_map_string, ) @@ -202,7 +202,7 @@ def check_schedule_validity( # align in_ dims import islpy as isl - from linearization_checker.sched_check_utils import ( + from loopy.schedule.linearization_checker.sched_check_utils import ( reorder_dims_by_name, ) sio_in_names = sio.space.get_var_names(isl.dim_type.in_) @@ -259,7 +259,7 @@ def check_schedule_validity( print("loop priority known:") print(preprocessed_knl.loop_priority) """ - from linearization_checker.sched_check_utils import ( + from loopy.schedule.linearization_checker.sched_check_utils import ( get_concurrent_inames, ) conc_inames, non_conc_inames = get_concurrent_inames(scheduled_knl) diff --git a/loopy/schedule/linearization_checker/dependency.py b/loopy/schedule/linearization_checker/dependency.py index 418650d4f..d17c7d299 100644 --- a/loopy/schedule/linearization_checker/dependency.py +++ b/loopy/schedule/linearization_checker/dependency.py @@ -215,7 +215,7 @@ def create_dependency_constraint( """ - from linearization_checker.sched_check_utils import ( + from loopy.schedule.linearization_checker.sched_check_utils import ( make_islvars_with_marker, append_apostrophes, add_dims_to_isl_set, @@ -225,7 +225,7 @@ def create_dependency_constraint( # This function uses the dependency given to create the following constraint: # Statement [s,i,j] comes before statement [s',i',j'] iff - from linearization_checker.sched_check_utils import ( + from loopy.schedule.linearization_checker.sched_check_utils import ( list_var_names_in_isl_sets, ) if dom_inames_ordered_before is None: @@ -312,7 +312,7 @@ def create_dependency_constraint( p_tuple[p_tuple.index(iname)+1:]]) nested_inside[iname] = comes_after_iname - from linearization_checker.sched_check_utils import ( + from loopy.schedule.linearization_checker.sched_check_utils import ( get_orderings_of_length_n) # get all orderings that are explicitly allowed by priorities orders = get_orderings_of_length_n( @@ -350,7 +350,7 @@ def create_dependency_constraint( # TODO could this happen? assert False - from linearization_checker.lexicographic_order_map import ( + from loopy.schedule.linearization_checker.lexicographic_order_map import ( get_lex_order_constraint ) # TODO handle case where inames list is empty @@ -432,7 +432,7 @@ def _create_5pt_stencil_dependency_constraint( all_dom_inames_ordered=None, ): - from linearization_checker.sched_check_utils import ( + from loopy.schedule.linearization_checker.sched_check_utils import ( make_islvars_with_marker, append_apostrophes, add_dims_to_isl_set, @@ -442,7 +442,7 @@ def _create_5pt_stencil_dependency_constraint( # This function uses the dependency given to create the following constraint: # Statement [s,i,j] comes before statement [s',i',j'] iff - from linearization_checker.sched_check_utils import ( + from loopy.schedule.linearization_checker.sched_check_utils import ( list_var_names_in_isl_sets, ) if all_dom_inames_ordered is None: @@ -557,7 +557,7 @@ def create_arbitrary_dependency_constraint( # TODO test after switching primes to before vars - from linearization_checker.sched_check_utils import ( + from loopy.schedule.linearization_checker.sched_check_utils import ( make_islvars_with_marker, #append_apostrophes, append_marker_to_strings, @@ -568,7 +568,7 @@ def create_arbitrary_dependency_constraint( # This function uses the constraint given to create the following map: # Statement [s,i,j] comes before statement [s',i',j'] iff - from linearization_checker.sched_check_utils import ( + from loopy.schedule.linearization_checker.sched_check_utils import ( list_var_names_in_isl_sets, ) if all_dom_inames_ordered is None: @@ -719,12 +719,12 @@ def create_dependencies_from_legacy_knl(knl): """ # Introduce SAME dep for set of shared, non-concurrent inames - from linearization_checker.sched_check_utils import ( + from loopy.schedule.linearization_checker.sched_check_utils import ( get_concurrent_inames, get_all_nonconcurrent_insn_iname_subsets, get_sched_item_ids_within_inames, ) - from linearization_checker.schedule import LexScheduleStatement + from loopy.schedule.linearization_checker.schedule import LexScheduleStatement dt = DependencyType conc_inames, non_conc_inames = get_concurrent_inames(knl) statement_dep_sets = [] @@ -854,7 +854,7 @@ def get_dependency_maps( from loopy.schedule import Barrier, RunInstruction for sched_item in schedule_items: if isinstance(sched_item, (RunInstruction, Barrier)): - from linearization_checker.sched_check_utils import ( + from loopy.schedule.linearization_checker.sched_check_utils import ( _get_insn_id_from_sched_item, ) lp_insn_id = _get_insn_id_from_sched_item(sched_item) @@ -865,7 +865,7 @@ def get_dependency_maps( lp_insn_id_to_lex_sched_id[sched_item] = next_sid next_sid += 1 - from linearization_checker.sched_check_utils import ( + from loopy.schedule.linearization_checker.sched_check_utils import ( get_concurrent_inames, ) conc_inames, non_conc_inames = get_concurrent_inames(knl) diff --git a/loopy/schedule/linearization_checker/example_dependency_checking.py b/loopy/schedule/linearization_checker/example_dependency_checking.py index 0551eb665..1efd3e6ac 100644 --- a/loopy/schedule/linearization_checker/example_dependency_checking.py +++ b/loopy/schedule/linearization_checker/example_dependency_checking.py @@ -1,20 +1,20 @@ import loopy as lp -from linearization_checker.dependency import ( # noqa +from loopy.schedule.linearization_checker.dependency import ( # noqa StatementPairDependencySet, DependencyType as dt, create_dependency_constraint, ) -from linearization_checker.lexicographic_order_map import ( +from loopy.schedule.linearization_checker.lexicographic_order_map import ( create_lex_order_map, get_statement_ordering_map, ) -from linearization_checker.sched_check_utils import ( +from loopy.schedule.linearization_checker.sched_check_utils import ( prettier_map_string as pmap, append_apostrophes, create_explicit_map_from_tuples, get_isl_space, ) -from linearization_checker.schedule import LexScheduleStatement +from loopy.schedule.linearization_checker.schedule import LexScheduleStatement # make example kernel @@ -142,7 +142,7 @@ aligned_constraint_map = constraint_map.align_params(SIO_valid.space) # align in_ dims import islpy as isl -from linearization_checker.sched_check_utils import ( +from loopy.schedule.linearization_checker.sched_check_utils import ( reorder_dims_by_name, ) SIO_valid_in_names = SIO_valid.space.get_var_names(isl.dim_type.in_) diff --git a/loopy/schedule/linearization_checker/example_lex_map_creation.py b/loopy/schedule/linearization_checker/example_lex_map_creation.py index 2a5dd352a..bb56ca4b0 100644 --- a/loopy/schedule/linearization_checker/example_lex_map_creation.py +++ b/loopy/schedule/linearization_checker/example_lex_map_creation.py @@ -1,8 +1,8 @@ -from linearization_checker.lexicographic_order_map import ( +from loopy.schedule.linearization_checker.lexicographic_order_map import ( get_statement_ordering_map, create_lex_order_map, ) -from linearization_checker.sched_check_utils import ( +from loopy.schedule.linearization_checker.sched_check_utils import ( create_explicit_map_from_tuples, get_isl_space, prettier_map_string as pmap, diff --git a/loopy/schedule/linearization_checker/example_pairwise_schedule_validity.py b/loopy/schedule/linearization_checker/example_pairwise_schedule_validity.py index d2e133271..5aca8934e 100644 --- a/loopy/schedule/linearization_checker/example_pairwise_schedule_validity.py +++ b/loopy/schedule/linearization_checker/example_pairwise_schedule_validity.py @@ -1,9 +1,9 @@ import loopy as lp import numpy as np -from linearization_checker.sched_check_utils import ( +from loopy.schedule.linearization_checker.sched_check_utils import ( create_graph_from_pairs, ) -from linearization_checker.dependency import ( +from loopy.schedule.linearization_checker.dependency import ( get_dependency_maps, ) from loopy.kernel import KernelState diff --git a/loopy/schedule/linearization_checker/example_wave_equation.py b/loopy/schedule/linearization_checker/example_wave_equation.py index b5a496c00..fdffd6a6d 100644 --- a/loopy/schedule/linearization_checker/example_wave_equation.py +++ b/loopy/schedule/linearization_checker/example_wave_equation.py @@ -7,18 +7,18 @@ import numpy as np import islpy as isl #from loopy.kernel_stat_collector import KernelStatCollector #from loopy.kernel_stat_collector import KernelStatOptions as kso # noqa -from linearization_checker.sched_check_utils import ( +from loopy.schedule.linearization_checker.sched_check_utils import ( prettier_map_string, reorder_dims_by_name, append_apostrophes, append_marker_to_isl_map_var_names, ) -from linearization_checker.dependency import ( +from loopy.schedule.linearization_checker.dependency import ( create_arbitrary_dependency_constraint, ) from dependency import _create_5pt_stencil_dependency_constraint -from linearization_checker.schedule import LexSchedule -from linearization_checker.lexicographic_order_map import ( +from loopy.schedule.linearization_checker.schedule import LexSchedule +from loopy.schedule.linearization_checker.lexicographic_order_map import ( get_statement_ordering_map, ) @@ -292,7 +292,7 @@ if not sched_is_valid: print("loop priority known:") print(preprocessed_knl.loop_priority) """ - from linearization_checker.sched_check_utils import ( + from loopy.schedule.linearization_checker.sched_check_utils import ( get_concurrent_inames, ) conc_inames, non_conc_inames = get_concurrent_inames(scheduled_knl) @@ -579,7 +579,7 @@ if not sched_is_valid: print("loop priority known:") print(preprocessed_knl.loop_priority) """ - from linearization_checker.sched_check_utils import ( + from loopy.schedule.linearization_checker.sched_check_utils import ( get_concurrent_inames, ) conc_inames, non_conc_inames = get_concurrent_inames(scheduled_knl) diff --git a/loopy/schedule/linearization_checker/lexicographic_order_map.py b/loopy/schedule/linearization_checker/lexicographic_order_map.py index 870f96d7d..fe23ef4ed 100644 --- a/loopy/schedule/linearization_checker/lexicographic_order_map.py +++ b/loopy/schedule/linearization_checker/lexicographic_order_map.py @@ -114,7 +114,7 @@ def create_lex_order_map( if before_names is None: before_names = ["i%s" % (i) for i in range(n_dims)] if after_names is None: - from linearization_checker.sched_check_utils import ( + from loopy.schedule.linearization_checker.sched_check_utils import ( append_marker_to_strings, ) after_names = append_marker_to_strings(before_names, marker="_") diff --git a/loopy/schedule/linearization_checker/schedule.py b/loopy/schedule/linearization_checker/schedule.py index 053180d60..3ea9b884b 100644 --- a/loopy/schedule/linearization_checker/schedule.py +++ b/loopy/schedule/linearization_checker/schedule.py @@ -227,7 +227,7 @@ class LexSchedule(object): # sometimes be able to skip increment, but it's not hurting anything # TODO might not need this increment period? elif isinstance(sched_item, (RunInstruction, Barrier)): - from linearization_checker.sched_check_utils import ( + from loopy.schedule.linearization_checker.sched_check_utils import ( _get_insn_id_from_sched_item, ) lp_insn_id = _get_insn_id_from_sched_item(sched_item) @@ -367,12 +367,12 @@ class LexSchedule(object): """ - from linearization_checker.sched_check_utils import ( + from loopy.schedule.linearization_checker.sched_check_utils import ( create_symbolic_isl_map_from_tuples, add_dims_to_isl_set ) - from linearization_checker.sched_check_utils import ( + from loopy.schedule.linearization_checker.sched_check_utils import ( list_var_names_in_isl_sets, ) if dom_inames_ordered_before is None: @@ -385,7 +385,7 @@ class LexSchedule(object): # create an isl space # {('statement', used in >=1 statement domain>) -> # (lexicographic ordering dims)} - from linearization_checker.sched_check_utils import get_isl_space + from loopy.schedule.linearization_checker.sched_check_utils import get_isl_space params_sched = [self.unused_param_name] out_names_sched = self.get_lex_var_names() @@ -450,7 +450,7 @@ class LexSchedule(object): lexocigraphically greater. """ - from linearization_checker.lexicographic_order_map import ( + from loopy.schedule.linearization_checker.lexicographic_order_map import ( create_lex_order_map, ) n_dims = self.max_lex_dims() -- GitLab From 2947bab1b8a836bdc2d79da11b038c2e6e4762f8 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 2 Mar 2020 03:37:20 -0600 Subject: [PATCH 188/415] moved examples into example dir (will be moved elsewhere eventually) --- .../{ => examples}/example_dependency_checking.py | 0 .../{ => examples}/example_lex_map_creation.py | 0 .../{ => examples}/example_pairwise_schedule_validity.py | 0 .../linearization_checker/{ => examples}/example_wave_equation.py | 0 4 files changed, 0 insertions(+), 0 deletions(-) rename loopy/schedule/linearization_checker/{ => examples}/example_dependency_checking.py (100%) rename loopy/schedule/linearization_checker/{ => examples}/example_lex_map_creation.py (100%) rename loopy/schedule/linearization_checker/{ => examples}/example_pairwise_schedule_validity.py (100%) rename loopy/schedule/linearization_checker/{ => examples}/example_wave_equation.py (100%) diff --git a/loopy/schedule/linearization_checker/example_dependency_checking.py b/loopy/schedule/linearization_checker/examples/example_dependency_checking.py similarity index 100% rename from loopy/schedule/linearization_checker/example_dependency_checking.py rename to loopy/schedule/linearization_checker/examples/example_dependency_checking.py diff --git a/loopy/schedule/linearization_checker/example_lex_map_creation.py b/loopy/schedule/linearization_checker/examples/example_lex_map_creation.py similarity index 100% rename from loopy/schedule/linearization_checker/example_lex_map_creation.py rename to loopy/schedule/linearization_checker/examples/example_lex_map_creation.py diff --git a/loopy/schedule/linearization_checker/example_pairwise_schedule_validity.py b/loopy/schedule/linearization_checker/examples/example_pairwise_schedule_validity.py similarity index 100% rename from loopy/schedule/linearization_checker/example_pairwise_schedule_validity.py rename to loopy/schedule/linearization_checker/examples/example_pairwise_schedule_validity.py diff --git a/loopy/schedule/linearization_checker/example_wave_equation.py b/loopy/schedule/linearization_checker/examples/example_wave_equation.py similarity index 100% rename from loopy/schedule/linearization_checker/example_wave_equation.py rename to loopy/schedule/linearization_checker/examples/example_wave_equation.py -- GitLab From ed82aa7abfac257d5ec5d673ef4bfe8346575f73 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 2 Mar 2020 03:53:46 -0600 Subject: [PATCH 189/415] move linearization_checker tests into loopy/test dir --- .../test/test_invalid_scheds.py | 164 ------------------ .../test_linearization_checker.py | 162 +++++++++++++++-- 2 files changed, 149 insertions(+), 177 deletions(-) delete mode 100644 loopy/schedule/linearization_checker/test/test_invalid_scheds.py rename loopy/schedule/linearization_checker/test/test_valid_scheds.py => test/test_linearization_checker.py (69%) diff --git a/loopy/schedule/linearization_checker/test/test_invalid_scheds.py b/loopy/schedule/linearization_checker/test/test_invalid_scheds.py deleted file mode 100644 index 41051b867..000000000 --- a/loopy/schedule/linearization_checker/test/test_invalid_scheds.py +++ /dev/null @@ -1,164 +0,0 @@ -from __future__ import division, print_function - -__copyright__ = "Copyright (C) 2018 James Stevens" - -__license__ = """ -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. -""" - -import sys -from pyopencl.tools import ( # noqa - pytest_generate_tests_for_pyopencl - as pytest_generate_tests) -import loopy as lp -from loopy.kernel import KernelState -from loopy import ( - preprocess_kernel, - get_one_scheduled_kernel, -) - - -def test_invalid_prioritiy_detection(): - ref_knl = lp.make_kernel( - [ - "{[h]: 0<=h acc = 0 - for h,i,j,k - acc = acc + h + i + j + k - end - """, - name="priorities", - assumptions="ni,nj,nk,nh >= 1", - lang_version=(2018, 2) - ) - - # no error: - knl0 = lp.prioritize_loops(ref_knl, "h,i") - knl0 = lp.prioritize_loops(ref_knl, "i,j") - knl0 = lp.prioritize_loops(knl0, "j,k") - - unprocessed_knl = knl0.copy() - - deps_and_domains = lp.get_statement_pair_dependency_sets_from_legacy_knl( - unprocessed_knl) - if hasattr(lp, "add_dependencies_v2"): - knl0 = lp.add_dependencies_v2(knl0, deps_and_domains) - - # get a schedule to check - if knl0.state < KernelState.PREPROCESSED: - knl0 = preprocess_kernel(knl0) - knl0 = get_one_scheduled_kernel(knl0) - schedule_items = knl0.schedule - - sched_is_valid = lp.check_schedule_validity( - unprocessed_knl, deps_and_domains, schedule_items) - assert sched_is_valid - - # no error: - knl1 = lp.prioritize_loops(ref_knl, "h,i,k") - knl1 = lp.prioritize_loops(knl1, "h,j,k") - - unprocessed_knl = knl1.copy() - - deps_and_domains = lp.get_statement_pair_dependency_sets_from_legacy_knl( - unprocessed_knl) - if hasattr(lp, "add_dependencies_v2"): - knl1 = lp.add_dependencies_v2(knl1, deps_and_domains) - - # get a schedule to check - if knl1.state < KernelState.PREPROCESSED: - knl1 = preprocess_kernel(knl1) - knl1 = get_one_scheduled_kernel(knl1) - schedule_items = knl1.schedule - - sched_is_valid = lp.check_schedule_validity( - unprocessed_knl, deps_and_domains, schedule_items) - assert sched_is_valid - - # error (cycle): - knl2 = lp.prioritize_loops(ref_knl, "h,i,j") - knl2 = lp.prioritize_loops(knl2, "j,k") - try: - if hasattr(lp, "constrain_loop_nesting"): - knl2 = lp.constrain_loop_nesting(knl2, "k,i") - else: - knl2 = lp.prioritize_loops(knl2, "k,i") - - unprocessed_knl = knl2.copy() - - deps_and_domains = lp.get_statement_pair_dependency_sets_from_legacy_knl( - unprocessed_knl) - - # get a schedule to check - if knl2.state < KernelState.PREPROCESSED: - knl2 = preprocess_kernel(knl2) - knl2 = get_one_scheduled_kernel(knl2) - schedule_items = knl2.schedule - - sched_is_valid = lp.check_schedule_validity( - unprocessed_knl, deps_and_domains, schedule_items) - # should raise error - assert False - except ValueError as e: - if hasattr(lp, "constrain_loop_nesting"): - assert "cycle detected" in str(e) - else: - assert "invalid priorities" in str(e) - - # error (inconsistent priorities): - knl3 = lp.prioritize_loops(ref_knl, "h,i,j,k") - try: - if hasattr(lp, "constrain_loop_nesting"): - knl3 = lp.constrain_loop_nesting(knl3, "h,j,i,k") - else: - knl3 = lp.prioritize_loops(knl3, "h,j,i,k") - - unprocessed_knl = knl3.copy() - - deps_and_domains = lp.get_statement_pair_dependency_sets_from_legacy_knl( - unprocessed_knl) - - # get a schedule to check - if knl3.state < KernelState.PREPROCESSED: - knl3 = preprocess_kernel(knl3) - knl3 = get_one_scheduled_kernel(knl3) - schedule_items = knl3.schedule - - sched_is_valid = lp.check_schedule_validity( - unprocessed_knl, deps_and_domains, schedule_items) - # should raise error - assert False - except ValueError as e: - if hasattr(lp, "constrain_loop_nesting"): - assert "cycle detected" in str(e) - else: - assert "invalid priorities" in str(e) - - -if __name__ == "__main__": - if len(sys.argv) > 1: - exec(sys.argv[1]) - else: - from pytest import main - main([__file__]) diff --git a/loopy/schedule/linearization_checker/test/test_valid_scheds.py b/test/test_linearization_checker.py similarity index 69% rename from loopy/schedule/linearization_checker/test/test_valid_scheds.py rename to test/test_linearization_checker.py index 56bfe1902..c2a914668 100644 --- a/loopy/schedule/linearization_checker/test/test_valid_scheds.py +++ b/test/test_linearization_checker.py @@ -1,6 +1,6 @@ from __future__ import division, print_function -__copyright__ = "Copyright (C) 2018 James Stevens" +__copyright__ = "Copyright (C) 2019 James Stevens" __license__ = """ Permission is hereby granted, free of charge, to any person obtaining a copy @@ -22,20 +22,32 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ +import six # noqa: F401 import sys -from pyopencl.tools import ( # noqa - pytest_generate_tests_for_pyopencl - as pytest_generate_tests) -import loopy as lp import numpy as np +import loopy as lp +from pyopencl.tools import ( # noqa + pytest_generate_tests_for_pyopencl + as pytest_generate_tests) +from loopy.version import LOOPY_USE_LANGUAGE_VERSION_2018_2 # noqa +import logging from loopy.kernel import KernelState from loopy import ( preprocess_kernel, get_one_scheduled_kernel, ) +logger = logging.getLogger(__name__) + +try: + import faulthandler +except ImportError: + pass +else: + faulthandler.enable() + -def test_loop_prioritization(): +def test_linearization_checker_with_loop_prioritization(): knl = lp.make_kernel( [ "{[i]: 0<=i {[i]: 0<=i acc = 0 + for h,i,j,k + acc = acc + h + i + j + k + end + """, + name="priorities", + assumptions="ni,nj,nk,nh >= 1", + lang_version=(2018, 2) + ) + + # no error: + knl0 = lp.prioritize_loops(ref_knl, "h,i") + knl0 = lp.prioritize_loops(ref_knl, "i,j") + knl0 = lp.prioritize_loops(knl0, "j,k") + + unprocessed_knl = knl0.copy() + + deps_and_domains = lp.get_statement_pair_dependency_sets_from_legacy_knl( + unprocessed_knl) + if hasattr(lp, "add_dependencies_v2"): + knl0 = lp.add_dependencies_v2(knl0, deps_and_domains) + + # get a schedule to check + if knl0.state < KernelState.PREPROCESSED: + knl0 = preprocess_kernel(knl0) + knl0 = get_one_scheduled_kernel(knl0) + schedule_items = knl0.schedule + + sched_is_valid = lp.check_schedule_validity( + unprocessed_knl, deps_and_domains, schedule_items) + assert sched_is_valid + + # no error: + knl1 = lp.prioritize_loops(ref_knl, "h,i,k") + knl1 = lp.prioritize_loops(knl1, "h,j,k") + + unprocessed_knl = knl1.copy() + + deps_and_domains = lp.get_statement_pair_dependency_sets_from_legacy_knl( + unprocessed_knl) + if hasattr(lp, "add_dependencies_v2"): + knl1 = lp.add_dependencies_v2(knl1, deps_and_domains) + + # get a schedule to check + if knl1.state < KernelState.PREPROCESSED: + knl1 = preprocess_kernel(knl1) + knl1 = get_one_scheduled_kernel(knl1) + schedule_items = knl1.schedule + + sched_is_valid = lp.check_schedule_validity( + unprocessed_knl, deps_and_domains, schedule_items) + assert sched_is_valid + + # error (cycle): + knl2 = lp.prioritize_loops(ref_knl, "h,i,j") + knl2 = lp.prioritize_loops(knl2, "j,k") + try: + if hasattr(lp, "constrain_loop_nesting"): + knl2 = lp.constrain_loop_nesting(knl2, "k,i") + else: + knl2 = lp.prioritize_loops(knl2, "k,i") + + unprocessed_knl = knl2.copy() + + deps_and_domains = lp.get_statement_pair_dependency_sets_from_legacy_knl( + unprocessed_knl) + + # get a schedule to check + if knl2.state < KernelState.PREPROCESSED: + knl2 = preprocess_kernel(knl2) + knl2 = get_one_scheduled_kernel(knl2) + schedule_items = knl2.schedule + + sched_is_valid = lp.check_schedule_validity( + unprocessed_knl, deps_and_domains, schedule_items) + # should raise error + assert False + except ValueError as e: + if hasattr(lp, "constrain_loop_nesting"): + assert "cycle detected" in str(e) + else: + assert "invalid priorities" in str(e) + + # error (inconsistent priorities): + knl3 = lp.prioritize_loops(ref_knl, "h,i,j,k") + try: + if hasattr(lp, "constrain_loop_nesting"): + knl3 = lp.constrain_loop_nesting(knl3, "h,j,i,k") + else: + knl3 = lp.prioritize_loops(knl3, "h,j,i,k") + + unprocessed_knl = knl3.copy() + + deps_and_domains = lp.get_statement_pair_dependency_sets_from_legacy_knl( + unprocessed_knl) + + # get a schedule to check + if knl3.state < KernelState.PREPROCESSED: + knl3 = preprocess_kernel(knl3) + knl3 = get_one_scheduled_kernel(knl3) + schedule_items = knl3.schedule + + sched_is_valid = lp.check_schedule_validity( + unprocessed_knl, deps_and_domains, schedule_items) + # should raise error + assert False + except ValueError as e: + if hasattr(lp, "constrain_loop_nesting"): + assert "cycle detected" in str(e) + else: + assert "invalid priorities" in str(e) + + if __name__ == "__main__": if len(sys.argv) > 1: exec(sys.argv[1]) else: from pytest import main main([__file__]) + +# vim: foldmethod=marker -- GitLab From e1213c897becddc994cc81f24d5ccc1ab4e121f8 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 2 Mar 2020 04:24:32 -0600 Subject: [PATCH 190/415] renoved two examples --- .../examples/example_dependency_checking.py | 189 ------------------ .../examples/example_lex_map_creation.py | 43 ---- 2 files changed, 232 deletions(-) delete mode 100644 loopy/schedule/linearization_checker/examples/example_dependency_checking.py delete mode 100644 loopy/schedule/linearization_checker/examples/example_lex_map_creation.py diff --git a/loopy/schedule/linearization_checker/examples/example_dependency_checking.py b/loopy/schedule/linearization_checker/examples/example_dependency_checking.py deleted file mode 100644 index 1efd3e6ac..000000000 --- a/loopy/schedule/linearization_checker/examples/example_dependency_checking.py +++ /dev/null @@ -1,189 +0,0 @@ -import loopy as lp -from loopy.schedule.linearization_checker.dependency import ( # noqa - StatementPairDependencySet, - DependencyType as dt, - create_dependency_constraint, -) -from loopy.schedule.linearization_checker.lexicographic_order_map import ( - create_lex_order_map, - get_statement_ordering_map, -) -from loopy.schedule.linearization_checker.sched_check_utils import ( - prettier_map_string as pmap, - append_apostrophes, - create_explicit_map_from_tuples, - get_isl_space, -) -from loopy.schedule.linearization_checker.schedule import LexScheduleStatement - - -# make example kernel -knl = lp.make_kernel( - "{[i,j]: 0<=i,j<2}", - [ - "a[i,j] = b[i,j] {id=0}", - "a[i,j] = a[i,j] + 1 {id=1,dep=0}", - ], - name="example", - ) -knl = lp.tag_inames(knl, {"i": "l.0"}) -print("Kernel:") -print(knl) - -inames = ['i', 'j'] -statement_var = 's' -unused_param_name = 'unused' - -# example sched: -print("-"*80) - -# i is parallel, suppose we want to enforce the following: -# for a given i, statement 0 happens before statement 1 - -params_sched = ['p0', 'p1', unused_param_name] -in_names_sched = [statement_var]+inames -out_names_sched = ['l0', 'l1'] -sched_space = get_isl_space(params_sched, in_names_sched, out_names_sched) - -example_sched_valid = create_explicit_map_from_tuples( - [ - ((0, 0, 0), (0, 0)), - ((0, 1, 0), (0, 0)), - ((1, 0, 0), (0, 1)), - ((1, 1, 0), (0, 1)), - ((0, 0, 1), (1, 0)), - ((0, 1, 1), (1, 0)), - ((1, 0, 1), (1, 1)), - ((1, 1, 1), (1, 1)), - ], - sched_space, - ) -print("example sched (valid):") -print(pmap(example_sched_valid)) - -example_sched_invalid = create_explicit_map_from_tuples( - [ - ((0, 0, 0), (0, 0)), - ((0, 1, 0), (1, 1)), # these two are out of order, violation - ((1, 0, 0), (0, 1)), - ((1, 1, 0), (0, 1)), - ((0, 0, 1), (1, 0)), - ((0, 1, 1), (1, 0)), - ((1, 0, 1), (1, 1)), - ((1, 1, 1), (0, 0)), # these two are out of order, violation - ], - sched_space, - ) -print("example sched (invalid):") -print(pmap(example_sched_invalid)) - -# Lexicographic order map- map each tuple to all tuples occuring later -print("-"*80) -n_dims = 2 -lex_order_map = create_lex_order_map(n_dims) -print("lexicographic order map:") -print(pmap(lex_order_map)) - -# Statement instance ordering (valid sched) -print("-"*80) -SIO_valid = get_statement_ordering_map( - example_sched_valid, lex_order_map) -print("statement instance ordering (valid_sched):") -print(pmap(SIO_valid)) - -# Statement instance ordering (invalid sched) -print("-"*80) -SIO_invalid = get_statement_ordering_map( - example_sched_invalid, lex_order_map) -print("statement instance ordering (invalid_sched):") -print(pmap(SIO_invalid)) - -# Dependencies and constraints: -print("-"*80) - -# make some dependencies manually: - -s0 = LexScheduleStatement(insn_id="0", within_inames={"i", "j"}) -s1 = LexScheduleStatement(insn_id="1", within_inames={"i", "j"}) -insnid_to_int_sid = {"0": 0, "1": 1} - -dom_before = knl.get_inames_domain(s0.within_inames) -dom_after = knl.get_inames_domain(s1.within_inames) - -statement_pair_dep_set = StatementPairDependencySet( - s0, s1, {dt.SAME: ["i", "j"]}, dom_before, dom_after) -# SAME({i,j}) means: -# insn0{i,j} happens before insn1{i',j'} iff i = i' and j = j' - -print("Statement pair dependency set:") -print(statement_pair_dep_set) - -loop_priority = None -constraint_map = create_dependency_constraint( - statement_pair_dep_set, - loop_priority, - insnid_to_int_sid, - unused_param_name, - statement_var, - #all_dom_inames_ordered=inames, # not necessary since algin spaces below - ) -print("constraint map (before aligning space):") -print(pmap(constraint_map)) - -assert SIO_valid.space == SIO_invalid.space - -# align constraint map spaces to match sio so we can compare them - -print("constraint map space (before aligning):") -print(constraint_map.space) - -# align params -aligned_constraint_map = constraint_map.align_params(SIO_valid.space) - -# align in_ dims -import islpy as isl -from loopy.schedule.linearization_checker.sched_check_utils import ( - reorder_dims_by_name, -) -SIO_valid_in_names = SIO_valid.space.get_var_names(isl.dim_type.in_) -aligned_constraint_map = reorder_dims_by_name( - aligned_constraint_map, - isl.dim_type.in_, - SIO_valid_in_names, - add_missing=False, - new_names_are_permutation_only=True, - ) - -# align out dims -aligned_constraint_map = reorder_dims_by_name( - aligned_constraint_map, - isl.dim_type.out, - append_apostrophes(SIO_valid_in_names), - # TODO SIO out names are only pretending to have apostrophes; confusing - add_missing=False, - new_names_are_permutation_only=True, - ) - -assert aligned_constraint_map.space == SIO_valid.space -assert ( - aligned_constraint_map.space.get_var_names(isl.dim_type.in_) - == SIO_valid.space.get_var_names(isl.dim_type.in_)) -assert ( - aligned_constraint_map.space.get_var_names(isl.dim_type.out) - == append_apostrophes(SIO_valid.space.get_var_names(isl.dim_type.out))) -assert ( - aligned_constraint_map.space.get_var_names(isl.dim_type.param) - == SIO_valid.space.get_var_names(isl.dim_type.param)) - -print("constraint map space (after aligning):") -print(aligned_constraint_map.space) -print("constraint map (after aligning space):") -print(pmap(aligned_constraint_map)) -print("SIO space:") -print(SIO_valid.space) - -print("is valid sched valid?") -print(aligned_constraint_map.is_subset(SIO_valid)) - -print("is invalid sched valid?") -print(aligned_constraint_map.is_subset(SIO_invalid)) diff --git a/loopy/schedule/linearization_checker/examples/example_lex_map_creation.py b/loopy/schedule/linearization_checker/examples/example_lex_map_creation.py deleted file mode 100644 index bb56ca4b0..000000000 --- a/loopy/schedule/linearization_checker/examples/example_lex_map_creation.py +++ /dev/null @@ -1,43 +0,0 @@ -from loopy.schedule.linearization_checker.lexicographic_order_map import ( - get_statement_ordering_map, - create_lex_order_map, -) -from loopy.schedule.linearization_checker.sched_check_utils import ( - create_explicit_map_from_tuples, - get_isl_space, - prettier_map_string as pmap, -) - -# Lexicographic order map- map each tuple to all tuples occuring later - -n_dims = 2 -lex_order_map = create_lex_order_map(n_dims) -print("lexicographic order map:") -print(pmap(lex_order_map)) - -# Example *explicit* schedule (map statement instances to lex time) - -param_names_sched = [] -in_names_sched = ["s"] -out_names_sched = ["i", "j"] -sched_space = get_isl_space(param_names_sched, in_names_sched, out_names_sched) -sched_explicit = create_explicit_map_from_tuples( - [ - ((0,), (0, 0)), - ((1,), (0, 1)), - ((2,), (1, 0)), - ((3,), (1, 1)), - ], - sched_space, - ) -print("example explicit sched:") -print(pmap(sched_explicit)) - -# Statement instance ordering: -# map each statement instance to all statement instances that occur later -# S -> L -> S^-1 - -sio = get_statement_ordering_map( - sched_explicit, lex_order_map) -print("Statement instance ordering:") -print(pmap(sio)) -- GitLab From 375461c3e300ba3151abe4e225ad86718acd2fee Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 2 Mar 2020 04:24:48 -0600 Subject: [PATCH 191/415] fixed flake8 issues --- .../linearization_checker/dependency.py | 11 ++-- .../examples/example_wave_equation.py | 66 ++++++++++--------- .../linearization_checker/schedule.py | 4 +- 3 files changed, 43 insertions(+), 38 deletions(-) diff --git a/loopy/schedule/linearization_checker/dependency.py b/loopy/schedule/linearization_checker/dependency.py index d17c7d299..71b7a7ec6 100644 --- a/loopy/schedule/linearization_checker/dependency.py +++ b/loopy/schedule/linearization_checker/dependency.py @@ -221,13 +221,11 @@ def create_dependency_constraint( add_dims_to_isl_set, reorder_dims_by_name, create_new_isl_set_with_primes, + list_var_names_in_isl_sets, ) # This function uses the dependency given to create the following constraint: # Statement [s,i,j] comes before statement [s',i',j'] iff - from loopy.schedule.linearization_checker.sched_check_utils import ( - list_var_names_in_isl_sets, - ) if dom_inames_ordered_before is None: dom_inames_ordered_before = list_var_names_in_isl_sets( [statement_dep_set.dom_before]) @@ -350,11 +348,10 @@ def create_dependency_constraint( # TODO could this happen? assert False - from loopy.schedule.linearization_checker.lexicographic_order_map import ( - get_lex_order_constraint - ) + from loopy.schedule.linearization_checker import ( + lexicographic_order_map as lom) # TODO handle case where inames list is empty - constraint_set = get_lex_order_constraint( + constraint_set = lom.get_lex_order_constraint( islvars, inames_list_nest_ordered_prime, inames_list_nest_ordered, diff --git a/loopy/schedule/linearization_checker/examples/example_wave_equation.py b/loopy/schedule/linearization_checker/examples/example_wave_equation.py index fdffd6a6d..b170f9b93 100644 --- a/loopy/schedule/linearization_checker/examples/example_wave_equation.py +++ b/loopy/schedule/linearization_checker/examples/example_wave_equation.py @@ -10,13 +10,11 @@ import islpy as isl from loopy.schedule.linearization_checker.sched_check_utils import ( prettier_map_string, reorder_dims_by_name, - append_apostrophes, append_marker_to_isl_map_var_names, ) from loopy.schedule.linearization_checker.dependency import ( create_arbitrary_dependency_constraint, ) -from dependency import _create_5pt_stencil_dependency_constraint from loopy.schedule.linearization_checker.schedule import LexSchedule from loopy.schedule.linearization_checker.lexicographic_order_map import ( get_statement_ordering_map, @@ -28,7 +26,8 @@ from loopy.schedule.linearization_checker.lexicographic_order_map import ( # mine, works: # "{[x,t]: 1<=x {[ix, it]: 1<=ix {[statement, ix, it] -> [statement'=statement, tx, tt, tparity, itt, itx]: " + "[nx,nt,unused] -> {[statement, ix, it] -> " + "[statement'=statement, tx, tt, tparity, itt, itx]: " "16*(tx - tt + tparity) + itx - itt = ix - it and " "16*(tx + tt) + itt + itx = ix + it and " "0<=tparity<2 and 0 <= itx - itt < 16 and 0 <= itt+itx < 16}") m2_prime = isl.BasicMap( - "[nx,nt,unused] -> {[statement, ix, it] -> [statement'=statement, tx', tt', tparity', itt', itx']: " + "[nx,nt,unused] -> {[statement, ix, it] -> " + "[statement'=statement, tx', tt', tparity', itt', itx']: " "16*(tx' - tt' + tparity') + itx' - itt' = ix - it and " "16*(tx' + tt') + itt' + itx' = ix + it and " "0<=tparity'<2 and 0 <= itx' - itt' < 16 and 0 <= itt'+itx' < 16}") @@ -348,17 +353,20 @@ m = isl.BasicMap( "16*(tx + tt + tparity) + itt + itx = ix + it and " "0<=tparity<2 and 0 <= itx - itt < 16 and 0 <= itt+itx < 16}") m2 = isl.BasicMap( - "[nx,nt,unused] -> {[statement, ix, it] -> [statement'=statement, tx, tt, tparity, itt, itx]: " + "[nx,nt,unused] -> {[statement, ix, it] -> " + "[statement'=statement, tx, tt, tparity, itt, itx]: " "16*(tx - tt) + itx - itt = ix - it and " "16*(tx + tt + tparity) + itt + itx = ix + it and " "0<=tparity<2 and 0 <= itx - itt < 16 and 0 <= itt+itx < 16}") #m2_primes_after = isl.BasicMap( -# "[nx,nt,unused] -> {[statement, ix, it] -> [statement'=statement, tx', tt', tparity', itt', itx']: " +# "[nx,nt,unused] -> {[statement, ix, it] -> " +# "[statement'=statement, tx', tt', tparity', itt', itx']: " # "16*(tx' - tt') + itx' - itt' = ix - it and " # "16*(tx' + tt' + tparity') + itt' + itx' = ix + it and " # "0<=tparity'<2 and 0 <= itx' - itt' < 16 and 0 <= itt'+itx' < 16}") m2_prime = isl.BasicMap( - "[nx,nt,unused] -> {[statement', ix', it'] -> [statement=statement', tx, tt, tparity, itt, itx]: " + "[nx,nt,unused] -> {[statement', ix', it'] -> " + "[statement=statement', tx, tt, tparity, itt, itx]: " "16*(tx - tt) + itx - itt = ix' - it' and " "16*(tx + tt + tparity) + itt + itx = ix' + it' and " "0<=tparity<2 and 0 <= itx - itt < 16 and 0 <= itt+itx < 16}") @@ -390,7 +398,7 @@ mapped_constraint_map = append_marker_to_isl_map_var_names( print("constraint_map after apply_domain:") print(prettier_map_string(mapped_constraint_map)) -statement_inames_mapped = set(["itx","itt","tt","tparity","tx"]) +statement_inames_mapped = set(["itx", "itt", "tt", "tparity", "tx"]) sid_before = 0 sid_after = 0 @@ -398,8 +406,10 @@ if knl.state < KernelState.PREPROCESSED: preprocessed_knl = preprocess_kernel(knl) else: preprocessed_knl = knl -inames_domain_before_mapped = preprocessed_knl.get_inames_domain(statement_inames_mapped) -inames_domain_after_mapped = preprocessed_knl.get_inames_domain(statement_inames_mapped) +inames_domain_before_mapped = preprocessed_knl.get_inames_domain( + statement_inames_mapped) +inames_domain_after_mapped = preprocessed_knl.get_inames_domain( + statement_inames_mapped) print("(mapped) inames_domain_before:", inames_domain_before_mapped) print("(mapped) inames_domain_after:", inames_domain_after_mapped) @@ -603,10 +613,6 @@ if not sched_is_valid: print("is sched valid? constraint map subset of SIO?") print(sched_is_valid) - - - - ''' # (U_n^{k+1}-U_n^k)/dt = C*(U_{n+1}^k-U_n^k)/dx # U_n^{k+1} = U_n^k + dt/dx*C*(U_{n+1}^k-U_n^k) diff --git a/loopy/schedule/linearization_checker/schedule.py b/loopy/schedule/linearization_checker/schedule.py index 3ea9b884b..d181065fb 100644 --- a/loopy/schedule/linearization_checker/schedule.py +++ b/loopy/schedule/linearization_checker/schedule.py @@ -385,7 +385,9 @@ class LexSchedule(object): # create an isl space # {('statement', used in >=1 statement domain>) -> # (lexicographic ordering dims)} - from loopy.schedule.linearization_checker.sched_check_utils import get_isl_space + from loopy.schedule.linearization_checker.sched_check_utils import ( + get_isl_space + ) params_sched = [self.unused_param_name] out_names_sched = self.get_lex_var_names() -- GitLab From 0ee0b6b6b21d032b817b2971858ffa1870df0932 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 2 Mar 2020 04:26:50 -0600 Subject: [PATCH 192/415] renamed examples->experimental_scripts --- .../example_pairwise_schedule_validity.py | 0 .../{examples => experimental_scripts}/example_wave_equation.py | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename loopy/schedule/linearization_checker/{examples => experimental_scripts}/example_pairwise_schedule_validity.py (100%) rename loopy/schedule/linearization_checker/{examples => experimental_scripts}/example_wave_equation.py (100%) diff --git a/loopy/schedule/linearization_checker/examples/example_pairwise_schedule_validity.py b/loopy/schedule/linearization_checker/experimental_scripts/example_pairwise_schedule_validity.py similarity index 100% rename from loopy/schedule/linearization_checker/examples/example_pairwise_schedule_validity.py rename to loopy/schedule/linearization_checker/experimental_scripts/example_pairwise_schedule_validity.py diff --git a/loopy/schedule/linearization_checker/examples/example_wave_equation.py b/loopy/schedule/linearization_checker/experimental_scripts/example_wave_equation.py similarity index 100% rename from loopy/schedule/linearization_checker/examples/example_wave_equation.py rename to loopy/schedule/linearization_checker/experimental_scripts/example_wave_equation.py -- GitLab From cc6fefb2757433cf6f32f847419be2baf065a888 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 3 Mar 2020 01:59:43 -0600 Subject: [PATCH 193/415] moved TODO --- loopy/schedule/linearization_checker/__init__.py | 4 +--- test/test_linearization_checker.py | 2 ++ 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/loopy/schedule/linearization_checker/__init__.py b/loopy/schedule/linearization_checker/__init__.py index 3b29af170..4c9963e0b 100644 --- a/loopy/schedule/linearization_checker/__init__.py +++ b/loopy/schedule/linearization_checker/__init__.py @@ -24,7 +24,7 @@ def get_statement_pair_dependency_sets_from_legacy_knl(knl): ) # Preprocess if not already preprocessed - # note that kernels must always be preprocessed before scheduling + # note: kernels must always be preprocessed before scheduling from loopy.kernel import KernelState if knl.state < KernelState.PREPROCESSED: from loopy import preprocess_kernel @@ -37,8 +37,6 @@ def get_statement_pair_dependency_sets_from_legacy_knl(knl): return create_dependencies_from_legacy_knl(preprocessed_knl) -# TODO create a set of broken kernels to test against -# (small kernels to test a specific case) # TODO work on granularity of encapsulation, encapsulate some of this in # separate functions def check_schedule_validity( diff --git a/test/test_linearization_checker.py b/test/test_linearization_checker.py index c2a914668..a99b6f6df 100644 --- a/test/test_linearization_checker.py +++ b/test/test_linearization_checker.py @@ -475,6 +475,8 @@ def test_linearization_checker_and_invalid_prioritiy_detection(): else: assert "invalid priorities" in str(e) +# TODO create more kernels with invalid schedules to test linearization checker + if __name__ == "__main__": if len(sys.argv) > 1: -- GitLab From fbe38b4c75ff3554aa61bc6a403ec438691d907b Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 3 Mar 2020 02:00:12 -0600 Subject: [PATCH 194/415] fixed typo in docstring for StatementPairDependencySet --- loopy/schedule/linearization_checker/dependency.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/loopy/schedule/linearization_checker/dependency.py b/loopy/schedule/linearization_checker/dependency.py index 71b7a7ec6..91e0fceb8 100644 --- a/loopy/schedule/linearization_checker/dependency.py +++ b/loopy/schedule/linearization_checker/dependency.py @@ -63,7 +63,7 @@ class StatementPairDependencySet(object): .. attribute:: dom_after A :class:`islpy.BasicSet` representing the domain for the - dependee statement. + depender statement. """ -- GitLab From 1af9deb960a91d230816410cc0a95fd3b2062107 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 3 Mar 2020 02:04:36 -0600 Subject: [PATCH 195/415] no need to check kernel state before preprocessing since this happens inside preprocess --- .../linearization_checker/__init__.py | 26 ++++++------------- .../example_pairwise_schedule_validity.py | 3 +-- .../example_wave_equation.py | 10 ++----- 3 files changed, 11 insertions(+), 28 deletions(-) diff --git a/loopy/schedule/linearization_checker/__init__.py b/loopy/schedule/linearization_checker/__init__.py index 4c9963e0b..31add6c0a 100644 --- a/loopy/schedule/linearization_checker/__init__.py +++ b/loopy/schedule/linearization_checker/__init__.py @@ -19,21 +19,15 @@ def get_statement_pair_dependency_sets_from_legacy_knl(knl): """ - from loopy.schedule.linearization_checker.dependency import ( - create_dependencies_from_legacy_knl, - ) - # Preprocess if not already preprocessed # note: kernels must always be preprocessed before scheduling - from loopy.kernel import KernelState - if knl.state < KernelState.PREPROCESSED: - from loopy import preprocess_kernel - preprocessed_knl = preprocess_kernel(knl) - else: - preprocessed_knl = knl + from loopy import preprocess_kernel + preprocessed_knl = preprocess_kernel(knl) # Create StatementPairDependencySet(s) from kernel dependencies - + from loopy.schedule.linearization_checker.dependency import ( + create_dependencies_from_legacy_knl, + ) return create_dependencies_from_legacy_knl(preprocessed_knl) @@ -59,13 +53,9 @@ def check_schedule_validity( ) # Preprocess if not already preprocessed - # note that kernels must always be preprocessed before scheduling - from loopy.kernel import KernelState - if knl.state < KernelState.PREPROCESSED: - from loopy import preprocess_kernel - preprocessed_knl = preprocess_kernel(knl) - else: - preprocessed_knl = knl + # note: kernels must always be preprocessed before scheduling + from loopy import preprocess_kernel + preprocessed_knl = preprocess_kernel(knl) if not prohibited_var_names: prohibited_var_names = preprocessed_knl.all_inames() diff --git a/loopy/schedule/linearization_checker/experimental_scripts/example_pairwise_schedule_validity.py b/loopy/schedule/linearization_checker/experimental_scripts/example_pairwise_schedule_validity.py index 5aca8934e..98dcbfc6c 100644 --- a/loopy/schedule/linearization_checker/experimental_scripts/example_pairwise_schedule_validity.py +++ b/loopy/schedule/linearization_checker/experimental_scripts/example_pairwise_schedule_validity.py @@ -290,8 +290,7 @@ legacy_deps_and_domains = lp.get_statement_pair_dependency_sets_from_legacy_knl( unprocessed_knl) # get a schedule to check -if knl.state < KernelState.PREPROCESSED: - knl = preprocess_kernel(knl) +knl = preprocess_kernel(knl) knl = get_one_scheduled_kernel(knl) print("kernel schedueld") schedule_items = knl.schedule diff --git a/loopy/schedule/linearization_checker/experimental_scripts/example_wave_equation.py b/loopy/schedule/linearization_checker/experimental_scripts/example_wave_equation.py index b170f9b93..8d539ac46 100644 --- a/loopy/schedule/linearization_checker/experimental_scripts/example_wave_equation.py +++ b/loopy/schedule/linearization_checker/experimental_scripts/example_wave_equation.py @@ -65,10 +65,7 @@ statement_inames_premap_order = ["ix", "it"] sid_before = 0 sid_after = 0 -if knl.state < KernelState.PREPROCESSED: - preprocessed_knl = preprocess_kernel(knl) -else: - preprocessed_knl = knl +preprocessed_knl = preprocess_kernel(knl) inames_domain_before = preprocessed_knl.get_inames_domain(statement_inames_premap) inames_domain_after = preprocessed_knl.get_inames_domain(statement_inames_premap) print("(unmapped) inames_domain_before:", inames_domain_before) @@ -402,10 +399,7 @@ statement_inames_mapped = set(["itx", "itt", "tt", "tparity", "tx"]) sid_before = 0 sid_after = 0 -if knl.state < KernelState.PREPROCESSED: - preprocessed_knl = preprocess_kernel(knl) -else: - preprocessed_knl = knl +preprocessed_knl = preprocess_kernel(knl) inames_domain_before_mapped = preprocessed_knl.get_inames_domain( statement_inames_mapped) inames_domain_after_mapped = preprocessed_knl.get_inames_domain( -- GitLab From e92573dd90d10f95c5f9e4b8f0954f0a02491556 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 3 Mar 2020 02:24:57 -0600 Subject: [PATCH 196/415] rename get_statement_pair_dependency_sets_from_legacy_knl()->statement_pair_dep_sets_from_legacy_knl() for brevity; for clarity, change all previous confusing names for lists of StatementPairDependencySets to --- loopy/__init__.py | 4 +- .../linearization_checker/__init__.py | 8 +-- .../linearization_checker/dependency.py | 4 +- .../example_pairwise_schedule_validity.py | 11 ++-- test/test_linearization_checker.py | 62 +++++++++---------- 5 files changed, 44 insertions(+), 45 deletions(-) diff --git a/loopy/__init__.py b/loopy/__init__.py index 3200a4899..cbbb634cf 100644 --- a/loopy/__init__.py +++ b/loopy/__init__.py @@ -125,7 +125,7 @@ from loopy.type_inference import infer_unknown_types from loopy.preprocess import preprocess_kernel, realize_reduction from loopy.schedule import generate_loop_schedules, get_one_scheduled_kernel from loopy.schedule.linearization_checker import ( - get_statement_pair_dependency_sets_from_legacy_knl, + statement_pair_dep_sets_from_legacy_knl, check_schedule_validity) from loopy.statistics import (ToCountMap, CountGranularity, stringify_stats_mapping, Op, MemAccess, get_op_poly, get_op_map, get_lmem_access_poly, @@ -252,7 +252,7 @@ __all__ = [ "preprocess_kernel", "realize_reduction", "generate_loop_schedules", "get_one_scheduled_kernel", - "get_statement_pair_dependency_sets_from_legacy_knl", + "statement_pair_dep_sets_from_legacy_knl", "check_schedule_validity", "GeneratedProgram", "CodeGenerationResult", "PreambleInfo", diff --git a/loopy/schedule/linearization_checker/__init__.py b/loopy/schedule/linearization_checker/__init__.py index 31add6c0a..ba44c4ff9 100644 --- a/loopy/schedule/linearization_checker/__init__.py +++ b/loopy/schedule/linearization_checker/__init__.py @@ -1,6 +1,6 @@ -def get_statement_pair_dependency_sets_from_legacy_knl(knl): +def statement_pair_dep_sets_from_legacy_knl(knl): """Return a list of :class:`StatementPairDependySet` instances created for a :class:`loopy.LoopKernel` containing legacy depencencies. Create the new dependencies according to the following rules. (1) If @@ -35,7 +35,7 @@ def get_statement_pair_dependency_sets_from_legacy_knl(knl): # separate functions def check_schedule_validity( knl, - deps_and_domains, + statement_pair_dep_sets, schedule_items, prohibited_var_names=set(), verbose=False, @@ -63,7 +63,7 @@ def check_schedule_validity( if verbose: print("="*80) print("StatementDependencies w/domains:") - for dep_set in deps_and_domains: + for dep_set in statement_pair_dep_sets: print(dep_set) print(dep_set.dom_before) print(dep_set.dom_after) @@ -88,7 +88,7 @@ def check_schedule_validity( # For each dependency, create+test schedule containing pair of insns------ sched_is_valid = True - for statement_pair_dep_set in deps_and_domains: + for statement_pair_dep_set in statement_pair_dep_sets: s_before = statement_pair_dep_set.statement_before s_after = statement_pair_dep_set.statement_after dom_before = statement_pair_dep_set.dom_before diff --git a/loopy/schedule/linearization_checker/dependency.py b/loopy/schedule/linearization_checker/dependency.py index 91e0fceb8..2fb48ec2f 100644 --- a/loopy/schedule/linearization_checker/dependency.py +++ b/loopy/schedule/linearization_checker/dependency.py @@ -836,7 +836,7 @@ class DependencyInfo(object): def get_dependency_maps( - deps_and_domains, + statement_pair_dep_sets, schedule_items, # TODO always pass as strings since we only need the name? loop_priority, knl, # TODO avoid passing this in @@ -868,7 +868,7 @@ def get_dependency_maps( conc_inames, non_conc_inames = get_concurrent_inames(knl) dep_info_list = [] - for statement_pair_dep_set in deps_and_domains: + for statement_pair_dep_set in statement_pair_dep_sets: dep_constraint_map = create_dependency_constraint( statement_pair_dep_set, diff --git a/loopy/schedule/linearization_checker/experimental_scripts/example_pairwise_schedule_validity.py b/loopy/schedule/linearization_checker/experimental_scripts/example_pairwise_schedule_validity.py index 98dcbfc6c..8d3ba1469 100644 --- a/loopy/schedule/linearization_checker/experimental_scripts/example_pairwise_schedule_validity.py +++ b/loopy/schedule/linearization_checker/experimental_scripts/example_pairwise_schedule_validity.py @@ -6,7 +6,6 @@ from loopy.schedule.linearization_checker.sched_check_utils import ( from loopy.schedule.linearization_checker.dependency import ( get_dependency_maps, ) -from loopy.kernel import KernelState from loopy import ( preprocess_kernel, get_one_scheduled_kernel, @@ -286,7 +285,7 @@ if knl_choice == "loop_carried_deps": unprocessed_knl = knl.copy() -legacy_deps_and_domains = lp.get_statement_pair_dependency_sets_from_legacy_knl( +legacy_statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( unprocessed_knl) # get a schedule to check @@ -297,10 +296,10 @@ schedule_items = knl.schedule print("checking validity") sched_is_valid = lp.check_schedule_validity( - unprocessed_knl, legacy_deps_and_domains, schedule_items, verbose=True) + unprocessed_knl, legacy_statement_pair_dep_sets, schedule_items, verbose=True) """ -legacy_deps_and_domains = lp.get_statement_pair_dependency_sets_from_legacy_knl(knl) +legacy_statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl(knl) # get a schedule to check from loopy import get_one_scheduled_kernel @@ -308,7 +307,7 @@ scheduled_knl = get_one_scheduled_kernel(knl) schedule_items = scheduled_knl.schedule sched_is_valid = lp.check_schedule_validity( - knl, legacy_deps_and_domains, schedule_items, verbose=True) + knl, legacy_statement_pair_dep_sets, schedule_items, verbose=True) """ print("is sched valid? constraint map subset of SIO?") @@ -322,7 +321,7 @@ print("="*80) # create maps representing legacy deps # (includes bool representing result of test for dep graph edge) legacy_dep_info_list = get_dependency_maps( - legacy_deps_and_domains, + legacy_statement_pair_dep_sets, schedule_items, knl.loop_priority, knl, diff --git a/test/test_linearization_checker.py b/test/test_linearization_checker.py index a99b6f6df..5634bc989 100644 --- a/test/test_linearization_checker.py +++ b/test/test_linearization_checker.py @@ -81,10 +81,10 @@ def test_linearization_checker_with_loop_prioritization(): unprocessed_knl = knl.copy() - deps_and_domains = lp.get_statement_pair_dependency_sets_from_legacy_knl( + statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): - knl = lp.add_dependencies_v2(knl, deps_and_domains) + knl = lp.add_dependencies_v2(knl, statement_pair_dep_sets) # get a schedule to check if knl.state < KernelState.PREPROCESSED: @@ -93,7 +93,7 @@ def test_linearization_checker_with_loop_prioritization(): schedule_items = knl.schedule sched_is_valid = lp.check_schedule_validity( - unprocessed_knl, deps_and_domains, schedule_items) + unprocessed_knl, statement_pair_dep_sets, schedule_items) assert sched_is_valid @@ -118,10 +118,10 @@ def test_linearization_checker_with_matmul(): unprocessed_knl = knl.copy() - deps_and_domains = lp.get_statement_pair_dependency_sets_from_legacy_knl( + statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): - knl = lp.add_dependencies_v2(knl, deps_and_domains) + knl = lp.add_dependencies_v2(knl, statement_pair_dep_sets) # get a schedule to check if knl.state < KernelState.PREPROCESSED: @@ -130,7 +130,7 @@ def test_linearization_checker_with_matmul(): schedule_items = knl.schedule sched_is_valid = lp.check_schedule_validity( - unprocessed_knl, deps_and_domains, schedule_items) + unprocessed_knl, statement_pair_dep_sets, schedule_items) assert sched_is_valid @@ -168,10 +168,10 @@ def test_linearization_checker_with_dependent_domain(): unprocessed_knl = knl.copy() - deps_and_domains = lp.get_statement_pair_dependency_sets_from_legacy_knl( + statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): - knl = lp.add_dependencies_v2(knl, deps_and_domains) + knl = lp.add_dependencies_v2(knl, statement_pair_dep_sets) # get a schedule to check if knl.state < KernelState.PREPROCESSED: @@ -180,7 +180,7 @@ def test_linearization_checker_with_dependent_domain(): schedule_items = knl.schedule sched_is_valid = lp.check_schedule_validity( - unprocessed_knl, deps_and_domains, schedule_items) + unprocessed_knl, statement_pair_dep_sets, schedule_items) assert sched_is_valid @@ -221,10 +221,10 @@ def test_linearization_checker_with_stroud_bernstein(): unprocessed_knl = knl.copy() - deps_and_domains = lp.get_statement_pair_dependency_sets_from_legacy_knl( + statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): - knl = lp.add_dependencies_v2(knl, deps_and_domains) + knl = lp.add_dependencies_v2(knl, statement_pair_dep_sets) # get a schedule to check if knl.state < KernelState.PREPROCESSED: @@ -233,7 +233,7 @@ def test_linearization_checker_with_stroud_bernstein(): schedule_items = knl.schedule sched_is_valid = lp.check_schedule_validity( - unprocessed_knl, deps_and_domains, schedule_items) + unprocessed_knl, statement_pair_dep_sets, schedule_items) assert sched_is_valid @@ -257,10 +257,10 @@ def test_linearization_checker_with_nop(): unprocessed_knl = knl.copy() - deps_and_domains = lp.get_statement_pair_dependency_sets_from_legacy_knl( + statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): - knl = lp.add_dependencies_v2(knl, deps_and_domains) + knl = lp.add_dependencies_v2(knl, statement_pair_dep_sets) # get a schedule to check if knl.state < KernelState.PREPROCESSED: @@ -269,7 +269,7 @@ def test_linearization_checker_with_nop(): schedule_items = knl.schedule sched_is_valid = lp.check_schedule_validity( - unprocessed_knl, deps_and_domains, schedule_items) + unprocessed_knl, statement_pair_dep_sets, schedule_items) assert sched_is_valid @@ -303,10 +303,10 @@ def test_linearization_checker_with_multi_domain(): unprocessed_knl = knl.copy() - deps_and_domains = lp.get_statement_pair_dependency_sets_from_legacy_knl( + statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): - knl = lp.add_dependencies_v2(knl, deps_and_domains) + knl = lp.add_dependencies_v2(knl, statement_pair_dep_sets) # get a schedule to check if knl.state < KernelState.PREPROCESSED: @@ -315,7 +315,7 @@ def test_linearization_checker_with_multi_domain(): schedule_items = knl.schedule sched_is_valid = lp.check_schedule_validity( - unprocessed_knl, deps_and_domains, schedule_items) + unprocessed_knl, statement_pair_dep_sets, schedule_items) assert sched_is_valid @@ -338,10 +338,10 @@ def test_linearization_checker_with_loop_carried_deps(): unprocessed_knl = knl.copy() - deps_and_domains = lp.get_statement_pair_dependency_sets_from_legacy_knl( + statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): - knl = lp.add_dependencies_v2(knl, deps_and_domains) + knl = lp.add_dependencies_v2(knl, statement_pair_dep_sets) # get a schedule to check if knl.state < KernelState.PREPROCESSED: @@ -350,7 +350,7 @@ def test_linearization_checker_with_loop_carried_deps(): schedule_items = knl.schedule sched_is_valid = lp.check_schedule_validity( - unprocessed_knl, deps_and_domains, schedule_items) + unprocessed_knl, statement_pair_dep_sets, schedule_items) assert sched_is_valid @@ -380,10 +380,10 @@ def test_linearization_checker_and_invalid_prioritiy_detection(): unprocessed_knl = knl0.copy() - deps_and_domains = lp.get_statement_pair_dependency_sets_from_legacy_knl( + statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): - knl0 = lp.add_dependencies_v2(knl0, deps_and_domains) + knl0 = lp.add_dependencies_v2(knl0, statement_pair_dep_sets) # get a schedule to check if knl0.state < KernelState.PREPROCESSED: @@ -392,7 +392,7 @@ def test_linearization_checker_and_invalid_prioritiy_detection(): schedule_items = knl0.schedule sched_is_valid = lp.check_schedule_validity( - unprocessed_knl, deps_and_domains, schedule_items) + unprocessed_knl, statement_pair_dep_sets, schedule_items) assert sched_is_valid # no error: @@ -401,10 +401,10 @@ def test_linearization_checker_and_invalid_prioritiy_detection(): unprocessed_knl = knl1.copy() - deps_and_domains = lp.get_statement_pair_dependency_sets_from_legacy_knl( + statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): - knl1 = lp.add_dependencies_v2(knl1, deps_and_domains) + knl1 = lp.add_dependencies_v2(knl1, statement_pair_dep_sets) # get a schedule to check if knl1.state < KernelState.PREPROCESSED: @@ -413,7 +413,7 @@ def test_linearization_checker_and_invalid_prioritiy_detection(): schedule_items = knl1.schedule sched_is_valid = lp.check_schedule_validity( - unprocessed_knl, deps_and_domains, schedule_items) + unprocessed_knl, statement_pair_dep_sets, schedule_items) assert sched_is_valid # error (cycle): @@ -427,7 +427,7 @@ def test_linearization_checker_and_invalid_prioritiy_detection(): unprocessed_knl = knl2.copy() - deps_and_domains = lp.get_statement_pair_dependency_sets_from_legacy_knl( + statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( unprocessed_knl) # get a schedule to check @@ -437,7 +437,7 @@ def test_linearization_checker_and_invalid_prioritiy_detection(): schedule_items = knl2.schedule sched_is_valid = lp.check_schedule_validity( - unprocessed_knl, deps_and_domains, schedule_items) + unprocessed_knl, statement_pair_dep_sets, schedule_items) # should raise error assert False except ValueError as e: @@ -456,7 +456,7 @@ def test_linearization_checker_and_invalid_prioritiy_detection(): unprocessed_knl = knl3.copy() - deps_and_domains = lp.get_statement_pair_dependency_sets_from_legacy_knl( + statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( unprocessed_knl) # get a schedule to check @@ -466,7 +466,7 @@ def test_linearization_checker_and_invalid_prioritiy_detection(): schedule_items = knl3.schedule sched_is_valid = lp.check_schedule_validity( - unprocessed_knl, deps_and_domains, schedule_items) + unprocessed_knl, statement_pair_dep_sets, schedule_items) # should raise error assert False except ValueError as e: -- GitLab From ba1ee4f958ead1cbb27a2ab009cc2b12b118e33c Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 3 Mar 2020 02:26:27 -0600 Subject: [PATCH 197/415] renove unused import --- .../experimental_scripts/example_wave_equation.py | 1 - 1 file changed, 1 deletion(-) diff --git a/loopy/schedule/linearization_checker/experimental_scripts/example_wave_equation.py b/loopy/schedule/linearization_checker/experimental_scripts/example_wave_equation.py index 8d539ac46..08a18809d 100644 --- a/loopy/schedule/linearization_checker/experimental_scripts/example_wave_equation.py +++ b/loopy/schedule/linearization_checker/experimental_scripts/example_wave_equation.py @@ -1,7 +1,6 @@ import loopy as lp from loopy import generate_code_v2 from loopy import get_one_scheduled_kernel -from loopy.kernel import KernelState from loopy import preprocess_kernel import numpy as np import islpy as isl -- GitLab From 1313188fe855f67ed23fc1d33eef6c22e637174a Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 3 Mar 2020 02:39:51 -0600 Subject: [PATCH 198/415] clean up verbose=True print statements in check_schedule_validity() --- .../linearization_checker/__init__.py | 41 ++++++------------- 1 file changed, 13 insertions(+), 28 deletions(-) diff --git a/loopy/schedule/linearization_checker/__init__.py b/loopy/schedule/linearization_checker/__init__.py index ba44c4ff9..a1b9a9669 100644 --- a/loopy/schedule/linearization_checker/__init__.py +++ b/loopy/schedule/linearization_checker/__init__.py @@ -62,7 +62,9 @@ def check_schedule_validity( if verbose: print("="*80) - print("StatementDependencies w/domains:") + print("Kernel: %s" % (knl.name)) + print("="*80) + print("Dependencies w/domains:") for dep_set in statement_pair_dep_sets: print(dep_set) print(dep_set.dom_before) @@ -70,19 +72,9 @@ def check_schedule_validity( # Print kernel info ------------------------------------------------------ print("="*80) - #print("Kernel:") - #print(scheduled_knl) - #from loopy import generate_code_v2 - #print(generate_code_v2(scheduled_knl).device_code()) - print("="*80) - #print("Iname tags: %s" % (scheduled_knl.iname_to_tags)) - print("="*80) - print("Loopy schedule:") + print("Schedule items:") for sched_item in schedule_items: print(sched_item) - #print("scheduled iname order:") - #print(sched_iname_order) - print("="*80) print("Looping through dep pairs...") @@ -96,13 +88,13 @@ def check_schedule_validity( if verbose: print("="*80) - print("statement dep set:") + print("Dependency set:") print(statement_pair_dep_set) print("dom_before:", dom_before) print("dom_after:", dom_after) - # Create a mapping of {statement instance: lex point} - # including only instructions involved in this dependency + # Create LexSchedule: mapping of {statement instance: lex point} + # include only instructions involved in this dependency sched = LexSchedule( preprocessed_knl, schedule_items, @@ -111,21 +103,18 @@ def check_schedule_validity( prohibited_var_names=prohibited_var_names, ) - #print("-"*80) - #print("LexSchedule before processing:") - #print(sched) - lp_insn_id_to_lex_sched_id = sched.loopy_insn_id_to_lex_sched_id() + if verbose: print("-"*80) - print("LexSchedule with inames added:") + print("LexSchedule:") print(sched) print("dict{lp insn id : sched sid int}:") print(lp_insn_id_to_lex_sched_id) - # Get an isl map representing the LexSchedule; + # Get two isl maps representing the LexSchedule, + # one for each schedule item involved in the dependency; # this requires the iname domains - sched_map_symbolic_before, sched_map_symbolic_after = \ sched.create_symbolic_isl_maps( dom_before, @@ -133,14 +122,10 @@ def check_schedule_validity( ) if verbose: - print("dom_before:\n", dom_before) - print("dom_after:\n", dom_after) - print("LexSchedule after creating symbolic isl map:") - print(sched) - print("LexSched:") + print("-"*80) + print("ISL maps representing schedules for {before, after} statement:") print(prettier_map_string(sched_map_symbolic_before)) print(prettier_map_string(sched_map_symbolic_after)) - #print("-"*80) # get map representing lexicographic ordering lex_order_map_symbolic = sched.get_lex_order_map_for_symbolic_sched() -- GitLab From 71b64b47a5f4de4f2c3e8e497829e202effe0d0b Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 3 Mar 2020 03:01:02 -0600 Subject: [PATCH 199/415] get rid of unnecessary use of term that was previously used to distinguish from representations that no longer exist --- .../linearization_checker/__init__.py | 22 ++++---- .../example_wave_equation.py | 50 +++++++++---------- .../linearization_checker/schedule.py | 4 +- 3 files changed, 37 insertions(+), 39 deletions(-) diff --git a/loopy/schedule/linearization_checker/__init__.py b/loopy/schedule/linearization_checker/__init__.py index a1b9a9669..010122067 100644 --- a/loopy/schedule/linearization_checker/__init__.py +++ b/loopy/schedule/linearization_checker/__init__.py @@ -115,8 +115,8 @@ def check_schedule_validity( # Get two isl maps representing the LexSchedule, # one for each schedule item involved in the dependency; # this requires the iname domains - sched_map_symbolic_before, sched_map_symbolic_after = \ - sched.create_symbolic_isl_maps( + isl_sched_map_before, isl_sched_map_after = \ + sched.create_isl_maps( dom_before, dom_after, ) @@ -124,26 +124,26 @@ def check_schedule_validity( if verbose: print("-"*80) print("ISL maps representing schedules for {before, after} statement:") - print(prettier_map_string(sched_map_symbolic_before)) - print(prettier_map_string(sched_map_symbolic_after)) + print(prettier_map_string(isl_sched_map_before)) + print(prettier_map_string(isl_sched_map_after)) # get map representing lexicographic ordering - lex_order_map_symbolic = sched.get_lex_order_map_for_symbolic_sched() + sched_lex_order_map = sched.get_lex_order_map_for_sched_space() """ if verbose: - print("lex order map symbolic:") - print(prettier_map_string(lex_order_map_symbolic)) + print("sched lex order map:") + print(prettier_map_string(sched_lex_order_map)) print("space (lex time -> lex time):") - print(lex_order_map_symbolic.space) + print(sched_lex_order_map.space) print("-"*80) """ # create statement instance ordering, # maps each statement instance to all statement instances occuring later sio = get_statement_ordering_map( - sched_map_symbolic_before, - sched_map_symbolic_after, - lex_order_map_symbolic, + isl_sched_map_before, + isl_sched_map_after, + sched_lex_order_map, ) if verbose: diff --git a/loopy/schedule/linearization_checker/experimental_scripts/example_wave_equation.py b/loopy/schedule/linearization_checker/experimental_scripts/example_wave_equation.py index 08a18809d..06b84cbf8 100644 --- a/loopy/schedule/linearization_checker/experimental_scripts/example_wave_equation.py +++ b/loopy/schedule/linearization_checker/experimental_scripts/example_wave_equation.py @@ -167,36 +167,35 @@ sid_to_dom = { sid_after: inames_domain_after, } -#sched_map_symbolic = sched.create_symbolic_isl_map(sid_to_dom) -sched_map_symbolic_before, sched_map_symbolic_after = sched.create_symbolic_isl_maps( +isl_sched_map_before, isl_sched_map_after = sched.create_isl_maps( inames_domain_before, inames_domain_after) # {{{ verbose if verbose: print("sid_to_dom:\n", sid_to_dom) - print("LexSchedule after creating symbolic isl map:") + print("LexSchedule after creating isl map:") print(sched) print("LexSched:") - print(prettier_map_string(sched_map_symbolic_before)) - print(prettier_map_string(sched_map_symbolic_after)) + print(prettier_map_string(isl_sched_map_before)) + print(prettier_map_string(isl_sched_map_after)) #print("space (statement instances -> lex time):") - #print(sched_map_symbolic.space) + #print(isl_sched_map.space) #print("-"*80) # }}} # get map representing lexicographic ordering -lex_order_map_symbolic = sched.get_lex_order_map_for_symbolic_sched() +sched_lex_order_map = sched.get_lex_order_map_for_sched_space() # {{{ verbose """ if verbose: - print("lex order map symbolic:") - print(prettier_map_string(lex_order_map_symbolic)) + print("sched lex order map:") + print(prettier_map_string(sched_lex_order_map)) print("space (lex time -> lex time):") - print(lex_order_map_symbolic.space) + print(sched_lex_order_map.space) print("-"*80) """ @@ -205,9 +204,9 @@ if verbose: # create statement instance ordering, # maps each statement instance to all statement instances occuring later sio = get_statement_ordering_map( - sched_map_symbolic_before, - sched_map_symbolic_after, - lex_order_map_symbolic, + isl_sched_map_before, + isl_sched_map_after, + sched_lex_order_map, before_marker="p") # {{{ verbose @@ -458,36 +457,35 @@ sid_to_dom = { sid_after: inames_domain_after_mapped, } -#sched_map_symbolic = sched.create_symbolic_isl_map(sid_to_dom) -sched_map_symbolic_before, sched_map_symbolic_after = sched.create_symbolic_isl_maps( +isl_sched_map_before, isl_sched_map_after = sched.create_isl_maps( inames_domain_before_mapped, inames_domain_after_mapped) # {{{ verbose if verbose: print("sid_to_dom:\n", sid_to_dom) - print("LexSchedule after creating symbolic isl map:") + print("LexSchedule after creating isl map:") print(sched) print("LexSched:") - print(prettier_map_string(sched_map_symbolic_before)) - print(prettier_map_string(sched_map_symbolic_after)) + print(prettier_map_string(isl_sched_map_before)) + print(prettier_map_string(isl_sched_map_after)) #print("space (statement instances -> lex time):") - #print(sched_map_symbolic.space) + #print(isl_sched_map.space) #print("-"*80) # }}} # get map representing lexicographic ordering -lex_order_map_symbolic = sched.get_lex_order_map_for_symbolic_sched() +sched_lex_order_map = sched.get_lex_order_map_for_sched_space() # {{{ verbose """ if verbose: - print("lex order map symbolic:") - print(prettier_map_string(lex_order_map_symbolic)) + print("sched lex order map:") + print(prettier_map_string(sched_lex_order_map)) print("space (lex time -> lex time):") - print(lex_order_map_symbolic.space) + print(sched_lex_order_map.space) print("-"*80) """ @@ -496,9 +494,9 @@ if verbose: # create statement instance ordering, # maps each statement instance to all statement instances occuring later sio = get_statement_ordering_map( - sched_map_symbolic_before, - sched_map_symbolic_after, - lex_order_map_symbolic, + isl_sched_map_before, + isl_sched_map_after, + sched_lex_order_map, before_marker="'") # {{{ verbose diff --git a/loopy/schedule/linearization_checker/schedule.py b/loopy/schedule/linearization_checker/schedule.py index d181065fb..4106d85a1 100644 --- a/loopy/schedule/linearization_checker/schedule.py +++ b/loopy/schedule/linearization_checker/schedule.py @@ -331,7 +331,7 @@ class LexSchedule(object): max_lex_dim-len(self.stmt_instance_after.lex_pt)) ) - def create_symbolic_isl_maps( + def create_isl_maps( self, dom_before, dom_after, @@ -446,7 +446,7 @@ class LexSchedule(object): return [self.lex_var_prefix+str(i) for i in range(self.max_lex_dims())] - def get_lex_order_map_for_symbolic_sched(self): + def get_lex_order_map_for_sched_space(self): """Return an :class:`islpy.BasicMap` that maps each point in a lexicographic ordering to every point that is lexocigraphically greater. -- GitLab From 04dde9e92ad89d8e8d05bf846381fd5f1834d3e8 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 3 Mar 2020 03:07:27 -0600 Subject: [PATCH 200/415] more cleanup of verbose=True print statements in check_schedule_validity() --- .../linearization_checker/__init__.py | 51 ++++++------------- 1 file changed, 16 insertions(+), 35 deletions(-) diff --git a/loopy/schedule/linearization_checker/__init__.py b/loopy/schedule/linearization_checker/__init__.py index 010122067..9ecc7ae51 100644 --- a/loopy/schedule/linearization_checker/__init__.py +++ b/loopy/schedule/linearization_checker/__init__.py @@ -129,14 +129,6 @@ def check_schedule_validity( # get map representing lexicographic ordering sched_lex_order_map = sched.get_lex_order_map_for_sched_space() - """ - if verbose: - print("sched lex order map:") - print(prettier_map_string(sched_lex_order_map)) - print("space (lex time -> lex time):") - print(sched_lex_order_map.space) - print("-"*80) - """ # create statement instance ordering, # maps each statement instance to all statement instances occuring later @@ -147,11 +139,12 @@ def check_schedule_validity( ) if verbose: - print("statement instance ordering:") + print("-"*80) + print("Statement instance ordering:") print(prettier_map_string(sio)) + print("-"*80) print("SIO space (statement instances -> statement instances):") print(sio.space) - print("-"*80) # create a map representing constraints from the dependency, # maps statement instance to all statement instances that must occur later @@ -167,8 +160,11 @@ def check_schedule_validity( # align constraint map spaces to match sio so we can compare them if verbose: - print("constraint map space (before aligning):") + print("-"*80) + print("Constraint map space (before aligning with SIO):") print(constraint_map.space) + print("Constraint map:") + print(prettier_map_string(constraint_map)) # align params aligned_constraint_map = constraint_map.align_params(sio.space) @@ -198,9 +194,10 @@ def check_schedule_validity( ) if verbose: - print("constraint map space (after aligning):") + print("-"*80) + print("Constraint map space (after aligning with SIO):") print(aligned_constraint_map.space) - print("constraint map:") + print("Constraint map:") print(prettier_map_string(aligned_constraint_map)) assert aligned_constraint_map.space == sio.space @@ -220,35 +217,19 @@ def check_schedule_validity( if verbose: print("================ constraint check failure =================") - print("constraint map not subset of SIO") - print("dependency:") + print("Constraint map not subset of SIO") + print("Dependencies:") print(statement_pair_dep_set) - print("statement instance ordering:") + print("Statement instance ordering:") print(prettier_map_string(sio)) print("constraint_map.gist(sio):") - print(aligned_constraint_map.gist(sio)) + print(prettier_map_string(aligned_constraint_map.gist(sio))) print("sio.gist(constraint_map)") - print(sio.gist(aligned_constraint_map)) - print("loop priority known:") + print(prettier_map_string(sio.gist(aligned_constraint_map))) + print("Loop priority known:") print(preprocessed_knl.loop_priority) - """ - from loopy.schedule.linearization_checker.sched_check_utils import ( - get_concurrent_inames, - ) - conc_inames, non_conc_inames = get_concurrent_inames(scheduled_knl) - print("concurrent inames:", conc_inames) - print("sequential inames:", non_conc_inames) - print("constraint map space (stmt instances -> stmt instances):") - print(aligned_constraint_map.space) - print("SIO space (statement instances -> statement instances):") - print(sio.space) - print("constraint map:") - print(prettier_map_string(aligned_constraint_map)) - print("statement instance ordering:") - print(prettier_map_string(sio)) print("{insn id -> sched sid int} dict:") print(lp_insn_id_to_lex_sched_id) - """ print("===========================================================") return sched_is_valid -- GitLab From 75ce7465c8371cd6fc5d0e9536c651cd2dfa2f27 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 3 Mar 2020 07:03:14 -0600 Subject: [PATCH 201/415] removed unnecessary DependencyInfo class; renamed get_dependency_maps()->filter_deps_by_intersection_with_SAME() and simplified by removing unnecessary work --- .../linearization_checker/dependency.py | 76 +++++-------------- .../example_pairwise_schedule_validity.py | 23 ++++-- 2 files changed, 35 insertions(+), 64 deletions(-) diff --git a/loopy/schedule/linearization_checker/dependency.py b/loopy/schedule/linearization_checker/dependency.py index 2fb48ec2f..72bee1ce6 100644 --- a/loopy/schedule/linearization_checker/dependency.py +++ b/loopy/schedule/linearization_checker/dependency.py @@ -817,59 +817,36 @@ def get_dependency_sources_and_sinks(knl, sched_item_ids): return sources, sinks -class DependencyInfo(object): - # TODO rename - # TODO use Record? - def __init__( - self, - statement_pair_dep_set, - dom_before, - dom_after, - dep_constraint_map, - is_edge_in_dep_graph, # { dep & SAME } != empty - ): - self.statement_pair_dep_set = statement_pair_dep_set - self.dom_before = dom_before - self.dom_after = dom_after - self.dep_constraint_map = dep_constraint_map - self.is_edge_in_dep_graph = is_edge_in_dep_graph - - -def get_dependency_maps( +def filter_deps_by_intersection_with_SAME( statement_pair_dep_sets, - schedule_items, # TODO always pass as strings since we only need the name? + schedule_items, loop_priority, - knl, # TODO avoid passing this in + non_conc_inames, ): # TODO document + from loopy.schedule import Barrier, RunInstruction + from loopy.schedule.linearization_checker.sched_check_utils import ( + _get_insn_id_from_sched_item, + ) dt = DependencyType # create map from loopy insn ids to ints - lp_insn_id_to_lex_sched_id = {} # TODO + # (need this to keep consistent statement numbering between separate maps) + lp_insn_id_to_lex_sched_id = {} next_sid = 0 - from loopy.schedule import Barrier, RunInstruction for sched_item in schedule_items: if isinstance(sched_item, (RunInstruction, Barrier)): - from loopy.schedule.linearization_checker.sched_check_utils import ( - _get_insn_id_from_sched_item, - ) lp_insn_id = _get_insn_id_from_sched_item(sched_item) lp_insn_id_to_lex_sched_id[lp_insn_id] = next_sid next_sid += 1 - elif isinstance(sched_item, str): - # a string was passed, assume it's the insn_id - lp_insn_id_to_lex_sched_id[sched_item] = next_sid - next_sid += 1 - - from loopy.schedule.linearization_checker.sched_check_utils import ( - get_concurrent_inames, - ) - conc_inames, non_conc_inames = get_concurrent_inames(knl) - dep_info_list = [] + # determine which dep relations have a non-empty intersection with + # the SAME relation + deps_filtered = [] for statement_pair_dep_set in statement_pair_dep_sets: + # create isl map representing dep relation dep_constraint_map = create_dependency_constraint( statement_pair_dep_set, loop_priority, @@ -878,19 +855,17 @@ def get_dependency_maps( "statement", ) - # create "same" dep for these two insns + # create isl map representing "SAME" dep for these two insns s_before = statement_pair_dep_set.statement_before s_after = statement_pair_dep_set.statement_after - dom_before = statement_pair_dep_set.dom_before - dom_after = statement_pair_dep_set.dom_after shared_nc_inames = ( s_before.within_inames & s_after.within_inames & non_conc_inames) same_dep_set = StatementPairDependencySet( s_before, s_after, {dt.SAME: shared_nc_inames}, - dom_before, - dom_after, + statement_pair_dep_set.dom_before, + statement_pair_dep_set.dom_after, ) same_dep_constraint_map = create_dependency_constraint( same_dep_set, @@ -900,22 +875,11 @@ def get_dependency_maps( "statement", ) - # see whether we should create an edge in our statement dep graph + # see whether the intersection of dep map and SAME dep map exists intersect_dep_and_same = same_dep_constraint_map & dep_constraint_map intersect_not_empty = not bool(intersect_dep_and_same.is_empty()) - # create a map representing constraints from the dependency, - # maps statement instance to all statement instances that must occur later - # TODO instead of tuple, store all this in a class - dep_info_list.append( - DependencyInfo( - statement_pair_dep_set, - dom_before, - dom_after, - dep_constraint_map, - intersect_not_empty, - ) - ) - print("") + if intersect_not_empty: + deps_filtered.append(statement_pair_dep_set) - return dep_info_list + return deps_filtered diff --git a/loopy/schedule/linearization_checker/experimental_scripts/example_pairwise_schedule_validity.py b/loopy/schedule/linearization_checker/experimental_scripts/example_pairwise_schedule_validity.py index 8d3ba1469..78d34c83b 100644 --- a/loopy/schedule/linearization_checker/experimental_scripts/example_pairwise_schedule_validity.py +++ b/loopy/schedule/linearization_checker/experimental_scripts/example_pairwise_schedule_validity.py @@ -4,7 +4,7 @@ from loopy.schedule.linearization_checker.sched_check_utils import ( create_graph_from_pairs, ) from loopy.schedule.linearization_checker.dependency import ( - get_dependency_maps, + filter_deps_by_intersection_with_SAME, ) from loopy import ( preprocess_kernel, @@ -318,22 +318,29 @@ print("="*80) print("testing dep sort") print("="*80) -# create maps representing legacy deps -# (includes bool representing result of test for dep graph edge) -legacy_dep_info_list = get_dependency_maps( +# create dependency graph + +# for which deps does the intersection with the SAME dependency relation exist? +# create a graph including these deps as edges (from after->before) + +from loopy.schedule.linearization_checker.sched_check_utils import ( + get_concurrent_inames, +) +_, non_conc_inames = get_concurrent_inames(knl) +legacy_deps_filtered_by_same = filter_deps_by_intersection_with_SAME( legacy_statement_pair_dep_sets, schedule_items, knl.loop_priority, - knl, + non_conc_inames, ) # get dep graph edges dep_graph_pairs = [ ( - dep.statement_pair_dep_set.statement_before.insn_id, - dep.statement_pair_dep_set.statement_after.insn_id + dep.statement_after.insn_id, + dep.statement_before.insn_id ) - for dep in legacy_dep_info_list if dep.is_edge_in_dep_graph] + for dep in legacy_deps_filtered_by_same] # create dep graph from edges dep_graph = create_graph_from_pairs(dep_graph_pairs) -- GitLab From f68784600690fa8fb4cc6d20f176f9fd7a4bee6a Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 3 Mar 2020 07:20:28 -0600 Subject: [PATCH 202/415] pass list of insn_id strings to filter_deps_by_intersection_with_SAME() instead of full insn list --- .../linearization_checker/dependency.py | 17 ++++------------- .../example_pairwise_schedule_validity.py | 2 +- 2 files changed, 5 insertions(+), 14 deletions(-) diff --git a/loopy/schedule/linearization_checker/dependency.py b/loopy/schedule/linearization_checker/dependency.py index 72bee1ce6..3c6dfa9b9 100644 --- a/loopy/schedule/linearization_checker/dependency.py +++ b/loopy/schedule/linearization_checker/dependency.py @@ -819,27 +819,18 @@ def get_dependency_sources_and_sinks(knl, sched_item_ids): def filter_deps_by_intersection_with_SAME( statement_pair_dep_sets, - schedule_items, + insn_ids, loop_priority, non_conc_inames, ): # TODO document - from loopy.schedule import Barrier, RunInstruction - from loopy.schedule.linearization_checker.sched_check_utils import ( - _get_insn_id_from_sched_item, - ) dt = DependencyType # create map from loopy insn ids to ints - # (need this to keep consistent statement numbering between separate maps) - lp_insn_id_to_lex_sched_id = {} - next_sid = 0 - for sched_item in schedule_items: - if isinstance(sched_item, (RunInstruction, Barrier)): - lp_insn_id = _get_insn_id_from_sched_item(sched_item) - lp_insn_id_to_lex_sched_id[lp_insn_id] = next_sid - next_sid += 1 + # (used for consistent statement numbering between dep and SAME maps) + lp_insn_id_to_lex_sched_id = dict( + [(insn_id, sid) for sid, insn_id in enumerate(insn_ids)]) # determine which dep relations have a non-empty intersection with # the SAME relation diff --git a/loopy/schedule/linearization_checker/experimental_scripts/example_pairwise_schedule_validity.py b/loopy/schedule/linearization_checker/experimental_scripts/example_pairwise_schedule_validity.py index 78d34c83b..81fd3b5d6 100644 --- a/loopy/schedule/linearization_checker/experimental_scripts/example_pairwise_schedule_validity.py +++ b/loopy/schedule/linearization_checker/experimental_scripts/example_pairwise_schedule_validity.py @@ -329,7 +329,7 @@ from loopy.schedule.linearization_checker.sched_check_utils import ( _, non_conc_inames = get_concurrent_inames(knl) legacy_deps_filtered_by_same = filter_deps_by_intersection_with_SAME( legacy_statement_pair_dep_sets, - schedule_items, + [insn.id for insn in knl.instructions], knl.loop_priority, non_conc_inames, ) -- GitLab From 951a3bc8d9bedf767487e1153fcaee5c299c6b37 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 3 Mar 2020 08:08:46 -0600 Subject: [PATCH 203/415] removed mechanisms for dealing with isl maps containing inames in their space that are not used in the constraint inequalities, this doesn't happen anymore since we're using a separate map for each insn --- .../linearization_checker/__init__.py | 1 - .../linearization_checker/dependency.py | 72 +------------------ .../example_pairwise_schedule_validity.py | 2 +- .../example_wave_equation.py | 4 +- .../sched_check_utils.py | 23 +----- .../linearization_checker/schedule.py | 22 +----- 6 files changed, 10 insertions(+), 114 deletions(-) diff --git a/loopy/schedule/linearization_checker/__init__.py b/loopy/schedule/linearization_checker/__init__.py index 9ecc7ae51..23d479366 100644 --- a/loopy/schedule/linearization_checker/__init__.py +++ b/loopy/schedule/linearization_checker/__init__.py @@ -152,7 +152,6 @@ def check_schedule_validity( statement_pair_dep_set, knl.loop_priority, lp_insn_id_to_lex_sched_id, - sched.unused_param_name, sched.statement_var_name, ) # TODO figure out how to keep a consistent lp_insn_id_to_lex_sched_id map diff --git a/loopy/schedule/linearization_checker/dependency.py b/loopy/schedule/linearization_checker/dependency.py index 3c6dfa9b9..32c9ad4a5 100644 --- a/loopy/schedule/linearization_checker/dependency.py +++ b/loopy/schedule/linearization_checker/dependency.py @@ -163,7 +163,6 @@ def create_dependency_constraint( statement_dep_set, loop_priorities, insn_id_to_int, - unused_param_name, statement_var_name, statement_var_pose=0, dom_inames_ordered_before=None, @@ -187,16 +186,6 @@ def create_dependency_constraint( 'insn_id' and 'int_id' refer to the 'insn_id' and 'int_id' attributes of :class:`LexScheduleStatement`. - .. arg unused_param_name: A :class:`str` that specifies the name of a - dummy isl parameter assigned to variables in domain elements of the - isl map that represent inames unused in a particular statement - instance. The domain space of the generated isl map will have a - dimension for every iname used in any statement instance found in - the program ordering. An element in the domain of this map may - represent a statement instance that does not lie within iname x, but - will still need to assign a value to the x domain variable. In this - case, the parameter unused_param_name is is assigned to x. - .. arg statement_var_name: A :class:`str` specifying the name of the isl variable used to represent the unique :class:`int` statement id. @@ -237,25 +226,10 @@ def create_dependency_constraint( islvars = make_islvars_with_marker( var_names_needing_marker=[statement_var_name]+dom_inames_ordered_before, other_var_names=[statement_var_name]+dom_inames_ordered_after, - param_names=[unused_param_name], marker="'", ) statement_var_name_prime = statement_var_name+"'" - # get (ordered) list of unused before/after inames - inames_before_unused = [] - for iname in dom_inames_ordered_before: - if iname not in statement_dep_set.dom_before.get_var_names(isl.dim_type.out): - inames_before_unused.append(iname + "'") - inames_after_unused = [] - for iname in dom_inames_ordered_after: - if iname not in statement_dep_set.dom_after.get_var_names(isl.dim_type.out): - inames_after_unused.append(iname) - - # TODO are there ever unused inames now that we're separating the in/out spaces? - if inames_before_unused or inames_after_unused: - assert False - # initialize constraints to False # this will disappear as soon as we add a constraint all_constraints_set = islvars[0].eq_set(islvars[0] + 1) @@ -361,12 +335,6 @@ def create_dependency_constraint( constraint_set = create_elementwise_comparison_conjunction_set( inames_prime, inames_list, islvars, op="lt") - # TODO remove, this shouldn't happen anymore - # set unused vars == unused dummy param - for iname in inames_before_unused+inames_after_unused: - constraint_set = constraint_set & islvars[iname].eq_set( - islvars[unused_param_name]) - # set statement_var_name == statement # s_before_int = insn_id_to_int[statement_dep_set.statement_before.insn_id] s_after_int = insn_id_to_int[statement_dep_set.statement_after.insn_id] @@ -423,11 +391,11 @@ def _create_5pt_stencil_dependency_constraint( sid_after, space_iname, time_iname, - unused_param_name, statement_var_name, statement_var_pose=0, all_dom_inames_ordered=None, ): + """ WIP: NO NEED TO REVIEW YET """ from loopy.schedule.linearization_checker.sched_check_utils import ( make_islvars_with_marker, @@ -450,21 +418,10 @@ def _create_5pt_stencil_dependency_constraint( islvars = make_islvars_with_marker( var_names_needing_marker=[statement_var_name]+all_dom_inames_ordered, other_var_names=[statement_var_name]+all_dom_inames_ordered, - param_names=[unused_param_name], marker="'", ) statement_var_name_prime = statement_var_name+"'" - # get (ordered) list of unused before/after inames - inames_before_unused = [] - for iname in all_dom_inames_ordered: - if iname not in dom_before_constraint_set.get_var_names(isl.dim_type.out): - inames_before_unused.append(iname + "'") - inames_after_unused = [] - for iname in all_dom_inames_ordered: - if iname not in dom_after_constraint_set.get_var_names(isl.dim_type.out): - inames_after_unused.append(iname) - # initialize constraints to False # this will disappear as soon as we add a constraint #all_constraints_set = islvars[0].eq_set(islvars[0] + 1) @@ -496,11 +453,6 @@ def _create_5pt_stencil_dependency_constraint( & islvars[space_iname_prime].eq_set(islvars[space_iname])) ) - # set unused vars == unused dummy param - for iname in inames_before_unused+inames_after_unused: - constraint_set = constraint_set & islvars[iname].eq_set( - islvars[unused_param_name]) - # set statement_var_name == statement # constraint_set = constraint_set & islvars[statement_var_name_prime].eq_set( islvars[0]+sid_before) @@ -546,11 +498,11 @@ def create_arbitrary_dependency_constraint( dom_after_constraint_set, sid_before, sid_after, - unused_param_name, statement_var_name, statement_var_pose=0, all_dom_inames_ordered=None, ): + """ WIP: NO NEED TO REVIEW YET """ # TODO test after switching primes to before vars @@ -576,25 +528,12 @@ def create_arbitrary_dependency_constraint( islvars = make_islvars_with_marker( var_names_needing_marker=[statement_var_name]+all_dom_inames_ordered, other_var_names=[statement_var_name]+all_dom_inames_ordered, - param_names=[unused_param_name], marker="p", ) # TODO figure out before/after notation #statement_var_name_prime = statement_var_name+"'" statement_var_name_prime = statement_var_name+"p" # TODO figure out before/after notation - # get (ordered) list of unused before/after inames - inames_before_unused = [] - for iname in all_dom_inames_ordered: - if iname not in dom_before_constraint_set.get_var_names(isl.dim_type.out): - inames_before_unused.append(iname + "p") - inames_after_unused = [] - for iname in all_dom_inames_ordered: - if iname not in dom_after_constraint_set.get_var_names(isl.dim_type.out): - #inames_after_unused.append(iname + "'") - inames_after_unused.append(iname) - # TODO figure out before/after notation - # initialize constraints to False # this will disappear as soon as we add a constraint all_constraints_set = islvars[0].eq_set(islvars[0] + 1) @@ -645,11 +584,6 @@ def create_arbitrary_dependency_constraint( 1/0 all_constraints_set = all_constraints_set | conj_constraint - # set unused vars == unused dummy param - for iname in inames_before_unused+inames_after_unused: - all_constraints_set = all_constraints_set & islvars[iname].eq_set( - islvars[unused_param_name]) - # set statement_var_name == statement # all_constraints_set = ( all_constraints_set & islvars[statement_var_name_prime].eq_set( @@ -842,7 +776,6 @@ def filter_deps_by_intersection_with_SAME( statement_pair_dep_set, loop_priority, lp_insn_id_to_lex_sched_id, - "unused", # TODO shouldn't be necessary anymore "statement", ) @@ -862,7 +795,6 @@ def filter_deps_by_intersection_with_SAME( same_dep_set, loop_priority, lp_insn_id_to_lex_sched_id, - "unused", # TODO shouldn't be necessary "statement", ) diff --git a/loopy/schedule/linearization_checker/experimental_scripts/example_pairwise_schedule_validity.py b/loopy/schedule/linearization_checker/experimental_scripts/example_pairwise_schedule_validity.py index 81fd3b5d6..0919c07ce 100644 --- a/loopy/schedule/linearization_checker/experimental_scripts/example_pairwise_schedule_validity.py +++ b/loopy/schedule/linearization_checker/experimental_scripts/example_pairwise_schedule_validity.py @@ -1,3 +1,4 @@ +""" WIP: NO NEED TO REVIEW YET """ import loopy as lp import numpy as np from loopy.schedule.linearization_checker.sched_check_utils import ( @@ -13,7 +14,6 @@ from loopy import ( # Choose kernel ---------------------------------------------------------- - knl_choice = "example" #knl_choice = "unused_inames" #knl_choice = "matmul" diff --git a/loopy/schedule/linearization_checker/experimental_scripts/example_wave_equation.py b/loopy/schedule/linearization_checker/experimental_scripts/example_wave_equation.py index 06b84cbf8..18bd017e8 100644 --- a/loopy/schedule/linearization_checker/experimental_scripts/example_wave_equation.py +++ b/loopy/schedule/linearization_checker/experimental_scripts/example_wave_equation.py @@ -1,3 +1,4 @@ +""" WIP: NO NEED TO REVIEW YET """ import loopy as lp from loopy import generate_code_v2 from loopy import get_one_scheduled_kernel @@ -78,7 +79,6 @@ constraint_map = _create_5pt_stencil_dependency_constraint( sid_after = sid_after, space_iname = "ix", time_iname = "it", - unused_param_name = "unused", statement_var_name = "statement", statement_var_pose=0, #all_dom_inames_ordered=None, @@ -109,7 +109,6 @@ constraint_map = create_arbitrary_dependency_constraint( inames_domain_after, sid_before=sid_before, sid_after=sid_after, - unused_param_name="unused", statement_var_name="statement", statement_var_pose=0, #all_dom_inames_ordered=None, @@ -342,6 +341,7 @@ m2_prime = isl.BasicMap( """ # new +# TODO remove "unused" m = isl.BasicMap( "[nx,nt] -> {[ix, it] -> [tx, tt, tparity, itt, itx]: " "16*(tx - tt) + itx - itt = ix - it and " diff --git a/loopy/schedule/linearization_checker/sched_check_utils.py b/loopy/schedule/linearization_checker/sched_check_utils.py index 6fefa1483..05ab873e9 100644 --- a/loopy/schedule/linearization_checker/sched_check_utils.py +++ b/loopy/schedule/linearization_checker/sched_check_utils.py @@ -140,7 +140,7 @@ def append_marker_to_isl_map_var_names(old_isl_map, dim_type, marker="'"): def make_islvars_with_marker( - var_names_needing_marker, other_var_names, param_names, marker="'"): + var_names_needing_marker, other_var_names, param_names=[], marker="'"): """Return a dictionary from variable and parameter names to :class:`PwAff` instances that represent each of the variables and parameters, appending marker to @@ -203,7 +203,6 @@ def list_var_names_in_isl_sets( def create_symbolic_isl_map_from_tuples( tuple_pairs_with_domains, space, - unused_param_name, statement_var_name, ): """Return an :class:`islpy.Map` constructed using the provided space, @@ -219,16 +218,6 @@ def create_symbolic_isl_map_from_tuples( .. arg space: A :class:`islpy.Space` to be used to create the map. - .. arg unused_param_name: A :class:`str` that specifies the name of a - dummy isl parameter assigned to variables in domain elements of the - isl map that represent inames unused in a particular statement - instance. An element in the domain of this map may - represent a statement instance that does not lie within iname x, but - will still need to assign a value to the x domain variable. In this - case, the parameter unused_param_name is is assigned to x. This - situation is detected when a name present in `in_` dimension of - the space is not present in a particular domain. - .. arg statement_var_name: A :class:`str` specifying the name of the isl variable used to represent the unique :class:`int` statement id. @@ -238,8 +227,7 @@ def create_symbolic_isl_map_from_tuples( `(tup_in)->(tup_out) : domain`, where `tup_in` and `tup_out` are numeric or symbolic values assigned to the input and output dimension variables in `space`, and `domain` specifies constraints - on these values. Any space `in_` dimension variable not - constrained by `domain` is assigned `unused_param_name`. + on these values. """ @@ -282,13 +270,6 @@ def create_symbolic_isl_map_from_tuples( [var for var in tup_out if not isinstance(var, int)] ).issubset(set(dom_var_names)): assert False - unused_inames = set(space_in_names) \ - - set(dom_var_names) - set([statement_var_name]) - # TODO find another way to determine which inames should be unused and - # make an assertion to double check this - for unused_iname in unused_inames: - constraint = constraint & islvars[unused_iname].eq_set( - islvars[unused_param_name]) # set values for 'out' dimension using tuple vals assert len(tup_out) == len(space_out_names) diff --git a/loopy/schedule/linearization_checker/schedule.py b/loopy/schedule/linearization_checker/schedule.py index 4106d85a1..9a3773f9c 100644 --- a/loopy/schedule/linearization_checker/schedule.py +++ b/loopy/schedule/linearization_checker/schedule.py @@ -107,19 +107,6 @@ class LexSchedule(object): ordering. Points in lexicographic ordering are represented as a list of :class:`int` or as :class:`str` Loopy inames. - .. attribute:: unused_param_name - - A :class:`str` that specifies the name of a dummy isl parameter - assigned to variables in domain elements of the isl map that - represent inames unused in a particular statement instance. - The domain space of the generated isl map will have a dimension - for every iname used in any statement instance found in the - program ordering. An element in the domain of this map may - represent a statement instance that does not lie within - iname x, but will still need to assign a value to the x domain - variable. In this case, the parameter unused_param_name is - is assigned to x. - .. attribute:: statement_var_name A :class:`str` specifying the name of the isl variable used @@ -133,7 +120,6 @@ class LexSchedule(object): """ - unused_param_name = "unused" statement_var_name = "statement" lex_var_prefix = "l" @@ -171,8 +157,6 @@ class LexSchedule(object): # make sure we don't have an iname name conflict assert not any( iname == self.statement_var_name for iname in prohibited_var_names) - assert not any( - iname == self.unused_param_name for iname in prohibited_var_names) from loopy.schedule import (EnterLoop, LeaveLoop, Barrier, RunInstruction) from loopy.kernel.data import ConcurrentTag @@ -388,7 +372,7 @@ class LexSchedule(object): from loopy.schedule.linearization_checker.sched_check_utils import ( get_isl_space ) - params_sched = [self.unused_param_name] + params_sched = [] out_names_sched = self.get_lex_var_names() in_names_sched_before = [ @@ -430,7 +414,7 @@ class LexSchedule(object): )], doms_to_intersect_before ), - sched_space_before, self.unused_param_name, self.statement_var_name), + sched_space_before, self.statement_var_name), create_symbolic_isl_map_from_tuples( zip( [( @@ -439,7 +423,7 @@ class LexSchedule(object): self.stmt_instance_after.lex_pt)], doms_to_intersect_after ), - sched_space_after, self.unused_param_name, self.statement_var_name) + sched_space_after, self.statement_var_name) ) def get_lex_var_names(self): -- GitLab From 4ae164cd4482e97702305ac575c18101319e9f72 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 3 Mar 2020 08:57:09 -0600 Subject: [PATCH 204/415] don't use to refer to two different variables in nested loops in create_dependency_constraint(); use inside/outside inames instead --- loopy/schedule/linearization_checker/dependency.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/loopy/schedule/linearization_checker/dependency.py b/loopy/schedule/linearization_checker/dependency.py index 32c9ad4a5..80b1399cd 100644 --- a/loopy/schedule/linearization_checker/dependency.py +++ b/loopy/schedule/linearization_checker/dependency.py @@ -275,14 +275,14 @@ def create_dependency_constraint( # create a mapping from each iname to inames that must be # nested inside that iname nested_inside = {} - for iname in inames_list: - comes_after_iname = set() + for outside_iname in inames_list: + nested_inside_inames = set() for p_tuple in relevant_priorities: - if iname in p_tuple: - comes_after_iname.update([ - iname for iname in - p_tuple[p_tuple.index(iname)+1:]]) - nested_inside[iname] = comes_after_iname + if outside_iname in p_tuple: + nested_inside_inames.update([ + inside_iname for inside_iname in + p_tuple[p_tuple.index(outside_iname)+1:]]) + nested_inside[outside_iname] = nested_inside_inames from loopy.schedule.linearization_checker.sched_check_utils import ( get_orderings_of_length_n) -- GitLab From a93effd9710147813a9e7b44fc121af97e76dabd Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 3 Mar 2020 09:27:45 -0600 Subject: [PATCH 205/415] fixing pylint errors *on the correct branch this time* --- .../linearization_checker/schedule.py | 3 -- test/test_linearization_checker.py | 32 ++++++++++++------- 2 files changed, 21 insertions(+), 14 deletions(-) diff --git a/loopy/schedule/linearization_checker/schedule.py b/loopy/schedule/linearization_checker/schedule.py index 9a3773f9c..457f188a5 100644 --- a/loopy/schedule/linearization_checker/schedule.py +++ b/loopy/schedule/linearization_checker/schedule.py @@ -443,9 +443,6 @@ class LexSchedule(object): return create_lex_order_map( n_dims, before_names=self.get_lex_var_names()) - def __nonzero__(self): - return self.__bool__() - def __eq__(self, other): return ( self.stmt_instance_before == other.stmt_instance_before diff --git a/test/test_linearization_checker.py b/test/test_linearization_checker.py index 5634bc989..8da744e15 100644 --- a/test/test_linearization_checker.py +++ b/test/test_linearization_checker.py @@ -84,7 +84,8 @@ def test_linearization_checker_with_loop_prioritization(): statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): - knl = lp.add_dependencies_v2(knl, statement_pair_dep_sets) + knl = lp.add_dependencies_v2( + knl, statement_pair_dep_sets) # pylint:disable=no-member # get a schedule to check if knl.state < KernelState.PREPROCESSED: @@ -121,7 +122,8 @@ def test_linearization_checker_with_matmul(): statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): - knl = lp.add_dependencies_v2(knl, statement_pair_dep_sets) + knl = lp.add_dependencies_v2( + knl, statement_pair_dep_sets) # pylint:disable=no-member # get a schedule to check if knl.state < KernelState.PREPROCESSED: @@ -171,7 +173,8 @@ def test_linearization_checker_with_dependent_domain(): statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): - knl = lp.add_dependencies_v2(knl, statement_pair_dep_sets) + knl = lp.add_dependencies_v2( + knl, statement_pair_dep_sets) # pylint:disable=no-member # get a schedule to check if knl.state < KernelState.PREPROCESSED: @@ -224,7 +227,8 @@ def test_linearization_checker_with_stroud_bernstein(): statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): - knl = lp.add_dependencies_v2(knl, statement_pair_dep_sets) + knl = lp.add_dependencies_v2( + knl, statement_pair_dep_sets) # pylint:disable=no-member # get a schedule to check if knl.state < KernelState.PREPROCESSED: @@ -260,7 +264,8 @@ def test_linearization_checker_with_nop(): statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): - knl = lp.add_dependencies_v2(knl, statement_pair_dep_sets) + knl = lp.add_dependencies_v2( + knl, statement_pair_dep_sets) # pylint:disable=no-member # get a schedule to check if knl.state < KernelState.PREPROCESSED: @@ -306,7 +311,8 @@ def test_linearization_checker_with_multi_domain(): statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): - knl = lp.add_dependencies_v2(knl, statement_pair_dep_sets) + knl = lp.add_dependencies_v2( + knl, statement_pair_dep_sets) # pylint:disable=no-member # get a schedule to check if knl.state < KernelState.PREPROCESSED: @@ -341,7 +347,8 @@ def test_linearization_checker_with_loop_carried_deps(): statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): - knl = lp.add_dependencies_v2(knl, statement_pair_dep_sets) + knl = lp.add_dependencies_v2( + knl, statement_pair_dep_sets) # pylint:disable=no-member # get a schedule to check if knl.state < KernelState.PREPROCESSED: @@ -383,7 +390,8 @@ def test_linearization_checker_and_invalid_prioritiy_detection(): statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): - knl0 = lp.add_dependencies_v2(knl0, statement_pair_dep_sets) + knl0 = lp.add_dependencies_v2( + knl0, statement_pair_dep_sets) # pylint:disable=no-member # get a schedule to check if knl0.state < KernelState.PREPROCESSED: @@ -404,7 +412,8 @@ def test_linearization_checker_and_invalid_prioritiy_detection(): statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): - knl1 = lp.add_dependencies_v2(knl1, statement_pair_dep_sets) + knl1 = lp.add_dependencies_v2( + knl1, statement_pair_dep_sets) # pylint:disable=no-member # get a schedule to check if knl1.state < KernelState.PREPROCESSED: @@ -421,7 +430,7 @@ def test_linearization_checker_and_invalid_prioritiy_detection(): knl2 = lp.prioritize_loops(knl2, "j,k") try: if hasattr(lp, "constrain_loop_nesting"): - knl2 = lp.constrain_loop_nesting(knl2, "k,i") + knl2 = lp.constrain_loop_nesting(knl2, "k,i") # pylint:disable=no-member else: knl2 = lp.prioritize_loops(knl2, "k,i") @@ -450,7 +459,8 @@ def test_linearization_checker_and_invalid_prioritiy_detection(): knl3 = lp.prioritize_loops(ref_knl, "h,i,j,k") try: if hasattr(lp, "constrain_loop_nesting"): - knl3 = lp.constrain_loop_nesting(knl3, "h,j,i,k") + knl3 = lp.constrain_loop_nesting( + knl3, "h,j,i,k") # pylint:disable=no-member else: knl3 = lp.prioritize_loops(knl3, "h,j,i,k") -- GitLab From 2afc1133ffee3b707b81e6769df13e417c262f2a Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 3 Mar 2020 09:41:32 -0600 Subject: [PATCH 206/415] putting pylint:disable on correct lines --- test/test_linearization_checker.py | 40 +++++++++++++++--------------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/test/test_linearization_checker.py b/test/test_linearization_checker.py index 8da744e15..ebbf04b2a 100644 --- a/test/test_linearization_checker.py +++ b/test/test_linearization_checker.py @@ -84,8 +84,8 @@ def test_linearization_checker_with_loop_prioritization(): statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): - knl = lp.add_dependencies_v2( - knl, statement_pair_dep_sets) # pylint:disable=no-member + knl = lp.add_dependencies_v2( # pylint:disable=no-member + knl, statement_pair_dep_sets) # get a schedule to check if knl.state < KernelState.PREPROCESSED: @@ -122,8 +122,8 @@ def test_linearization_checker_with_matmul(): statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): - knl = lp.add_dependencies_v2( - knl, statement_pair_dep_sets) # pylint:disable=no-member + knl = lp.add_dependencies_v2( # pylint:disable=no-member + knl, statement_pair_dep_sets) # get a schedule to check if knl.state < KernelState.PREPROCESSED: @@ -173,8 +173,8 @@ def test_linearization_checker_with_dependent_domain(): statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): - knl = lp.add_dependencies_v2( - knl, statement_pair_dep_sets) # pylint:disable=no-member + knl = lp.add_dependencies_v2( # pylint:disable=no-member + knl, statement_pair_dep_sets) # get a schedule to check if knl.state < KernelState.PREPROCESSED: @@ -227,8 +227,8 @@ def test_linearization_checker_with_stroud_bernstein(): statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): - knl = lp.add_dependencies_v2( - knl, statement_pair_dep_sets) # pylint:disable=no-member + knl = lp.add_dependencies_v2( # pylint:disable=no-member + knl, statement_pair_dep_sets) # get a schedule to check if knl.state < KernelState.PREPROCESSED: @@ -264,8 +264,8 @@ def test_linearization_checker_with_nop(): statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): - knl = lp.add_dependencies_v2( - knl, statement_pair_dep_sets) # pylint:disable=no-member + knl = lp.add_dependencies_v2( # pylint:disable=no-member + knl, statement_pair_dep_sets) # get a schedule to check if knl.state < KernelState.PREPROCESSED: @@ -311,8 +311,8 @@ def test_linearization_checker_with_multi_domain(): statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): - knl = lp.add_dependencies_v2( - knl, statement_pair_dep_sets) # pylint:disable=no-member + knl = lp.add_dependencies_v2( # pylint:disable=no-member + knl, statement_pair_dep_sets) # get a schedule to check if knl.state < KernelState.PREPROCESSED: @@ -347,8 +347,8 @@ def test_linearization_checker_with_loop_carried_deps(): statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): - knl = lp.add_dependencies_v2( - knl, statement_pair_dep_sets) # pylint:disable=no-member + knl = lp.add_dependencies_v2( # pylint:disable=no-member + knl, statement_pair_dep_sets) # get a schedule to check if knl.state < KernelState.PREPROCESSED: @@ -390,8 +390,8 @@ def test_linearization_checker_and_invalid_prioritiy_detection(): statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): - knl0 = lp.add_dependencies_v2( - knl0, statement_pair_dep_sets) # pylint:disable=no-member + knl0 = lp.add_dependencies_v2( # pylint:disable=no-member + knl0, statement_pair_dep_sets) # get a schedule to check if knl0.state < KernelState.PREPROCESSED: @@ -412,8 +412,8 @@ def test_linearization_checker_and_invalid_prioritiy_detection(): statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): - knl1 = lp.add_dependencies_v2( - knl1, statement_pair_dep_sets) # pylint:disable=no-member + knl1 = lp.add_dependencies_v2( # pylint:disable=no-member + knl1, statement_pair_dep_sets) # get a schedule to check if knl1.state < KernelState.PREPROCESSED: @@ -459,8 +459,8 @@ def test_linearization_checker_and_invalid_prioritiy_detection(): knl3 = lp.prioritize_loops(ref_knl, "h,i,j,k") try: if hasattr(lp, "constrain_loop_nesting"): - knl3 = lp.constrain_loop_nesting( - knl3, "h,j,i,k") # pylint:disable=no-member + knl3 = lp.constrain_loop_nesting( # pylint:disable=no-member + knl3, "h,j,i,k") else: knl3 = lp.prioritize_loops(knl3, "h,j,i,k") -- GitLab From ebfc15fe4778c65bffa498626d060871188b1e49 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 3 Mar 2020 11:10:37 -0600 Subject: [PATCH 207/415] move code that aligns constraint_map with SIO into separate function; enhance comment explaining need for alignment --- .../linearization_checker/__init__.py | 37 +++++-------------- .../sched_check_utils.py | 28 ++++++++++++++ 2 files changed, 38 insertions(+), 27 deletions(-) diff --git a/loopy/schedule/linearization_checker/__init__.py b/loopy/schedule/linearization_checker/__init__.py index 23d479366..86125882a 100644 --- a/loopy/schedule/linearization_checker/__init__.py +++ b/loopy/schedule/linearization_checker/__init__.py @@ -157,7 +157,14 @@ def check_schedule_validity( # TODO figure out how to keep a consistent lp_insn_id_to_lex_sched_id map # when dependency creation is separate from schedule checking - # align constraint map spaces to match sio so we can compare them + # reorder variables/params in constraint map space to match SIO so we can + # check to see whether the constraint map is a subset of the SIO + # (spaces must be aligned so that the variables in the constraint map + # correspond to the same variables in the SIO) + from loopy.schedule.linearization_checker.sched_check_utils import ( + align_isl_maps_by_var_names, + ) + if verbose: print("-"*80) print("Constraint map space (before aligning with SIO):") @@ -165,32 +172,7 @@ def check_schedule_validity( print("Constraint map:") print(prettier_map_string(constraint_map)) - # align params - aligned_constraint_map = constraint_map.align_params(sio.space) - - # align in_ dims - import islpy as isl - from loopy.schedule.linearization_checker.sched_check_utils import ( - reorder_dims_by_name, - ) - sio_in_names = sio.space.get_var_names(isl.dim_type.in_) - aligned_constraint_map = reorder_dims_by_name( - aligned_constraint_map, - isl.dim_type.in_, - sio_in_names, - add_missing=False, - new_names_are_permutation_only=True, - ) - - # align out dims - sio_out_names = sio.space.get_var_names(isl.dim_type.out) - aligned_constraint_map = reorder_dims_by_name( - aligned_constraint_map, - isl.dim_type.out, - sio_out_names, - add_missing=False, - new_names_are_permutation_only=True, - ) + aligned_constraint_map = align_isl_maps_by_var_names(constraint_map, sio) if verbose: print("-"*80) @@ -199,6 +181,7 @@ def check_schedule_validity( print("Constraint map:") print(prettier_map_string(aligned_constraint_map)) + import islpy as isl assert aligned_constraint_map.space == sio.space assert ( aligned_constraint_map.space.get_var_names(isl.dim_type.in_) diff --git a/loopy/schedule/linearization_checker/sched_check_utils.py b/loopy/schedule/linearization_checker/sched_check_utils.py index 05ab873e9..63b4584e7 100644 --- a/loopy/schedule/linearization_checker/sched_check_utils.py +++ b/loopy/schedule/linearization_checker/sched_check_utils.py @@ -98,6 +98,34 @@ def reorder_dims_by_name( return new_set +def align_isl_maps_by_var_names(input_map, target_map): + + # align params + aligned_input_map = input_map.align_params(target_map.space) + + # align in_ dims + target_map_in_names = target_map.space.get_var_names(isl.dim_type.in_) + aligned_input_map = reorder_dims_by_name( + aligned_input_map, + isl.dim_type.in_, + target_map_in_names, + add_missing=False, + new_names_are_permutation_only=True, + ) + + # align out dims + target_map_out_names = target_map.space.get_var_names(isl.dim_type.out) + aligned_input_map = reorder_dims_by_name( + aligned_input_map, + isl.dim_type.out, + target_map_out_names, + add_missing=False, + new_names_are_permutation_only=True, + ) + + return aligned_input_map + + def create_new_isl_set_with_primes(old_isl_set, marker="'"): """Return an isl_set with apostrophes appended to dim_type.set dimension names. -- GitLab From 86c4f68a681a234b5a108385380eb85de6e1512d Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 3 Mar 2020 12:00:47 -0600 Subject: [PATCH 208/415] rename directory linearization_checker->checker --- loopy/schedule/{linearization_checker => checker}/__init__.py | 0 loopy/schedule/{linearization_checker => checker}/dependency.py | 0 .../experimental_scripts/example_pairwise_schedule_validity.py | 0 .../experimental_scripts/example_wave_equation.py | 0 .../{linearization_checker => checker}/lexicographic_order_map.py | 0 .../{linearization_checker => checker}/sched_check_utils.py | 0 loopy/schedule/{linearization_checker => checker}/schedule.py | 0 loopy/schedule/{linearization_checker => checker}/version.py | 0 8 files changed, 0 insertions(+), 0 deletions(-) rename loopy/schedule/{linearization_checker => checker}/__init__.py (100%) rename loopy/schedule/{linearization_checker => checker}/dependency.py (100%) rename loopy/schedule/{linearization_checker => checker}/experimental_scripts/example_pairwise_schedule_validity.py (100%) rename loopy/schedule/{linearization_checker => checker}/experimental_scripts/example_wave_equation.py (100%) rename loopy/schedule/{linearization_checker => checker}/lexicographic_order_map.py (100%) rename loopy/schedule/{linearization_checker => checker}/sched_check_utils.py (100%) rename loopy/schedule/{linearization_checker => checker}/schedule.py (100%) rename loopy/schedule/{linearization_checker => checker}/version.py (100%) diff --git a/loopy/schedule/linearization_checker/__init__.py b/loopy/schedule/checker/__init__.py similarity index 100% rename from loopy/schedule/linearization_checker/__init__.py rename to loopy/schedule/checker/__init__.py diff --git a/loopy/schedule/linearization_checker/dependency.py b/loopy/schedule/checker/dependency.py similarity index 100% rename from loopy/schedule/linearization_checker/dependency.py rename to loopy/schedule/checker/dependency.py diff --git a/loopy/schedule/linearization_checker/experimental_scripts/example_pairwise_schedule_validity.py b/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py similarity index 100% rename from loopy/schedule/linearization_checker/experimental_scripts/example_pairwise_schedule_validity.py rename to loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py diff --git a/loopy/schedule/linearization_checker/experimental_scripts/example_wave_equation.py b/loopy/schedule/checker/experimental_scripts/example_wave_equation.py similarity index 100% rename from loopy/schedule/linearization_checker/experimental_scripts/example_wave_equation.py rename to loopy/schedule/checker/experimental_scripts/example_wave_equation.py diff --git a/loopy/schedule/linearization_checker/lexicographic_order_map.py b/loopy/schedule/checker/lexicographic_order_map.py similarity index 100% rename from loopy/schedule/linearization_checker/lexicographic_order_map.py rename to loopy/schedule/checker/lexicographic_order_map.py diff --git a/loopy/schedule/linearization_checker/sched_check_utils.py b/loopy/schedule/checker/sched_check_utils.py similarity index 100% rename from loopy/schedule/linearization_checker/sched_check_utils.py rename to loopy/schedule/checker/sched_check_utils.py diff --git a/loopy/schedule/linearization_checker/schedule.py b/loopy/schedule/checker/schedule.py similarity index 100% rename from loopy/schedule/linearization_checker/schedule.py rename to loopy/schedule/checker/schedule.py diff --git a/loopy/schedule/linearization_checker/version.py b/loopy/schedule/checker/version.py similarity index 100% rename from loopy/schedule/linearization_checker/version.py rename to loopy/schedule/checker/version.py -- GitLab From c3920fedc4ac1f80735d7c5d7531751993ebbfdf Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 3 Mar 2020 12:08:01 -0600 Subject: [PATCH 209/415] update module paths from loopy.schedule.linearization_checker->loopy.schedule.checker --- loopy/__init__.py | 2 +- loopy/schedule/checker/__init__.py | 12 ++++++------ loopy/schedule/checker/dependency.py | 18 +++++++++--------- .../example_pairwise_schedule_validity.py | 6 +++--- .../example_wave_equation.py | 12 ++++++------ .../checker/lexicographic_order_map.py | 2 +- loopy/schedule/checker/schedule.py | 10 +++++----- 7 files changed, 31 insertions(+), 31 deletions(-) diff --git a/loopy/__init__.py b/loopy/__init__.py index cbbb634cf..6cbcbdbf6 100644 --- a/loopy/__init__.py +++ b/loopy/__init__.py @@ -124,7 +124,7 @@ from loopy.transform.add_barrier import add_barrier from loopy.type_inference import infer_unknown_types from loopy.preprocess import preprocess_kernel, realize_reduction from loopy.schedule import generate_loop_schedules, get_one_scheduled_kernel -from loopy.schedule.linearization_checker import ( +from loopy.schedule.checker import ( statement_pair_dep_sets_from_legacy_knl, check_schedule_validity) from loopy.statistics import (ToCountMap, CountGranularity, stringify_stats_mapping, diff --git a/loopy/schedule/checker/__init__.py b/loopy/schedule/checker/__init__.py index 86125882a..4dfbb8f94 100644 --- a/loopy/schedule/checker/__init__.py +++ b/loopy/schedule/checker/__init__.py @@ -25,7 +25,7 @@ def statement_pair_dep_sets_from_legacy_knl(knl): preprocessed_knl = preprocess_kernel(knl) # Create StatementPairDependencySet(s) from kernel dependencies - from loopy.schedule.linearization_checker.dependency import ( + from loopy.schedule.checker.dependency import ( create_dependencies_from_legacy_knl, ) return create_dependencies_from_legacy_knl(preprocessed_knl) @@ -41,14 +41,14 @@ def check_schedule_validity( verbose=False, _use_scheduled_kernel_to_obtain_loop_priority=False): - from loopy.schedule.linearization_checker.dependency import ( + from loopy.schedule.checker.dependency import ( create_dependency_constraint, ) - from loopy.schedule.linearization_checker.schedule import LexSchedule - from loopy.schedule.linearization_checker.lexicographic_order_map import ( + from loopy.schedule.checker.schedule import LexSchedule + from loopy.schedule.checker.lexicographic_order_map import ( get_statement_ordering_map, ) - from loopy.schedule.linearization_checker.sched_check_utils import ( + from loopy.schedule.checker.sched_check_utils import ( prettier_map_string, ) @@ -161,7 +161,7 @@ def check_schedule_validity( # check to see whether the constraint map is a subset of the SIO # (spaces must be aligned so that the variables in the constraint map # correspond to the same variables in the SIO) - from loopy.schedule.linearization_checker.sched_check_utils import ( + from loopy.schedule.checker.sched_check_utils import ( align_isl_maps_by_var_names, ) diff --git a/loopy/schedule/checker/dependency.py b/loopy/schedule/checker/dependency.py index 80b1399cd..89fb0a45a 100644 --- a/loopy/schedule/checker/dependency.py +++ b/loopy/schedule/checker/dependency.py @@ -204,7 +204,7 @@ def create_dependency_constraint( """ - from loopy.schedule.linearization_checker.sched_check_utils import ( + from loopy.schedule.checker.sched_check_utils import ( make_islvars_with_marker, append_apostrophes, add_dims_to_isl_set, @@ -284,7 +284,7 @@ def create_dependency_constraint( p_tuple[p_tuple.index(outside_iname)+1:]]) nested_inside[outside_iname] = nested_inside_inames - from loopy.schedule.linearization_checker.sched_check_utils import ( + from loopy.schedule.checker.sched_check_utils import ( get_orderings_of_length_n) # get all orderings that are explicitly allowed by priorities orders = get_orderings_of_length_n( @@ -322,7 +322,7 @@ def create_dependency_constraint( # TODO could this happen? assert False - from loopy.schedule.linearization_checker import ( + from loopy.schedule.checker import ( lexicographic_order_map as lom) # TODO handle case where inames list is empty constraint_set = lom.get_lex_order_constraint( @@ -397,7 +397,7 @@ def _create_5pt_stencil_dependency_constraint( ): """ WIP: NO NEED TO REVIEW YET """ - from loopy.schedule.linearization_checker.sched_check_utils import ( + from loopy.schedule.checker.sched_check_utils import ( make_islvars_with_marker, append_apostrophes, add_dims_to_isl_set, @@ -407,7 +407,7 @@ def _create_5pt_stencil_dependency_constraint( # This function uses the dependency given to create the following constraint: # Statement [s,i,j] comes before statement [s',i',j'] iff - from loopy.schedule.linearization_checker.sched_check_utils import ( + from loopy.schedule.checker.sched_check_utils import ( list_var_names_in_isl_sets, ) if all_dom_inames_ordered is None: @@ -506,7 +506,7 @@ def create_arbitrary_dependency_constraint( # TODO test after switching primes to before vars - from loopy.schedule.linearization_checker.sched_check_utils import ( + from loopy.schedule.checker.sched_check_utils import ( make_islvars_with_marker, #append_apostrophes, append_marker_to_strings, @@ -517,7 +517,7 @@ def create_arbitrary_dependency_constraint( # This function uses the constraint given to create the following map: # Statement [s,i,j] comes before statement [s',i',j'] iff - from loopy.schedule.linearization_checker.sched_check_utils import ( + from loopy.schedule.checker.sched_check_utils import ( list_var_names_in_isl_sets, ) if all_dom_inames_ordered is None: @@ -650,12 +650,12 @@ def create_dependencies_from_legacy_knl(knl): """ # Introduce SAME dep for set of shared, non-concurrent inames - from loopy.schedule.linearization_checker.sched_check_utils import ( + from loopy.schedule.checker.sched_check_utils import ( get_concurrent_inames, get_all_nonconcurrent_insn_iname_subsets, get_sched_item_ids_within_inames, ) - from loopy.schedule.linearization_checker.schedule import LexScheduleStatement + from loopy.schedule.checker.schedule import LexScheduleStatement dt = DependencyType conc_inames, non_conc_inames = get_concurrent_inames(knl) statement_dep_sets = [] diff --git a/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py b/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py index 0919c07ce..d9c34dda4 100644 --- a/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py +++ b/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py @@ -1,10 +1,10 @@ """ WIP: NO NEED TO REVIEW YET """ import loopy as lp import numpy as np -from loopy.schedule.linearization_checker.sched_check_utils import ( +from loopy.schedule.checker.sched_check_utils import ( create_graph_from_pairs, ) -from loopy.schedule.linearization_checker.dependency import ( +from loopy.schedule.checker.dependency import ( filter_deps_by_intersection_with_SAME, ) from loopy import ( @@ -323,7 +323,7 @@ print("="*80) # for which deps does the intersection with the SAME dependency relation exist? # create a graph including these deps as edges (from after->before) -from loopy.schedule.linearization_checker.sched_check_utils import ( +from loopy.schedule.checker.sched_check_utils import ( get_concurrent_inames, ) _, non_conc_inames = get_concurrent_inames(knl) diff --git a/loopy/schedule/checker/experimental_scripts/example_wave_equation.py b/loopy/schedule/checker/experimental_scripts/example_wave_equation.py index 18bd017e8..6f602cf61 100644 --- a/loopy/schedule/checker/experimental_scripts/example_wave_equation.py +++ b/loopy/schedule/checker/experimental_scripts/example_wave_equation.py @@ -7,16 +7,16 @@ import numpy as np import islpy as isl #from loopy.kernel_stat_collector import KernelStatCollector #from loopy.kernel_stat_collector import KernelStatOptions as kso # noqa -from loopy.schedule.linearization_checker.sched_check_utils import ( +from loopy.schedule.checker.sched_check_utils import ( prettier_map_string, reorder_dims_by_name, append_marker_to_isl_map_var_names, ) -from loopy.schedule.linearization_checker.dependency import ( +from loopy.schedule.checker.dependency import ( create_arbitrary_dependency_constraint, ) -from loopy.schedule.linearization_checker.schedule import LexSchedule -from loopy.schedule.linearization_checker.lexicographic_order_map import ( +from loopy.schedule.checker.schedule import LexSchedule +from loopy.schedule.checker.lexicographic_order_map import ( get_statement_ordering_map, ) @@ -289,7 +289,7 @@ if not sched_is_valid: print("loop priority known:") print(preprocessed_knl.loop_priority) """ - from loopy.schedule.linearization_checker.sched_check_utils import ( + from loopy.schedule.checker.sched_check_utils import ( get_concurrent_inames, ) conc_inames, non_conc_inames = get_concurrent_inames(scheduled_knl) @@ -580,7 +580,7 @@ if not sched_is_valid: print("loop priority known:") print(preprocessed_knl.loop_priority) """ - from loopy.schedule.linearization_checker.sched_check_utils import ( + from loopy.schedule.checker.sched_check_utils import ( get_concurrent_inames, ) conc_inames, non_conc_inames = get_concurrent_inames(scheduled_knl) diff --git a/loopy/schedule/checker/lexicographic_order_map.py b/loopy/schedule/checker/lexicographic_order_map.py index fe23ef4ed..9007a8a73 100644 --- a/loopy/schedule/checker/lexicographic_order_map.py +++ b/loopy/schedule/checker/lexicographic_order_map.py @@ -114,7 +114,7 @@ def create_lex_order_map( if before_names is None: before_names = ["i%s" % (i) for i in range(n_dims)] if after_names is None: - from loopy.schedule.linearization_checker.sched_check_utils import ( + from loopy.schedule.checker.sched_check_utils import ( append_marker_to_strings, ) after_names = append_marker_to_strings(before_names, marker="_") diff --git a/loopy/schedule/checker/schedule.py b/loopy/schedule/checker/schedule.py index 457f188a5..6871a031c 100644 --- a/loopy/schedule/checker/schedule.py +++ b/loopy/schedule/checker/schedule.py @@ -211,7 +211,7 @@ class LexSchedule(object): # sometimes be able to skip increment, but it's not hurting anything # TODO might not need this increment period? elif isinstance(sched_item, (RunInstruction, Barrier)): - from loopy.schedule.linearization_checker.sched_check_utils import ( + from loopy.schedule.checker.sched_check_utils import ( _get_insn_id_from_sched_item, ) lp_insn_id = _get_insn_id_from_sched_item(sched_item) @@ -351,12 +351,12 @@ class LexSchedule(object): """ - from loopy.schedule.linearization_checker.sched_check_utils import ( + from loopy.schedule.checker.sched_check_utils import ( create_symbolic_isl_map_from_tuples, add_dims_to_isl_set ) - from loopy.schedule.linearization_checker.sched_check_utils import ( + from loopy.schedule.checker.sched_check_utils import ( list_var_names_in_isl_sets, ) if dom_inames_ordered_before is None: @@ -369,7 +369,7 @@ class LexSchedule(object): # create an isl space # {('statement', used in >=1 statement domain>) -> # (lexicographic ordering dims)} - from loopy.schedule.linearization_checker.sched_check_utils import ( + from loopy.schedule.checker.sched_check_utils import ( get_isl_space ) params_sched = [] @@ -436,7 +436,7 @@ class LexSchedule(object): lexocigraphically greater. """ - from loopy.schedule.linearization_checker.lexicographic_order_map import ( + from loopy.schedule.checker.lexicographic_order_map import ( create_lex_order_map, ) n_dims = self.max_lex_dims() -- GitLab From 19519d3b73212835c1810574f076d84c756a5155 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Thu, 5 Mar 2020 12:24:30 -0600 Subject: [PATCH 210/415] changed get_one_scheduled_kernel()->get_one_linearized_kernel() --- .../example_pairwise_schedule_validity.py | 8 ++++---- .../checker/experimental_scripts/example_wave_equation.py | 6 +++--- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py b/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py index d9c34dda4..5bb6bdc60 100644 --- a/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py +++ b/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py @@ -9,7 +9,7 @@ from loopy.schedule.checker.dependency import ( ) from loopy import ( preprocess_kernel, - get_one_scheduled_kernel, + get_one_linearized_kernel, ) # Choose kernel ---------------------------------------------------------- @@ -290,7 +290,7 @@ legacy_statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( # get a schedule to check knl = preprocess_kernel(knl) -knl = get_one_scheduled_kernel(knl) +knl = get_one_linearized_kernel(knl) print("kernel schedueld") schedule_items = knl.schedule print("checking validity") @@ -302,8 +302,8 @@ sched_is_valid = lp.check_schedule_validity( legacy_statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl(knl) # get a schedule to check -from loopy import get_one_scheduled_kernel -scheduled_knl = get_one_scheduled_kernel(knl) +from loopy import get_one_linearized_kernel +scheduled_knl = get_one_linearized_kernel(knl) schedule_items = scheduled_knl.schedule sched_is_valid = lp.check_schedule_validity( diff --git a/loopy/schedule/checker/experimental_scripts/example_wave_equation.py b/loopy/schedule/checker/experimental_scripts/example_wave_equation.py index 6f602cf61..352739f11 100644 --- a/loopy/schedule/checker/experimental_scripts/example_wave_equation.py +++ b/loopy/schedule/checker/experimental_scripts/example_wave_equation.py @@ -1,7 +1,7 @@ """ WIP: NO NEED TO REVIEW YET """ import loopy as lp from loopy import generate_code_v2 -from loopy import get_one_scheduled_kernel +from loopy import get_one_linearized_kernel from loopy import preprocess_kernel import numpy as np import islpy as isl @@ -125,7 +125,7 @@ verbose = True # get a schedule to check if preprocessed_knl.schedule is None: - scheduled_knl = get_one_scheduled_kernel(preprocessed_knl) + scheduled_knl = get_one_linearized_kernel(preprocessed_knl) else: scheduled_knl = preprocessed_knl @@ -412,7 +412,7 @@ verbose = True # get a schedule to check if preprocessed_knl.schedule is None: - scheduled_knl = get_one_scheduled_kernel(preprocessed_knl) + scheduled_knl = get_one_linearized_kernel(preprocessed_knl) else: scheduled_knl = preprocessed_knl -- GitLab From 2ffbbccaed04b8a17fc676f55f4cfdcaef8265f7 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Thu, 5 Mar 2020 12:30:28 -0600 Subject: [PATCH 211/415] change get_one_scheduled_kernel()->get_one_linearized_kernel() and knl.schedule->knl.linearization --- test/test_linearization_checker.py | 46 +++++++++++++++--------------- 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/test/test_linearization_checker.py b/test/test_linearization_checker.py index ebbf04b2a..9a8b5e43e 100644 --- a/test/test_linearization_checker.py +++ b/test/test_linearization_checker.py @@ -34,7 +34,7 @@ import logging from loopy.kernel import KernelState from loopy import ( preprocess_kernel, - get_one_scheduled_kernel, + get_one_linearized_kernel, ) logger = logging.getLogger(__name__) @@ -90,8 +90,8 @@ def test_linearization_checker_with_loop_prioritization(): # get a schedule to check if knl.state < KernelState.PREPROCESSED: knl = preprocess_kernel(knl) - knl = get_one_scheduled_kernel(knl) - schedule_items = knl.schedule + knl = get_one_linearized_kernel(knl) + schedule_items = knl.linearization sched_is_valid = lp.check_schedule_validity( unprocessed_knl, statement_pair_dep_sets, schedule_items) @@ -128,8 +128,8 @@ def test_linearization_checker_with_matmul(): # get a schedule to check if knl.state < KernelState.PREPROCESSED: knl = preprocess_kernel(knl) - knl = get_one_scheduled_kernel(knl) - schedule_items = knl.schedule + knl = get_one_linearized_kernel(knl) + schedule_items = knl.linearization sched_is_valid = lp.check_schedule_validity( unprocessed_knl, statement_pair_dep_sets, schedule_items) @@ -179,8 +179,8 @@ def test_linearization_checker_with_dependent_domain(): # get a schedule to check if knl.state < KernelState.PREPROCESSED: knl = preprocess_kernel(knl) - knl = get_one_scheduled_kernel(knl) - schedule_items = knl.schedule + knl = get_one_linearized_kernel(knl) + schedule_items = knl.linearization sched_is_valid = lp.check_schedule_validity( unprocessed_knl, statement_pair_dep_sets, schedule_items) @@ -233,8 +233,8 @@ def test_linearization_checker_with_stroud_bernstein(): # get a schedule to check if knl.state < KernelState.PREPROCESSED: knl = preprocess_kernel(knl) - knl = get_one_scheduled_kernel(knl) - schedule_items = knl.schedule + knl = get_one_linearized_kernel(knl) + schedule_items = knl.linearization sched_is_valid = lp.check_schedule_validity( unprocessed_knl, statement_pair_dep_sets, schedule_items) @@ -270,8 +270,8 @@ def test_linearization_checker_with_nop(): # get a schedule to check if knl.state < KernelState.PREPROCESSED: knl = preprocess_kernel(knl) - knl = get_one_scheduled_kernel(knl) - schedule_items = knl.schedule + knl = get_one_linearized_kernel(knl) + schedule_items = knl.linearization sched_is_valid = lp.check_schedule_validity( unprocessed_knl, statement_pair_dep_sets, schedule_items) @@ -317,8 +317,8 @@ def test_linearization_checker_with_multi_domain(): # get a schedule to check if knl.state < KernelState.PREPROCESSED: knl = preprocess_kernel(knl) - knl = get_one_scheduled_kernel(knl) - schedule_items = knl.schedule + knl = get_one_linearized_kernel(knl) + schedule_items = knl.linearization sched_is_valid = lp.check_schedule_validity( unprocessed_knl, statement_pair_dep_sets, schedule_items) @@ -353,8 +353,8 @@ def test_linearization_checker_with_loop_carried_deps(): # get a schedule to check if knl.state < KernelState.PREPROCESSED: knl = preprocess_kernel(knl) - knl = get_one_scheduled_kernel(knl) - schedule_items = knl.schedule + knl = get_one_linearized_kernel(knl) + schedule_items = knl.linearization sched_is_valid = lp.check_schedule_validity( unprocessed_knl, statement_pair_dep_sets, schedule_items) @@ -396,8 +396,8 @@ def test_linearization_checker_and_invalid_prioritiy_detection(): # get a schedule to check if knl0.state < KernelState.PREPROCESSED: knl0 = preprocess_kernel(knl0) - knl0 = get_one_scheduled_kernel(knl0) - schedule_items = knl0.schedule + knl0 = get_one_linearized_kernel(knl0) + schedule_items = knl0.linearization sched_is_valid = lp.check_schedule_validity( unprocessed_knl, statement_pair_dep_sets, schedule_items) @@ -418,8 +418,8 @@ def test_linearization_checker_and_invalid_prioritiy_detection(): # get a schedule to check if knl1.state < KernelState.PREPROCESSED: knl1 = preprocess_kernel(knl1) - knl1 = get_one_scheduled_kernel(knl1) - schedule_items = knl1.schedule + knl1 = get_one_linearized_kernel(knl1) + schedule_items = knl1.linearization sched_is_valid = lp.check_schedule_validity( unprocessed_knl, statement_pair_dep_sets, schedule_items) @@ -442,8 +442,8 @@ def test_linearization_checker_and_invalid_prioritiy_detection(): # get a schedule to check if knl2.state < KernelState.PREPROCESSED: knl2 = preprocess_kernel(knl2) - knl2 = get_one_scheduled_kernel(knl2) - schedule_items = knl2.schedule + knl2 = get_one_linearized_kernel(knl2) + schedule_items = knl2.linearization sched_is_valid = lp.check_schedule_validity( unprocessed_knl, statement_pair_dep_sets, schedule_items) @@ -472,8 +472,8 @@ def test_linearization_checker_and_invalid_prioritiy_detection(): # get a schedule to check if knl3.state < KernelState.PREPROCESSED: knl3 = preprocess_kernel(knl3) - knl3 = get_one_scheduled_kernel(knl3) - schedule_items = knl3.schedule + knl3 = get_one_linearized_kernel(knl3) + schedule_items = knl3.linearization sched_is_valid = lp.check_schedule_validity( unprocessed_knl, statement_pair_dep_sets, schedule_items) -- GitLab From 36c3a38939727a29deb43aeae92425717c38d67b Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Thu, 5 Mar 2020 12:34:53 -0600 Subject: [PATCH 212/415] change terminology schedule->linearization --- test/test_linearization_checker.py | 66 +++++++++++++++--------------- 1 file changed, 33 insertions(+), 33 deletions(-) diff --git a/test/test_linearization_checker.py b/test/test_linearization_checker.py index 9a8b5e43e..e8b069511 100644 --- a/test/test_linearization_checker.py +++ b/test/test_linearization_checker.py @@ -87,14 +87,14 @@ def test_linearization_checker_with_loop_prioritization(): knl = lp.add_dependencies_v2( # pylint:disable=no-member knl, statement_pair_dep_sets) - # get a schedule to check + # get a linearization to check if knl.state < KernelState.PREPROCESSED: knl = preprocess_kernel(knl) knl = get_one_linearized_kernel(knl) - schedule_items = knl.linearization + linearization_items = knl.linearization sched_is_valid = lp.check_schedule_validity( - unprocessed_knl, statement_pair_dep_sets, schedule_items) + unprocessed_knl, statement_pair_dep_sets, linearization_items) assert sched_is_valid @@ -125,14 +125,14 @@ def test_linearization_checker_with_matmul(): knl = lp.add_dependencies_v2( # pylint:disable=no-member knl, statement_pair_dep_sets) - # get a schedule to check + # get a linearization to check if knl.state < KernelState.PREPROCESSED: knl = preprocess_kernel(knl) knl = get_one_linearized_kernel(knl) - schedule_items = knl.linearization + linearization_items = knl.linearization sched_is_valid = lp.check_schedule_validity( - unprocessed_knl, statement_pair_dep_sets, schedule_items) + unprocessed_knl, statement_pair_dep_sets, linearization_items) assert sched_is_valid @@ -176,14 +176,14 @@ def test_linearization_checker_with_dependent_domain(): knl = lp.add_dependencies_v2( # pylint:disable=no-member knl, statement_pair_dep_sets) - # get a schedule to check + # get a linearization to check if knl.state < KernelState.PREPROCESSED: knl = preprocess_kernel(knl) knl = get_one_linearized_kernel(knl) - schedule_items = knl.linearization + linearization_items = knl.linearization sched_is_valid = lp.check_schedule_validity( - unprocessed_knl, statement_pair_dep_sets, schedule_items) + unprocessed_knl, statement_pair_dep_sets, linearization_items) assert sched_is_valid @@ -230,14 +230,14 @@ def test_linearization_checker_with_stroud_bernstein(): knl = lp.add_dependencies_v2( # pylint:disable=no-member knl, statement_pair_dep_sets) - # get a schedule to check + # get a linearization to check if knl.state < KernelState.PREPROCESSED: knl = preprocess_kernel(knl) knl = get_one_linearized_kernel(knl) - schedule_items = knl.linearization + linearization_items = knl.linearization sched_is_valid = lp.check_schedule_validity( - unprocessed_knl, statement_pair_dep_sets, schedule_items) + unprocessed_knl, statement_pair_dep_sets, linearization_items) assert sched_is_valid @@ -267,14 +267,14 @@ def test_linearization_checker_with_nop(): knl = lp.add_dependencies_v2( # pylint:disable=no-member knl, statement_pair_dep_sets) - # get a schedule to check + # get a linearization to check if knl.state < KernelState.PREPROCESSED: knl = preprocess_kernel(knl) knl = get_one_linearized_kernel(knl) - schedule_items = knl.linearization + linearization_items = knl.linearization sched_is_valid = lp.check_schedule_validity( - unprocessed_knl, statement_pair_dep_sets, schedule_items) + unprocessed_knl, statement_pair_dep_sets, linearization_items) assert sched_is_valid @@ -314,14 +314,14 @@ def test_linearization_checker_with_multi_domain(): knl = lp.add_dependencies_v2( # pylint:disable=no-member knl, statement_pair_dep_sets) - # get a schedule to check + # get a linearization to check if knl.state < KernelState.PREPROCESSED: knl = preprocess_kernel(knl) knl = get_one_linearized_kernel(knl) - schedule_items = knl.linearization + linearization_items = knl.linearization sched_is_valid = lp.check_schedule_validity( - unprocessed_knl, statement_pair_dep_sets, schedule_items) + unprocessed_knl, statement_pair_dep_sets, linearization_items) assert sched_is_valid @@ -350,14 +350,14 @@ def test_linearization_checker_with_loop_carried_deps(): knl = lp.add_dependencies_v2( # pylint:disable=no-member knl, statement_pair_dep_sets) - # get a schedule to check + # get a linearization to check if knl.state < KernelState.PREPROCESSED: knl = preprocess_kernel(knl) knl = get_one_linearized_kernel(knl) - schedule_items = knl.linearization + linearization_items = knl.linearization sched_is_valid = lp.check_schedule_validity( - unprocessed_knl, statement_pair_dep_sets, schedule_items) + unprocessed_knl, statement_pair_dep_sets, linearization_items) assert sched_is_valid @@ -393,14 +393,14 @@ def test_linearization_checker_and_invalid_prioritiy_detection(): knl0 = lp.add_dependencies_v2( # pylint:disable=no-member knl0, statement_pair_dep_sets) - # get a schedule to check + # get a linearization to check if knl0.state < KernelState.PREPROCESSED: knl0 = preprocess_kernel(knl0) knl0 = get_one_linearized_kernel(knl0) - schedule_items = knl0.linearization + linearization_items = knl0.linearization sched_is_valid = lp.check_schedule_validity( - unprocessed_knl, statement_pair_dep_sets, schedule_items) + unprocessed_knl, statement_pair_dep_sets, linearization_items) assert sched_is_valid # no error: @@ -415,14 +415,14 @@ def test_linearization_checker_and_invalid_prioritiy_detection(): knl1 = lp.add_dependencies_v2( # pylint:disable=no-member knl1, statement_pair_dep_sets) - # get a schedule to check + # get a linearization to check if knl1.state < KernelState.PREPROCESSED: knl1 = preprocess_kernel(knl1) knl1 = get_one_linearized_kernel(knl1) - schedule_items = knl1.linearization + linearization_items = knl1.linearization sched_is_valid = lp.check_schedule_validity( - unprocessed_knl, statement_pair_dep_sets, schedule_items) + unprocessed_knl, statement_pair_dep_sets, linearization_items) assert sched_is_valid # error (cycle): @@ -439,14 +439,14 @@ def test_linearization_checker_and_invalid_prioritiy_detection(): statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( unprocessed_knl) - # get a schedule to check + # get a linearization to check if knl2.state < KernelState.PREPROCESSED: knl2 = preprocess_kernel(knl2) knl2 = get_one_linearized_kernel(knl2) - schedule_items = knl2.linearization + linearization_items = knl2.linearization sched_is_valid = lp.check_schedule_validity( - unprocessed_knl, statement_pair_dep_sets, schedule_items) + unprocessed_knl, statement_pair_dep_sets, linearization_items) # should raise error assert False except ValueError as e: @@ -469,14 +469,14 @@ def test_linearization_checker_and_invalid_prioritiy_detection(): statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( unprocessed_knl) - # get a schedule to check + # get a linearization to check if knl3.state < KernelState.PREPROCESSED: knl3 = preprocess_kernel(knl3) knl3 = get_one_linearized_kernel(knl3) - schedule_items = knl3.linearization + linearization_items = knl3.linearization sched_is_valid = lp.check_schedule_validity( - unprocessed_knl, statement_pair_dep_sets, schedule_items) + unprocessed_knl, statement_pair_dep_sets, linearization_items) # should raise error assert False except ValueError as e: -- GitLab From 2e6f1c77d49e9d101a9b106177413382b279d0f7 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Thu, 5 Mar 2020 13:14:58 -0600 Subject: [PATCH 213/415] change terminology schedule->linearization in linearization_checker --- loopy/__init__.py | 4 +- loopy/schedule/checker/__init__.py | 24 +++--- loopy/schedule/checker/dependency.py | 28 ++++--- .../example_pairwise_schedule_validity.py | 22 ++--- .../example_wave_equation.py | 82 +++++++++---------- loopy/schedule/checker/sched_check_utils.py | 18 ++-- loopy/schedule/checker/schedule.py | 26 +++--- test/test_linearization_checker.py | 44 +++++----- 8 files changed, 123 insertions(+), 125 deletions(-) diff --git a/loopy/__init__.py b/loopy/__init__.py index 8b2c907db..a0ce3c271 100644 --- a/loopy/__init__.py +++ b/loopy/__init__.py @@ -127,7 +127,7 @@ from loopy.schedule import ( generate_loop_schedules, get_one_scheduled_kernel, get_one_linearized_kernel) from loopy.schedule.checker import ( statement_pair_dep_sets_from_legacy_knl, - check_schedule_validity) + check_linearization_validity) from loopy.statistics import (ToCountMap, CountGranularity, stringify_stats_mapping, Op, MemAccess, get_op_poly, get_op_map, get_lmem_access_poly, get_DRAM_access_poly, get_gmem_access_poly, get_mem_access_map, @@ -255,7 +255,7 @@ __all__ = [ "generate_loop_schedules", "get_one_scheduled_kernel", "get_one_linearized_kernel", "statement_pair_dep_sets_from_legacy_knl", - "check_schedule_validity", + "check_linearization_validity", "GeneratedProgram", "CodeGenerationResult", "PreambleInfo", "generate_code", "generate_code_v2", "generate_body", diff --git a/loopy/schedule/checker/__init__.py b/loopy/schedule/checker/__init__.py index 4dfbb8f94..8c67423fe 100644 --- a/loopy/schedule/checker/__init__.py +++ b/loopy/schedule/checker/__init__.py @@ -33,13 +33,13 @@ def statement_pair_dep_sets_from_legacy_knl(knl): # TODO work on granularity of encapsulation, encapsulate some of this in # separate functions -def check_schedule_validity( +def check_linearization_validity( knl, statement_pair_dep_sets, - schedule_items, + linearization_items, prohibited_var_names=set(), verbose=False, - _use_scheduled_kernel_to_obtain_loop_priority=False): + _use_linearized_kernel_to_obtain_loop_priority=False): # TODO unused arg? from loopy.schedule.checker.dependency import ( create_dependency_constraint, @@ -73,13 +73,13 @@ def check_schedule_validity( # Print kernel info ------------------------------------------------------ print("="*80) print("Schedule items:") - for sched_item in schedule_items: - print(sched_item) + for linearization_item in linearization_items: + print(linearization_item) print("="*80) print("Looping through dep pairs...") - # For each dependency, create+test schedule containing pair of insns------ - sched_is_valid = True + # For each dependency, create+test linearization containing pair of insns------ + linearization_is_valid = True for statement_pair_dep_set in statement_pair_dep_sets: s_before = statement_pair_dep_set.statement_before s_after = statement_pair_dep_set.statement_after @@ -97,7 +97,7 @@ def check_schedule_validity( # include only instructions involved in this dependency sched = LexSchedule( preprocessed_knl, - schedule_items, + linearization_items, s_before.insn_id, s_after.insn_id, prohibited_var_names=prohibited_var_names, @@ -113,7 +113,7 @@ def check_schedule_validity( print(lp_insn_id_to_lex_sched_id) # Get two isl maps representing the LexSchedule, - # one for each schedule item involved in the dependency; + # one for each linearization item involved in the dependency; # this requires the iname domains isl_sched_map_before, isl_sched_map_after = \ sched.create_isl_maps( @@ -155,7 +155,7 @@ def check_schedule_validity( sched.statement_var_name, ) # TODO figure out how to keep a consistent lp_insn_id_to_lex_sched_id map - # when dependency creation is separate from schedule checking + # when dependency creation is separate from linearization checking # reorder variables/params in constraint map space to match SIO so we can # check to see whether the constraint map is a subset of the SIO @@ -195,7 +195,7 @@ def check_schedule_validity( if not aligned_constraint_map.is_subset(sio): - sched_is_valid = False + linearization_is_valid = False if verbose: print("================ constraint check failure =================") @@ -214,4 +214,4 @@ def check_schedule_validity( print(lp_insn_id_to_lex_sched_id) print("===========================================================") - return sched_is_valid + return linearization_is_valid diff --git a/loopy/schedule/checker/dependency.py b/loopy/schedule/checker/dependency.py index 89fb0a45a..c9f998d35 100644 --- a/loopy/schedule/checker/dependency.py +++ b/loopy/schedule/checker/dependency.py @@ -653,7 +653,7 @@ def create_dependencies_from_legacy_knl(knl): from loopy.schedule.checker.sched_check_utils import ( get_concurrent_inames, get_all_nonconcurrent_insn_iname_subsets, - get_sched_item_ids_within_inames, + get_linearization_item_ids_within_inames, ) from loopy.schedule.checker.schedule import LexScheduleStatement dt = DependencyType @@ -690,10 +690,12 @@ def create_dependencies_from_legacy_knl(knl): # Then make PRIOR dep from all sinks to all sources at previous iterations for iname_subset in non_conc_iname_subsets: # find items within this iname set - sched_item_ids = get_sched_item_ids_within_inames(knl, iname_subset) + linearization_item_ids = get_linearization_item_ids_within_inames( + knl, iname_subset) # find sources and sinks - sources, sinks = get_dependency_sources_and_sinks(knl, sched_item_ids) + sources, sinks = get_dependency_sources_and_sinks( + knl, linearization_item_ids) # create prior deps @@ -721,13 +723,13 @@ def create_dependencies_from_legacy_knl(knl): return set(statement_dep_sets) -def get_dependency_sources_and_sinks(knl, sched_item_ids): - """Implicitly create a directed graph with the schedule items specified - by ``sched_item_ids`` as nodes, and with edges representing a +def get_dependency_sources_and_sinks(knl, linearization_item_ids): + """Implicitly create a directed graph with the linearization items specified + by ``linearization_item_ids`` as nodes, and with edges representing a 'happens before' relationship specfied by each legacy dependency between two instructions. Return the sources and sinks within this graph. - .. arg sched_item_ids: A :class:`list` of :class:`str` representing + .. arg linearization_item_ids: A :class:`list` of :class:`str` representing loopy instruction ids. .. return: Two instances of :class:`set` of :class:`str` instruction ids @@ -735,18 +737,18 @@ def get_dependency_sources_and_sinks(knl, sched_item_ids): """ sources = set() - dependees = set() # all dependees (within sched_item_ids) - for item_id in sched_item_ids: - # find the deps within sched_item_ids - deps = knl.id_to_insn[item_id].depends_on & sched_item_ids + dependees = set() # all dependees (within linearization_item_ids) + for item_id in linearization_item_ids: + # find the deps within linearization_item_ids + deps = knl.id_to_insn[item_id].depends_on & linearization_item_ids if deps: # add deps to dependees dependees.update(deps) - else: # has no deps (within sched_item_ids), this is a source + else: # has no deps (within linearization_item_ids), this is a source sources.add(item_id) # sinks don't point to anyone - sinks = sched_item_ids - dependees + sinks = linearization_item_ids - dependees return sources, sinks diff --git a/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py b/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py index 5bb6bdc60..cfbf8a022 100644 --- a/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py +++ b/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py @@ -288,30 +288,30 @@ unprocessed_knl = knl.copy() legacy_statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( unprocessed_knl) -# get a schedule to check +# get a linearization to check knl = preprocess_kernel(knl) knl = get_one_linearized_kernel(knl) print("kernel schedueld") -schedule_items = knl.schedule +linearization_items = knl.linearization print("checking validity") -sched_is_valid = lp.check_schedule_validity( - unprocessed_knl, legacy_statement_pair_dep_sets, schedule_items, verbose=True) +linearization_is_valid = lp.check_linearization_validity( + unprocessed_knl, legacy_statement_pair_dep_sets, linearization_items, verbose=True) """ legacy_statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl(knl) -# get a schedule to check +# get a linearization to check from loopy import get_one_linearized_kernel -scheduled_knl = get_one_linearized_kernel(knl) -schedule_items = scheduled_knl.schedule +linearized_knl = get_one_linearized_kernel(knl) +linearization_items = linearized_knl.linearization -sched_is_valid = lp.check_schedule_validity( - knl, legacy_statement_pair_dep_sets, schedule_items, verbose=True) +linearization_is_valid = lp.check_linearization_validity( + knl, legacy_statement_pair_dep_sets, linearization_items, verbose=True) """ -print("is sched valid? constraint map subset of SIO?") -print(sched_is_valid) +print("is linearization valid? constraint map subset of SIO?") +print(linearization_is_valid) print("="*80) diff --git a/loopy/schedule/checker/experimental_scripts/example_wave_equation.py b/loopy/schedule/checker/experimental_scripts/example_wave_equation.py index 352739f11..f92a5828c 100644 --- a/loopy/schedule/checker/experimental_scripts/example_wave_equation.py +++ b/loopy/schedule/checker/experimental_scripts/example_wave_equation.py @@ -123,11 +123,11 @@ print(prettier_map_string(constraint_map)) verbose = False verbose = True -# get a schedule to check -if preprocessed_knl.schedule is None: - scheduled_knl = get_one_linearized_kernel(preprocessed_knl) +# get a linearization to check +if preprocessed_knl.linearization is None: + linearized_knl = get_one_linearized_kernel(preprocessed_knl) else: - scheduled_knl = preprocessed_knl + linearized_knl = preprocessed_knl # {{{ verbose @@ -135,16 +135,14 @@ if verbose: # Print kernel info ------------------------------------------------------ print("="*80) print("Kernel:") - print(scheduled_knl) - #print(generate_code_v2(scheduled_knl).device_code()) + print(linearized_knl) + #print(generate_code_v2(linearized_knl).device_code()) print("="*80) - print("Iname tags: %s" % (scheduled_knl.iname_to_tags)) + print("Iname tags: %s" % (linearized_knl.iname_to_tags)) print("="*80) - print("Loopy schedule:") - for sched_item in scheduled_knl.schedule: - print(sched_item) - #print("scheduled iname order:") - #print(sched_iname_order) + print("Loopy linearization:") + for linearization_item in linearized_knl.linearization: + print(linearization_item) print("="*80) print("inames_domain_before:", inames_domain_before) @@ -155,7 +153,7 @@ if verbose: # Create a mapping of {statement instance: lex point} # including only instructions involved in this dependency sched = LexSchedule( - scheduled_knl, scheduled_knl.schedule, str(sid_before), str(sid_after)) + linearized_knl, linearized_knl.linearization, str(sid_before), str(sid_after)) # Get an isl map representing the LexSchedule; # this requires the iname domains @@ -269,9 +267,9 @@ assert ( aligned_constraint_map.space.get_var_names(isl.dim_type.param) == sio.space.get_var_names(isl.dim_type.param)) -sched_is_valid = aligned_constraint_map.is_subset(sio) +linearization_is_valid = aligned_constraint_map.is_subset(sio) -if not sched_is_valid: +if not linearization_is_valid: # {{{ verbose @@ -292,7 +290,7 @@ if not sched_is_valid: from loopy.schedule.checker.sched_check_utils import ( get_concurrent_inames, ) - conc_inames, non_conc_inames = get_concurrent_inames(scheduled_knl) + conc_inames, non_conc_inames = get_concurrent_inames(linearized_knl) print("concurrent inames:", conc_inames) print("sequential inames:", non_conc_inames) print("constraint map space (stmt instances -> stmt instances):") @@ -310,8 +308,8 @@ if not sched_is_valid: # }}} -print("is sched valid? constraint map subset of SIO?") -print(sched_is_valid) +print("is linearization valid? constraint map subset of SIO?") +print(linearization_is_valid) # ====================================================================== @@ -410,11 +408,11 @@ print("(mapped) inames_domain_after:", inames_domain_after_mapped) verbose = False verbose = True -# get a schedule to check -if preprocessed_knl.schedule is None: - scheduled_knl = get_one_linearized_kernel(preprocessed_knl) +# get a linearization to check +if preprocessed_knl.linearization is None: + linearized_knl = get_one_linearized_kernel(preprocessed_knl) else: - scheduled_knl = preprocessed_knl + linearized_knl = preprocessed_knl # {{{ verbose @@ -422,16 +420,14 @@ if verbose: # Print kernel info ------------------------------------------------------ print("="*80) print("Kernel:") - print(scheduled_knl) - #print(generate_code_v2(scheduled_knl).device_code()) + print(linearized_knl) + #print(generate_code_v2(linearized_knl).device_code()) print("="*80) - print("Iname tags: %s" % (scheduled_knl.iname_to_tags)) + print("Iname tags: %s" % (linearized_knl.iname_to_tags)) print("="*80) - print("Loopy schedule:") - for sched_item in scheduled_knl.schedule: - print(sched_item) - #print("scheduled iname order:") - #print(sched_iname_order) + print("Loopy linearization:") + for linearization_item in linearized_knl.linearization: + print(linearization_item) print("="*80) print("inames_domain_before_mapped:", inames_domain_before_mapped) @@ -442,8 +438,8 @@ if verbose: # Create a mapping of {statement instance: lex point} # including only instructions involved in this dependency sched = LexSchedule( - scheduled_knl, - scheduled_knl.schedule, + linearized_knl, + linearized_knl.linearization, str(sid_before), str(sid_after) ) @@ -560,9 +556,9 @@ assert ( aligned_constraint_map.space.get_var_names(isl.dim_type.param) == sio.space.get_var_names(isl.dim_type.param)) -sched_is_valid = aligned_constraint_map.is_subset(sio) +linearization_is_valid = aligned_constraint_map.is_subset(sio) -if not sched_is_valid: +if not linearization_is_valid: # {{{ verbose @@ -583,7 +579,7 @@ if not sched_is_valid: from loopy.schedule.checker.sched_check_utils import ( get_concurrent_inames, ) - conc_inames, non_conc_inames = get_concurrent_inames(scheduled_knl) + conc_inames, non_conc_inames = get_concurrent_inames(linearized_knl) print("concurrent inames:", conc_inames) print("sequential inames:", non_conc_inames) print("constraint map space (stmt instances -> stmt instances):") @@ -601,8 +597,8 @@ if not sched_is_valid: # }}} -print("is sched valid? constraint map subset of SIO?") -print(sched_is_valid) +print("is linearization valid? constraint map subset of SIO?") +print(linearization_is_valid) ''' # (U_n^{k+1}-U_n^k)/dt = C*(U_{n+1}^k-U_n^k)/dx @@ -632,15 +628,15 @@ print("time:", time_measured) """ """ -sched_is_valid = lp.check_schedule_validity(knl, verbose=True) +linearization_is_valid = lp.check_linearization_validity(knl, verbose=True) -print("is sched valid? constraint map subset of SIO?") -print(sched_is_valid) +print("is linearization valid? constraint map subset of SIO?") +print(linearization_is_valid) """ """ -sched_is_valid = lp.check_schedule_validity(knl, verbose=True) +linearization_is_valid = lp.check_linearization_validity(knl, verbose=True) -print("is sched valid? constraint map subset of SIO?") -print(sched_is_valid) +print("is linearization valid? constraint map subset of SIO?") +print(linearization_is_valid) """ diff --git a/loopy/schedule/checker/sched_check_utils.py b/loopy/schedule/checker/sched_check_utils.py index 63b4584e7..eb60ffada 100644 --- a/loopy/schedule/checker/sched_check_utils.py +++ b/loopy/schedule/checker/sched_check_utils.py @@ -392,17 +392,17 @@ def get_concurrent_inames(knl): return conc_inames, all_inames-conc_inames -def _get_insn_id_from_sched_item(sched_item): +def _get_insn_id_from_linearization_item(linearization_item): # TODO could use loopy's sched_item_to_insn_id() from loopy.schedule import Barrier - if isinstance(sched_item, Barrier): - return sched_item.originating_insn_id + if isinstance(linearization_item, Barrier): + return linearization_item.originating_insn_id else: - return sched_item.insn_id + return linearization_item.insn_id # TODO for better performance, could combine these funcs so we don't -# loop over schedule more than once +# loop over linearization more than once def get_all_nonconcurrent_insn_iname_subsets( knl, exclude_empty=False, non_conc_inames=None): """Return a :class:`set` of every unique subset of non-concurrent @@ -434,12 +434,12 @@ def get_all_nonconcurrent_insn_iname_subsets( return iname_subsets -def get_sched_item_ids_within_inames(knl, inames): - sched_item_ids = set() +def get_linearization_item_ids_within_inames(knl, inames): + linearization_item_ids = set() for insn in knl.instructions: if inames.issubset(insn.within_inames): - sched_item_ids.add(insn.id) - return sched_item_ids + linearization_item_ids.add(insn.id) + return linearization_item_ids # TODO use yield to clean this up diff --git a/loopy/schedule/checker/schedule.py b/loopy/schedule/checker/schedule.py index 6871a031c..f694cb623 100644 --- a/loopy/schedule/checker/schedule.py +++ b/loopy/schedule/checker/schedule.py @@ -126,7 +126,7 @@ class LexSchedule(object): def __init__( self, knl, - sched_items_ordered, + linearization_items_ordered, before_insn_id, after_insn_id, prohibited_var_names=[], @@ -135,7 +135,7 @@ class LexSchedule(object): :arg knl: A :class:`LoopKernel` whose schedule items will be described by this :class:`LexSchedule`. - :arg sched_items_ordered: A list of :class:`ScheduleItem` whose + :arg linearization_items_ordered: A list of :class:`ScheduleItem` whose order will be described by this :class:`LexSchedule`. :arg before_insn_id: A :class:`str` instruction id specifying @@ -161,22 +161,22 @@ class LexSchedule(object): from loopy.schedule import (EnterLoop, LeaveLoop, Barrier, RunInstruction) from loopy.kernel.data import ConcurrentTag - # go through sched_items_ordered and generate self.lex_schedule + # go through linearization_items_ordered and generate self.lex_schedule # keep track of the next point in our lexicographic ordering # initially this as a 1-d point with value 0 next_insn_lex_pt = [0] next_sid = 0 - for sched_item in sched_items_ordered: - if isinstance(sched_item, EnterLoop): - iname = sched_item.iname + for linearization_item in linearization_items_ordered: + if isinstance(linearization_item, EnterLoop): + iname = linearization_item.iname if knl.iname_tags_of_type(iname, ConcurrentTag): # In the future, this should be unnecessary because there # won't be any inames with ConcurrentTags in the loopy sched from warnings import warn warn( "LexSchedule.__init__: Encountered EnterLoop for iname %s " - "with ConcurrentTag(s) in schedule for kernel %s. " + "with ConcurrentTag(s) in linearization for kernel %s. " "Ignoring this loop." % (iname, knl.name)) continue @@ -195,8 +195,8 @@ class LexSchedule(object): # add a second lex dim to enumerate code blocks within the new loop next_insn_lex_pt.append(iname) next_insn_lex_pt.append(0) - elif isinstance(sched_item, LeaveLoop): - if knl.iname_tags_of_type(sched_item.iname, ConcurrentTag): + elif isinstance(linearization_item, LeaveLoop): + if knl.iname_tags_of_type(linearization_item.iname, ConcurrentTag): # In the future, this should be unnecessary because there # won't be any inames with ConcurrentTags in the loopy sched continue @@ -210,16 +210,16 @@ class LexSchedule(object): # if we didn't add any statements while in this loop, we might # sometimes be able to skip increment, but it's not hurting anything # TODO might not need this increment period? - elif isinstance(sched_item, (RunInstruction, Barrier)): + elif isinstance(linearization_item, (RunInstruction, Barrier)): from loopy.schedule.checker.sched_check_utils import ( - _get_insn_id_from_sched_item, + _get_insn_id_from_linearization_item, ) - lp_insn_id = _get_insn_id_from_sched_item(sched_item) + lp_insn_id = _get_insn_id_from_linearization_item(linearization_item) if lp_insn_id is None: # TODO make sure it's okay to ignore barriers without id # (because they'll never be part of a dependency?) # matmul example has barrier that fails this assertion... - # assert sched_item.originating_insn_id is not None + # assert linearization_item.originating_insn_id is not None continue # if include_only_insn_ids list was passed, diff --git a/test/test_linearization_checker.py b/test/test_linearization_checker.py index e8b069511..3b68aa350 100644 --- a/test/test_linearization_checker.py +++ b/test/test_linearization_checker.py @@ -93,9 +93,9 @@ def test_linearization_checker_with_loop_prioritization(): knl = get_one_linearized_kernel(knl) linearization_items = knl.linearization - sched_is_valid = lp.check_schedule_validity( + linearization_is_valid = lp.check_linearization_validity( unprocessed_knl, statement_pair_dep_sets, linearization_items) - assert sched_is_valid + assert linearization_is_valid def test_linearization_checker_with_matmul(): @@ -131,9 +131,9 @@ def test_linearization_checker_with_matmul(): knl = get_one_linearized_kernel(knl) linearization_items = knl.linearization - sched_is_valid = lp.check_schedule_validity( + linearization_is_valid = lp.check_linearization_validity( unprocessed_knl, statement_pair_dep_sets, linearization_items) - assert sched_is_valid + assert linearization_is_valid def test_linearization_checker_with_scan(): @@ -165,7 +165,7 @@ def test_linearization_checker_with_dependent_domain(): lang_version=(2018, 2), ) # TODO current check for unused inames is incorrectly - # causing scheduling to fail when realize_reduction is used + # causing linearizing to fail when realize_reduction is used #knl = lp.realize_reduction(knl, force_scan=True) unprocessed_knl = knl.copy() @@ -182,9 +182,9 @@ def test_linearization_checker_with_dependent_domain(): knl = get_one_linearized_kernel(knl) linearization_items = knl.linearization - sched_is_valid = lp.check_schedule_validity( + linearization_is_valid = lp.check_linearization_validity( unprocessed_knl, statement_pair_dep_sets, linearization_items) - assert sched_is_valid + assert linearization_is_valid def test_linearization_checker_with_stroud_bernstein(): @@ -236,9 +236,9 @@ def test_linearization_checker_with_stroud_bernstein(): knl = get_one_linearized_kernel(knl) linearization_items = knl.linearization - sched_is_valid = lp.check_schedule_validity( + linearization_is_valid = lp.check_linearization_validity( unprocessed_knl, statement_pair_dep_sets, linearization_items) - assert sched_is_valid + assert linearization_is_valid def test_linearization_checker_with_nop(): @@ -273,9 +273,9 @@ def test_linearization_checker_with_nop(): knl = get_one_linearized_kernel(knl) linearization_items = knl.linearization - sched_is_valid = lp.check_schedule_validity( + linearization_is_valid = lp.check_linearization_validity( unprocessed_knl, statement_pair_dep_sets, linearization_items) - assert sched_is_valid + assert linearization_is_valid def test_linearization_checker_with_multi_domain(): @@ -320,9 +320,9 @@ def test_linearization_checker_with_multi_domain(): knl = get_one_linearized_kernel(knl) linearization_items = knl.linearization - sched_is_valid = lp.check_schedule_validity( + linearization_is_valid = lp.check_linearization_validity( unprocessed_knl, statement_pair_dep_sets, linearization_items) - assert sched_is_valid + assert linearization_is_valid def test_linearization_checker_with_loop_carried_deps(): @@ -356,9 +356,9 @@ def test_linearization_checker_with_loop_carried_deps(): knl = get_one_linearized_kernel(knl) linearization_items = knl.linearization - sched_is_valid = lp.check_schedule_validity( + linearization_is_valid = lp.check_linearization_validity( unprocessed_knl, statement_pair_dep_sets, linearization_items) - assert sched_is_valid + assert linearization_is_valid def test_linearization_checker_and_invalid_prioritiy_detection(): @@ -399,9 +399,9 @@ def test_linearization_checker_and_invalid_prioritiy_detection(): knl0 = get_one_linearized_kernel(knl0) linearization_items = knl0.linearization - sched_is_valid = lp.check_schedule_validity( + linearization_is_valid = lp.check_linearization_validity( unprocessed_knl, statement_pair_dep_sets, linearization_items) - assert sched_is_valid + assert linearization_is_valid # no error: knl1 = lp.prioritize_loops(ref_knl, "h,i,k") @@ -421,9 +421,9 @@ def test_linearization_checker_and_invalid_prioritiy_detection(): knl1 = get_one_linearized_kernel(knl1) linearization_items = knl1.linearization - sched_is_valid = lp.check_schedule_validity( + linearization_is_valid = lp.check_linearization_validity( unprocessed_knl, statement_pair_dep_sets, linearization_items) - assert sched_is_valid + assert linearization_is_valid # error (cycle): knl2 = lp.prioritize_loops(ref_knl, "h,i,j") @@ -445,7 +445,7 @@ def test_linearization_checker_and_invalid_prioritiy_detection(): knl2 = get_one_linearized_kernel(knl2) linearization_items = knl2.linearization - sched_is_valid = lp.check_schedule_validity( + linearization_is_valid = lp.check_linearization_validity( unprocessed_knl, statement_pair_dep_sets, linearization_items) # should raise error assert False @@ -475,7 +475,7 @@ def test_linearization_checker_and_invalid_prioritiy_detection(): knl3 = get_one_linearized_kernel(knl3) linearization_items = knl3.linearization - sched_is_valid = lp.check_schedule_validity( + linearization_is_valid = lp.check_linearization_validity( unprocessed_knl, statement_pair_dep_sets, linearization_items) # should raise error assert False @@ -485,7 +485,7 @@ def test_linearization_checker_and_invalid_prioritiy_detection(): else: assert "invalid priorities" in str(e) -# TODO create more kernels with invalid schedules to test linearization checker +# TODO create more kernels with invalid linearizations to test linearization checker if __name__ == "__main__": -- GitLab From cefcbb25514168e89baced22eef77d7b84feba00 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Thu, 5 Mar 2020 13:20:32 -0600 Subject: [PATCH 214/415] fixing flake8 issue --- .../experimental_scripts/example_pairwise_schedule_validity.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py b/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py index cfbf8a022..3fc7abc01 100644 --- a/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py +++ b/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py @@ -296,7 +296,8 @@ linearization_items = knl.linearization print("checking validity") linearization_is_valid = lp.check_linearization_validity( - unprocessed_knl, legacy_statement_pair_dep_sets, linearization_items, verbose=True) + unprocessed_knl, legacy_statement_pair_dep_sets, linearization_items, + verbose=True) """ legacy_statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl(knl) -- GitLab From 464b55db0acaf270d4478f875dfe6cb66bfde768 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Thu, 19 Mar 2020 07:15:57 -0500 Subject: [PATCH 215/415] add copyright and license stuff --- loopy/schedule/checker/__init__.py | 21 ++++++++++++++++++ loopy/schedule/checker/dependency.py | 22 +++++++++++++++++++ .../checker/lexicographic_order_map.py | 22 +++++++++++++++++++ loopy/schedule/checker/sched_check_utils.py | 22 +++++++++++++++++++ loopy/schedule/checker/schedule.py | 22 +++++++++++++++++++ 5 files changed, 109 insertions(+) diff --git a/loopy/schedule/checker/__init__.py b/loopy/schedule/checker/__init__.py index 8c67423fe..73b829e67 100644 --- a/loopy/schedule/checker/__init__.py +++ b/loopy/schedule/checker/__init__.py @@ -1,3 +1,24 @@ +__copyright__ = "Copyright (C) 2019 James Stevens" + +__license__ = """ +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. +""" def statement_pair_dep_sets_from_legacy_knl(knl): diff --git a/loopy/schedule/checker/dependency.py b/loopy/schedule/checker/dependency.py index c9f998d35..e0436e24b 100644 --- a/loopy/schedule/checker/dependency.py +++ b/loopy/schedule/checker/dependency.py @@ -1,3 +1,25 @@ +__copyright__ = "Copyright (C) 2019 James Stevens" + +__license__ = """ +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. +""" + import islpy as isl diff --git a/loopy/schedule/checker/lexicographic_order_map.py b/loopy/schedule/checker/lexicographic_order_map.py index 9007a8a73..5526599c4 100644 --- a/loopy/schedule/checker/lexicographic_order_map.py +++ b/loopy/schedule/checker/lexicographic_order_map.py @@ -1,3 +1,25 @@ +__copyright__ = "Copyright (C) 2019 James Stevens" + +__license__ = """ +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. +""" + import islpy as isl diff --git a/loopy/schedule/checker/sched_check_utils.py b/loopy/schedule/checker/sched_check_utils.py index eb60ffada..5d8bd30cf 100644 --- a/loopy/schedule/checker/sched_check_utils.py +++ b/loopy/schedule/checker/sched_check_utils.py @@ -1,3 +1,25 @@ +__copyright__ = "Copyright (C) 2019 James Stevens" + +__license__ = """ +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. +""" + import islpy as isl diff --git a/loopy/schedule/checker/schedule.py b/loopy/schedule/checker/schedule.py index f694cb623..ff34ef4d6 100644 --- a/loopy/schedule/checker/schedule.py +++ b/loopy/schedule/checker/schedule.py @@ -1,3 +1,25 @@ +__copyright__ = "Copyright (C) 2019 James Stevens" + +__license__ = """ +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. +""" + import islpy as isl -- GitLab From 5c99a20fde31e70d7b2bdf3d0217776b3b317e39 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Thu, 19 Mar 2020 07:16:53 -0500 Subject: [PATCH 216/415] add more copyright and license stuff --- .../example_pairwise_schedule_validity.py | 22 +++++++++++++++++++ .../example_wave_equation.py | 22 +++++++++++++++++++ 2 files changed, 44 insertions(+) diff --git a/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py b/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py index 3fc7abc01..ceb4a0ce9 100644 --- a/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py +++ b/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py @@ -1,3 +1,25 @@ +__copyright__ = "Copyright (C) 2019 James Stevens" + +__license__ = """ +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. +""" + """ WIP: NO NEED TO REVIEW YET """ import loopy as lp import numpy as np diff --git a/loopy/schedule/checker/experimental_scripts/example_wave_equation.py b/loopy/schedule/checker/experimental_scripts/example_wave_equation.py index f92a5828c..3b5e25c9b 100644 --- a/loopy/schedule/checker/experimental_scripts/example_wave_equation.py +++ b/loopy/schedule/checker/experimental_scripts/example_wave_equation.py @@ -1,3 +1,25 @@ +__copyright__ = "Copyright (C) 2019 James Stevens" + +__license__ = """ +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. +""" + """ WIP: NO NEED TO REVIEW YET """ import loopy as lp from loopy import generate_code_v2 -- GitLab From e25bb64b92e5604127d5f75a532d0ca81b87eb58 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Thu, 19 Mar 2020 07:18:53 -0500 Subject: [PATCH 217/415] fix typo 'StatementPairDependySet' --- loopy/schedule/checker/__init__.py | 2 +- loopy/schedule/checker/dependency.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/loopy/schedule/checker/__init__.py b/loopy/schedule/checker/__init__.py index 73b829e67..bf83b97fa 100644 --- a/loopy/schedule/checker/__init__.py +++ b/loopy/schedule/checker/__init__.py @@ -22,7 +22,7 @@ THE SOFTWARE. def statement_pair_dep_sets_from_legacy_knl(knl): - """Return a list of :class:`StatementPairDependySet` instances created + """Return a list of :class:`StatementPairDependencySet` instances created for a :class:`loopy.LoopKernel` containing legacy depencencies. Create the new dependencies according to the following rules. (1) If a dependency exists between ``insn0`` and ``insn1``, create the dependnecy diff --git a/loopy/schedule/checker/dependency.py b/loopy/schedule/checker/dependency.py index e0436e24b..5d2f32832 100644 --- a/loopy/schedule/checker/dependency.py +++ b/loopy/schedule/checker/dependency.py @@ -653,7 +653,7 @@ def create_arbitrary_dependency_constraint( def create_dependencies_from_legacy_knl(knl): - """Return a list of :class:`StatementPairDependySet` instances created + """Return a list of :class:`StatementPairDependencySet` instances created for a :class:`loopy.LoopKernel` containing legacy depencencies. Create the new dependencies according to the following rules. (1) If a dependency exists between ``insn0`` and ``insn1``, create the dependnecy -- GitLab From 12b081a12ceda0510836bd231af26aa5c4f93a1b Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Thu, 19 Mar 2020 07:41:11 -0500 Subject: [PATCH 218/415] add paths to correctly link class names in docstrings --- loopy/schedule/checker/__init__.py | 24 ++++++++++--------- loopy/schedule/checker/dependency.py | 12 ++++++---- .../checker/lexicographic_order_map.py | 4 ++-- loopy/schedule/checker/sched_check_utils.py | 6 ++--- loopy/schedule/checker/schedule.py | 2 +- 5 files changed, 26 insertions(+), 22 deletions(-) diff --git a/loopy/schedule/checker/__init__.py b/loopy/schedule/checker/__init__.py index bf83b97fa..7fda654dd 100644 --- a/loopy/schedule/checker/__init__.py +++ b/loopy/schedule/checker/__init__.py @@ -22,21 +22,23 @@ THE SOFTWARE. def statement_pair_dep_sets_from_legacy_knl(knl): - """Return a list of :class:`StatementPairDependencySet` instances created - for a :class:`loopy.LoopKernel` containing legacy depencencies. Create - the new dependencies according to the following rules. (1) If - a dependency exists between ``insn0`` and ``insn1``, create the dependnecy - ``SAME(SNC)`` where ``SNC`` is the set of non-concurrent inames used - by both ``insn0`` and ``insn1``, and ``SAME`` is the relationship specified - by the ``SAME`` attribute of :class:`DependencyType`. (2) For each subset - of non-concurrent inames used by any instruction, find the set of all - instructions using those inames, create a directed graph with these - instructions as nodes and edges representing a 'happens before' + """Return a list of + :class:`loopy.schedule.checker.dependency.StatementPairDependencySet` + instances created for a :class:`loopy.LoopKernel` containing legacy + depencencies. Create the new dependencies according to the following rules. + (1) If a dependency exists between ``insn0`` and ``insn1``, create the + dependnecy ``SAME(SNC)`` where ``SNC`` is the set of non-concurrent inames + used by both ``insn0`` and ``insn1``, and ``SAME`` is the relationship + specified by the ``SAME`` attribute of + :class:`loopy.schedule.checker.dependency.DependencyType`. + (2) For each subset of non-concurrent inames used by any instruction, find + the set of all instructions using those inames, create a directed graph + with these instructions as nodes and edges representing a 'happens before' relationship specfied by each dependency, find the sources and sinks within this graph, and connect each sink to each source (sink happens before source) with a ``PRIOR(SNC)`` dependency, where ``PRIOR`` is the relationship specified by the ``PRIOR`` attribute of - :class:`DependencyType`. + :class:`loopy.schedule.checker.dependency.DependencyType`. """ diff --git a/loopy/schedule/checker/dependency.py b/loopy/schedule/checker/dependency.py index 5d2f32832..89f3bbd09 100644 --- a/loopy/schedule/checker/dependency.py +++ b/loopy/schedule/checker/dependency.py @@ -65,11 +65,13 @@ class StatementPairDependencySet(object): .. attribute:: statement_before - A :class:`LexScheduleStatement` depended on by statement_after. + A :class:`loopy.schedule.checker.schedule.LexScheduleStatement` depended + on by statement_after. .. attribute:: statement_after - A :class:`LexScheduleStatement` which depends on statement_before. + A :class:`loopy.schedule.checker.schedule.LexScheduleStatement` which + cdepends on statement_before. .. attribute:: deps @@ -146,10 +148,10 @@ def create_elementwise_comparison_conjunction_set( .. arg names1: A list of :class:`str` representing variable names. - .. arg islvars: A dictionary from variable names to :class:`PwAff` + .. arg islvars: A dictionary from variable names to :class:`islpy.PwAff` instances that represent each of the variables (islvars may be produced by `islpy.make_zero_and_vars`). The key - '0' is also include and represents a :class:`PwAff` zero constant. + '0' is also include and represents a :class:`islpy.PwAff` zero constant. .. arg op: A :class:`str` describing the operator to use when creating the set constraints. Options: `eq` for `=`, `lt` for `<` @@ -206,7 +208,7 @@ def create_dependency_constraint( .. arg insn_id_to_int: A :class:`dict` mapping insn_id to int_id, where 'insn_id' and 'int_id' refer to the 'insn_id' and 'int_id' attributes - of :class:`LexScheduleStatement`. + of :class:`loopy.schedule.checker.schedule.LexScheduleStatement`. .. arg statement_var_name: A :class:`str` specifying the name of the isl variable used to represent the unique :class:`int` statement id. diff --git a/loopy/schedule/checker/lexicographic_order_map.py b/loopy/schedule/checker/lexicographic_order_map.py index 5526599c4..ec0de6fc0 100644 --- a/loopy/schedule/checker/lexicographic_order_map.py +++ b/loopy/schedule/checker/lexicographic_order_map.py @@ -66,10 +66,10 @@ def get_lex_order_constraint(islvars, before_names, after_names): defining a 'happens before' relationship in a lexicographic ordering. - .. arg islvars: A dictionary from variable names to :class:`PwAff` + .. arg islvars: A dictionary from variable names to :class:`islpy.PwAff` instances that represent each of the variables (islvars may be produced by `islpy.make_zero_and_vars`). The key - '0' is also include and represents a :class:`PwAff` zero constant. + '0' is also include and represents a :class:`islpy.PwAff` zero constant. This dictionary defines the space to be used for the set. .. arg before_names: A list of :class:`str` variable names representing diff --git a/loopy/schedule/checker/sched_check_utils.py b/loopy/schedule/checker/sched_check_utils.py index 5d8bd30cf..5cf50e6a0 100644 --- a/loopy/schedule/checker/sched_check_utils.py +++ b/loopy/schedule/checker/sched_check_utils.py @@ -192,7 +192,7 @@ def append_marker_to_isl_map_var_names(old_isl_map, dim_type, marker="'"): def make_islvars_with_marker( var_names_needing_marker, other_var_names, param_names=[], marker="'"): """Return a dictionary from variable and parameter names - to :class:`PwAff` instances that represent each of + to :class:`islpy.PwAff` instances that represent each of the variables and parameters, appending marker to var_names_needing_marker. @@ -205,10 +205,10 @@ def make_islvars_with_marker( .. arg param_names: A :class:`list` of :class:`str` elements representing parameter names. - .. return: A dictionary from variable names to :class:`PwAff` + .. return: A dictionary from variable names to :class:`islpy.PwAff` instances that represent each of the variables (islvars may be produced by `islpy.make_zero_and_vars`). The key - '0' is also include and represents a :class:`PwAff` zero constant. + '0' is also include and represents a :class:`islpy.PwAff` zero constant. """ diff --git a/loopy/schedule/checker/schedule.py b/loopy/schedule/checker/schedule.py index ff34ef4d6..0d982519b 100644 --- a/loopy/schedule/checker/schedule.py +++ b/loopy/schedule/checker/schedule.py @@ -154,7 +154,7 @@ class LexSchedule(object): prohibited_var_names=[], ): """ - :arg knl: A :class:`LoopKernel` whose schedule items will be + :arg knl: A :class:`loopy.LoopKernel` whose schedule items will be described by this :class:`LexSchedule`. :arg linearization_items_ordered: A list of :class:`ScheduleItem` whose -- GitLab From ae4092f5449f4fbc8508dde17ed0da23a87c9d83 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Thu, 19 Mar 2020 07:46:18 -0500 Subject: [PATCH 219/415] format docstring for statement_pair_dep_sets_from_legacy_knl() --- loopy/schedule/checker/__init__.py | 28 +++++++++++++++++++--------- 1 file changed, 19 insertions(+), 9 deletions(-) diff --git a/loopy/schedule/checker/__init__.py b/loopy/schedule/checker/__init__.py index 7fda654dd..0b05f7ef9 100644 --- a/loopy/schedule/checker/__init__.py +++ b/loopy/schedule/checker/__init__.py @@ -25,20 +25,30 @@ def statement_pair_dep_sets_from_legacy_knl(knl): """Return a list of :class:`loopy.schedule.checker.dependency.StatementPairDependencySet` instances created for a :class:`loopy.LoopKernel` containing legacy - depencencies. Create the new dependencies according to the following rules. + depencencies. + + Create the new dependencies according to the following rules: + (1) If a dependency exists between ``insn0`` and ``insn1``, create the dependnecy ``SAME(SNC)`` where ``SNC`` is the set of non-concurrent inames used by both ``insn0`` and ``insn1``, and ``SAME`` is the relationship specified by the ``SAME`` attribute of :class:`loopy.schedule.checker.dependency.DependencyType`. - (2) For each subset of non-concurrent inames used by any instruction, find - the set of all instructions using those inames, create a directed graph - with these instructions as nodes and edges representing a 'happens before' - relationship specfied by each dependency, find the sources and sinks within - this graph, and connect each sink to each source (sink happens before - source) with a ``PRIOR(SNC)`` dependency, where ``PRIOR`` is the - relationship specified by the ``PRIOR`` attribute of - :class:`loopy.schedule.checker.dependency.DependencyType`. + + (2) For each subset of non-concurrent inames used by any instruction, + + (a), find the set of all instructions using those inames, + + (b), create a directed graph with these instructions as nodes and + edges representing a 'happens before' relationship specfied by + each dependency, + + (c), find the sources and sinks within this graph, and + + (d), connect each sink to each source (sink happens before source) + with a ``PRIOR(SNC)`` dependency, where ``PRIOR`` is the + relationship specified by the ``PRIOR`` attribute of + :class:`loopy.schedule.checker.dependency.DependencyType`. """ -- GitLab From 9a8bde00ee1ba4cb27ca27067b782a29efa31366 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Thu, 19 Mar 2020 07:51:05 -0500 Subject: [PATCH 220/415] format docstring for create_dependencies_from_legacy_knl() --- loopy/schedule/checker/__init__.py | 1 + loopy/schedule/checker/dependency.py | 39 ++++++++++++++++++---------- 2 files changed, 26 insertions(+), 14 deletions(-) diff --git a/loopy/schedule/checker/__init__.py b/loopy/schedule/checker/__init__.py index 0b05f7ef9..ba7fd4a57 100644 --- a/loopy/schedule/checker/__init__.py +++ b/loopy/schedule/checker/__init__.py @@ -73,6 +73,7 @@ def check_linearization_validity( prohibited_var_names=set(), verbose=False, _use_linearized_kernel_to_obtain_loop_priority=False): # TODO unused arg? + # TODO document from loopy.schedule.checker.dependency import ( create_dependency_constraint, diff --git a/loopy/schedule/checker/dependency.py b/loopy/schedule/checker/dependency.py index 89f3bbd09..e78610c57 100644 --- a/loopy/schedule/checker/dependency.py +++ b/loopy/schedule/checker/dependency.py @@ -656,22 +656,33 @@ def create_arbitrary_dependency_constraint( def create_dependencies_from_legacy_knl(knl): """Return a list of :class:`StatementPairDependencySet` instances created - for a :class:`loopy.LoopKernel` containing legacy depencencies. Create - the new dependencies according to the following rules. (1) If - a dependency exists between ``insn0`` and ``insn1``, create the dependnecy - ``SAME(SNC)`` where ``SNC`` is the set of non-concurrent inames used - by both ``insn0`` and ``insn1``, and ``SAME`` is the relationship specified - by the ``SAME`` attribute of :class:`DependencyType`. (2) For each subset - of non-concurrent inames used by any instruction, find the set of all - instructions using those inames, create a directed graph with these - instructions as nodes and edges representing a 'happens before' - relationship specfied by each dependency, find the sources and sinks within - this graph, and connect each sink to each source (sink happens before - source) with a ``PRIOR(SNC)`` dependency, where ``PRIOR`` is the - relationship specified by the ``PRIOR`` attribute of - :class:`DependencyType`. + for a :class:`loopy.LoopKernel` containing legacy depencencies. + + Create the new dependencies according to the following rules: + + (1) If a dependency exists between ``insn0`` and ``insn1``, create the + dependnecy ``SAME(SNC)`` where ``SNC`` is the set of non-concurrent inames + used by both ``insn0`` and ``insn1``, and ``SAME`` is the relationship + specified by the ``SAME`` attribute of + :class:`loopy.schedule.checker.dependency.DependencyType`. + + (2) For each subset of non-concurrent inames used by any instruction, + + (a), find the set of all instructions using those inames, + + (b), create a directed graph with these instructions as nodes and + edges representing a 'happens before' relationship specfied by + each dependency, + + (c), find the sources and sinks within this graph, and + + (d), connect each sink to each source (sink happens before source) + with a ``PRIOR(SNC)`` dependency, where ``PRIOR`` is the + relationship specified by the ``PRIOR`` attribute of + :class:`loopy.schedule.checker.dependency.DependencyType`. """ + # Introduce SAME dep for set of shared, non-concurrent inames from loopy.schedule.checker.sched_check_utils import ( -- GitLab From 2dc738863e20ffda2896a7aef5e65f5dd0afde40 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Thu, 19 Mar 2020 07:55:28 -0500 Subject: [PATCH 221/415] add TODO about removing unnecessary wrapper function --- loopy/schedule/checker/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/loopy/schedule/checker/__init__.py b/loopy/schedule/checker/__init__.py index ba7fd4a57..e8ad05d82 100644 --- a/loopy/schedule/checker/__init__.py +++ b/loopy/schedule/checker/__init__.py @@ -51,6 +51,7 @@ def statement_pair_dep_sets_from_legacy_knl(knl): :class:`loopy.schedule.checker.dependency.DependencyType`. """ + # TODO maybe just eliminate this function since it doesn't do much # Preprocess if not already preprocessed # note: kernels must always be preprocessed before scheduling -- GitLab From 1d528672a5e0706f8b43c13b53a6097e3e576483 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Thu, 19 Mar 2020 07:55:42 -0500 Subject: [PATCH 222/415] remove version.py --- loopy/schedule/checker/version.py | 1 - 1 file changed, 1 deletion(-) delete mode 100644 loopy/schedule/checker/version.py diff --git a/loopy/schedule/checker/version.py b/loopy/schedule/checker/version.py deleted file mode 100644 index b6a75f587..000000000 --- a/loopy/schedule/checker/version.py +++ /dev/null @@ -1 +0,0 @@ -VERSION_TEXT = "0.1" -- GitLab From f513bb0a1842b7a74b845b5034c8cd6a186ed850 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Thu, 19 Mar 2020 08:13:09 -0500 Subject: [PATCH 223/415] renamed sched_check_utils.py->utils.py --- loopy/schedule/checker/__init__.py | 4 ++-- loopy/schedule/checker/dependency.py | 14 +++++++------- .../example_pairwise_schedule_validity.py | 4 ++-- .../experimental_scripts/example_wave_equation.py | 6 +++--- loopy/schedule/checker/lexicographic_order_map.py | 2 +- loopy/schedule/checker/schedule.py | 8 ++++---- .../checker/{sched_check_utils.py => utils.py} | 0 7 files changed, 19 insertions(+), 19 deletions(-) rename loopy/schedule/checker/{sched_check_utils.py => utils.py} (100%) diff --git a/loopy/schedule/checker/__init__.py b/loopy/schedule/checker/__init__.py index e8ad05d82..868de4466 100644 --- a/loopy/schedule/checker/__init__.py +++ b/loopy/schedule/checker/__init__.py @@ -83,7 +83,7 @@ def check_linearization_validity( from loopy.schedule.checker.lexicographic_order_map import ( get_statement_ordering_map, ) - from loopy.schedule.checker.sched_check_utils import ( + from loopy.schedule.checker.utils import ( prettier_map_string, ) @@ -196,7 +196,7 @@ def check_linearization_validity( # check to see whether the constraint map is a subset of the SIO # (spaces must be aligned so that the variables in the constraint map # correspond to the same variables in the SIO) - from loopy.schedule.checker.sched_check_utils import ( + from loopy.schedule.checker.utils import ( align_isl_maps_by_var_names, ) diff --git a/loopy/schedule/checker/dependency.py b/loopy/schedule/checker/dependency.py index e78610c57..3ba1bf152 100644 --- a/loopy/schedule/checker/dependency.py +++ b/loopy/schedule/checker/dependency.py @@ -228,7 +228,7 @@ def create_dependency_constraint( """ - from loopy.schedule.checker.sched_check_utils import ( + from loopy.schedule.checker.utils import ( make_islvars_with_marker, append_apostrophes, add_dims_to_isl_set, @@ -308,7 +308,7 @@ def create_dependency_constraint( p_tuple[p_tuple.index(outside_iname)+1:]]) nested_inside[outside_iname] = nested_inside_inames - from loopy.schedule.checker.sched_check_utils import ( + from loopy.schedule.checker.utils import ( get_orderings_of_length_n) # get all orderings that are explicitly allowed by priorities orders = get_orderings_of_length_n( @@ -421,7 +421,7 @@ def _create_5pt_stencil_dependency_constraint( ): """ WIP: NO NEED TO REVIEW YET """ - from loopy.schedule.checker.sched_check_utils import ( + from loopy.schedule.checker.utils import ( make_islvars_with_marker, append_apostrophes, add_dims_to_isl_set, @@ -431,7 +431,7 @@ def _create_5pt_stencil_dependency_constraint( # This function uses the dependency given to create the following constraint: # Statement [s,i,j] comes before statement [s',i',j'] iff - from loopy.schedule.checker.sched_check_utils import ( + from loopy.schedule.checker.utils import ( list_var_names_in_isl_sets, ) if all_dom_inames_ordered is None: @@ -530,7 +530,7 @@ def create_arbitrary_dependency_constraint( # TODO test after switching primes to before vars - from loopy.schedule.checker.sched_check_utils import ( + from loopy.schedule.checker.utils import ( make_islvars_with_marker, #append_apostrophes, append_marker_to_strings, @@ -541,7 +541,7 @@ def create_arbitrary_dependency_constraint( # This function uses the constraint given to create the following map: # Statement [s,i,j] comes before statement [s',i',j'] iff - from loopy.schedule.checker.sched_check_utils import ( + from loopy.schedule.checker.utils import ( list_var_names_in_isl_sets, ) if all_dom_inames_ordered is None: @@ -685,7 +685,7 @@ def create_dependencies_from_legacy_knl(knl): # Introduce SAME dep for set of shared, non-concurrent inames - from loopy.schedule.checker.sched_check_utils import ( + from loopy.schedule.checker.utils import ( get_concurrent_inames, get_all_nonconcurrent_insn_iname_subsets, get_linearization_item_ids_within_inames, diff --git a/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py b/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py index ceb4a0ce9..025205afd 100644 --- a/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py +++ b/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py @@ -23,7 +23,7 @@ THE SOFTWARE. """ WIP: NO NEED TO REVIEW YET """ import loopy as lp import numpy as np -from loopy.schedule.checker.sched_check_utils import ( +from loopy.schedule.checker.utils import ( create_graph_from_pairs, ) from loopy.schedule.checker.dependency import ( @@ -346,7 +346,7 @@ print("="*80) # for which deps does the intersection with the SAME dependency relation exist? # create a graph including these deps as edges (from after->before) -from loopy.schedule.checker.sched_check_utils import ( +from loopy.schedule.checker.utils import ( get_concurrent_inames, ) _, non_conc_inames = get_concurrent_inames(knl) diff --git a/loopy/schedule/checker/experimental_scripts/example_wave_equation.py b/loopy/schedule/checker/experimental_scripts/example_wave_equation.py index 3b5e25c9b..2a1c84ae6 100644 --- a/loopy/schedule/checker/experimental_scripts/example_wave_equation.py +++ b/loopy/schedule/checker/experimental_scripts/example_wave_equation.py @@ -29,7 +29,7 @@ import numpy as np import islpy as isl #from loopy.kernel_stat_collector import KernelStatCollector #from loopy.kernel_stat_collector import KernelStatOptions as kso # noqa -from loopy.schedule.checker.sched_check_utils import ( +from loopy.schedule.checker.utils import ( prettier_map_string, reorder_dims_by_name, append_marker_to_isl_map_var_names, @@ -309,7 +309,7 @@ if not linearization_is_valid: print("loop priority known:") print(preprocessed_knl.loop_priority) """ - from loopy.schedule.checker.sched_check_utils import ( + from loopy.schedule.checker.utils import ( get_concurrent_inames, ) conc_inames, non_conc_inames = get_concurrent_inames(linearized_knl) @@ -598,7 +598,7 @@ if not linearization_is_valid: print("loop priority known:") print(preprocessed_knl.loop_priority) """ - from loopy.schedule.checker.sched_check_utils import ( + from loopy.schedule.checker.utils import ( get_concurrent_inames, ) conc_inames, non_conc_inames = get_concurrent_inames(linearized_knl) diff --git a/loopy/schedule/checker/lexicographic_order_map.py b/loopy/schedule/checker/lexicographic_order_map.py index ec0de6fc0..04478cf43 100644 --- a/loopy/schedule/checker/lexicographic_order_map.py +++ b/loopy/schedule/checker/lexicographic_order_map.py @@ -136,7 +136,7 @@ def create_lex_order_map( if before_names is None: before_names = ["i%s" % (i) for i in range(n_dims)] if after_names is None: - from loopy.schedule.checker.sched_check_utils import ( + from loopy.schedule.checker.utils import ( append_marker_to_strings, ) after_names = append_marker_to_strings(before_names, marker="_") diff --git a/loopy/schedule/checker/schedule.py b/loopy/schedule/checker/schedule.py index 0d982519b..65fa7a684 100644 --- a/loopy/schedule/checker/schedule.py +++ b/loopy/schedule/checker/schedule.py @@ -233,7 +233,7 @@ class LexSchedule(object): # sometimes be able to skip increment, but it's not hurting anything # TODO might not need this increment period? elif isinstance(linearization_item, (RunInstruction, Barrier)): - from loopy.schedule.checker.sched_check_utils import ( + from loopy.schedule.checker.utils import ( _get_insn_id_from_linearization_item, ) lp_insn_id = _get_insn_id_from_linearization_item(linearization_item) @@ -373,12 +373,12 @@ class LexSchedule(object): """ - from loopy.schedule.checker.sched_check_utils import ( + from loopy.schedule.checker.utils import ( create_symbolic_isl_map_from_tuples, add_dims_to_isl_set ) - from loopy.schedule.checker.sched_check_utils import ( + from loopy.schedule.checker.utils import ( list_var_names_in_isl_sets, ) if dom_inames_ordered_before is None: @@ -391,7 +391,7 @@ class LexSchedule(object): # create an isl space # {('statement', used in >=1 statement domain>) -> # (lexicographic ordering dims)} - from loopy.schedule.checker.sched_check_utils import ( + from loopy.schedule.checker.utils import ( get_isl_space ) params_sched = [] diff --git a/loopy/schedule/checker/sched_check_utils.py b/loopy/schedule/checker/utils.py similarity index 100% rename from loopy/schedule/checker/sched_check_utils.py rename to loopy/schedule/checker/utils.py -- GitLab From c45ae8ba35b3e7c08960b5ec15b7752f9ddc0eac Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Thu, 19 Mar 2020 08:19:44 -0500 Subject: [PATCH 224/415] fix docstring arg/returns syntax --- loopy/schedule/checker/dependency.py | 30 +++++------ .../checker/lexicographic_order_map.py | 24 ++++----- loopy/schedule/checker/schedule.py | 10 ++-- loopy/schedule/checker/utils.py | 54 +++++++++---------- 4 files changed, 59 insertions(+), 59 deletions(-) diff --git a/loopy/schedule/checker/dependency.py b/loopy/schedule/checker/dependency.py index 3ba1bf152..348d08ae7 100644 --- a/loopy/schedule/checker/dependency.py +++ b/loopy/schedule/checker/dependency.py @@ -144,19 +144,19 @@ def create_elementwise_comparison_conjunction_set( """Create a set constrained by the conjunction of conditions comparing `names0` to `names1`. - .. arg names0: A list of :class:`str` representing variable names. + :arg names0: A list of :class:`str` representing variable names. - .. arg names1: A list of :class:`str` representing variable names. + :arg names1: A list of :class:`str` representing variable names. - .. arg islvars: A dictionary from variable names to :class:`islpy.PwAff` + :arg islvars: A dictionary from variable names to :class:`islpy.PwAff` instances that represent each of the variables (islvars may be produced by `islpy.make_zero_and_vars`). The key '0' is also include and represents a :class:`islpy.PwAff` zero constant. - .. arg op: A :class:`str` describing the operator to use when creating + :arg op: A :class:`str` describing the operator to use when creating the set constraints. Options: `eq` for `=`, `lt` for `<` - .. return: A set involving `islvars` cosntrained by the constraints + :returns: A set involving `islvars` cosntrained by the constraints `{names0[0] names1[0] and names0[1] names1[1] and ...}`. """ @@ -199,31 +199,31 @@ def create_dependency_constraint( specified condition on inames ``i',j',i,j`` is met. ``i'`` and ``j'`` are the values of inames ``i`` and ``j`` in first statement instance. - .. arg statement_dep_set: A :class:`StatementPairDependencySet` describing + :arg statement_dep_set: A :class:`StatementPairDependencySet` describing the dependency relationship between the two statements. - .. arg loop_priorities: A list of tuples from the ``loop_priority`` + :arg loop_priorities: A list of tuples from the ``loop_priority`` attribute of :class:`loopy.LoopKernel` specifying the loop nest ordering rules. - .. arg insn_id_to_int: A :class:`dict` mapping insn_id to int_id, where + :arg insn_id_to_int: A :class:`dict` mapping insn_id to int_id, where 'insn_id' and 'int_id' refer to the 'insn_id' and 'int_id' attributes of :class:`loopy.schedule.checker.schedule.LexScheduleStatement`. - .. arg statement_var_name: A :class:`str` specifying the name of the + :arg statement_var_name: A :class:`str` specifying the name of the isl variable used to represent the unique :class:`int` statement id. - .. arg statement_var_pose: A :class:`int` specifying which position in the + :arg statement_var_pose: A :class:`int` specifying which position in the statement instance tuples holds the dimension representing the statement id. Defaults to ``0``. - .. arg all_dom_inames_ordered_before: A :class:`list` of :class:`str` + :arg all_dom_inames_ordered_before: A :class:`list` of :class:`str` specifying an order for the dimensions representing dependee inames. - .. arg all_dom_inames_ordered_after: A :class:`list` of :class:`str` + :arg all_dom_inames_ordered_after: A :class:`list` of :class:`str` specifying an order for the dimensions representing depender inames. - .. return: An :class:`islpy.Map` mapping each statement instance to all + :returns: An :class:`islpy.Map` mapping each statement instance to all statement instances that must occur later according to the constraints. """ @@ -764,10 +764,10 @@ def get_dependency_sources_and_sinks(knl, linearization_item_ids): 'happens before' relationship specfied by each legacy dependency between two instructions. Return the sources and sinks within this graph. - .. arg linearization_item_ids: A :class:`list` of :class:`str` representing + :arg linearization_item_ids: A :class:`list` of :class:`str` representing loopy instruction ids. - .. return: Two instances of :class:`set` of :class:`str` instruction ids + :returns: Two instances of :class:`set` of :class:`str` instruction ids representing the sources and sinks in the dependency graph. """ diff --git a/loopy/schedule/checker/lexicographic_order_map.py b/loopy/schedule/checker/lexicographic_order_map.py index 04478cf43..ea679b3b4 100644 --- a/loopy/schedule/checker/lexicographic_order_map.py +++ b/loopy/schedule/checker/lexicographic_order_map.py @@ -28,15 +28,15 @@ def get_statement_ordering_map( """Return a mapping that maps each statement instance to all statement instances occuring later. - .. arg sched_map_before: An :class:`islpy.Map` representing instruction + :arg sched_map_before: An :class:`islpy.Map` representing instruction instance order for the dependee as a mapping from each statement instance to a point in the lexicographic ordering. - .. arg sched_map_after: An :class:`islpy.Map` representing instruction + :arg sched_map_after: An :class:`islpy.Map` representing instruction instance order for the depender as a mapping from each statement instance to a point in the lexicographic ordering. - .. arg lex_map: An :class:`islpy.Map` representing a lexicographic + :arg lex_map: An :class:`islpy.Map` representing a lexicographic ordering as a mapping from each point in lexicographic time to every point that occurs later in lexicographic time. E.g.:: @@ -44,7 +44,7 @@ def get_statement_ordering_map( i0' < i0 or (i0' = i0 and i1' < i1) or (i0' = i0 and i1' = i1 and i2' < i2) ...} - .. return: An :class:`islpy.Map` representing the lex schedule as + :returns: An :class:`islpy.Map` representing the lex schedule as a mapping from each statement instance to all statement instances occuring later. I.e., we compose B -> L -> A^-1, where B is sched_map_before, A is sched_map_after, and L is the @@ -66,21 +66,21 @@ def get_lex_order_constraint(islvars, before_names, after_names): defining a 'happens before' relationship in a lexicographic ordering. - .. arg islvars: A dictionary from variable names to :class:`islpy.PwAff` + :arg islvars: A dictionary from variable names to :class:`islpy.PwAff` instances that represent each of the variables (islvars may be produced by `islpy.make_zero_and_vars`). The key '0' is also include and represents a :class:`islpy.PwAff` zero constant. This dictionary defines the space to be used for the set. - .. arg before_names: A list of :class:`str` variable names representing + :arg before_names: A list of :class:`str` variable names representing the lexicographic space dimensions for a point in lexicographic time that occurs before. (see example below) - .. arg after_names: A list of :class:`str` variable names representing + :arg after_names: A list of :class:`str` variable names representing the lexicographic space dimensions for a point in lexicographic time that occurs after. (see example below) - .. return: An :class:`islpy.Set` representing a constraint that enforces a + :returns: An :class:`islpy.Set` representing a constraint that enforces a lexicographic ordering. E.g., if ``before_names = [i0', i1', i2']`` and ``after_names = [i0, i1, i2]``, return the set:: @@ -110,18 +110,18 @@ def create_lex_order_map( ordering to every point that occurs later in lexicographic time. - .. arg n_dims: An :class:`int` representing the number of dimensions + :arg n_dims: An :class:`int` representing the number of dimensions in the lexicographic ordering. - .. arg before_names: A list of :class:`str` variable names representing + :arg before_names: A list of :class:`str` variable names representing the lexicographic space dimensions for a point in lexicographic time that occurs before. (see example below) - .. arg after_names: A list of :class:`str` variable names representing + :arg after_names: A list of :class:`str` variable names representing the lexicographic space dimensions for a point in lexicographic time that occurs after. (see example below) - .. return: An :class:`islpy.Map` representing a lexicographic + :returns: An :class:`islpy.Map` representing a lexicographic ordering as a mapping from each point in lexicographic time to every point that occurs later in lexicographic time. E.g., if ``before_names = [i0', i1', i2']`` and diff --git a/loopy/schedule/checker/schedule.py b/loopy/schedule/checker/schedule.py index 65fa7a684..323019eda 100644 --- a/loopy/schedule/checker/schedule.py +++ b/loopy/schedule/checker/schedule.py @@ -348,25 +348,25 @@ class LexSchedule(object): from statement instances to lexicographic time, one for the dependee and one for the depender. - .. arg dom_before: A :class:`islpy.BasicSet` representing the + :arg dom_before: A :class:`islpy.BasicSet` representing the domain for the dependee statement. - .. arg dom_after: A :class:`islpy.BasicSet` representing the + :arg dom_after: A :class:`islpy.BasicSet` representing the domain for the dependee statement. - .. arg dom_inames_ordered_before: A list of :class:`str` + :arg dom_inames_ordered_before: A list of :class:`str` representing the union of inames used in instances of the dependee statement. ``statement_var_name`` and ``dom_inames_ordered_before`` are the names of the dims of the space of the ISL map domain for the dependee. - .. arg dom_inames_ordered_after: A list of :class:`str` + :arg dom_inames_ordered_after: A list of :class:`str` representing the union of inames used in instances of the depender statement. ``statement_var_name`` and ``dom_inames_ordered_after`` are the names of the dims of the space of the ISL map domain for the depender. - .. return: A two-tuple containing two :class:`islpy.Map`s + :returns: A two-tuple containing two :class:`islpy.Map`s representing the schedule as two mappings from statement instances to lexicographic time, one for the dependee and one for the depender. diff --git a/loopy/schedule/checker/utils.py b/loopy/schedule/checker/utils.py index 5cf50e6a0..048da1c72 100644 --- a/loopy/schedule/checker/utils.py +++ b/loopy/schedule/checker/utils.py @@ -54,26 +54,26 @@ def reorder_dims_by_name( add_missing=False, new_names_are_permutation_only=False): """Return an isl_set with the dimensions in the specified order. - .. arg isl_set: A :class:`islpy.Set` whose dimensions are + :arg isl_set: A :class:`islpy.Set` whose dimensions are to be reordered. - .. arg dim_type: A :class:`islpy.dim_type`, i.e., an :class:`int`, + :arg dim_type: A :class:`islpy.dim_type`, i.e., an :class:`int`, specifying the dimension to be reordered. - .. arg desired_dims_ordered: A :class:`list` of :class:`str` elements + :arg desired_dims_ordered: A :class:`list` of :class:`str` elements representing the desired dimensions order by dimension name. - .. arg add_missing: A :class:`bool` specifying whether to insert + :arg add_missing: A :class:`bool` specifying whether to insert dimensions (by name) found in `desired_dims_ordered` that are not present in `isl_set`. - .. arg new_names_are_permutation_only: A :class:`bool` indicating that + :arg new_names_are_permutation_only: A :class:`bool` indicating that `desired_dims_ordered` contains the same names as the specified dimensions in `isl_set`, and does not, e.g., contain additional dimension names not found in `isl_set`. If set to True, and these two sets of names do not match, an error is produced. - .. return: An :class:`islpy.Set` matching `isl_set` with the + :returns: An :class:`islpy.Set` matching `isl_set` with the dimension order matching `desired_dims_ordered`, optionally including additional dimensions present in `desred_dims_ordered` that are not present in `isl_set`. @@ -152,9 +152,9 @@ def create_new_isl_set_with_primes(old_isl_set, marker="'"): """Return an isl_set with apostrophes appended to dim_type.set dimension names. - .. arg old_isl_set: A :class:`islpy.Set`. + :arg old_isl_set: A :class:`islpy.Set`. - .. return: A :class:`islpy.Set` matching `old_isl_set` with + :returns: A :class:`islpy.Set` matching `old_isl_set` with apostrophes appended to dim_type.set dimension names. """ @@ -172,12 +172,12 @@ def append_marker_to_isl_map_var_names(old_isl_map, dim_type, marker="'"): """Return an isl_map with marker appended to dim_type dimension names. - .. arg old_isl_map: A :class:`islpy.Map`. + :arg old_isl_map: A :class:`islpy.Map`. - .. arg dim_type: A :class:`islpy.dim_type`, i.e., an :class:`int`, + :arg dim_type: A :class:`islpy.dim_type`, i.e., an :class:`int`, specifying the dimension to be marked. - .. return: A :class:`islpy.Map` matching `old_isl_map` with + :returns: A :class:`islpy.Map` matching `old_isl_map` with apostrophes appended to dim_type dimension names. """ @@ -196,16 +196,16 @@ def make_islvars_with_marker( the variables and parameters, appending marker to var_names_needing_marker. - .. arg var_names_needing_marker: A :class:`list` of :class:`str` + :arg var_names_needing_marker: A :class:`list` of :class:`str` elements representing variable names to have markers appended. - .. arg other_var_names: A :class:`list` of :class:`str` + :arg other_var_names: A :class:`list` of :class:`str` elements representing variable names to be included as-is. - .. arg param_names: A :class:`list` of :class:`str` elements + :arg param_names: A :class:`list` of :class:`str` elements representing parameter names. - .. return: A dictionary from variable names to :class:`islpy.PwAff` + :returns: A dictionary from variable names to :class:`islpy.PwAff` instances that represent each of the variables (islvars may be produced by `islpy.make_zero_and_vars`). The key '0' is also include and represents a :class:`islpy.PwAff` zero constant. @@ -259,19 +259,19 @@ def create_symbolic_isl_map_from_tuples( mapping input->output tuples provided in `tuple_pairs_with_domains`, with each set of tuple variables constrained by the domains provided. - .. arg tuple_pairs_with_domains: A :class:`list` with each element being + :arg tuple_pairs_with_domains: A :class:`list` with each element being a tuple of the form `((tup_in, tup_out), domain)`. `tup_in` and `tup_out` are tuples containing elements of type :class:`int` and :class:`str` representing values for the input and output dimensions in `space`, and `domain` is a :class:`islpy.Set` constraining variable bounds. - .. arg space: A :class:`islpy.Space` to be used to create the map. + :arg space: A :class:`islpy.Space` to be used to create the map. - .. arg statement_var_name: A :class:`str` specifying the name of the + :arg statement_var_name: A :class:`str` specifying the name of the isl variable used to represent the unique :class:`int` statement id. - .. return: A :class:`islpy.Map` constructed using the provided space + :returns: A :class:`islpy.Map` constructed using the provided space as follows. For each `((tup_in, tup_out), domain)` in `tuple_pairs_with_domains`, map `(tup_in)->(tup_out) : domain`, where `tup_in` and `tup_out` are @@ -430,15 +430,15 @@ def get_all_nonconcurrent_insn_iname_subsets( """Return a :class:`set` of every unique subset of non-concurrent inames used in an instruction in a :class:`loopy.LoopKernel`. - .. arg knl: A :class:`loopy.LoopKernel`. + :arg knl: A :class:`loopy.LoopKernel`. - .. arg exclude_empty: A :class:`bool` specifying whether to + :arg exclude_empty: A :class:`bool` specifying whether to exclude the empty set. - .. arg non_conc_inames: A :class:`set` of non-concurrent inames + :arg non_conc_inames: A :class:`set` of non-concurrent inames which may be provided if already known. - .. return: A :class:`set` of every unique subset of non-concurrent + :returns: A :class:`set` of every unique subset of non-concurrent inames used in any instruction in a :class:`loopy.LoopKernel`. """ @@ -509,18 +509,18 @@ def get_orderings_of_length_n( allowed_after_dict, required_length, return_first_found=False): """Return all orderings found in tree represented by `allowed_after_dict`. - .. arg allowed_after_dict: A :class:`dict` mapping each :class:`string` + :arg allowed_after_dict: A :class:`dict` mapping each :class:`string` names to a :class:`set` of names that are allowed to come after that name. - .. arg required_length: A :class:`int` representing the length required + :arg required_length: A :class:`int` representing the length required for all orderings. Orderings not matching the required length will not be returned. - .. arg return_first_found: A :class:`bool` specifying whether to return + :arg return_first_found: A :class:`bool` specifying whether to return the first valid ordering found. - .. return: A :class:`set` of all orderings that are *explicitly* allowed + :returns: A :class:`set` of all orderings that are *explicitly* allowed by the tree represented by `allowed_after_dict`. I.e., if we know a->b and c->b, we don't know enough to return a->c->b. Note that if the set for a dict key is empty, nothing is allowed to come after. -- GitLab From 0cca56e3a82ad8a2e2be2ae7f31f74ecc7fefbf9 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Thu, 19 Mar 2020 08:32:03 -0500 Subject: [PATCH 225/415] indent literal text in docstrings --- .../checker/lexicographic_order_map.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/loopy/schedule/checker/lexicographic_order_map.py b/loopy/schedule/checker/lexicographic_order_map.py index ea679b3b4..2e063e7d7 100644 --- a/loopy/schedule/checker/lexicographic_order_map.py +++ b/loopy/schedule/checker/lexicographic_order_map.py @@ -40,9 +40,9 @@ def get_statement_ordering_map( ordering as a mapping from each point in lexicographic time to every point that occurs later in lexicographic time. E.g.:: - {[i0', i1', i2', ...] -> [i0, i1, i2, ...] : - i0' < i0 or (i0' = i0 and i1' < i1) - or (i0' = i0 and i1' = i1 and i2' < i2) ...} + {[i0', i1', i2', ...] -> [i0, i1, i2, ...] : + i0' < i0 or (i0' = i0 and i1' < i1) + or (i0' = i0 and i1' = i1 and i2' < i2) ...} :returns: An :class:`islpy.Map` representing the lex schedule as a mapping from each statement instance to all statement instances @@ -84,9 +84,9 @@ def get_lex_order_constraint(islvars, before_names, after_names): lexicographic ordering. E.g., if ``before_names = [i0', i1', i2']`` and ``after_names = [i0, i1, i2]``, return the set:: - {[i0', i1', i2', i0, i1, i2] : - i0' < i0 or (i0' = i0 and i1' < i1) - or (i0' = i0 and i1' = i1 and i2' < i2)} + {[i0', i1', i2', i0, i1, i2] : + i0' < i0 or (i0' = i0 and i1' < i1) + or (i0' = i0 and i1' = i1 and i2' < i2)} """ @@ -127,9 +127,9 @@ def create_lex_order_map( E.g., if ``before_names = [i0', i1', i2']`` and ``after_names = [i0, i1, i2]``, return the map:: - {[i0', i1', i2'] -> [i0, i1, i2] : - i0' < i0 or (i0' = i0 and i1' < i1) - or (i0' = i0 and i1' = i1 and i2' < i2)} + {[i0', i1', i2'] -> [i0, i1, i2] : + i0' < i0 or (i0' = i0 and i1' < i1) + or (i0' = i0 and i1' = i1 and i2' < i2)} """ -- GitLab From f92f27cf58df4d9dad6b93a6dd6d33f8410e9aab Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Thu, 19 Mar 2020 08:58:33 -0500 Subject: [PATCH 226/415] change Loopy->:mod: in docstrings --- loopy/schedule/checker/dependency.py | 2 +- loopy/schedule/checker/schedule.py | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/loopy/schedule/checker/dependency.py b/loopy/schedule/checker/dependency.py index 348d08ae7..c193752be 100644 --- a/loopy/schedule/checker/dependency.py +++ b/loopy/schedule/checker/dependency.py @@ -76,7 +76,7 @@ class StatementPairDependencySet(object): .. attribute:: deps A :class:`dict` mapping instances of :class:`DependencyType` to - the Loopy kernel inames involved in that particular + the :mod:`loopy` kernel inames involved in that particular dependency relationship. .. attribute:: dom_before diff --git a/loopy/schedule/checker/schedule.py b/loopy/schedule/checker/schedule.py index 323019eda..6e0d58bf6 100644 --- a/loopy/schedule/checker/schedule.py +++ b/loopy/schedule/checker/schedule.py @@ -24,7 +24,7 @@ import islpy as isl class LexScheduleStatement(object): - """A representation of a Loopy statement. + """A representation of a :mod:`loopy` statement. .. attribute:: insn_id @@ -84,7 +84,7 @@ class LexScheduleStatement(object): class LexScheduleStatementInstance(object): - """A representation of a Loopy statement instance. + """A representation of a :mod:`loopy` statement instance. .. attribute:: stmt @@ -92,7 +92,7 @@ class LexScheduleStatementInstance(object): .. attribute:: lex_pt - A list of :class:`int` or as :class:`str` Loopy inames representing + A list of :class:`int` or as :class:`str` :mod:`loopy` inames representing a point or set of points in a lexicographic ordering. """ @@ -119,7 +119,7 @@ class LexSchedule(object): statement's order relative to the depender statment by mapping a statement to a point or set of points in a lexicographic ordering. Points in lexicographic ordering are represented as - a list of :class:`int` or as :class:`str` Loopy inames. + a list of :class:`int` or as :class:`str` :mod:`loopy` inames. .. attribute:: stmt_instance_after @@ -127,7 +127,7 @@ class LexSchedule(object): statement's order relative to the dependee statment by mapping a statement to a point or set of points in a lexicographic ordering. Points in lexicographic ordering are represented as - a list of :class:`int` or as :class:`str` Loopy inames. + a list of :class:`int` or as :class:`str` :mod:`loopy` inames. .. attribute:: statement_var_name -- GitLab From 2bee49cf539d9ecaea4c6e2856df4dc4f16a5a31 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sat, 11 Apr 2020 17:29:17 -0500 Subject: [PATCH 227/415] remove unused argument in check_linearization_validity: _use_linearized_kernel_to_obtain_loop_priority --- loopy/schedule/checker/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/loopy/schedule/checker/__init__.py b/loopy/schedule/checker/__init__.py index 868de4466..6a455319f 100644 --- a/loopy/schedule/checker/__init__.py +++ b/loopy/schedule/checker/__init__.py @@ -73,7 +73,7 @@ def check_linearization_validity( linearization_items, prohibited_var_names=set(), verbose=False, - _use_linearized_kernel_to_obtain_loop_priority=False): # TODO unused arg? + ): # TODO document from loopy.schedule.checker.dependency import ( -- GitLab From 1fdd11816c8bf26326e3b81a5bf2c2696e4ee4d2 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sat, 11 Apr 2020 17:41:22 -0500 Subject: [PATCH 228/415] add todo about name conflicts --- loopy/schedule/checker/schedule.py | 1 + 1 file changed, 1 insertion(+) diff --git a/loopy/schedule/checker/schedule.py b/loopy/schedule/checker/schedule.py index 6e0d58bf6..43cb7eaf0 100644 --- a/loopy/schedule/checker/schedule.py +++ b/loopy/schedule/checker/schedule.py @@ -177,6 +177,7 @@ class LexSchedule(object): self.stmt_instance_after = None # make sure we don't have an iname name conflict + # TODO use loopy's existing tool for ensuring unique var names assert not any( iname == self.statement_var_name for iname in prohibited_var_names) -- GitLab From cbd0ed5b17f2fdb487683aff2bf9f00c07975ceb Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sat, 11 Apr 2020 18:33:46 -0500 Subject: [PATCH 229/415] eliminiate need to pass knl to LexSchedule constructor by instead passing in loops_to_ignore (currently includes any loops with parallel tags, which shouldn't be present in linearization after updates) --- loopy/schedule/checker/__init__.py | 26 ++++++++++++++++++- .../example_wave_equation.py | 17 +++++++++--- loopy/schedule/checker/schedule.py | 19 +++----------- 3 files changed, 42 insertions(+), 20 deletions(-) diff --git a/loopy/schedule/checker/__init__.py b/loopy/schedule/checker/__init__.py index 6a455319f..3d7aacafc 100644 --- a/loopy/schedule/checker/__init__.py +++ b/loopy/schedule/checker/__init__.py @@ -65,6 +65,17 @@ def statement_pair_dep_sets_from_legacy_knl(knl): return create_dependencies_from_legacy_knl(preprocessed_knl) +def _get_concurrent_loop_inames(linearization_items, knl): + from loopy.kernel.data import ConcurrentTag + from loopy.schedule import EnterLoop + conc_loop_inames = set() + for linearization_item in linearization_items: + if (isinstance(linearization_item, EnterLoop) and + knl.iname_tags_of_type(linearization_item.iname, ConcurrentTag)): + conc_loop_inames.add(linearization_item.iname) + return conc_loop_inames + + # TODO work on granularity of encapsulation, encapsulate some of this in # separate functions def check_linearization_validity( @@ -95,6 +106,19 @@ def check_linearization_validity( if not prohibited_var_names: prohibited_var_names = preprocessed_knl.all_inames() + # Get EnterLoop inames tagged as concurrent so LexSchedule can ignore + # TODO: In the future, this shouldn't be necessary because there + # won't be any inames with ConcurrentTags in the loopy sched. + # Test exercising this: test_linearization_checker_with_stroud_bernstein() + conc_loop_inames = _get_concurrent_loop_inames( + linearization_items, preprocessed_knl) + if conc_loop_inames: + from warnings import warn + warn( + "check_linearization_validity encountered EnterLoop for inames %s " + "with ConcurrentTag(s) in linearization for kernel %s. " + "Ignoring these loops." % (conc_loop_inames, knl.name)) + if verbose: print("="*80) print("Kernel: %s" % (knl.name)) @@ -131,11 +155,11 @@ def check_linearization_validity( # Create LexSchedule: mapping of {statement instance: lex point} # include only instructions involved in this dependency sched = LexSchedule( - preprocessed_knl, linearization_items, s_before.insn_id, s_after.insn_id, prohibited_var_names=prohibited_var_names, + loops_to_ignore=conc_loop_inames, ) lp_insn_id_to_lex_sched_id = sched.loopy_insn_id_to_lex_sched_id() diff --git a/loopy/schedule/checker/experimental_scripts/example_wave_equation.py b/loopy/schedule/checker/experimental_scripts/example_wave_equation.py index 2a1c84ae6..0eacfb0dc 100644 --- a/loopy/schedule/checker/experimental_scripts/example_wave_equation.py +++ b/loopy/schedule/checker/experimental_scripts/example_wave_equation.py @@ -29,6 +29,7 @@ import numpy as np import islpy as isl #from loopy.kernel_stat_collector import KernelStatCollector #from loopy.kernel_stat_collector import KernelStatOptions as kso # noqa +from loopy.schedule.checker import _get_concurrent_loop_inames from loopy.schedule.checker.utils import ( prettier_map_string, reorder_dims_by_name, @@ -172,10 +173,17 @@ if verbose: # }}} +conc_loop_inames = _get_concurrent_loop_inames( + linearized_knl.linearization, preprocessed_knl) + # Create a mapping of {statement instance: lex point} # including only instructions involved in this dependency sched = LexSchedule( - linearized_knl, linearized_knl.linearization, str(sid_before), str(sid_after)) + linearized_knl.linearization, + str(sid_before), + str(sid_after), + loops_to_ignore=conc_loop_inames, + ) # Get an isl map representing the LexSchedule; # this requires the iname domains @@ -457,13 +465,16 @@ if verbose: # }}} + +conc_loop_inames = _get_concurrent_loop_inames( + linearized_knl.linearization, preprocessed_knl) # Create a mapping of {statement instance: lex point} # including only instructions involved in this dependency sched = LexSchedule( - linearized_knl, linearized_knl.linearization, str(sid_before), - str(sid_after) + str(sid_after), + loops_to_ignore=conc_loop_inames, ) # Get an isl map representing the LexSchedule; diff --git a/loopy/schedule/checker/schedule.py b/loopy/schedule/checker/schedule.py index 43cb7eaf0..9aeae254c 100644 --- a/loopy/schedule/checker/schedule.py +++ b/loopy/schedule/checker/schedule.py @@ -147,16 +147,13 @@ class LexSchedule(object): def __init__( self, - knl, linearization_items_ordered, before_insn_id, after_insn_id, prohibited_var_names=[], + loops_to_ignore=set(), ): """ - :arg knl: A :class:`loopy.LoopKernel` whose schedule items will be - described by this :class:`LexSchedule`. - :arg linearization_items_ordered: A list of :class:`ScheduleItem` whose order will be described by this :class:`LexSchedule`. @@ -182,7 +179,6 @@ class LexSchedule(object): iname == self.statement_var_name for iname in prohibited_var_names) from loopy.schedule import (EnterLoop, LeaveLoop, Barrier, RunInstruction) - from loopy.kernel.data import ConcurrentTag # go through linearization_items_ordered and generate self.lex_schedule @@ -193,14 +189,7 @@ class LexSchedule(object): for linearization_item in linearization_items_ordered: if isinstance(linearization_item, EnterLoop): iname = linearization_item.iname - if knl.iname_tags_of_type(iname, ConcurrentTag): - # In the future, this should be unnecessary because there - # won't be any inames with ConcurrentTags in the loopy sched - from warnings import warn - warn( - "LexSchedule.__init__: Encountered EnterLoop for iname %s " - "with ConcurrentTag(s) in linearization for kernel %s. " - "Ignoring this loop." % (iname, knl.name)) + if iname in loops_to_ignore: continue # if the schedule is empty, this is the first schedule item, so @@ -219,9 +208,7 @@ class LexSchedule(object): next_insn_lex_pt.append(iname) next_insn_lex_pt.append(0) elif isinstance(linearization_item, LeaveLoop): - if knl.iname_tags_of_type(linearization_item.iname, ConcurrentTag): - # In the future, this should be unnecessary because there - # won't be any inames with ConcurrentTags in the loopy sched + if linearization_item.iname in loops_to_ignore: continue # upon leaving a loop, # pop lex dimension for enumerating code blocks within this loop, and -- GitLab From 45b29f7a3c97760ccba104e6e189522b1587b552 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 20 Apr 2020 20:34:04 -0500 Subject: [PATCH 230/415] adding lexicographic_order_map.py (creates isl maps defining lex orderings and statement instance orderings) --- .../checker/lexicographic_order_map.py | 159 ++++++++++++++++++ 1 file changed, 159 insertions(+) create mode 100644 loopy/schedule/checker/lexicographic_order_map.py diff --git a/loopy/schedule/checker/lexicographic_order_map.py b/loopy/schedule/checker/lexicographic_order_map.py new file mode 100644 index 000000000..2e063e7d7 --- /dev/null +++ b/loopy/schedule/checker/lexicographic_order_map.py @@ -0,0 +1,159 @@ +__copyright__ = "Copyright (C) 2019 James Stevens" + +__license__ = """ +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. +""" + +import islpy as isl + + +def get_statement_ordering_map( + sched_map_before, sched_map_after, lex_map, before_marker="'"): + """Return a mapping that maps each statement instance to + all statement instances occuring later. + + :arg sched_map_before: An :class:`islpy.Map` representing instruction + instance order for the dependee as a mapping from each statement + instance to a point in the lexicographic ordering. + + :arg sched_map_after: An :class:`islpy.Map` representing instruction + instance order for the depender as a mapping from each statement + instance to a point in the lexicographic ordering. + + :arg lex_map: An :class:`islpy.Map` representing a lexicographic + ordering as a mapping from each point in lexicographic time + to every point that occurs later in lexicographic time. E.g.:: + + {[i0', i1', i2', ...] -> [i0, i1, i2, ...] : + i0' < i0 or (i0' = i0 and i1' < i1) + or (i0' = i0 and i1' = i1 and i2' < i2) ...} + + :returns: An :class:`islpy.Map` representing the lex schedule as + a mapping from each statement instance to all statement instances + occuring later. I.e., we compose B -> L -> A^-1, where B + is sched_map_before, A is sched_map_after, and L is the + lexicographic ordering map. + + """ + + sio = sched_map_before.apply_range( + lex_map).apply_range(sched_map_after.reverse()) + # append marker to in names + for i in range(sio.dim(isl.dim_type.in_)): + sio = sio.set_dim_name(isl.dim_type.in_, i, sio.get_dim_name( + isl.dim_type.in_, i)+before_marker) + return sio + + +def get_lex_order_constraint(islvars, before_names, after_names): + """Return a constraint represented as an :class:`islpy.Set` + defining a 'happens before' relationship in a lexicographic + ordering. + + :arg islvars: A dictionary from variable names to :class:`islpy.PwAff` + instances that represent each of the variables + (islvars may be produced by `islpy.make_zero_and_vars`). The key + '0' is also include and represents a :class:`islpy.PwAff` zero constant. + This dictionary defines the space to be used for the set. + + :arg before_names: A list of :class:`str` variable names representing + the lexicographic space dimensions for a point in lexicographic + time that occurs before. (see example below) + + :arg after_names: A list of :class:`str` variable names representing + the lexicographic space dimensions for a point in lexicographic + time that occurs after. (see example below) + + :returns: An :class:`islpy.Set` representing a constraint that enforces a + lexicographic ordering. E.g., if ``before_names = [i0', i1', i2']`` and + ``after_names = [i0, i1, i2]``, return the set:: + + {[i0', i1', i2', i0, i1, i2] : + i0' < i0 or (i0' = i0 and i1' < i1) + or (i0' = i0 and i1' = i1 and i2' < i2)} + + """ + + lex_order_constraint = islvars[before_names[0]].lt_set(islvars[after_names[0]]) + for i in range(1, len(before_names)): + lex_order_constraint_conj = islvars[before_names[i]].lt_set( + islvars[after_names[i]]) + for j in range(i): + lex_order_constraint_conj = lex_order_constraint_conj & \ + islvars[before_names[j]].eq_set(islvars[after_names[j]]) + lex_order_constraint = lex_order_constraint | lex_order_constraint_conj + return lex_order_constraint + + +def create_lex_order_map( + n_dims, + before_names=None, + after_names=None, + ): + """Return a mapping that maps each point in a lexicographic + ordering to every point that occurs later in lexicographic + time. + + :arg n_dims: An :class:`int` representing the number of dimensions + in the lexicographic ordering. + + :arg before_names: A list of :class:`str` variable names representing + the lexicographic space dimensions for a point in lexicographic + time that occurs before. (see example below) + + :arg after_names: A list of :class:`str` variable names representing + the lexicographic space dimensions for a point in lexicographic + time that occurs after. (see example below) + + :returns: An :class:`islpy.Map` representing a lexicographic + ordering as a mapping from each point in lexicographic time + to every point that occurs later in lexicographic time. + E.g., if ``before_names = [i0', i1', i2']`` and + ``after_names = [i0, i1, i2]``, return the map:: + + {[i0', i1', i2'] -> [i0, i1, i2] : + i0' < i0 or (i0' = i0 and i1' < i1) + or (i0' = i0 and i1' = i1 and i2' < i2)} + + """ + + if before_names is None: + before_names = ["i%s" % (i) for i in range(n_dims)] + if after_names is None: + from loopy.schedule.checker.utils import ( + append_marker_to_strings, + ) + after_names = append_marker_to_strings(before_names, marker="_") + + assert len(before_names) == len(after_names) == n_dims + dim_type = isl.dim_type + + islvars = isl.make_zero_and_vars( + before_names+after_names, + []) + + lex_order_constraint = get_lex_order_constraint( + islvars, before_names, after_names) + + lex_map = isl.Map.from_domain(lex_order_constraint) + lex_map = lex_map.move_dims( + dim_type.out, 0, dim_type.in_, + len(before_names), len(after_names)) + + return lex_map -- GitLab From 782dde2330328a0716bda113efc1526257c3fcbe Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 20 Apr 2020 20:35:41 -0500 Subject: [PATCH 231/415] add get_lex_order_map_for_sched_space() to schedule (gets an isl map defining the lexicographic ordering) --- loopy/schedule/checker/schedule.py | 13 +++++++++++++ loopy/schedule/checker/utils.py | 7 +++++++ 2 files changed, 20 insertions(+) diff --git a/loopy/schedule/checker/schedule.py b/loopy/schedule/checker/schedule.py index 0aca588c3..305d1f74f 100644 --- a/loopy/schedule/checker/schedule.py +++ b/loopy/schedule/checker/schedule.py @@ -405,6 +405,19 @@ class LexSchedule(object): return [self.lex_var_prefix+str(i) for i in range(self.max_lex_dims())] + def get_lex_order_map_for_sched_space(self): + """Return an :class:`islpy.BasicMap` that maps each point in a + lexicographic ordering to every point that is + lexocigraphically greater. + """ + + from loopy.schedule.checker.lexicographic_order_map import ( + create_lex_order_map, + ) + n_dims = self.max_lex_dims() + return create_lex_order_map( + n_dims, before_names=self.get_lex_var_names()) + def __str__(self): def stringify_sched_stmt_instance(stmt_inst): diff --git a/loopy/schedule/checker/utils.py b/loopy/schedule/checker/utils.py index cb933de6f..8757406b7 100644 --- a/loopy/schedule/checker/utils.py +++ b/loopy/schedule/checker/utils.py @@ -143,6 +143,13 @@ def align_isl_maps_by_var_names(input_map, target_map): return aligned_input_map +def append_marker_to_strings(strings, marker="'"): + if not isinstance(strings, list): + raise ValueError("append_marker_to_strings did not receive a list") + else: + return [s+marker for s in strings] + + def _union_of_isl_sets_or_maps(set_list): union = set_list[0] for s in set_list[1:]: -- GitLab From 0e664550837299ff697d5f6947fed9d90d2cc095 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 20 Apr 2020 22:13:50 -0500 Subject: [PATCH 232/415] add function append_marker_to_in_dim_names(islmap) --- loopy/schedule/checker/lexicographic_order_map.py | 8 ++++---- loopy/schedule/checker/utils.py | 8 ++++++++ 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/loopy/schedule/checker/lexicographic_order_map.py b/loopy/schedule/checker/lexicographic_order_map.py index 2e063e7d7..61f191247 100644 --- a/loopy/schedule/checker/lexicographic_order_map.py +++ b/loopy/schedule/checker/lexicographic_order_map.py @@ -55,10 +55,10 @@ def get_statement_ordering_map( sio = sched_map_before.apply_range( lex_map).apply_range(sched_map_after.reverse()) # append marker to in names - for i in range(sio.dim(isl.dim_type.in_)): - sio = sio.set_dim_name(isl.dim_type.in_, i, sio.get_dim_name( - isl.dim_type.in_, i)+before_marker) - return sio + from loopy.schedule.checker.utils import ( + append_marker_to_in_dim_names, + ) + return append_marker_to_in_dim_names(sio, before_marker) def get_lex_order_constraint(islvars, before_names, after_names): diff --git a/loopy/schedule/checker/utils.py b/loopy/schedule/checker/utils.py index 8757406b7..96aa007c7 100644 --- a/loopy/schedule/checker/utils.py +++ b/loopy/schedule/checker/utils.py @@ -150,6 +150,14 @@ def append_marker_to_strings(strings, marker="'"): return [s+marker for s in strings] +def append_marker_to_in_dim_names(islmap, marker="'"): + # append marker to in names + for i in range(islmap.dim(isl.dim_type.in_)): + islmap = islmap.set_dim_name(isl.dim_type.in_, i, islmap.get_dim_name( + isl.dim_type.in_, i)+marker) + return islmap + + def _union_of_isl_sets_or_maps(set_list): union = set_list[0] for s in set_list[1:]: -- GitLab From ceb9015a1a18d16f0615c8f3deb9cf35f0cb9ca2 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 20 Apr 2020 22:14:38 -0500 Subject: [PATCH 233/415] test lexicographic order map creation and statement instance order creation --- test/test_linearization_checker.py | 203 +++++++++++++++++++++++++++++ 1 file changed, 203 insertions(+) diff --git a/test/test_linearization_checker.py b/test/test_linearization_checker.py index c112b40ae..5a05bdd8e 100644 --- a/test/test_linearization_checker.py +++ b/test/test_linearization_checker.py @@ -46,6 +46,8 @@ else: faulthandler.enable() +# {{{ test LexSchedule and isl map creation + def test_lexschedule_and_islmap_creation(): import islpy as isl from loopy.schedule.checker import ( @@ -362,6 +364,207 @@ def test_lexschedule_and_islmap_creation(): else: perform_insn_cd_checks_with(1, 0) +# }}} + + +# {{{ test statement instance ordering creation + +def test_statement_instance_ordering_creation(): + import islpy as isl + from loopy.schedule.checker import ( + get_schedule_for_statement_pair, + get_isl_maps_for_LexSchedule, + ) + from loopy.schedule.checker.utils import ( + align_isl_maps_by_var_names, + append_marker_to_in_dim_names, + ) + from loopy.schedule.checker.lexicographic_order_map import ( + get_statement_ordering_map, + ) + + # example kernel (add deps to fix loop order) + knl = lp.make_kernel( + [ + "{[i]: 0<=itemp = b[i,k] {id=insn_a} + end + for j + a[i,j] = temp + 1 {id=insn_b,dep=insn_a} + c[i,j] = d[i,j] {id=insn_c,dep=insn_b} + end + end + for t + e[t] = f[t] {id=insn_d, dep=insn_c} + end + """, + name="example", + assumptions="pi,pj,pk,pt >= 1", + lang_version=(2018, 2) + ) + knl = lp.add_and_infer_dtypes( + knl, + {"b": np.float32, "d": np.float32, "f": np.float32}) + knl = lp.prioritize_loops(knl, "i,k") + knl = lp.prioritize_loops(knl, "i,j") + + # get a linearization + knl = preprocess_kernel(knl) + knl = get_one_linearized_kernel(knl) + linearization_items = knl.linearization + + def check_sio_for_insn_pair( + insn_id_before, + insn_id_after, + expected_lex_order_map, + expected_sio, + ): + + lex_sched = get_schedule_for_statement_pair( + knl, + linearization_items, + insn_id_before, + insn_id_after, + ) + + # Get two isl maps representing the LexSchedule + isl_sched_map_before, isl_sched_map_after = \ + get_isl_maps_for_LexSchedule(lex_sched, knl, insn_id_before, insn_id_after) + + # get map representing lexicographic ordering + sched_lex_order_map = lex_sched.get_lex_order_map_for_sched_space() + + assert sched_lex_order_map == expected_lex_order_map + + # create statement instance ordering, + # maps each statement instance to all statement instances occuring later + sio = get_statement_ordering_map( + isl_sched_map_before, + isl_sched_map_after, + sched_lex_order_map, + ) + + print(sio) + print(expected_sio) + + sio_aligned = align_isl_maps_by_var_names(sio, expected_sio) + + print(sio_aligned) + print(expected_sio) + + assert sio_aligned == expected_sio + + expected_lex_order_map = isl.Map( + "{ " + "[l0, l1, l2, l3, l4] -> [l0_, l1_, l2_, l3_, l4_] : l0_ > l0; " + "[l0, l1, l2, l3, l4] -> [l0_= l0, l1_, l2_, l3_, l4_] : l1_ > l1; " + "[l0, l1, l2, l3, l4] -> [l0_= l0, l1_= l1, l2_, l3_, l4_] : l2_ > l2; " + "[l0, l1, l2, l3, l4] -> [l0_= l0, l1_= l1, l2_= l2, l3_, l4_] : l3_ > l3; " + "[l0, l1, l2, l3, l4] -> [l0_= l0, l1_= l1, l2_= l2, l3_= l3, l4_] : l4_ > l4 " + "}" + ) + + # Relationship between insn_a and insn_b --------------------------------------- + + expected_sio = isl.Map( + "[pi, pj, pk] -> { " + "[statement' = 0, i', k'] -> [statement = 1, i, j] : " + "0 <= i' < pi and 0 <= k' < pk and 0 <= j < pj and 0 <= i < pi and i > i'; " + "[statement' = 0, i', k'] -> [statement = 1, i = i', j] : " + "0 <= i' < pi and 0 <= k' < pk and 0 <= j < pj " + "}" + ) + # isl ignores these apostrophes, so explicitly add them + expected_sio = append_marker_to_in_dim_names(expected_sio, "'") + + check_sio_for_insn_pair( + "insn_a", "insn_b", expected_lex_order_map, expected_sio) + + # Relationship between insn_a and insn_c --------------------------------------- + + expected_sio = isl.Map( + "[pi, pj, pk] -> { " + "[statement' = 0, i', k'] -> [statement = 1, i, j] : " + "0 <= i' < pi and 0 <= k' < pk and 0 <= j < pj and 0 <= i < pi and i > i'; " + "[statement' = 0, i', k'] -> [statement = 1, i = i', j] : " + "0 <= i' < pi and 0 <= k' < pk and 0 <= j < pj " + "}" + ) + # isl ignores these apostrophes, so explicitly add them + expected_sio = append_marker_to_in_dim_names(expected_sio, "'") + + check_sio_for_insn_pair( + "insn_a", "insn_c", expected_lex_order_map, expected_sio) + + # Relationship between insn_a and insn_d --------------------------------------- + + expected_sio = isl.Map( + "[pt, pi, pk] -> { " + "[statement' = 0, i', k'] -> [statement = 1, t] : " + "0 <= i' < pi and 0 <= k' < pk and 0 <= t < pt " + "}" + ) + # isl ignores these apostrophes, so explicitly add them + expected_sio = append_marker_to_in_dim_names(expected_sio, "'") + + check_sio_for_insn_pair( + "insn_a", "insn_d", expected_lex_order_map, expected_sio) + + # Relationship between insn_b and insn_c --------------------------------------- + + expected_sio = isl.Map( + "[pi, pj] -> { " + "[statement' = 0, i', j'] -> [statement = 1, i, j] : " + "0 <= i' < pi and 0 <= j' < pj and i > i' and 0 <= i < pi and 0 <= j < pj; " + "[statement' = 0, i', j'] -> [statement = 1, i = i', j] : " + "0 <= i' < pi and 0 <= j' < pj and j > j' and 0 <= j < pj; " + "[statement' = 0, i', j'] -> [statement = 1, i = i', j = j'] : " + "0 <= i' < pi and 0 <= j' < pj " + "}" + ) + # isl ignores these apostrophes, so explicitly add them + expected_sio = append_marker_to_in_dim_names(expected_sio, "'") + + check_sio_for_insn_pair( + "insn_b", "insn_c", expected_lex_order_map, expected_sio) + + # Relationship between insn_b and insn_d --------------------------------------- + + expected_sio = isl.Map( + "[pt, pi, pj] -> { " + "[statement' = 0, i', j'] -> [statement = 1, t] : " + "0 <= i' < pi and 0 <= j' < pj and 0 <= t < pt " + "}" + ) + # isl ignores these apostrophes, so explicitly add them + expected_sio = append_marker_to_in_dim_names(expected_sio, "'") + + check_sio_for_insn_pair( + "insn_b", "insn_d", expected_lex_order_map, expected_sio) + + # Relationship between insn_c and insn_d --------------------------------------- + + expected_sio = isl.Map( + "[pt, pi, pj] -> { " + "[statement' = 0, i', j'] -> [statement = 1, t] : " + "0 <= i' < pi and 0 <= j' < pj and 0 <= t < pt " + "}" + ) + # isl ignores these apostrophes, so explicitly add them + expected_sio = append_marker_to_in_dim_names(expected_sio, "'") + + check_sio_for_insn_pair( + "insn_c", "insn_d", expected_lex_order_map, expected_sio) + +# }}} + if __name__ == "__main__": if len(sys.argv) > 1: -- GitLab From 6f109f979f39a4ab2cc7839ea582b1457c538ac6 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 20 Apr 2020 22:28:38 -0500 Subject: [PATCH 234/415] fixing flake8 issues --- test/test_linearization_checker.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/test/test_linearization_checker.py b/test/test_linearization_checker.py index 5a05bdd8e..52145915d 100644 --- a/test/test_linearization_checker.py +++ b/test/test_linearization_checker.py @@ -435,8 +435,8 @@ def test_statement_instance_ordering_creation(): ) # Get two isl maps representing the LexSchedule - isl_sched_map_before, isl_sched_map_after = \ - get_isl_maps_for_LexSchedule(lex_sched, knl, insn_id_before, insn_id_after) + isl_sched_map_before, isl_sched_map_after = get_isl_maps_for_LexSchedule( + lex_sched, knl, insn_id_before, insn_id_after) # get map representing lexicographic ordering sched_lex_order_map = lex_sched.get_lex_order_map_for_sched_space() @@ -463,11 +463,11 @@ def test_statement_instance_ordering_creation(): expected_lex_order_map = isl.Map( "{ " - "[l0, l1, l2, l3, l4] -> [l0_, l1_, l2_, l3_, l4_] : l0_ > l0; " - "[l0, l1, l2, l3, l4] -> [l0_= l0, l1_, l2_, l3_, l4_] : l1_ > l1; " - "[l0, l1, l2, l3, l4] -> [l0_= l0, l1_= l1, l2_, l3_, l4_] : l2_ > l2; " - "[l0, l1, l2, l3, l4] -> [l0_= l0, l1_= l1, l2_= l2, l3_, l4_] : l3_ > l3; " - "[l0, l1, l2, l3, l4] -> [l0_= l0, l1_= l1, l2_= l2, l3_= l3, l4_] : l4_ > l4 " + "[l0, l1, l2, l3, l4] -> [l0_, l1_, l2_, l3_, l4_]: l0_ > l0; " + "[l0, l1, l2, l3, l4] -> [l0_= l0, l1_, l2_, l3_, l4_]: l1_ > l1; " + "[l0, l1, l2, l3, l4] -> [l0_= l0, l1_= l1, l2_, l3_, l4_]: l2_ > l2; " + "[l0, l1, l2, l3, l4] -> [l0_= l0, l1_= l1, l2_= l2, l3_, l4_]: l3_ > l3; " + "[l0, l1, l2, l3, l4] -> [l0_= l0, l1_= l1, l2_= l2, l3_= l3, l4_]: l4_ > l4" "}" ) -- GitLab From ae7f906a83159796f0ae21929f7dd8d08d518279 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 21 Apr 2020 03:57:15 -0500 Subject: [PATCH 235/415] replace append_marker_to_in_dim_names() with more generic append_marker_to_isl_map_var_names() that allows dim specification --- .../checker/lexicographic_order_map.py | 5 ++-- loopy/schedule/checker/utils.py | 29 ++++++++++++++----- test/test_linearization_checker.py | 20 ++++++++----- 3 files changed, 37 insertions(+), 17 deletions(-) diff --git a/loopy/schedule/checker/lexicographic_order_map.py b/loopy/schedule/checker/lexicographic_order_map.py index 61f191247..ddc320ed9 100644 --- a/loopy/schedule/checker/lexicographic_order_map.py +++ b/loopy/schedule/checker/lexicographic_order_map.py @@ -56,9 +56,10 @@ def get_statement_ordering_map( lex_map).apply_range(sched_map_after.reverse()) # append marker to in names from loopy.schedule.checker.utils import ( - append_marker_to_in_dim_names, + append_marker_to_isl_map_var_names, ) - return append_marker_to_in_dim_names(sio, before_marker) + return append_marker_to_isl_map_var_names( + sio, isl.dim_type.in_, before_marker) def get_lex_order_constraint(islvars, before_names, after_names): diff --git a/loopy/schedule/checker/utils.py b/loopy/schedule/checker/utils.py index 96aa007c7..46c33ed3b 100644 --- a/loopy/schedule/checker/utils.py +++ b/loopy/schedule/checker/utils.py @@ -143,6 +143,27 @@ def align_isl_maps_by_var_names(input_map, target_map): return aligned_input_map +def append_marker_to_isl_map_var_names(old_isl_map, dim_type, marker="'"): + """Return an isl_map with marker appended to + dim_type dimension names. + + :arg old_isl_map: A :class:`islpy.Map`. + + :arg dim_type: A :class:`islpy.dim_type`, i.e., an :class:`int`, + specifying the dimension to be marked. + + :returns: A :class:`islpy.Map` matching `old_isl_map` with + apostrophes appended to dim_type dimension names. + + """ + + new_map = old_isl_map.copy() + for i in range(len(old_isl_map.get_var_names(dim_type))): + new_map = new_map.set_dim_name(dim_type, i, old_isl_map.get_dim_name( + dim_type, i)+marker) + return new_map + + def append_marker_to_strings(strings, marker="'"): if not isinstance(strings, list): raise ValueError("append_marker_to_strings did not receive a list") @@ -150,14 +171,6 @@ def append_marker_to_strings(strings, marker="'"): return [s+marker for s in strings] -def append_marker_to_in_dim_names(islmap, marker="'"): - # append marker to in names - for i in range(islmap.dim(isl.dim_type.in_)): - islmap = islmap.set_dim_name(isl.dim_type.in_, i, islmap.get_dim_name( - isl.dim_type.in_, i)+marker) - return islmap - - def _union_of_isl_sets_or_maps(set_list): union = set_list[0] for s in set_list[1:]: diff --git a/test/test_linearization_checker.py b/test/test_linearization_checker.py index 52145915d..a15d48d1c 100644 --- a/test/test_linearization_checker.py +++ b/test/test_linearization_checker.py @@ -377,7 +377,7 @@ def test_statement_instance_ordering_creation(): ) from loopy.schedule.checker.utils import ( align_isl_maps_by_var_names, - append_marker_to_in_dim_names, + append_marker_to_isl_map_var_names, ) from loopy.schedule.checker.lexicographic_order_map import ( get_statement_ordering_map, @@ -482,7 +482,8 @@ def test_statement_instance_ordering_creation(): "}" ) # isl ignores these apostrophes, so explicitly add them - expected_sio = append_marker_to_in_dim_names(expected_sio, "'") + expected_sio = append_marker_to_isl_map_var_names( + expected_sio, isl.dim_type.in_, "'") check_sio_for_insn_pair( "insn_a", "insn_b", expected_lex_order_map, expected_sio) @@ -498,7 +499,8 @@ def test_statement_instance_ordering_creation(): "}" ) # isl ignores these apostrophes, so explicitly add them - expected_sio = append_marker_to_in_dim_names(expected_sio, "'") + expected_sio = append_marker_to_isl_map_var_names( + expected_sio, isl.dim_type.in_, "'") check_sio_for_insn_pair( "insn_a", "insn_c", expected_lex_order_map, expected_sio) @@ -512,7 +514,8 @@ def test_statement_instance_ordering_creation(): "}" ) # isl ignores these apostrophes, so explicitly add them - expected_sio = append_marker_to_in_dim_names(expected_sio, "'") + expected_sio = append_marker_to_isl_map_var_names( + expected_sio, isl.dim_type.in_, "'") check_sio_for_insn_pair( "insn_a", "insn_d", expected_lex_order_map, expected_sio) @@ -530,7 +533,8 @@ def test_statement_instance_ordering_creation(): "}" ) # isl ignores these apostrophes, so explicitly add them - expected_sio = append_marker_to_in_dim_names(expected_sio, "'") + expected_sio = append_marker_to_isl_map_var_names( + expected_sio, isl.dim_type.in_, "'") check_sio_for_insn_pair( "insn_b", "insn_c", expected_lex_order_map, expected_sio) @@ -544,7 +548,8 @@ def test_statement_instance_ordering_creation(): "}" ) # isl ignores these apostrophes, so explicitly add them - expected_sio = append_marker_to_in_dim_names(expected_sio, "'") + expected_sio = append_marker_to_isl_map_var_names( + expected_sio, isl.dim_type.in_, "'") check_sio_for_insn_pair( "insn_b", "insn_d", expected_lex_order_map, expected_sio) @@ -558,7 +563,8 @@ def test_statement_instance_ordering_creation(): "}" ) # isl ignores these apostrophes, so explicitly add them - expected_sio = append_marker_to_in_dim_names(expected_sio, "'") + expected_sio = append_marker_to_isl_map_var_names( + expected_sio, isl.dim_type.in_, "'") check_sio_for_insn_pair( "insn_c", "insn_d", expected_lex_order_map, expected_sio) -- GitLab From 2556e7590f6724b1a49c8370925dc9701aab6097 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 27 Apr 2020 18:16:23 -0500 Subject: [PATCH 236/415] remove extra args from get_isl_maps_for_LexSchedule() --- test/test_linearization_checker.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/test_linearization_checker.py b/test/test_linearization_checker.py index c6f8d56dc..f51b050ac 100644 --- a/test/test_linearization_checker.py +++ b/test/test_linearization_checker.py @@ -436,7 +436,7 @@ def test_statement_instance_ordering_creation(): # Get two isl maps representing the LexSchedule isl_sched_map_before, isl_sched_map_after = get_isl_maps_for_LexSchedule( - lex_sched, knl, insn_id_before, insn_id_after) + lex_sched, knl) # get map representing lexicographic ordering sched_lex_order_map = lex_sched.get_lex_order_map_for_sched_space() -- GitLab From aac84c58e253945e8029d64a4cd15afa37ac2516 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 27 Apr 2020 18:21:16 -0500 Subject: [PATCH 237/415] remove extra args from get_isl_maps_for_LexSchedule() --- loopy/schedule/checker/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/loopy/schedule/checker/__init__.py b/loopy/schedule/checker/__init__.py index 0d6bd9fc3..ffb9657b3 100644 --- a/loopy/schedule/checker/__init__.py +++ b/loopy/schedule/checker/__init__.py @@ -264,7 +264,7 @@ def check_linearization_validity( # Get two isl maps representing the LexSchedule, # one for each linearization item involved in the dependency; isl_sched_map_before, isl_sched_map_after = get_isl_maps_for_LexSchedule( - sched, preprocessed_knl, s_before.insn_id, s_after.insn_id) + sched, preprocessed_knl) if verbose: print("-"*80) -- GitLab From f38f3027c1b575c6cbce1849b80a37292accbb85 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 12 May 2020 00:47:46 -0500 Subject: [PATCH 238/415] add new reserved prefix to map vars --- test/test_linearization_checker.py | 55 +++++++++++++++++++----------- 1 file changed, 36 insertions(+), 19 deletions(-) diff --git a/test/test_linearization_checker.py b/test/test_linearization_checker.py index 9ce2f981e..1e5457b94 100644 --- a/test/test_linearization_checker.py +++ b/test/test_linearization_checker.py @@ -485,23 +485,40 @@ def test_statement_instance_ordering_creation(): assert sio_aligned == expected_sio - expected_lex_order_map = isl.Map( - "{ " - "[l0, l1, l2, l3, l4] -> [l0_, l1_, l2_, l3_, l4_]: l0_ > l0; " - "[l0, l1, l2, l3, l4] -> [l0_= l0, l1_, l2_, l3_, l4_]: l1_ > l1; " - "[l0, l1, l2, l3, l4] -> [l0_= l0, l1_= l1, l2_, l3_, l4_]: l2_ > l2; " - "[l0, l1, l2, l3, l4] -> [l0_= l0, l1_= l1, l2_= l2, l3_, l4_]: l3_ > l3; " - "[l0, l1, l2, l3, l4] -> [l0_= l0, l1_= l1, l2_= l2, l3_= l3, l4_]: l4_ > l4" - "}" - ) + expected_lex_order_map = isl.Map("{ " + "[_lp_sched_l0, _lp_sched_l1, _lp_sched_l2, _lp_sched_l3, _lp_sched_l4] -> " + "[_lp_sched_l0_, _lp_sched_l1_, _lp_sched_l2_, _lp_sched_l3_, _lp_sched_l4_]" + ":" + "(" + "_lp_sched_l0_ > _lp_sched_l0 " + ") or (" + "_lp_sched_l0_= _lp_sched_l0 and " + "_lp_sched_l1_ > _lp_sched_l1 " + ") or (" + "_lp_sched_l0_= _lp_sched_l0 and " + "_lp_sched_l1_= _lp_sched_l1 and " + "_lp_sched_l2_ > _lp_sched_l2 " + ") or (" + "_lp_sched_l0_= _lp_sched_l0 and " + "_lp_sched_l1_= _lp_sched_l1 and " + "_lp_sched_l2_= _lp_sched_l2 and " + "_lp_sched_l3_ > _lp_sched_l3 " + ") or (" + "_lp_sched_l0_= _lp_sched_l0 and " + "_lp_sched_l1_= _lp_sched_l1 and " + "_lp_sched_l2_= _lp_sched_l2 and " + "_lp_sched_l3_= _lp_sched_l3 and " + "_lp_sched_l4_ > _lp_sched_l4" + ")" + "}") # Relationship between insn_a and insn_b --------------------------------------- expected_sio = isl.Map( "[pi, pj, pk] -> { " - "[statement' = 0, i', k'] -> [statement = 1, i, j] : " + "[_lp_sched_statement'=0, i', k'] -> [_lp_sched_statement=1, i, j]:" "0 <= i' < pi and 0 <= k' < pk and 0 <= j < pj and 0 <= i < pi and i > i'; " - "[statement' = 0, i', k'] -> [statement = 1, i = i', j] : " + "[_lp_sched_statement'=0, i', k'] -> [_lp_sched_statement=1, i=i', j]:" "0 <= i' < pi and 0 <= k' < pk and 0 <= j < pj " "}" ) @@ -516,9 +533,9 @@ def test_statement_instance_ordering_creation(): expected_sio = isl.Map( "[pi, pj, pk] -> { " - "[statement' = 0, i', k'] -> [statement = 1, i, j] : " + "[_lp_sched_statement'=0, i', k'] -> [_lp_sched_statement=1, i, j]:" "0 <= i' < pi and 0 <= k' < pk and 0 <= j < pj and 0 <= i < pi and i > i'; " - "[statement' = 0, i', k'] -> [statement = 1, i = i', j] : " + "[_lp_sched_statement'=0, i', k'] -> [_lp_sched_statement=1, i=i', j]:" "0 <= i' < pi and 0 <= k' < pk and 0 <= j < pj " "}" ) @@ -533,7 +550,7 @@ def test_statement_instance_ordering_creation(): expected_sio = isl.Map( "[pt, pi, pk] -> { " - "[statement' = 0, i', k'] -> [statement = 1, t] : " + "[_lp_sched_statement'=0, i', k'] -> [_lp_sched_statement=1, t]:" "0 <= i' < pi and 0 <= k' < pk and 0 <= t < pt " "}" ) @@ -548,11 +565,11 @@ def test_statement_instance_ordering_creation(): expected_sio = isl.Map( "[pi, pj] -> { " - "[statement' = 0, i', j'] -> [statement = 1, i, j] : " + "[_lp_sched_statement'=0, i', j'] -> [_lp_sched_statement=1, i, j]:" "0 <= i' < pi and 0 <= j' < pj and i > i' and 0 <= i < pi and 0 <= j < pj; " - "[statement' = 0, i', j'] -> [statement = 1, i = i', j] : " + "[_lp_sched_statement'=0, i', j'] -> [_lp_sched_statement=1, i=i', j]:" "0 <= i' < pi and 0 <= j' < pj and j > j' and 0 <= j < pj; " - "[statement' = 0, i', j'] -> [statement = 1, i = i', j = j'] : " + "[_lp_sched_statement'=0, i', j'] -> [_lp_sched_statement=1, i=i', j=j']:" "0 <= i' < pi and 0 <= j' < pj " "}" ) @@ -567,7 +584,7 @@ def test_statement_instance_ordering_creation(): expected_sio = isl.Map( "[pt, pi, pj] -> { " - "[statement' = 0, i', j'] -> [statement = 1, t] : " + "[_lp_sched_statement'=0, i', j'] -> [_lp_sched_statement=1, t]:" "0 <= i' < pi and 0 <= j' < pj and 0 <= t < pt " "}" ) @@ -582,7 +599,7 @@ def test_statement_instance_ordering_creation(): expected_sio = isl.Map( "[pt, pi, pj] -> { " - "[statement' = 0, i', j'] -> [statement = 1, t] : " + "[_lp_sched_statement'=0, i', j'] -> [_lp_sched_statement=1, t]:" "0 <= i' < pi and 0 <= j' < pj and 0 <= t < pt " "}" ) -- GitLab From c79c2104c73aac887fa742637374c9afef4c59d5 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 12 May 2020 00:57:10 -0500 Subject: [PATCH 239/415] remove prohibited_var_names arg --- loopy/schedule/checker/__init__.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/loopy/schedule/checker/__init__.py b/loopy/schedule/checker/__init__.py index 8181852c4..ed6693e7e 100644 --- a/loopy/schedule/checker/__init__.py +++ b/loopy/schedule/checker/__init__.py @@ -179,7 +179,6 @@ def check_linearization_validity( knl, statement_pair_dep_sets, linearization_items, - prohibited_var_names=set(), verbose=False, ): # TODO document @@ -238,7 +237,6 @@ def check_linearization_validity( linearization_items, s_before.insn_id, s_after.insn_id, - prohibited_var_names=prohibited_var_names, ) lp_insn_id_to_lex_sched_id = sched.loopy_insn_id_to_lex_sched_id() -- GitLab From d4506a0ef3d0f8bf3adf3efbe231f4be6d1cbc09 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 12 May 2020 01:08:24 -0500 Subject: [PATCH 240/415] =?UTF-8?q?use=20composition=20symbol=20=E2=97=A6?= =?UTF-8?q?=20in=20docstring=20for=20get=5Fstatement=5Fordering=5Fmap?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- loopy/schedule/checker/lexicographic_order_map.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/loopy/schedule/checker/lexicographic_order_map.py b/loopy/schedule/checker/lexicographic_order_map.py index ddc320ed9..f42e8e610 100644 --- a/loopy/schedule/checker/lexicographic_order_map.py +++ b/loopy/schedule/checker/lexicographic_order_map.py @@ -46,7 +46,7 @@ def get_statement_ordering_map( :returns: An :class:`islpy.Map` representing the lex schedule as a mapping from each statement instance to all statement instances - occuring later. I.e., we compose B -> L -> A^-1, where B + occuring later. I.e., we compose B ◦ L ◦ A^-1, where B is sched_map_before, A is sched_map_after, and L is the lexicographic ordering map. -- GitLab From 1568d79dd0d36a33e77efb6ad94d997e6fa2e217 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 12 May 2020 01:12:18 -0500 Subject: [PATCH 241/415] in docstring for get_statement_ordering_map(), clarify that we are composing relations --- loopy/schedule/checker/lexicographic_order_map.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/loopy/schedule/checker/lexicographic_order_map.py b/loopy/schedule/checker/lexicographic_order_map.py index f42e8e610..ce8808119 100644 --- a/loopy/schedule/checker/lexicographic_order_map.py +++ b/loopy/schedule/checker/lexicographic_order_map.py @@ -46,9 +46,9 @@ def get_statement_ordering_map( :returns: An :class:`islpy.Map` representing the lex schedule as a mapping from each statement instance to all statement instances - occuring later. I.e., we compose B ◦ L ◦ A^-1, where B - is sched_map_before, A is sched_map_after, and L is the - lexicographic ordering map. + occuring later. I.e., we compose relations B, L, and A as + B ◦ L ◦ A^-1, where B is sched_map_before, A is sched_map_after, + and L is the lexicographic ordering map. """ -- GitLab From 4cd38b097a054b8b97ac0f200cf1bd0e76662870 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 18 May 2020 23:52:38 -0500 Subject: [PATCH 242/415] rename single char variable to placate flake8 --- loopy/schedule/checker/utils.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/loopy/schedule/checker/utils.py b/loopy/schedule/checker/utils.py index dd3839c91..2fda4442c 100644 --- a/loopy/schedule/checker/utils.py +++ b/loopy/schedule/checker/utils.py @@ -211,11 +211,11 @@ def make_islvars_with_marker( """ - def append_marker(l, mark): - new_l = [] - for s in l: - new_l.append(s+mark) - return new_l + def append_marker(items, mark): + new_items = [] + for item in items: + new_items.append(item+mark) + return new_items return isl.make_zero_and_vars( append_marker(var_names_needing_marker, marker) -- GitLab From a2c007b2f6908d72ccbd1c125347ee1e0f5e1c7a Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 19 May 2020 00:04:56 -0500 Subject: [PATCH 243/415] try a slightlyl different function composition symbol (to address 'Non-ASCII character' syntax error) --- loopy/schedule/checker/lexicographic_order_map.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/loopy/schedule/checker/lexicographic_order_map.py b/loopy/schedule/checker/lexicographic_order_map.py index ce8808119..9807d293f 100644 --- a/loopy/schedule/checker/lexicographic_order_map.py +++ b/loopy/schedule/checker/lexicographic_order_map.py @@ -47,7 +47,7 @@ def get_statement_ordering_map( :returns: An :class:`islpy.Map` representing the lex schedule as a mapping from each statement instance to all statement instances occuring later. I.e., we compose relations B, L, and A as - B ◦ L ◦ A^-1, where B is sched_map_before, A is sched_map_after, + B ∘ L ∘ A^-1, where B is sched_map_before, A is sched_map_after, and L is the lexicographic ordering map. """ -- GitLab From 11f8edd708ada13db5f81aa6b2d87638978155ca Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 19 May 2020 00:11:31 -0500 Subject: [PATCH 244/415] add 'coding: utf-8' at top of file to allow composition character --- loopy/schedule/checker/lexicographic_order_map.py | 1 + 1 file changed, 1 insertion(+) diff --git a/loopy/schedule/checker/lexicographic_order_map.py b/loopy/schedule/checker/lexicographic_order_map.py index 9807d293f..5ce2bb4a5 100644 --- a/loopy/schedule/checker/lexicographic_order_map.py +++ b/loopy/schedule/checker/lexicographic_order_map.py @@ -1,3 +1,4 @@ +# coding: utf-8 __copyright__ = "Copyright (C) 2019 James Stevens" __license__ = """ -- GitLab From db5fefe4c803947855484b96ce3132a3dc0a4a45 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 19 May 2020 01:57:43 -0500 Subject: [PATCH 245/415] improve time complexity of get_lex_order_constraint() --- .../checker/lexicographic_order_map.py | 30 +++++++++++++++---- 1 file changed, 24 insertions(+), 6 deletions(-) diff --git a/loopy/schedule/checker/lexicographic_order_map.py b/loopy/schedule/checker/lexicographic_order_map.py index 5ce2bb4a5..d783bac76 100644 --- a/loopy/schedule/checker/lexicographic_order_map.py +++ b/loopy/schedule/checker/lexicographic_order_map.py @@ -92,14 +92,32 @@ def get_lex_order_constraint(islvars, before_names, after_names): """ + # Initialize constraint with i0' < i0 lex_order_constraint = islvars[before_names[0]].lt_set(islvars[after_names[0]]) + + # Initialize conjunction constraint with True. + # For each dim d, starting with d=1, this conjunction will have d equalities, + # e.g., (i0' = i0 and i1' = i1 and ... i(d-1)' = i(d-1)) + equality_constraint_conj = islvars[0].eq_set(islvars[0]) + for i in range(1, len(before_names)): - lex_order_constraint_conj = islvars[before_names[i]].lt_set( - islvars[after_names[i]]) - for j in range(i): - lex_order_constraint_conj = lex_order_constraint_conj & \ - islvars[before_names[j]].eq_set(islvars[after_names[j]]) - lex_order_constraint = lex_order_constraint | lex_order_constraint_conj + + # Add the next equality constraint to equality_constraint_conj + equality_constraint_conj = equality_constraint_conj & \ + islvars[before_names[i-1]].eq_set(islvars[after_names[i-1]]) + + # Create a conjunction constraint by combining a less-than + # constraint for this dim, e.g., (i1' < i1), with the current + # equality constraint conjunction. + # For each dim d, starting with d=1, this conjunction will have d equalities, + # and one inequality, + # e.g., (i0' = i0 and i1' = i1 and ... i(d-1)' = i(d-1) and id' < id) + full_conj_constraint = islvars[before_names[i]].lt_set( + islvars[after_names[i]]) & equality_constraint_conj + + # Union this new constraint with the current lex_order_constraint + lex_order_constraint = lex_order_constraint | full_conj_constraint + return lex_order_constraint -- GitLab From 97e90820c5c232b845bf5063bfe2a71bd3bee01b Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 19 May 2020 02:22:12 -0500 Subject: [PATCH 246/415] have create_lex_order_map() put apostrophes on 'before' vars for consistency with other logic --- .../checker/lexicographic_order_map.py | 6 +-- loopy/schedule/checker/schedule.py | 2 +- test/test_linearization_checker.py | 40 +++++++++++-------- 3 files changed, 27 insertions(+), 21 deletions(-) diff --git a/loopy/schedule/checker/lexicographic_order_map.py b/loopy/schedule/checker/lexicographic_order_map.py index d783bac76..17b6616ca 100644 --- a/loopy/schedule/checker/lexicographic_order_map.py +++ b/loopy/schedule/checker/lexicographic_order_map.py @@ -153,13 +153,13 @@ def create_lex_order_map( """ - if before_names is None: - before_names = ["i%s" % (i) for i in range(n_dims)] if after_names is None: + after_names = ["i%s" % (i) for i in range(n_dims)] + if before_names is None: from loopy.schedule.checker.utils import ( append_marker_to_strings, ) - after_names = append_marker_to_strings(before_names, marker="_") + before_names = append_marker_to_strings(after_names, marker="'") assert len(before_names) == len(after_names) == n_dims dim_type = isl.dim_type diff --git a/loopy/schedule/checker/schedule.py b/loopy/schedule/checker/schedule.py index ea0829199..a87723480 100644 --- a/loopy/schedule/checker/schedule.py +++ b/loopy/schedule/checker/schedule.py @@ -409,7 +409,7 @@ class LexSchedule(object): ) n_dims = self.max_lex_dims() return create_lex_order_map( - n_dims, before_names=self.get_lex_var_names()) + n_dims, after_names=self.get_lex_var_names()) def __str__(self): diff --git a/test/test_linearization_checker.py b/test/test_linearization_checker.py index 1e5457b94..e57df9ac8 100644 --- a/test/test_linearization_checker.py +++ b/test/test_linearization_checker.py @@ -486,32 +486,38 @@ def test_statement_instance_ordering_creation(): assert sio_aligned == expected_sio expected_lex_order_map = isl.Map("{ " - "[_lp_sched_l0, _lp_sched_l1, _lp_sched_l2, _lp_sched_l3, _lp_sched_l4] -> " - "[_lp_sched_l0_, _lp_sched_l1_, _lp_sched_l2_, _lp_sched_l3_, _lp_sched_l4_]" + "[_lp_sched_l0', _lp_sched_l1', _lp_sched_l2', _lp_sched_l3', _lp_sched_l4']" + " -> [_lp_sched_l0, _lp_sched_l1, _lp_sched_l2, _lp_sched_l3, _lp_sched_l4]" ":" "(" - "_lp_sched_l0_ > _lp_sched_l0 " + "_lp_sched_l0' < _lp_sched_l0 " ") or (" - "_lp_sched_l0_= _lp_sched_l0 and " - "_lp_sched_l1_ > _lp_sched_l1 " + "_lp_sched_l0'= _lp_sched_l0 and " + "_lp_sched_l1' < _lp_sched_l1 " ") or (" - "_lp_sched_l0_= _lp_sched_l0 and " - "_lp_sched_l1_= _lp_sched_l1 and " - "_lp_sched_l2_ > _lp_sched_l2 " + "_lp_sched_l0'= _lp_sched_l0 and " + "_lp_sched_l1'= _lp_sched_l1 and " + "_lp_sched_l2' < _lp_sched_l2 " ") or (" - "_lp_sched_l0_= _lp_sched_l0 and " - "_lp_sched_l1_= _lp_sched_l1 and " - "_lp_sched_l2_= _lp_sched_l2 and " - "_lp_sched_l3_ > _lp_sched_l3 " + "_lp_sched_l0'= _lp_sched_l0 and " + "_lp_sched_l1'= _lp_sched_l1 and " + "_lp_sched_l2'= _lp_sched_l2 and " + "_lp_sched_l3' < _lp_sched_l3 " ") or (" - "_lp_sched_l0_= _lp_sched_l0 and " - "_lp_sched_l1_= _lp_sched_l1 and " - "_lp_sched_l2_= _lp_sched_l2 and " - "_lp_sched_l3_= _lp_sched_l3 and " - "_lp_sched_l4_ > _lp_sched_l4" + "_lp_sched_l0'= _lp_sched_l0 and " + "_lp_sched_l1'= _lp_sched_l1 and " + "_lp_sched_l2'= _lp_sched_l2 and " + "_lp_sched_l3'= _lp_sched_l3 and " + "_lp_sched_l4' < _lp_sched_l4" ")" "}") + # Isl ignores these apostrophes, but test would still pass since it ignores + # variable names when checking for equality. Even so, explicitly add apostrophes + # for sanity. + expected_lex_order_map = append_marker_to_isl_map_var_names( + expected_lex_order_map, isl.dim_type.in_, "'") + # Relationship between insn_a and insn_b --------------------------------------- expected_sio = isl.Map( -- GitLab From e6b83f65afdd8b0984c2db7741cf6b73fc1b618a Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 25 May 2020 03:00:54 -0500 Subject: [PATCH 247/415] don't pass within_inames to LexScheduleStatement() (removed); in filter_deps_by_intersection_with_SAME(), instead of getting within_inames from LexScheduleStatements, pass in kernel and use insn_id --- loopy/schedule/checker/dependency.py | 21 +++++++------------ .../example_pairwise_schedule_validity.py | 1 + 2 files changed, 9 insertions(+), 13 deletions(-) diff --git a/loopy/schedule/checker/dependency.py b/loopy/schedule/checker/dependency.py index c193752be..d31852de4 100644 --- a/loopy/schedule/checker/dependency.py +++ b/loopy/schedule/checker/dependency.py @@ -704,12 +704,8 @@ def create_dependencies_from_legacy_knl(knl): statement_dep_sets.append( StatementPairDependencySet( - LexScheduleStatement( - insn_id=insn_before.id, - within_inames=insn_before_inames), - LexScheduleStatement( - insn_id=insn_after.id, - within_inames=insn_after_inames), + LexScheduleStatement(insn_id=insn_before.id), + LexScheduleStatement(insn_id=insn_after.id), {dt.SAME: shared_non_conc_inames}, knl.get_inames_domain(insn_before_inames), knl.get_inames_domain(insn_after_inames), @@ -744,12 +740,8 @@ def create_dependencies_from_legacy_knl(knl): statement_dep_sets.append( StatementPairDependencySet( - LexScheduleStatement( - insn_id=sink_id, - within_inames=sink_insn_inames), - LexScheduleStatement( - insn_id=source_id, - within_inames=source_insn_inames), + LexScheduleStatement(insn_id=sink_id), + LexScheduleStatement(insn_id=source_id), {dt.PRIOR: shared_non_conc_inames}, knl.get_inames_domain(sink_insn_inames), knl.get_inames_domain(source_insn_inames), @@ -789,6 +781,7 @@ def get_dependency_sources_and_sinks(knl, linearization_item_ids): def filter_deps_by_intersection_with_SAME( + knl, statement_pair_dep_sets, insn_ids, loop_priority, @@ -820,7 +813,9 @@ def filter_deps_by_intersection_with_SAME( s_before = statement_pair_dep_set.statement_before s_after = statement_pair_dep_set.statement_after shared_nc_inames = ( - s_before.within_inames & s_after.within_inames & non_conc_inames) + knl.id_to_insn[s_before.insn_id].within_inames & + knl.id_to_insn[s_after.insn_id].within_inames & + non_conc_inames) same_dep_set = StatementPairDependencySet( s_before, s_after, diff --git a/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py b/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py index 025205afd..e962abff0 100644 --- a/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py +++ b/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py @@ -351,6 +351,7 @@ from loopy.schedule.checker.utils import ( ) _, non_conc_inames = get_concurrent_inames(knl) legacy_deps_filtered_by_same = filter_deps_by_intersection_with_SAME( + knl, legacy_statement_pair_dep_sets, [insn.id for insn in knl.instructions], knl.loop_priority, -- GitLab From 381abd1b68b526c3aba96b265b535e146743f5d2 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 25 May 2020 03:12:24 -0500 Subject: [PATCH 248/415] now that we're passing knl as arg into filter_deps_by_intersection_with_SAME(), don't pass knl.loop_priority --- loopy/schedule/checker/dependency.py | 5 ++--- .../example_pairwise_schedule_validity.py | 1 - 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/loopy/schedule/checker/dependency.py b/loopy/schedule/checker/dependency.py index d31852de4..80c7e7ddc 100644 --- a/loopy/schedule/checker/dependency.py +++ b/loopy/schedule/checker/dependency.py @@ -784,7 +784,6 @@ def filter_deps_by_intersection_with_SAME( knl, statement_pair_dep_sets, insn_ids, - loop_priority, non_conc_inames, ): # TODO document @@ -804,7 +803,7 @@ def filter_deps_by_intersection_with_SAME( # create isl map representing dep relation dep_constraint_map = create_dependency_constraint( statement_pair_dep_set, - loop_priority, + knl.loop_priority, lp_insn_id_to_lex_sched_id, "statement", ) @@ -825,7 +824,7 @@ def filter_deps_by_intersection_with_SAME( ) same_dep_constraint_map = create_dependency_constraint( same_dep_set, - loop_priority, + knl.loop_priority, lp_insn_id_to_lex_sched_id, "statement", ) diff --git a/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py b/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py index e962abff0..57715bc88 100644 --- a/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py +++ b/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py @@ -354,7 +354,6 @@ legacy_deps_filtered_by_same = filter_deps_by_intersection_with_SAME( knl, legacy_statement_pair_dep_sets, [insn.id for insn in knl.instructions], - knl.loop_priority, non_conc_inames, ) -- GitLab From 3b5d4caa5a5f1e272172370f949bcd19a54d9b0a Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 26 May 2020 10:27:36 -0500 Subject: [PATCH 249/415] rename LexScheduleStatement->PairwiseScheduleStatement, get_isl_maps_for_LexSchedule->get_isl_maps_from_PairwiseScheduleBuilder, LexSchedule->PairwiseScheduleBuilder; also rename other variables for consistency --- test/test_linearization_checker.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/test/test_linearization_checker.py b/test/test_linearization_checker.py index df40c1dd5..255d2b0a6 100644 --- a/test/test_linearization_checker.py +++ b/test/test_linearization_checker.py @@ -46,9 +46,9 @@ else: faulthandler.enable() -# {{{ test LexSchedule and isl map creation +# {{{ test PairwiseScheduleBuilder and isl map creation -def test_lexschedule_and_islmap_creation(): +def test_pairwise_schedule_and_islmap_creation(): import islpy as isl from loopy.schedule.checker import ( get_schedule_for_statement_pair, @@ -397,7 +397,7 @@ def test_statement_instance_ordering_creation(): import islpy as isl from loopy.schedule.checker import ( get_schedule_for_statement_pair, - get_isl_maps_for_LexSchedule, + get_isl_maps_from_PairwiseScheduleBuilder, ) from loopy.schedule.checker.utils import ( align_isl_maps_by_var_names, @@ -451,19 +451,19 @@ def test_statement_instance_ordering_creation(): expected_sio, ): - lex_sched = get_schedule_for_statement_pair( + sched_builder = get_schedule_for_statement_pair( knl, linearization_items, insn_id_before, insn_id_after, ) - # Get two isl maps representing the LexSchedule - isl_sched_map_before, isl_sched_map_after = get_isl_maps_for_LexSchedule( - lex_sched, knl) + # Get two isl maps from the PairwiseScheduleBuilder + isl_sched_map_before, isl_sched_map_after = \ + get_isl_maps_from_PairwiseScheduleBuilder(sched_builder, knl) # get map representing lexicographic ordering - sched_lex_order_map = lex_sched.get_lex_order_map_for_sched_space() + sched_lex_order_map = sched_builder.get_lex_order_map_for_sched_space() assert sched_lex_order_map == expected_lex_order_map -- GitLab From e717a5ab82cb1f1d8cbe148780759534ca8d2d42 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 26 May 2020 10:35:24 -0500 Subject: [PATCH 250/415] rename LexScheduleStatement->PairwiseScheduleStatement, get_isl_maps_for_LexSchedule->get_isl_maps_from_PairwiseScheduleBuilder, LexSchedule->PairwiseScheduleBuilder; also rename other variables for consistency --- loopy/schedule/checker/__init__.py | 24 ++++++++++--------- loopy/schedule/checker/dependency.py | 16 ++++++------- .../example_wave_equation.py | 14 +++++------ loopy/schedule/checker/schedule.py | 2 +- 4 files changed, 29 insertions(+), 27 deletions(-) diff --git a/loopy/schedule/checker/__init__.py b/loopy/schedule/checker/__init__.py index 1fc310fba..2e10f2616 100644 --- a/loopy/schedule/checker/__init__.py +++ b/loopy/schedule/checker/__init__.py @@ -223,7 +223,8 @@ def check_linearization_validity( for statement_pair_dep_set in statement_pair_dep_sets: s_before = statement_pair_dep_set.statement_before s_after = statement_pair_dep_set.statement_after - # TODO, since we now get the doms inside get_isl_maps_for_LexSchedule(), + # TODO, since we now get the doms inside + # get_isl_maps_from_PairwiseScheduleBuilder(), # reconsider the content of statement_pair_dep_set, which # currently contains doms(do we still want them there?) @@ -232,28 +233,29 @@ def check_linearization_validity( print("Dependency set:") print(statement_pair_dep_set) - # Create LexSchedule: mapping of {statement instance: lex point} + # Create PairwiseScheduleBuilder: mapping of {statement instance: lex point} # include only instructions involved in this dependency - sched = get_schedule_for_statement_pair( + sched_builder = get_schedule_for_statement_pair( preprocessed_knl, linearization_items, s_before.insn_id, s_after.insn_id, ) - lp_insn_id_to_lex_sched_id = sched.loopy_insn_id_to_lex_sched_id() + lp_insn_id_to_lex_sched_id = sched_builder.loopy_insn_id_to_lex_sched_id() if verbose: print("-"*80) - print("LexSchedule:") - print(sched) + print("PairwiseScheduleBuilder:") + print(sched_builder) print("dict{lp insn id : sched sid int}:") print(lp_insn_id_to_lex_sched_id) - # Get two isl maps representing the LexSchedule, + # Get two isl maps from the PairwiseScheduleBuilder, # one for each linearization item involved in the dependency; - isl_sched_map_before, isl_sched_map_after = get_isl_maps_for_LexSchedule( - sched, preprocessed_knl) + isl_sched_map_before, isl_sched_map_after = \ + get_isl_maps_from_PairwiseScheduleBuilder( + sched_builder, preprocessed_knl) if verbose: print("-"*80) @@ -262,7 +264,7 @@ def check_linearization_validity( print(prettier_map_string(isl_sched_map_after)) # get map representing lexicographic ordering - sched_lex_order_map = sched.get_lex_order_map_for_sched_space() + sched_lex_order_map = sched_builder.get_lex_order_map_for_sched_space() # create statement instance ordering, # maps each statement instance to all statement instances occuring later @@ -287,7 +289,7 @@ def check_linearization_validity( statement_pair_dep_set, knl.loop_priority, lp_insn_id_to_lex_sched_id, - sched.statement_var_name, + sched_builder.statement_var_name, ) # TODO figure out how to keep a consistent lp_insn_id_to_lex_sched_id map # when dependency creation is separate from linearization checking diff --git a/loopy/schedule/checker/dependency.py b/loopy/schedule/checker/dependency.py index 80c7e7ddc..6bb300582 100644 --- a/loopy/schedule/checker/dependency.py +++ b/loopy/schedule/checker/dependency.py @@ -65,12 +65,12 @@ class StatementPairDependencySet(object): .. attribute:: statement_before - A :class:`loopy.schedule.checker.schedule.LexScheduleStatement` depended + A :class:`loopy.schedule.checker.schedule.PairwiseScheduleStatement` depended on by statement_after. .. attribute:: statement_after - A :class:`loopy.schedule.checker.schedule.LexScheduleStatement` which + A :class:`loopy.schedule.checker.schedule.PairwiseScheduleStatement` which cdepends on statement_before. .. attribute:: deps @@ -208,7 +208,7 @@ def create_dependency_constraint( :arg insn_id_to_int: A :class:`dict` mapping insn_id to int_id, where 'insn_id' and 'int_id' refer to the 'insn_id' and 'int_id' attributes - of :class:`loopy.schedule.checker.schedule.LexScheduleStatement`. + of :class:`loopy.schedule.checker.schedule.PairwiseScheduleStatement`. :arg statement_var_name: A :class:`str` specifying the name of the isl variable used to represent the unique :class:`int` statement id. @@ -690,7 +690,7 @@ def create_dependencies_from_legacy_knl(knl): get_all_nonconcurrent_insn_iname_subsets, get_linearization_item_ids_within_inames, ) - from loopy.schedule.checker.schedule import LexScheduleStatement + from loopy.schedule.checker.schedule import PairwiseScheduleStatement dt = DependencyType conc_inames, non_conc_inames = get_concurrent_inames(knl) statement_dep_sets = [] @@ -704,8 +704,8 @@ def create_dependencies_from_legacy_knl(knl): statement_dep_sets.append( StatementPairDependencySet( - LexScheduleStatement(insn_id=insn_before.id), - LexScheduleStatement(insn_id=insn_after.id), + PairwiseScheduleStatement(insn_id=insn_before.id), + PairwiseScheduleStatement(insn_id=insn_after.id), {dt.SAME: shared_non_conc_inames}, knl.get_inames_domain(insn_before_inames), knl.get_inames_domain(insn_after_inames), @@ -740,8 +740,8 @@ def create_dependencies_from_legacy_knl(knl): statement_dep_sets.append( StatementPairDependencySet( - LexScheduleStatement(insn_id=sink_id), - LexScheduleStatement(insn_id=source_id), + PairwiseScheduleStatement(insn_id=sink_id), + PairwiseScheduleStatement(insn_id=source_id), {dt.PRIOR: shared_non_conc_inames}, knl.get_inames_domain(sink_insn_inames), knl.get_inames_domain(source_insn_inames), diff --git a/loopy/schedule/checker/experimental_scripts/example_wave_equation.py b/loopy/schedule/checker/experimental_scripts/example_wave_equation.py index 0eacfb0dc..eaad8961d 100644 --- a/loopy/schedule/checker/experimental_scripts/example_wave_equation.py +++ b/loopy/schedule/checker/experimental_scripts/example_wave_equation.py @@ -38,7 +38,7 @@ from loopy.schedule.checker.utils import ( from loopy.schedule.checker.dependency import ( create_arbitrary_dependency_constraint, ) -from loopy.schedule.checker.schedule import LexSchedule +from loopy.schedule.checker.schedule import PairwiseScheduleBuilder from loopy.schedule.checker.lexicographic_order_map import ( get_statement_ordering_map, ) @@ -178,14 +178,14 @@ conc_loop_inames = _get_concurrent_loop_inames( # Create a mapping of {statement instance: lex point} # including only instructions involved in this dependency -sched = LexSchedule( +sched = PairwiseScheduleBuilder( linearized_knl.linearization, str(sid_before), str(sid_after), loops_to_ignore=conc_loop_inames, ) -# Get an isl map representing the LexSchedule; +# Get an isl map representing the PairwiseScheduleBuilder; # this requires the iname domains # get a mapping from lex schedule id to relevant inames domain @@ -201,7 +201,7 @@ isl_sched_map_before, isl_sched_map_after = sched.create_isl_maps( if verbose: print("sid_to_dom:\n", sid_to_dom) - print("LexSchedule after creating isl map:") + print("PairwiseScheduleBuilder after creating isl map:") print(sched) print("LexSched:") print(prettier_map_string(isl_sched_map_before)) @@ -470,14 +470,14 @@ conc_loop_inames = _get_concurrent_loop_inames( linearized_knl.linearization, preprocessed_knl) # Create a mapping of {statement instance: lex point} # including only instructions involved in this dependency -sched = LexSchedule( +sched = PairwiseScheduleBuilder( linearized_knl.linearization, str(sid_before), str(sid_after), loops_to_ignore=conc_loop_inames, ) -# Get an isl map representing the LexSchedule; +# Get an isl map representing the PairwiseScheduleBuilder; # this requires the iname domains # get a mapping from lex schedule id to relevant inames domain @@ -493,7 +493,7 @@ isl_sched_map_before, isl_sched_map_after = sched.create_isl_maps( if verbose: print("sid_to_dom:\n", sid_to_dom) - print("LexSchedule after creating isl map:") + print("PairwiseScheduleBuilder after creating isl map:") print(sched) print("LexSched:") print(prettier_map_string(isl_sched_map_before)) diff --git a/loopy/schedule/checker/schedule.py b/loopy/schedule/checker/schedule.py index 4845580fa..7b84cd1b2 100644 --- a/loopy/schedule/checker/schedule.py +++ b/loopy/schedule/checker/schedule.py @@ -280,7 +280,7 @@ class PairwiseScheduleBuilder(object): def loopy_insn_id_to_lex_sched_id(self): """Return a dictionary mapping insn_id to int_id, where ``insn_id`` and ``int_id`` refer to the ``insn_id`` and ``int_id`` attributes of - :class:`LexScheduleStatement`. + :class:`PairwiseScheduleStatement`. """ return { self.stmt_instance_before.stmt.insn_id: -- GitLab From ba46ade4f5b002e72451d593162cac22cfa10553 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 1 Jun 2020 22:30:23 -0500 Subject: [PATCH 251/415] update identifier prefix for loopy.schedule.checker from _lp_sched_->_lp_linchk_ --- test/test_linearization_checker.py | 57 ++++++++++++++++-------------- 1 file changed, 30 insertions(+), 27 deletions(-) diff --git a/test/test_linearization_checker.py b/test/test_linearization_checker.py index 6841072ff..01e28f24a 100644 --- a/test/test_linearization_checker.py +++ b/test/test_linearization_checker.py @@ -486,29 +486,32 @@ def test_statement_instance_ordering_creation(): assert sio_aligned == expected_sio expected_lex_order_map = isl.Map("{ " - "[_lp_sched_l0', _lp_sched_l1', _lp_sched_l2', _lp_sched_l3', _lp_sched_l4']" - " -> [_lp_sched_l0, _lp_sched_l1, _lp_sched_l2, _lp_sched_l3, _lp_sched_l4]" + "[_lp_linchk_l0', _lp_linchk_l1', _lp_linchk_l2', _lp_linchk_l3', " + "_lp_linchk_l4']" + " -> " + "[_lp_linchk_l0, _lp_linchk_l1, _lp_linchk_l2, _lp_linchk_l3, " + "_lp_linchk_l4]" ":" "(" - "_lp_sched_l0' < _lp_sched_l0 " + "_lp_linchk_l0' < _lp_linchk_l0 " ") or (" - "_lp_sched_l0'= _lp_sched_l0 and " - "_lp_sched_l1' < _lp_sched_l1 " + "_lp_linchk_l0'= _lp_linchk_l0 and " + "_lp_linchk_l1' < _lp_linchk_l1 " ") or (" - "_lp_sched_l0'= _lp_sched_l0 and " - "_lp_sched_l1'= _lp_sched_l1 and " - "_lp_sched_l2' < _lp_sched_l2 " + "_lp_linchk_l0'= _lp_linchk_l0 and " + "_lp_linchk_l1'= _lp_linchk_l1 and " + "_lp_linchk_l2' < _lp_linchk_l2 " ") or (" - "_lp_sched_l0'= _lp_sched_l0 and " - "_lp_sched_l1'= _lp_sched_l1 and " - "_lp_sched_l2'= _lp_sched_l2 and " - "_lp_sched_l3' < _lp_sched_l3 " + "_lp_linchk_l0'= _lp_linchk_l0 and " + "_lp_linchk_l1'= _lp_linchk_l1 and " + "_lp_linchk_l2'= _lp_linchk_l2 and " + "_lp_linchk_l3' < _lp_linchk_l3 " ") or (" - "_lp_sched_l0'= _lp_sched_l0 and " - "_lp_sched_l1'= _lp_sched_l1 and " - "_lp_sched_l2'= _lp_sched_l2 and " - "_lp_sched_l3'= _lp_sched_l3 and " - "_lp_sched_l4' < _lp_sched_l4" + "_lp_linchk_l0'= _lp_linchk_l0 and " + "_lp_linchk_l1'= _lp_linchk_l1 and " + "_lp_linchk_l2'= _lp_linchk_l2 and " + "_lp_linchk_l3'= _lp_linchk_l3 and " + "_lp_linchk_l4' < _lp_linchk_l4" ")" "}") @@ -522,9 +525,9 @@ def test_statement_instance_ordering_creation(): expected_sio = isl.Map( "[pi, pj, pk] -> { " - "[_lp_sched_statement'=0, i', k'] -> [_lp_sched_statement=1, i, j]:" + "[_lp_linchk_statement'=0, i', k'] -> [_lp_linchk_statement=1, i, j]:" "0 <= i' < pi and 0 <= k' < pk and 0 <= j < pj and 0 <= i < pi and i > i'; " - "[_lp_sched_statement'=0, i', k'] -> [_lp_sched_statement=1, i=i', j]:" + "[_lp_linchk_statement'=0, i', k'] -> [_lp_linchk_statement=1, i=i', j]:" "0 <= i' < pi and 0 <= k' < pk and 0 <= j < pj " "}" ) @@ -539,9 +542,9 @@ def test_statement_instance_ordering_creation(): expected_sio = isl.Map( "[pi, pj, pk] -> { " - "[_lp_sched_statement'=0, i', k'] -> [_lp_sched_statement=1, i, j]:" + "[_lp_linchk_statement'=0, i', k'] -> [_lp_linchk_statement=1, i, j]:" "0 <= i' < pi and 0 <= k' < pk and 0 <= j < pj and 0 <= i < pi and i > i'; " - "[_lp_sched_statement'=0, i', k'] -> [_lp_sched_statement=1, i=i', j]:" + "[_lp_linchk_statement'=0, i', k'] -> [_lp_linchk_statement=1, i=i', j]:" "0 <= i' < pi and 0 <= k' < pk and 0 <= j < pj " "}" ) @@ -556,7 +559,7 @@ def test_statement_instance_ordering_creation(): expected_sio = isl.Map( "[pt, pi, pk] -> { " - "[_lp_sched_statement'=0, i', k'] -> [_lp_sched_statement=1, t]:" + "[_lp_linchk_statement'=0, i', k'] -> [_lp_linchk_statement=1, t]:" "0 <= i' < pi and 0 <= k' < pk and 0 <= t < pt " "}" ) @@ -571,11 +574,11 @@ def test_statement_instance_ordering_creation(): expected_sio = isl.Map( "[pi, pj] -> { " - "[_lp_sched_statement'=0, i', j'] -> [_lp_sched_statement=1, i, j]:" + "[_lp_linchk_statement'=0, i', j'] -> [_lp_linchk_statement=1, i, j]:" "0 <= i' < pi and 0 <= j' < pj and i > i' and 0 <= i < pi and 0 <= j < pj; " - "[_lp_sched_statement'=0, i', j'] -> [_lp_sched_statement=1, i=i', j]:" + "[_lp_linchk_statement'=0, i', j'] -> [_lp_linchk_statement=1, i=i', j]:" "0 <= i' < pi and 0 <= j' < pj and j > j' and 0 <= j < pj; " - "[_lp_sched_statement'=0, i', j'] -> [_lp_sched_statement=1, i=i', j=j']:" + "[_lp_linchk_statement'=0, i', j'] -> [_lp_linchk_statement=1, i=i', j=j']:" "0 <= i' < pi and 0 <= j' < pj " "}" ) @@ -590,7 +593,7 @@ def test_statement_instance_ordering_creation(): expected_sio = isl.Map( "[pt, pi, pj] -> { " - "[_lp_sched_statement'=0, i', j'] -> [_lp_sched_statement=1, t]:" + "[_lp_linchk_statement'=0, i', j'] -> [_lp_linchk_statement=1, t]:" "0 <= i' < pi and 0 <= j' < pj and 0 <= t < pt " "}" ) @@ -605,7 +608,7 @@ def test_statement_instance_ordering_creation(): expected_sio = isl.Map( "[pt, pi, pj] -> { " - "[_lp_sched_statement'=0, i', j'] -> [_lp_sched_statement=1, t]:" + "[_lp_linchk_statement'=0, i', j'] -> [_lp_linchk_statement=1, t]:" "0 <= i' < pi and 0 <= j' < pj and 0 <= t < pt " "}" ) -- GitLab From d09b3f365839817f9382006c0407a718fc5610b6 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 2 Jun 2020 03:00:06 -0500 Subject: [PATCH 252/415] rename StatementInstanceSet.stmt->StatementInstanceSet.stmt_ref, PairwiseScheduleStatement->StatementRef --- loopy/schedule/checker/dependency.py | 16 ++++++++-------- loopy/schedule/checker/schedule.py | 10 +++++----- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/loopy/schedule/checker/dependency.py b/loopy/schedule/checker/dependency.py index 6bb300582..106a00d70 100644 --- a/loopy/schedule/checker/dependency.py +++ b/loopy/schedule/checker/dependency.py @@ -65,12 +65,12 @@ class StatementPairDependencySet(object): .. attribute:: statement_before - A :class:`loopy.schedule.checker.schedule.PairwiseScheduleStatement` depended + A :class:`loopy.schedule.checker.schedule.StatementRef` depended on by statement_after. .. attribute:: statement_after - A :class:`loopy.schedule.checker.schedule.PairwiseScheduleStatement` which + A :class:`loopy.schedule.checker.schedule.StatementRef` which cdepends on statement_before. .. attribute:: deps @@ -208,7 +208,7 @@ def create_dependency_constraint( :arg insn_id_to_int: A :class:`dict` mapping insn_id to int_id, where 'insn_id' and 'int_id' refer to the 'insn_id' and 'int_id' attributes - of :class:`loopy.schedule.checker.schedule.PairwiseScheduleStatement`. + of :class:`loopy.schedule.checker.schedule.StatementRef`. :arg statement_var_name: A :class:`str` specifying the name of the isl variable used to represent the unique :class:`int` statement id. @@ -690,7 +690,7 @@ def create_dependencies_from_legacy_knl(knl): get_all_nonconcurrent_insn_iname_subsets, get_linearization_item_ids_within_inames, ) - from loopy.schedule.checker.schedule import PairwiseScheduleStatement + from loopy.schedule.checker.schedule import StatementRef dt = DependencyType conc_inames, non_conc_inames = get_concurrent_inames(knl) statement_dep_sets = [] @@ -704,8 +704,8 @@ def create_dependencies_from_legacy_knl(knl): statement_dep_sets.append( StatementPairDependencySet( - PairwiseScheduleStatement(insn_id=insn_before.id), - PairwiseScheduleStatement(insn_id=insn_after.id), + StatementRef(insn_id=insn_before.id), + StatementRef(insn_id=insn_after.id), {dt.SAME: shared_non_conc_inames}, knl.get_inames_domain(insn_before_inames), knl.get_inames_domain(insn_after_inames), @@ -740,8 +740,8 @@ def create_dependencies_from_legacy_knl(knl): statement_dep_sets.append( StatementPairDependencySet( - PairwiseScheduleStatement(insn_id=sink_id), - PairwiseScheduleStatement(insn_id=source_id), + StatementRef(insn_id=sink_id), + StatementRef(insn_id=source_id), {dt.PRIOR: shared_non_conc_inames}, knl.get_inames_domain(sink_insn_inames), knl.get_inames_domain(source_insn_inames), diff --git a/loopy/schedule/checker/schedule.py b/loopy/schedule/checker/schedule.py index 812389de6..89cfe5d94 100644 --- a/loopy/schedule/checker/schedule.py +++ b/loopy/schedule/checker/schedule.py @@ -350,13 +350,13 @@ class PairwiseScheduleBuilder(object): def loopy_insn_id_to_lex_sched_id(self): """Return a dictionary mapping insn_id to int_id, where ``insn_id`` and ``int_id`` refer to the ``insn_id`` and ``int_id`` attributes of - :class:`PairwiseScheduleStatement`. + :class:`StatementRef`. """ return { - self.stmt_instance_before.stmt.insn_id: - self.stmt_instance_before.stmt.int_id, - self.stmt_instance_after.stmt.insn_id: - self.stmt_instance_after.stmt.int_id, + self.stmt_instance_before.stmt_ref.insn_id: + self.stmt_instance_before.stmt_ref.int_id, + self.stmt_instance_after.stmt_ref.insn_id: + self.stmt_instance_after.stmt_ref.int_id, } def max_lex_dims(self): -- GitLab From a4c97513effa690b7c3a66f67caf54ed565490ad Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 2 Jun 2020 03:30:13 -0500 Subject: [PATCH 253/415] don't require islvars be passed to get_lex_order_constraint(); islvars default: create islvars from before_names+after_names --- .../checker/lexicographic_order_map.py | 27 ++++++++++--------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/loopy/schedule/checker/lexicographic_order_map.py b/loopy/schedule/checker/lexicographic_order_map.py index 17b6616ca..b547e1d94 100644 --- a/loopy/schedule/checker/lexicographic_order_map.py +++ b/loopy/schedule/checker/lexicographic_order_map.py @@ -63,17 +63,11 @@ def get_statement_ordering_map( sio, isl.dim_type.in_, before_marker) -def get_lex_order_constraint(islvars, before_names, after_names): +def get_lex_order_constraint(before_names, after_names, islvars=None): """Return a constraint represented as an :class:`islpy.Set` defining a 'happens before' relationship in a lexicographic ordering. - :arg islvars: A dictionary from variable names to :class:`islpy.PwAff` - instances that represent each of the variables - (islvars may be produced by `islpy.make_zero_and_vars`). The key - '0' is also include and represents a :class:`islpy.PwAff` zero constant. - This dictionary defines the space to be used for the set. - :arg before_names: A list of :class:`str` variable names representing the lexicographic space dimensions for a point in lexicographic time that occurs before. (see example below) @@ -82,6 +76,14 @@ def get_lex_order_constraint(islvars, before_names, after_names): the lexicographic space dimensions for a point in lexicographic time that occurs after. (see example below) + :arg islvars: A dictionary from variable names to :class:`islpy.PwAff` + instances that represent each of the variables + (islvars may be produced by `islpy.make_zero_and_vars`). The key + '0' is also include and represents a :class:`islpy.PwAff` zero constant. + This dictionary defines the space to be used for the set. If no + value is passed, the dictionary will be made using ``before_names`` + and ``after_names``. + :returns: An :class:`islpy.Set` representing a constraint that enforces a lexicographic ordering. E.g., if ``before_names = [i0', i1', i2']`` and ``after_names = [i0, i1, i2]``, return the set:: @@ -92,6 +94,10 @@ def get_lex_order_constraint(islvars, before_names, after_names): """ + # If no islvars passed, make them using the names provided + if islvars is None: + islvars = isl.make_zero_and_vars(before_names+after_names, []) + # Initialize constraint with i0' < i0 lex_order_constraint = islvars[before_names[0]].lt_set(islvars[after_names[0]]) @@ -164,12 +170,7 @@ def create_lex_order_map( assert len(before_names) == len(after_names) == n_dims dim_type = isl.dim_type - islvars = isl.make_zero_and_vars( - before_names+after_names, - []) - - lex_order_constraint = get_lex_order_constraint( - islvars, before_names, after_names) + lex_order_constraint = get_lex_order_constraint(before_names, after_names) lex_map = isl.Map.from_domain(lex_order_constraint) lex_map = lex_map.move_dims( -- GitLab From 007d4bca2ae31182ca06fd7de3c0eeee40f0be20 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 2 Jun 2020 03:32:47 -0500 Subject: [PATCH 254/415] update call to get_lex_order_constraint() after islvars changed to optional kwarg --- loopy/schedule/checker/dependency.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/loopy/schedule/checker/dependency.py b/loopy/schedule/checker/dependency.py index 106a00d70..630380371 100644 --- a/loopy/schedule/checker/dependency.py +++ b/loopy/schedule/checker/dependency.py @@ -350,9 +350,9 @@ def create_dependency_constraint( lexicographic_order_map as lom) # TODO handle case where inames list is empty constraint_set = lom.get_lex_order_constraint( - islvars, inames_list_nest_ordered_prime, inames_list_nest_ordered, + islvars, ) else: # priority not known # PRIOR requires upper left quadrant happen before: -- GitLab From ed8c8fa252fc895c3e7ce254111227d981d1b94c Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 2 Jun 2020 04:16:23 -0500 Subject: [PATCH 255/415] delete stray print statements in test_statement_instance_ordering_creation() --- test/test_linearization_checker.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/test/test_linearization_checker.py b/test/test_linearization_checker.py index 01e28f24a..58884b443 100644 --- a/test/test_linearization_checker.py +++ b/test/test_linearization_checker.py @@ -475,14 +475,8 @@ def test_statement_instance_ordering_creation(): sched_lex_order_map, ) - print(sio) - print(expected_sio) - sio_aligned = align_isl_maps_by_var_names(sio, expected_sio) - print(sio_aligned) - print(expected_sio) - assert sio_aligned == expected_sio expected_lex_order_map = isl.Map("{ " -- GitLab From d345c21fc0b6cc4c6c4de3b403c1565f4f35ec17 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sun, 7 Jun 2020 16:00:32 -0500 Subject: [PATCH 256/415] update basedon func change: get_isl_maps_from_PairwiseScheduleBuilder(sched_builder, knl)->sched_builder.build_maps(knl) --- test/test_linearization_checker.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/test/test_linearization_checker.py b/test/test_linearization_checker.py index 9511da729..15d022144 100644 --- a/test/test_linearization_checker.py +++ b/test/test_linearization_checker.py @@ -390,7 +390,6 @@ def test_statement_instance_ordering_creation(): import islpy as isl from loopy.schedule.checker import ( get_schedule_for_statement_pair, - get_isl_maps_from_PairwiseScheduleBuilder, ) from loopy.schedule.checker.utils import ( align_isl_maps_by_var_names, @@ -452,8 +451,7 @@ def test_statement_instance_ordering_creation(): ) # Get two isl maps from the PairwiseScheduleBuilder - isl_sched_map_before, isl_sched_map_after = \ - get_isl_maps_from_PairwiseScheduleBuilder(sched_builder, knl) + isl_sched_map_before, isl_sched_map_after = sched_builder.build_maps(knl) # get map representing lexicographic ordering sched_lex_order_map = sched_builder.get_lex_order_map_for_sched_space() -- GitLab From 0ed7d39c42be3dc3c912cff25343039d209ffe03 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sun, 7 Jun 2020 16:06:52 -0500 Subject: [PATCH 257/415] update after rename sched.create_isl_maps()->sched.build_maps() --- loopy/schedule/checker/__init__.py | 6 ++---- .../checker/experimental_scripts/example_wave_equation.py | 4 ++-- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/loopy/schedule/checker/__init__.py b/loopy/schedule/checker/__init__.py index 27e5746d7..28b8d6b0c 100644 --- a/loopy/schedule/checker/__init__.py +++ b/loopy/schedule/checker/__init__.py @@ -241,7 +241,7 @@ def check_linearization_validity( s_before = statement_pair_dep_set.statement_before s_after = statement_pair_dep_set.statement_after # TODO, since we now get the doms inside - # get_isl_maps_from_PairwiseScheduleBuilder(), + # build_maps() # reconsider the content of statement_pair_dep_set, which # currently contains doms(do we still want them there?) @@ -270,9 +270,7 @@ def check_linearization_validity( # Get two isl maps from the PairwiseScheduleBuilder, # one for each linearization item involved in the dependency; - isl_sched_map_before, isl_sched_map_after = \ - get_isl_maps_from_PairwiseScheduleBuilder( - sched_builder, preprocessed_knl) + isl_sched_map_before, isl_sched_map_after = sched_builder.build_maps(knl) if verbose: print("-"*80) diff --git a/loopy/schedule/checker/experimental_scripts/example_wave_equation.py b/loopy/schedule/checker/experimental_scripts/example_wave_equation.py index eaad8961d..3e1436866 100644 --- a/loopy/schedule/checker/experimental_scripts/example_wave_equation.py +++ b/loopy/schedule/checker/experimental_scripts/example_wave_equation.py @@ -194,7 +194,7 @@ sid_to_dom = { sid_after: inames_domain_after, } -isl_sched_map_before, isl_sched_map_after = sched.create_isl_maps( +isl_sched_map_before, isl_sched_map_after = sched.build_maps( inames_domain_before, inames_domain_after) # {{{ verbose @@ -486,7 +486,7 @@ sid_to_dom = { sid_after: inames_domain_after_mapped, } -isl_sched_map_before, isl_sched_map_after = sched.create_isl_maps( +isl_sched_map_before, isl_sched_map_after = sched.build_maps( inames_domain_before_mapped, inames_domain_after_mapped) # {{{ verbose -- GitLab From 1c84538417e2b28798259581293bad4b136d43b1 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 8 Jun 2020 13:32:58 -0500 Subject: [PATCH 258/415] pass the correct knl (preprocessed_knl) to build_maps() --- loopy/schedule/checker/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/loopy/schedule/checker/__init__.py b/loopy/schedule/checker/__init__.py index 28b8d6b0c..43bbe77d7 100644 --- a/loopy/schedule/checker/__init__.py +++ b/loopy/schedule/checker/__init__.py @@ -270,7 +270,8 @@ def check_linearization_validity( # Get two isl maps from the PairwiseScheduleBuilder, # one for each linearization item involved in the dependency; - isl_sched_map_before, isl_sched_map_after = sched_builder.build_maps(knl) + isl_sched_map_before, isl_sched_map_after = sched_builder.build_maps( + preprocessed_knl) if verbose: print("-"*80) -- GitLab From 7c2309ab23db59413b5fb3dbdf3cb58325087941 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 8 Jun 2020 14:42:59 -0500 Subject: [PATCH 259/415] rename local vars isl_sched_map_*->sched_map_* --- test/test_linearization_checker.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/test/test_linearization_checker.py b/test/test_linearization_checker.py index 3745564d2..5f7329ba1 100644 --- a/test/test_linearization_checker.py +++ b/test/test_linearization_checker.py @@ -451,7 +451,7 @@ def test_statement_instance_ordering_creation(): ) # Get two isl maps from the PairwiseScheduleBuilder - isl_sched_map_before, isl_sched_map_after = sched_builder.build_maps(knl) + sched_map_before, sched_map_after = sched_builder.build_maps(knl) # get map representing lexicographic ordering sched_lex_order_map = sched_builder.get_lex_order_map_for_sched_space() @@ -461,8 +461,8 @@ def test_statement_instance_ordering_creation(): # create statement instance ordering, # maps each statement instance to all statement instances occuring later sio = get_statement_ordering_map( - isl_sched_map_before, - isl_sched_map_after, + sched_map_before, + sched_map_after, sched_lex_order_map, ) -- GitLab From 0f4269b86ae1d7b1863184b731d007bb8463324f Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 8 Jun 2020 16:50:25 -0500 Subject: [PATCH 260/415] update after renaming of align_isl_maps_by_var_names()->ensure_dim_names_match_and_align() --- test/test_linearization_checker.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/test_linearization_checker.py b/test/test_linearization_checker.py index 84decedca..5640da8b8 100644 --- a/test/test_linearization_checker.py +++ b/test/test_linearization_checker.py @@ -392,7 +392,7 @@ def test_statement_instance_ordering_creation(): get_schedule_for_statement_pair, ) from loopy.schedule.checker.utils import ( - align_isl_maps_by_var_names, + ensure_dim_names_match_and_align, append_marker_to_isl_map_var_names, ) from loopy.schedule.checker.lexicographic_order_map import ( @@ -466,7 +466,7 @@ def test_statement_instance_ordering_creation(): sched_lex_order_map, ) - sio_aligned = align_isl_maps_by_var_names(sio, expected_sio) + sio_aligned = ensure_dim_names_match_and_align(sio, expected_sio) assert sio_aligned == expected_sio -- GitLab From 0b2b1ec8bc94035d50723ba6190e0912e4ecf08c Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 8 Jun 2020 16:53:14 -0500 Subject: [PATCH 261/415] update after renaming of align_isl_maps_by_var_names()->ensure_dim_names_match_and_align() --- loopy/schedule/checker/__init__.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/loopy/schedule/checker/__init__.py b/loopy/schedule/checker/__init__.py index 7df34f571..5c223598b 100644 --- a/loopy/schedule/checker/__init__.py +++ b/loopy/schedule/checker/__init__.py @@ -315,7 +315,7 @@ def check_linearization_validity( # (spaces must be aligned so that the variables in the constraint map # correspond to the same variables in the SIO) from loopy.schedule.checker.utils import ( - align_isl_maps_by_var_names, + ensure_dim_names_match_and_align, ) if verbose: @@ -325,7 +325,8 @@ def check_linearization_validity( print("Constraint map:") print(prettier_map_string(constraint_map)) - aligned_constraint_map = align_isl_maps_by_var_names(constraint_map, sio) + aligned_constraint_map = ensure_dim_names_match_and_align( + constraint_map, sio) if verbose: print("-"*80) -- GitLab From 6c51acb58ed1dfcfbaae1a4f4b9ac0efe833e186 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 8 Jun 2020 17:24:59 -0500 Subject: [PATCH 262/415] update after renaming of reorder_dims_by_name()->insert_missing_dims_and_reorder_by_name(); remove params add_missing (now always true) and new_names_are_permutation_only (now always false) --- loopy/schedule/checker/dependency.py | 36 ++++++++++++---------------- 1 file changed, 15 insertions(+), 21 deletions(-) diff --git a/loopy/schedule/checker/dependency.py b/loopy/schedule/checker/dependency.py index 630380371..c0f46f8f2 100644 --- a/loopy/schedule/checker/dependency.py +++ b/loopy/schedule/checker/dependency.py @@ -232,7 +232,7 @@ def create_dependency_constraint( make_islvars_with_marker, append_apostrophes, add_dims_to_isl_set, - reorder_dims_by_name, + insert_missing_dims_and_reorder_by_name, create_new_isl_set_with_primes, list_var_names_in_isl_sets, ) @@ -390,15 +390,13 @@ def create_dependency_constraint( [statement_var_name_prime], statement_var_pose) # insert inames missing from doms to enable intersection - domain_to_intersect = reorder_dims_by_name( + domain_to_intersect = insert_missing_dims_and_reorder_by_name( domain_to_intersect, isl.dim_type.out, - append_apostrophes([statement_var_name] + dom_inames_ordered_before), - add_missing=True) - range_to_intersect = reorder_dims_by_name( + append_apostrophes([statement_var_name] + dom_inames_ordered_before)) + range_to_intersect = insert_missing_dims_and_reorder_by_name( range_to_intersect, isl.dim_type.out, - [statement_var_name] + dom_inames_ordered_after, - add_missing=True) + [statement_var_name] + dom_inames_ordered_after) # intersect doms map_with_loop_domain_constraints = all_constraints_map.intersect_domain( @@ -425,7 +423,7 @@ def _create_5pt_stencil_dependency_constraint( make_islvars_with_marker, append_apostrophes, add_dims_to_isl_set, - reorder_dims_by_name, + insert_missing_dims_and_reorder_by_name, create_new_isl_set_with_primes, ) # This function uses the dependency given to create the following constraint: @@ -499,15 +497,13 @@ def _create_5pt_stencil_dependency_constraint( [statement_var_name_prime], statement_var_pose) # insert inames missing from doms to enable intersection - domain_to_intersect = reorder_dims_by_name( + domain_to_intersect = insert_missing_dims_and_reorder_by_name( domain_to_intersect, isl.dim_type.out, - append_apostrophes([statement_var_name] + all_dom_inames_ordered), - add_missing=True) - range_to_intersect = reorder_dims_by_name( + append_apostrophes([statement_var_name] + all_dom_inames_ordered)) + range_to_intersect = insert_missing_dims_and_reorder_by_name( range_to_intersect, isl.dim_type.out, - [statement_var_name] + all_dom_inames_ordered, - add_missing=True) + [statement_var_name] + all_dom_inames_ordered) # intersect doms map_with_loop_domain_constraints = all_constraints_map.intersect_domain( @@ -535,7 +531,7 @@ def create_arbitrary_dependency_constraint( #append_apostrophes, append_marker_to_strings, add_dims_to_isl_set, - reorder_dims_by_name, + insert_missing_dims_and_reorder_by_name, create_new_isl_set_with_primes, ) # This function uses the constraint given to create the following map: @@ -636,16 +632,14 @@ def create_arbitrary_dependency_constraint( [statement_var_name_prime], statement_var_pose) # insert inames missing from doms to enable intersection - domain_to_intersect = reorder_dims_by_name( + domain_to_intersect = insert_missing_dims_and_reorder_by_name( domain_to_intersect, isl.dim_type.out, append_marker_to_strings( # TODO figure out before/after notation - [statement_var_name] + all_dom_inames_ordered, "p"), - add_missing=True) - range_to_intersect = reorder_dims_by_name( + [statement_var_name] + all_dom_inames_ordered, "p")) + range_to_intersect = insert_missing_dims_and_reorder_by_name( range_to_intersect, isl.dim_type.out, - [statement_var_name] + all_dom_inames_ordered, - add_missing=True) + [statement_var_name] + all_dom_inames_ordered) # intersect doms map_with_loop_domain_constraints = all_constraints_map.intersect_domain( -- GitLab From 2ec7346a45de2cf3ea7c0ecf2135da954db96fcb Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 8 Jun 2020 17:43:39 -0500 Subject: [PATCH 263/415] update wave equation example based on several recent updates to schedule creation --- .../example_wave_equation.py | 76 ++++--------------- 1 file changed, 14 insertions(+), 62 deletions(-) diff --git a/loopy/schedule/checker/experimental_scripts/example_wave_equation.py b/loopy/schedule/checker/experimental_scripts/example_wave_equation.py index 3e1436866..4b93cb501 100644 --- a/loopy/schedule/checker/experimental_scripts/example_wave_equation.py +++ b/loopy/schedule/checker/experimental_scripts/example_wave_equation.py @@ -29,11 +29,11 @@ import numpy as np import islpy as isl #from loopy.kernel_stat_collector import KernelStatCollector #from loopy.kernel_stat_collector import KernelStatOptions as kso # noqa -from loopy.schedule.checker import _get_concurrent_loop_inames from loopy.schedule.checker.utils import ( prettier_map_string, - reorder_dims_by_name, + ensure_dim_names_match_and_align, append_marker_to_isl_map_var_names, + get_concurrent_inames, ) from loopy.schedule.checker.dependency import ( create_arbitrary_dependency_constraint, @@ -102,7 +102,7 @@ constraint_map = _create_5pt_stencil_dependency_constraint( sid_after = sid_after, space_iname = "ix", time_iname = "it", - statement_var_name = "statement", + statement_var_name = "_lp_linchk_statement", statement_var_pose=0, #all_dom_inames_ordered=None, all_dom_inames_ordered=statement_inames_premap_order, @@ -132,7 +132,7 @@ constraint_map = create_arbitrary_dependency_constraint( inames_domain_after, sid_before=sid_before, sid_after=sid_after, - statement_var_name="statement", + statement_var_name="_lp_linchk_statement", statement_var_pose=0, #all_dom_inames_ordered=None, all_dom_inames_ordered=statement_inames_premap_order, @@ -173,8 +173,7 @@ if verbose: # }}} -conc_loop_inames = _get_concurrent_loop_inames( - linearized_knl.linearization, preprocessed_knl) +conc_loop_inames, _ = get_concurrent_inames(linearized_knl) # Create a mapping of {statement instance: lex point} # including only instructions involved in this dependency @@ -194,8 +193,7 @@ sid_to_dom = { sid_after: inames_domain_after, } -isl_sched_map_before, isl_sched_map_after = sched.build_maps( - inames_domain_before, inames_domain_after) +isl_sched_map_before, isl_sched_map_after = sched.build_maps(linearized_knl) # {{{ verbose @@ -252,29 +250,7 @@ if verbose: # }}} # align constraint map spaces to match sio so we can compare them -# align params -aligned_constraint_map = constraint_map.align_params(sio.space) -#print(prettier_map_string(aligned_constraint_map)) - -# align in_ dims -sio_in_names = sio.space.get_var_names(isl.dim_type.in_) -aligned_constraint_map = reorder_dims_by_name( - aligned_constraint_map, - isl.dim_type.in_, - sio_in_names, - add_missing=False, - new_names_are_permutation_only=True, - ) - -# align out dims -sio_out_names = sio.space.get_var_names(isl.dim_type.out) -aligned_constraint_map = reorder_dims_by_name( - aligned_constraint_map, - isl.dim_type.out, - sio_out_names, - add_missing=False, - new_names_are_permutation_only=True, - ) +aligned_constraint_map = ensure_dim_names_match_and_align(constraint_map, sio) # {{{ verbose @@ -376,8 +352,8 @@ m = isl.BasicMap( "16*(tx + tt + tparity) + itt + itx = ix + it and " "0<=tparity<2 and 0 <= itx - itt < 16 and 0 <= itt+itx < 16}") m2 = isl.BasicMap( - "[nx,nt,unused] -> {[statement, ix, it] -> " - "[statement'=statement, tx, tt, tparity, itt, itx]: " + "[nx,nt,unused] -> {[_lp_linchk_statement, ix, it] -> " + "[_lp_linchk_statement'=_lp_linchk_statement, tx, tt, tparity, itt, itx]: " "16*(tx - tt) + itx - itt = ix - it and " "16*(tx + tt + tparity) + itt + itx = ix + it and " "0<=tparity<2 and 0 <= itx - itt < 16 and 0 <= itt+itx < 16}") @@ -388,8 +364,8 @@ m2 = isl.BasicMap( # "16*(tx' + tt' + tparity') + itt' + itx' = ix + it and " # "0<=tparity'<2 and 0 <= itx' - itt' < 16 and 0 <= itt'+itx' < 16}") m2_prime = isl.BasicMap( - "[nx,nt,unused] -> {[statement', ix', it'] -> " - "[statement=statement', tx, tt, tparity, itt, itx]: " + "[nx,nt,unused] -> {[_lp_linchk_statement', ix', it'] -> " + "[_lp_linchk_statement=_lp_linchk_statement', tx, tt, tparity, itt, itx]: " "16*(tx - tt) + itx - itt = ix' - it' and " "16*(tx + tt + tparity) + itt + itx = ix' + it' and " "0<=tparity<2 and 0 <= itx - itt < 16 and 0 <= itt+itx < 16}") @@ -466,8 +442,7 @@ if verbose: # }}} -conc_loop_inames = _get_concurrent_loop_inames( - linearized_knl.linearization, preprocessed_knl) +conc_loop_inames, _ = get_concurrent_inames(linearized_knl) # Create a mapping of {statement instance: lex point} # including only instructions involved in this dependency sched = PairwiseScheduleBuilder( @@ -486,8 +461,7 @@ sid_to_dom = { sid_after: inames_domain_after_mapped, } -isl_sched_map_before, isl_sched_map_after = sched.build_maps( - inames_domain_before_mapped, inames_domain_after_mapped) +isl_sched_map_before, isl_sched_map_after = sched.build_maps(linearized_knl) # {{{ verbose @@ -544,29 +518,7 @@ if verbose: # }}} # align constraint map spaces to match sio so we can compare them -# align params -aligned_constraint_map = mapped_constraint_map.align_params(sio.space) -#print(prettier_map_string(aligned_constraint_map)) - -# align in_ dims -sio_in_names = sio.space.get_var_names(isl.dim_type.in_) -aligned_constraint_map = reorder_dims_by_name( - aligned_constraint_map, - isl.dim_type.in_, - sio_in_names, - add_missing=False, - new_names_are_permutation_only=True, - ) - -# align out dims -sio_out_names = sio.space.get_var_names(isl.dim_type.out) -aligned_constraint_map = reorder_dims_by_name( - aligned_constraint_map, - isl.dim_type.out, - sio_out_names, - add_missing=False, - new_names_are_permutation_only=True, -) +aligned_constraint_map = ensure_dim_names_match_and_align(constraint_map, sio) # {{{ verbose -- GitLab From 49f8948168e34a3b1f49ec78843c7dab2f88f55e Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 30 Jun 2020 00:19:21 -0500 Subject: [PATCH 264/415] copy in (current state of) relevant dependency code from larger downstream MR --- loopy/__init__.py | 5 + loopy/schedule/checker/__init__.py | 221 +++++++++++ loopy/schedule/checker/dependency.py | 531 +++++++++++++++++++++++++++ loopy/schedule/checker/schedule.py | 12 + loopy/schedule/checker/utils.py | 219 +++++++++++ test/test_linearization_checker.py | 442 ++++++++++++++++++++++ 6 files changed, 1430 insertions(+) create mode 100644 loopy/schedule/checker/dependency.py diff --git a/loopy/__init__.py b/loopy/__init__.py index 807ce8834..47d3ebb4b 100644 --- a/loopy/__init__.py +++ b/loopy/__init__.py @@ -125,6 +125,9 @@ from loopy.type_inference import infer_unknown_types from loopy.preprocess import preprocess_kernel, realize_reduction from loopy.schedule import ( generate_loop_schedules, get_one_scheduled_kernel, get_one_linearized_kernel) +from loopy.schedule.checker import ( + statement_pair_dep_sets_from_legacy_knl, + check_linearization_validity) from loopy.statistics import (ToCountMap, CountGranularity, stringify_stats_mapping, Op, MemAccess, get_op_map, get_mem_access_map, get_synchronization_map, gather_access_footprints, @@ -250,6 +253,8 @@ __all__ = [ "preprocess_kernel", "realize_reduction", "generate_loop_schedules", "get_one_scheduled_kernel", "get_one_linearized_kernel", + "statement_pair_dep_sets_from_legacy_knl", + "check_linearization_validity", "GeneratedProgram", "CodeGenerationResult", "PreambleInfo", "generate_code", "generate_code_v2", "generate_body", diff --git a/loopy/schedule/checker/__init__.py b/loopy/schedule/checker/__init__.py index 716a0cb58..5c223598b 100644 --- a/loopy/schedule/checker/__init__.py +++ b/loopy/schedule/checker/__init__.py @@ -148,3 +148,224 @@ def get_schedule_for_statement_pair( # }}} # }}} + + +def statement_pair_dep_sets_from_legacy_knl(knl): + """Return a list of + :class:`loopy.schedule.checker.dependency.StatementPairDependencySet` + instances created for a :class:`loopy.LoopKernel` containing legacy + depencencies. + + Create the new dependencies according to the following rules: + + (1) If a dependency exists between ``insn0`` and ``insn1``, create the + dependnecy ``SAME(SNC)`` where ``SNC`` is the set of non-concurrent inames + used by both ``insn0`` and ``insn1``, and ``SAME`` is the relationship + specified by the ``SAME`` attribute of + :class:`loopy.schedule.checker.dependency.DependencyType`. + + (2) For each subset of non-concurrent inames used by any instruction, + + (a), find the set of all instructions using those inames, + + (b), create a directed graph with these instructions as nodes and + edges representing a 'happens before' relationship specfied by + each dependency, + + (c), find the sources and sinks within this graph, and + + (d), connect each sink to each source (sink happens before source) + with a ``PRIOR(SNC)`` dependency, where ``PRIOR`` is the + relationship specified by the ``PRIOR`` attribute of + :class:`loopy.schedule.checker.dependency.DependencyType`. + + """ + # TODO maybe just eliminate this function since it doesn't do much + + # Preprocess if not already preprocessed + # note: kernels must always be preprocessed before scheduling + from loopy import preprocess_kernel + preprocessed_knl = preprocess_kernel(knl) + + # Create StatementPairDependencySet(s) from kernel dependencies + from loopy.schedule.checker.dependency import ( + create_dependencies_from_legacy_knl, + ) + return create_dependencies_from_legacy_knl(preprocessed_knl) + + +def check_linearization_validity( + knl, + statement_pair_dep_sets, + linearization_items, + verbose=False, + ): + # TODO document + + from loopy.schedule.checker.dependency import ( + create_dependency_constraint, + ) + from loopy.schedule.checker.lexicographic_order_map import ( + get_statement_ordering_map, + ) + from loopy.schedule.checker.utils import ( + prettier_map_string, + ) + + # Preprocess if not already preprocessed + # note: kernels must always be preprocessed before scheduling + from loopy import preprocess_kernel + preprocessed_knl = preprocess_kernel(knl) + + if verbose: + print("="*80) + print("Kernel: %s" % (preprocessed_knl.name)) + print("="*80) + print("Dependencies w/domains:") + for dep_set in statement_pair_dep_sets: + print(dep_set) + print(dep_set.dom_before) + print(dep_set.dom_after) + + # Print kernel info ------------------------------------------------------ + print("="*80) + print("Schedule items:") + for linearization_item in linearization_items: + print(linearization_item) + print("="*80) + print("Looping through dep pairs...") + + # For each dependency, create+test linearization containing pair of insns------ + linearization_is_valid = True + for statement_pair_dep_set in statement_pair_dep_sets: + s_before = statement_pair_dep_set.statement_before + s_after = statement_pair_dep_set.statement_after + # TODO, since we now get the doms inside + # build_maps() + # reconsider the content of statement_pair_dep_set, which + # currently contains doms(do we still want them there?) + + if verbose: + print("="*80) + print("Dependency set:") + print(statement_pair_dep_set) + + # Create PairwiseScheduleBuilder: mapping of {statement instance: lex point} + # include only instructions involved in this dependency + sched_builder = get_schedule_for_statement_pair( + preprocessed_knl, + linearization_items, + s_before.insn_id, + s_after.insn_id, + ) + + lp_insn_id_to_lex_sched_id = sched_builder.loopy_insn_id_to_lex_sched_id() + + if verbose: + print("-"*80) + print("PairwiseScheduleBuilder:") + print(sched_builder) + print("dict{lp insn id : sched sid int}:") + print(lp_insn_id_to_lex_sched_id) + + # Get two isl maps from the PairwiseScheduleBuilder, + # one for each linearization item involved in the dependency; + isl_sched_map_before, isl_sched_map_after = sched_builder.build_maps( + preprocessed_knl) + + if verbose: + print("-"*80) + print("ISL maps representing schedules for {before, after} statement:") + print(prettier_map_string(isl_sched_map_before)) + print(prettier_map_string(isl_sched_map_after)) + + # get map representing lexicographic ordering + sched_lex_order_map = sched_builder.get_lex_order_map_for_sched_space() + + # create statement instance ordering, + # maps each statement instance to all statement instances occuring later + sio = get_statement_ordering_map( + isl_sched_map_before, + isl_sched_map_after, + sched_lex_order_map, + ) + + if verbose: + print("-"*80) + print("Statement instance ordering:") + print(prettier_map_string(sio)) + print("-"*80) + print("SIO space (statement instances -> statement instances):") + print(sio.space) + + # create a map representing constraints from the dependency, + # which maps statement instance to all stmt instances that must occur later + # and is acquired from the non-preprocessed kernel + constraint_map = create_dependency_constraint( + statement_pair_dep_set, + knl.loop_priority, + lp_insn_id_to_lex_sched_id, + sched_builder.statement_var_name, + ) + # TODO figure out how to keep a consistent lp_insn_id_to_lex_sched_id map + # when dependency creation is separate from linearization checking + + # reorder variables/params in constraint map space to match SIO so we can + # check to see whether the constraint map is a subset of the SIO + # (spaces must be aligned so that the variables in the constraint map + # correspond to the same variables in the SIO) + from loopy.schedule.checker.utils import ( + ensure_dim_names_match_and_align, + ) + + if verbose: + print("-"*80) + print("Constraint map space (before aligning with SIO):") + print(constraint_map.space) + print("Constraint map:") + print(prettier_map_string(constraint_map)) + + aligned_constraint_map = ensure_dim_names_match_and_align( + constraint_map, sio) + + if verbose: + print("-"*80) + print("Constraint map space (after aligning with SIO):") + print(aligned_constraint_map.space) + print("Constraint map:") + print(prettier_map_string(aligned_constraint_map)) + + import islpy as isl + assert aligned_constraint_map.space == sio.space + assert ( + aligned_constraint_map.space.get_var_names(isl.dim_type.in_) + == sio.space.get_var_names(isl.dim_type.in_)) + assert ( + aligned_constraint_map.space.get_var_names(isl.dim_type.out) + == sio.space.get_var_names(isl.dim_type.out)) + assert ( + aligned_constraint_map.space.get_var_names(isl.dim_type.param) + == sio.space.get_var_names(isl.dim_type.param)) + + if not aligned_constraint_map.is_subset(sio): + + linearization_is_valid = False + + if verbose: + print("================ constraint check failure =================") + print("Constraint map not subset of SIO") + print("Dependencies:") + print(statement_pair_dep_set) + print("Statement instance ordering:") + print(prettier_map_string(sio)) + print("constraint_map.gist(sio):") + print(prettier_map_string(aligned_constraint_map.gist(sio))) + print("sio.gist(constraint_map)") + print(prettier_map_string(sio.gist(aligned_constraint_map))) + print("Loop priority known:") + print(preprocessed_knl.loop_priority) + print("{insn id -> sched sid int} dict:") + print(lp_insn_id_to_lex_sched_id) + print("===========================================================") + + return linearization_is_valid diff --git a/loopy/schedule/checker/dependency.py b/loopy/schedule/checker/dependency.py new file mode 100644 index 000000000..a31a991af --- /dev/null +++ b/loopy/schedule/checker/dependency.py @@ -0,0 +1,531 @@ +__copyright__ = "Copyright (C) 2019 James Stevens" + +__license__ = """ +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. +""" + +import islpy as isl + + +class DependencyType: + """Strings specifying a particular type of dependency relationship. + + .. attribute:: SAME + + A :class:`str` specifying the following dependency relationship: + + If ``S = {i, j, ...}`` is a set of inames used in both statements + ``insn0`` and ``insn1``, and ``{i', j', ...}`` represent the values + of the inames in ``insn0``, and ``{i, j, ...}`` represent the + values of the inames in ``insn1``, then the dependency + ``insn0 happens before insn1 iff SAME({i, j})`` specifies that + ``insn0 happens before insn1 iff {i' = i and j' = j and ...}``. + Note that ``SAME({}) = True``. + + .. attribute:: PRIOR + + A :class:`str` specifying the following dependency relationship: + + If ``S = {i, j, k, ...}`` is a set of inames used in both statements + ``insn0`` and ``insn1``, and ``{i', j', k', ...}`` represent the values + of the inames in ``insn0``, and ``{i, j, k, ...}`` represent the + values of the inames in ``insn1``, then the dependency + ``insn0 happens before insn1 iff PRIOR({i, j, k})`` specifies one of + two possibilities, depending on whether the loop nest ordering is + known. If the loop nest ordering is unknown, then + ``insn0 happens before insn1 iff {i' < i and j' < j and k' < k ...}``. + If the loop nest ordering is known, the condition becomes + ``{i', j', k', ...}`` is lexicographically less than ``{i, j, k, ...}``, + i.e., ``i' < i or (i' = i and j' < j) or (i' = i and j' = j and k' < k) ...``. + + """ + + SAME = "same" + PRIOR = "prior" + + +class StatementPairDependencySet(object): + """A set of dependencies between two statements. + + .. attribute:: statement_before + + A :class:`loopy.schedule.checker.schedule.StatementRef` depended + on by statement_after. + + .. attribute:: statement_after + + A :class:`loopy.schedule.checker.schedule.StatementRef` which + cdepends on statement_before. + + .. attribute:: deps + + A :class:`dict` mapping instances of :class:`DependencyType` to + the :mod:`loopy` kernel inames involved in that particular + dependency relationship. + + .. attribute:: dom_before + + A :class:`islpy.BasicSet` representing the domain for the + dependee statement. + + .. attribute:: dom_after + + A :class:`islpy.BasicSet` representing the domain for the + depender statement. + + """ + + def __init__( + self, + statement_before, + statement_after, + deps, # {dep_type: iname_set} + dom_before=None, + dom_after=None, + ): + self.statement_before = statement_before + self.statement_after = statement_after + self.deps = deps + self.dom_before = dom_before + self.dom_after = dom_after + + def __eq__(self, other): + return ( + self.statement_before == other.statement_before + and self.statement_after == other.statement_after + and self.deps == other.deps + and self.dom_before == other.dom_before + and self.dom_after == other.dom_after + ) + + def __lt__(self, other): + return self.__hash__() < other.__hash__() + + def __hash__(self): + return hash(repr(self)) + + def update_persistent_hash(self, key_hash, key_builder): + """Custom hash computation function for use with + :class:`pytools.persistent_dict.PersistentDict`. + """ + + key_builder.rec(key_hash, self.statement_before) + key_builder.rec(key_hash, self.statement_after) + key_builder.rec(key_hash, self.deps) + key_builder.rec(key_hash, self.dom_before) + key_builder.rec(key_hash, self.dom_after) + + def __str__(self): + result = "%s --before->\n%s iff\n " % ( + self.statement_before, self.statement_after) + return result + " and\n ".join( + ["(%s : %s)" % (dep_type, inames) + for dep_type, inames in self.deps.items()]) + + +def create_elementwise_comparison_conjunction_set( + names0, names1, islvars, op="eq"): + """Create a set constrained by the conjunction of conditions comparing + `names0` to `names1`. + + :arg names0: A list of :class:`str` representing variable names. + + :arg names1: A list of :class:`str` representing variable names. + + :arg islvars: A dictionary from variable names to :class:`islpy.PwAff` + instances that represent each of the variables + (islvars may be produced by `islpy.make_zero_and_vars`). The key + '0' is also include and represents a :class:`islpy.PwAff` zero constant. + + :arg op: A :class:`str` describing the operator to use when creating + the set constraints. Options: `eq` for `=`, `lt` for `<` + + :returns: A set involving `islvars` cosntrained by the constraints + `{names0[0] names1[0] and names0[1] names1[1] and ...}`. + + """ + + # initialize set with constraint that is always true + conj_set = islvars[0].eq_set(islvars[0]) + for n0, n1 in zip(names0, names1): + if op == "eq": + conj_set = conj_set & islvars[n0].eq_set(islvars[n1]) + elif op == "lt": + conj_set = conj_set & islvars[n0].lt_set(islvars[n1]) + + return conj_set + + +def _convert_constraint_set_to_map(constraint_set, mv_count, src_position=None): + dim_type = isl.dim_type + constraint_map = isl.Map.from_domain(constraint_set) + if src_position: + return constraint_map.move_dims( + dim_type.out, 0, dim_type.in_, src_position, mv_count) + else: + return constraint_map.move_dims( + dim_type.out, 0, dim_type.in_, mv_count, mv_count) + + +def create_dependency_constraint( + statement_dep_set, + loop_priorities, + insn_id_to_int, + statement_var_name, + statement_var_pose=0, + dom_inames_ordered_before=None, + dom_inames_ordered_after=None, + ): + """Create a statement dependency constraint represented as a map from + each statement instance to statement instances that must occur later, + i.e., ``{[s'=0, i', j'] -> [s=1, i, j] : condition on {i', j', i, j}}`` + indicates that statement ``0`` comes before statment ``1`` when the + specified condition on inames ``i',j',i,j`` is met. ``i'`` and ``j'`` + are the values of inames ``i`` and ``j`` in first statement instance. + + :arg statement_dep_set: A :class:`StatementPairDependencySet` describing + the dependency relationship between the two statements. + + :arg loop_priorities: A list of tuples from the ``loop_priority`` + attribute of :class:`loopy.LoopKernel` specifying the loop nest + ordering rules. + + :arg insn_id_to_int: A :class:`dict` mapping insn_id to int_id, where + 'insn_id' and 'int_id' refer to the 'insn_id' and 'int_id' attributes + of :class:`loopy.schedule.checker.schedule.StatementRef`. + + :arg statement_var_name: A :class:`str` specifying the name of the + isl variable used to represent the unique :class:`int` statement id. + + :arg statement_var_pose: A :class:`int` specifying which position in the + statement instance tuples holds the dimension representing the + statement id. Defaults to ``0``. + + :arg all_dom_inames_ordered_before: A :class:`list` of :class:`str` + specifying an order for the dimensions representing dependee inames. + + :arg all_dom_inames_ordered_after: A :class:`list` of :class:`str` + specifying an order for the dimensions representing depender inames. + + :returns: An :class:`islpy.Map` mapping each statement instance to all + statement instances that must occur later according to the constraints. + + """ + + from loopy.schedule.checker.utils import ( + make_islvars_with_marker, + append_apostrophes, + add_dims_to_isl_set, + insert_missing_dims_and_reorder_by_name, + create_new_isl_set_with_primes, + list_var_names_in_isl_sets, + ) + # This function uses the dependency given to create the following constraint: + # Statement [s,i,j] comes before statement [s',i',j'] iff + + if dom_inames_ordered_before is None: + dom_inames_ordered_before = list_var_names_in_isl_sets( + [statement_dep_set.dom_before]) + if dom_inames_ordered_after is None: + dom_inames_ordered_after = list_var_names_in_isl_sets( + [statement_dep_set.dom_after]) + + # create some (ordered) isl vars to use, e.g., {s, i, j, s', i', j'} + islvars = make_islvars_with_marker( + var_names_needing_marker=[statement_var_name]+dom_inames_ordered_before, + other_var_names=[statement_var_name]+dom_inames_ordered_after, + marker="'", + ) + statement_var_name_prime = statement_var_name+"'" + + # initialize constraints to False + # this will disappear as soon as we add a constraint + all_constraints_set = islvars[0].eq_set(islvars[0] + 1) + + # for each (dep_type, inames) pair, create 'happens before' constraint, + # all_constraints_set will be the union of all these constraints + dt = DependencyType + for dep_type, inames in statement_dep_set.deps.items(): + # need to put inames in a list so that order of inames and inames' + # matches when calling create_elementwise_comparison_conj... + if not isinstance(inames, list): + inames_list = list(inames) + else: + inames_list = inames[:] + inames_prime = append_apostrophes(inames_list) # e.g., [j', k'] + + if dep_type == dt.SAME: + constraint_set = create_elementwise_comparison_conjunction_set( + inames_prime, inames_list, islvars, op="eq") + elif dep_type == dt.PRIOR: + + priority_known = False + # if nesting info is provided: + if loop_priorities: + # assumes all loop_priority tuples are consistent + + # with multiple priority tuples, determine whether the combined + # info they contain can give us a single, full proiritization, + # e.g., if prios={(a, b), (b, c), (c, d, e)}, then we know + # a -> b -> c -> d -> e + + # remove irrelevant inames from priority tuples (because we're + # about to perform a costly operation on remaining tuples) + relevant_priorities = set() + for p_tuple in loop_priorities: + new_tuple = [iname for iname in p_tuple if iname in inames_list] + # empty tuples and single tuples don't help us define + # a nesting, so ignore them (if we're dealing with a single + # iname, priorities will be ignored later anyway) + if len(new_tuple) > 1: + relevant_priorities.add(tuple(new_tuple)) + + # create a mapping from each iname to inames that must be + # nested inside that iname + nested_inside = {} + for outside_iname in inames_list: + nested_inside_inames = set() + for p_tuple in relevant_priorities: + if outside_iname in p_tuple: + nested_inside_inames.update([ + inside_iname for inside_iname in + p_tuple[p_tuple.index(outside_iname)+1:]]) + nested_inside[outside_iname] = nested_inside_inames + + from loopy.schedule.checker.utils import ( + get_orderings_of_length_n) + # get all orderings that are explicitly allowed by priorities + orders = get_orderings_of_length_n( + nested_inside, + required_length=len(inames_list), + #return_first_found=True, + return_first_found=False, # slower; allows priorities test below + ) + + if orders: + # test for invalid priorities (includes cycles) + if len(orders) != 1: + raise ValueError( + "create_dependency_constriant encountered invalid " + "priorities %s" + % (loop_priorities)) + priority_known = True + priority_tuple = orders.pop() + + # if only one loop, we know the priority + if not priority_known and len(inames_list) == 1: + priority_tuple = tuple(inames_list) + priority_known = True + + if priority_known: + # PRIOR requires statement_before complete previous iterations + # of loops before statement_after completes current iteration + # according to loop nest order + inames_list_nest_ordered = [ + iname for iname in priority_tuple + if iname in inames_list] + inames_list_nest_ordered_prime = append_apostrophes( + inames_list_nest_ordered) + if set(inames_list_nest_ordered) != set(inames_list): + # TODO could this happen? + assert False + + from loopy.schedule.checker import ( + lexicographic_order_map as lom) + # TODO handle case where inames list is empty + constraint_set = lom.get_lex_order_constraint( + inames_list_nest_ordered_prime, + inames_list_nest_ordered, + islvars, + ) + else: # priority not known + # PRIOR requires upper left quadrant happen before: + constraint_set = create_elementwise_comparison_conjunction_set( + inames_prime, inames_list, islvars, op="lt") + + # set statement_var_name == statement # + s_before_int = insn_id_to_int[statement_dep_set.statement_before.insn_id] + s_after_int = insn_id_to_int[statement_dep_set.statement_after.insn_id] + constraint_set = constraint_set & islvars[statement_var_name_prime].eq_set( + islvars[0]+s_before_int) + constraint_set = constraint_set & islvars[statement_var_name].eq_set( + islvars[0]+s_after_int) + + # union this constraint_set with all_constraints_set + all_constraints_set = all_constraints_set | constraint_set + + # convert constraint set to map + all_constraints_map = _convert_constraint_set_to_map( + all_constraints_set, + mv_count=len(dom_inames_ordered_after)+1, # +1 for statement var + src_position=len(dom_inames_ordered_before)+1, # +1 for statement var + ) + + # now apply domain sets to constraint variables + + # add statement variable to doms to enable intersection + range_to_intersect = add_dims_to_isl_set( + statement_dep_set.dom_after, isl.dim_type.out, + [statement_var_name], statement_var_pose) + domain_constraint_set = create_new_isl_set_with_primes( + statement_dep_set.dom_before) + domain_to_intersect = add_dims_to_isl_set( + domain_constraint_set, isl.dim_type.out, + [statement_var_name_prime], statement_var_pose) + + # insert inames missing from doms to enable intersection + domain_to_intersect = insert_missing_dims_and_reorder_by_name( + domain_to_intersect, isl.dim_type.out, + append_apostrophes([statement_var_name] + dom_inames_ordered_before)) + range_to_intersect = insert_missing_dims_and_reorder_by_name( + range_to_intersect, + isl.dim_type.out, + [statement_var_name] + dom_inames_ordered_after) + + # intersect doms + map_with_loop_domain_constraints = all_constraints_map.intersect_domain( + domain_to_intersect).intersect_range(range_to_intersect) + + return map_with_loop_domain_constraints + + +def create_dependencies_from_legacy_knl(knl): + """Return a list of :class:`StatementPairDependencySet` instances created + for a :class:`loopy.LoopKernel` containing legacy depencencies. + + Create the new dependencies according to the following rules: + + (1) If a dependency exists between ``insn0`` and ``insn1``, create the + dependnecy ``SAME(SNC)`` where ``SNC`` is the set of non-concurrent inames + used by both ``insn0`` and ``insn1``, and ``SAME`` is the relationship + specified by the ``SAME`` attribute of + :class:`loopy.schedule.checker.dependency.DependencyType`. + + (2) For each subset of non-concurrent inames used by any instruction, + + (a), find the set of all instructions using those inames, + + (b), create a directed graph with these instructions as nodes and + edges representing a 'happens before' relationship specfied by + each dependency, + + (c), find the sources and sinks within this graph, and + + (d), connect each sink to each source (sink happens before source) + with a ``PRIOR(SNC)`` dependency, where ``PRIOR`` is the + relationship specified by the ``PRIOR`` attribute of + :class:`loopy.schedule.checker.dependency.DependencyType`. + + """ + + # Introduce SAME dep for set of shared, non-concurrent inames + + from loopy.schedule.checker.utils import ( + get_concurrent_inames, + get_all_nonconcurrent_insn_iname_subsets, + get_linearization_item_ids_within_inames, + ) + from loopy.schedule.checker.schedule import StatementRef + dt = DependencyType + conc_inames, non_conc_inames = get_concurrent_inames(knl) + statement_dep_sets = [] + for insn_after in knl.instructions: + for insn_before_id in insn_after.depends_on: + insn_before = knl.id_to_insn[insn_before_id] + insn_before_inames = insn_before.within_inames + insn_after_inames = insn_after.within_inames + shared_inames = insn_before_inames & insn_after_inames + shared_non_conc_inames = shared_inames & non_conc_inames + + statement_dep_sets.append( + StatementPairDependencySet( + StatementRef(insn_id=insn_before.id), + StatementRef(insn_id=insn_after.id), + {dt.SAME: shared_non_conc_inames}, + knl.get_inames_domain(insn_before_inames), + knl.get_inames_domain(insn_after_inames), + )) + + # loop-carried deps ------------------------------------------ + + # Go through insns and get all unique insn.depends_on iname sets + non_conc_iname_subsets = get_all_nonconcurrent_insn_iname_subsets( + knl, exclude_empty=True, non_conc_inames=non_conc_inames) + + # For each set of insns within a given iname set, find sources and sinks. + # Then make PRIOR dep from all sinks to all sources at previous iterations + for iname_subset in non_conc_iname_subsets: + # find items within this iname set + linearization_item_ids = get_linearization_item_ids_within_inames( + knl, iname_subset) + + # find sources and sinks + sources, sinks = get_dependency_sources_and_sinks( + knl, linearization_item_ids) + + # create prior deps + + # in future, consider inserting single no-op source and sink + for source_id in sources: + for sink_id in sinks: + sink_insn_inames = knl.id_to_insn[sink_id].within_inames + source_insn_inames = knl.id_to_insn[source_id].within_inames + shared_inames = sink_insn_inames & source_insn_inames + shared_non_conc_inames = shared_inames & non_conc_inames + + statement_dep_sets.append( + StatementPairDependencySet( + StatementRef(insn_id=sink_id), + StatementRef(insn_id=source_id), + {dt.PRIOR: shared_non_conc_inames}, + knl.get_inames_domain(sink_insn_inames), + knl.get_inames_domain(source_insn_inames), + )) + + return set(statement_dep_sets) + + +def get_dependency_sources_and_sinks(knl, linearization_item_ids): + """Implicitly create a directed graph with the linearization items specified + by ``linearization_item_ids`` as nodes, and with edges representing a + 'happens before' relationship specfied by each legacy dependency between + two instructions. Return the sources and sinks within this graph. + + :arg linearization_item_ids: A :class:`list` of :class:`str` representing + loopy instruction ids. + + :returns: Two instances of :class:`set` of :class:`str` instruction ids + representing the sources and sinks in the dependency graph. + + """ + sources = set() + dependees = set() # all dependees (within linearization_item_ids) + for item_id in linearization_item_ids: + # find the deps within linearization_item_ids + deps = knl.id_to_insn[item_id].depends_on & linearization_item_ids + if deps: + # add deps to dependees + dependees.update(deps) + else: # has no deps (within linearization_item_ids), this is a source + sources.add(item_id) + + # sinks don't point to anyone + sinks = linearization_item_ids - dependees + + return sources, sinks diff --git a/loopy/schedule/checker/schedule.py b/loopy/schedule/checker/schedule.py index f83c19b70..affd05337 100644 --- a/loopy/schedule/checker/schedule.py +++ b/loopy/schedule/checker/schedule.py @@ -278,6 +278,18 @@ class PairwiseScheduleBuilder(object): # be zero, so add them. self.pad_lex_tuples_with_zeros() + def loopy_insn_id_to_lex_sched_id(self): + """Return a dictionary mapping insn_id to int_id, where ``insn_id`` and + ``int_id`` refer to the ``insn_id`` and ``int_id`` attributes of + :class:`StatementRef`. + """ + return { + self.stmt_instance_before.stmt_ref.insn_id: + self.stmt_instance_before.stmt_ref.int_id, + self.stmt_instance_after.stmt_ref.insn_id: + self.stmt_instance_after.stmt_ref.int_id, + } + def max_lex_dims(self): return max([ len(self.stmt_instance_before.lex_points), diff --git a/loopy/schedule/checker/utils.py b/loopy/schedule/checker/utils.py index 3000daf1b..3aae40923 100644 --- a/loopy/schedule/checker/utils.py +++ b/loopy/schedule/checker/utils.py @@ -132,6 +132,26 @@ def ensure_dim_names_match_and_align(obj_map, tgt_map): return aligned_obj_map +def create_new_isl_set_with_primes(old_isl_set, marker="'"): + """Return an isl_set with apostrophes appended to + dim_type.set dimension names. + + :arg old_isl_set: A :class:`islpy.Set`. + + :returns: A :class:`islpy.Set` matching `old_isl_set` with + apostrophes appended to dim_type.set dimension names. + + """ + # TODO this is just a special case of append_marker_to_isl_map_var_names + + new_set = old_isl_set.copy() + for i in range(old_isl_set.n_dim()): + new_set = new_set.set_dim_name( + isl.dim_type.set, i, old_isl_set.get_dim_name( + isl.dim_type.set, i)+marker) + return new_set + + def append_marker_to_isl_map_var_names(old_isl_map, dim_type, marker="'"): """Return an isl_map with marker appended to dim_type dimension names. @@ -153,6 +173,40 @@ def append_marker_to_isl_map_var_names(old_isl_map, dim_type, marker="'"): return new_map +def make_islvars_with_marker( + var_names_needing_marker, other_var_names, param_names=[], marker="'"): + """Return a dictionary from variable and parameter names + to :class:`islpy.PwAff` instances that represent each of + the variables and parameters, appending marker to + var_names_needing_marker. + + :arg var_names_needing_marker: A :class:`list` of :class:`str` + elements representing variable names to have markers appended. + + :arg other_var_names: A :class:`list` of :class:`str` + elements representing variable names to be included as-is. + + :arg param_names: A :class:`list` of :class:`str` elements + representing parameter names. + + :returns: A dictionary from variable names to :class:`islpy.PwAff` + instances that represent each of the variables + (islvars may be produced by `islpy.make_zero_and_vars`). The key + '0' is also include and represents a :class:`islpy.PwAff` zero constant. + + """ + + def append_marker(items, mark): + new_items = [] + for item in items: + new_items.append(item+mark) + return new_items + + return isl.make_zero_and_vars( + append_marker(var_names_needing_marker, marker) + + other_var_names, param_names) + + def append_marker_to_strings(strings, marker="'"): if not isinstance(strings, list): raise ValueError("append_marker_to_strings did not receive a list") @@ -160,6 +214,10 @@ def append_marker_to_strings(strings, marker="'"): return [s+marker for s in strings] +def append_apostrophes(strings): + return append_marker_to_strings(strings, marker="'") + + def _get_union(list_items): union = list_items[0] for s in list_items[1:]: @@ -204,6 +262,7 @@ def create_symbolic_map_from_tuples( on these values. """ + # TODO clarify this with more comments # TODO allow None for domains dim_type = isl.dim_type @@ -330,6 +389,166 @@ def get_insn_id_from_linearization_item(linearization_item): return linearization_item.insn_id +# TODO for better performance, could combine these funcs so we don't +# loop over linearization more than once +def get_all_nonconcurrent_insn_iname_subsets( + knl, exclude_empty=False, non_conc_inames=None): + """Return a :class:`set` of every unique subset of non-concurrent + inames used in an instruction in a :class:`loopy.LoopKernel`. + + :arg knl: A :class:`loopy.LoopKernel`. + + :arg exclude_empty: A :class:`bool` specifying whether to + exclude the empty set. + + :arg non_conc_inames: A :class:`set` of non-concurrent inames + which may be provided if already known. + + :returns: A :class:`set` of every unique subset of non-concurrent + inames used in any instruction in a :class:`loopy.LoopKernel`. + + """ + + if non_conc_inames is None: + _, non_conc_inames = get_concurrent_inames(knl) + + iname_subsets = set() + for insn in knl.instructions: + iname_subsets.add(insn.within_inames & non_conc_inames) + + if exclude_empty: + iname_subsets.discard(frozenset()) + + return iname_subsets + + +def get_linearization_item_ids_within_inames(knl, inames): + linearization_item_ids = set() + for insn in knl.instructions: + if inames.issubset(insn.within_inames): + linearization_item_ids.add(insn.id) + return linearization_item_ids + + +# TODO use yield to clean this up +# TODO use topological sort from loopy, then find longest path in dag +def _generate_orderings_starting_w_prefix( + allowed_after_dict, orderings, required_length=None, + start_prefix=(), return_first_found=False): + # alowed_after_dict = {str: set(str)} + # start prefix = tuple(str) + # orderings = set + if start_prefix: + next_items = allowed_after_dict[start_prefix[-1]]-set(start_prefix) + else: + next_items = allowed_after_dict.keys() + + if required_length: + if len(start_prefix) == required_length: + orderings.add(start_prefix) + if return_first_found: + return + else: + orderings.add(start_prefix) + if return_first_found: + return + + # return if no more items left + if not next_items: + return + + for next_item in next_items: + new_prefix = start_prefix + (next_item,) + _generate_orderings_starting_w_prefix( + allowed_after_dict, + orderings, + required_length=required_length, + start_prefix=new_prefix, + return_first_found=return_first_found, + ) + if return_first_found and orderings: + return + return + + +def get_orderings_of_length_n( + allowed_after_dict, required_length, return_first_found=False): + """Return all orderings found in tree represented by `allowed_after_dict`. + + :arg allowed_after_dict: A :class:`dict` mapping each :class:`string` + names to a :class:`set` of names that are allowed to come after + that name. + + :arg required_length: A :class:`int` representing the length required + for all orderings. Orderings not matching the required length will + not be returned. + + :arg return_first_found: A :class:`bool` specifying whether to return + the first valid ordering found. + + :returns: A :class:`set` of all orderings that are *explicitly* allowed + by the tree represented by `allowed_after_dict`. I.e., if we know + a->b and c->b, we don't know enough to return a->c->b. Note that + if the set for a dict key is empty, nothing is allowed to come after. + + """ + + orderings = set() + _generate_orderings_starting_w_prefix( + allowed_after_dict, + orderings, + required_length=required_length, + start_prefix=(), + return_first_found=return_first_found, + ) + return orderings + + +def create_graph_from_pairs(before_after_pairs): + # create key for every before + graph = dict([(before, set()) for before, _ in before_after_pairs]) + for before, after in before_after_pairs: + graph[before] = graph[before] | set([after, ]) + return graph + + +# only used for example purposes: + + +def create_explicit_map_from_tuples(tuple_pairs, space): + """Return a :class:`islpy.Map` in :class:`islpy.Space` space + mapping tup_in->tup_out for each `(tup_in, tup_out)` pair + in `tuple_pairs`, where `tup_in` and `tup_out` are + tuples of :class:`int` values to be assigned to the + corresponding dimension variables in `space`. + + """ + + dim_type = isl.dim_type + individual_maps = [] + + for tup_in, tup_out in tuple_pairs: + constraints = [] + for i, val_in in enumerate(tup_in): + constraints.append( + isl.Constraint.equality_alloc(space) + .set_coefficient_val(dim_type.in_, i, 1) + .set_constant_val(-1*val_in)) + for i, val_out in enumerate(tup_out): + constraints.append( + isl.Constraint.equality_alloc(space) + .set_coefficient_val(dim_type.out, i, 1) + .set_constant_val(-1*val_out)) + individual_maps.append( + isl.Map.universe(space).add_constraints(constraints)) + + union_map = individual_maps[0] + for m in individual_maps[1:]: + union_map = union_map.union(m) + + return union_map + + def get_EnterLoop_inames(linearization_items, knl): from loopy.schedule import EnterLoop loop_inames = set() diff --git a/test/test_linearization_checker.py b/test/test_linearization_checker.py index 5640da8b8..2b8282305 100644 --- a/test/test_linearization_checker.py +++ b/test/test_linearization_checker.py @@ -31,6 +31,7 @@ from pyopencl.tools import ( # noqa as pytest_generate_tests) from loopy.version import LOOPY_USE_LANGUAGE_VERSION_2018_2 # noqa import logging +from loopy.kernel import KernelState from loopy import ( preprocess_kernel, get_one_linearized_kernel, @@ -607,6 +608,447 @@ def test_statement_instance_ordering_creation(): # }}} +def test_linearization_checker_with_loop_prioritization(): + knl = lp.make_kernel( + [ + "{[i]: 0<=itemp = b[i,k] {id=insn_a} + end + for j + a[i,j] = temp + 1 {id=insn_b,dep=insn_a} + c[i,j] = d[i,j] {id=insn_c} + end + end + for t + e[t] = f[t] {id=insn_d} + end + """, + name="example", + assumptions="pi,pj,pk,pt >= 1", + lang_version=(2018, 2) + ) + knl = lp.add_and_infer_dtypes( + knl, + {"b": np.float32, "d": np.float32, "f": np.float32}) + knl = lp.prioritize_loops(knl, "i,k") + knl = lp.prioritize_loops(knl, "i,j") + + unprocessed_knl = knl.copy() + + statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( + unprocessed_knl) + if hasattr(lp, "add_dependencies_v2"): + knl = lp.add_dependencies_v2( # pylint:disable=no-member + knl, statement_pair_dep_sets) + + # get a linearization to check + if knl.state < KernelState.PREPROCESSED: + knl = preprocess_kernel(knl) + knl = get_one_linearized_kernel(knl) + linearization_items = knl.linearization + + linearization_is_valid = lp.check_linearization_validity( + unprocessed_knl, statement_pair_dep_sets, linearization_items) + assert linearization_is_valid + + +def test_linearization_checker_with_matmul(): + bsize = 16 + knl = lp.make_kernel( + "{[i,k,j]: 0<=i {[i,j]: 0<=i {[i]: 0<=i xi = qpts[1, i2] + <> s = 1-xi + <> r = xi/s + <> aind = 0 {id=aind_init} + for alpha1 + <> w = s**(deg-alpha1) {id=init_w} + for alpha2 + tmp[el,alpha1,i2] = tmp[el,alpha1,i2] + w * coeffs[aind] \ + {id=write_tmp,dep=init_w:aind_init} + w = w * r * ( deg - alpha1 - alpha2 ) / (1 + alpha2) \ + {id=update_w,dep=init_w:write_tmp} + aind = aind + 1 \ + {id=aind_incr,dep=aind_init:write_tmp:update_w} + end + end + end + """, + [lp.GlobalArg("coeffs", None, shape=None), "..."], + name="stroud_bernstein_orig", assumptions="deg>=0 and nels>=1") + knl = lp.add_and_infer_dtypes(knl, + dict(coeffs=np.float32, qpts=np.int32)) + knl = lp.fix_parameters(knl, nqp1d=7, deg=4) + knl = lp.split_iname(knl, "el", 16, inner_tag="l.0") + knl = lp.split_iname(knl, "el_outer", 2, outer_tag="g.0", + inner_tag="ilp", slabs=(0, 1)) + knl = lp.tag_inames(knl, dict(i2="l.1", alpha1="unr", alpha2="unr")) + + unprocessed_knl = knl.copy() + + statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( + unprocessed_knl) + if hasattr(lp, "add_dependencies_v2"): + knl = lp.add_dependencies_v2( # pylint:disable=no-member + knl, statement_pair_dep_sets) + + # get a linearization to check + if knl.state < KernelState.PREPROCESSED: + knl = preprocess_kernel(knl) + knl = get_one_linearized_kernel(knl) + linearization_items = knl.linearization + + linearization_is_valid = lp.check_linearization_validity( + unprocessed_knl, statement_pair_dep_sets, linearization_items) + assert linearization_is_valid + + +def test_linearization_checker_with_nop(): + knl = lp.make_kernel( + [ + "{[b]: b_start<=b c_end = 2 + for c + ... nop + end + end + """, + "...", + seq_dependencies=True) + knl = lp.fix_parameters(knl, dim=3) + + unprocessed_knl = knl.copy() + + statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( + unprocessed_knl) + if hasattr(lp, "add_dependencies_v2"): + knl = lp.add_dependencies_v2( # pylint:disable=no-member + knl, statement_pair_dep_sets) + + # get a linearization to check + if knl.state < KernelState.PREPROCESSED: + knl = preprocess_kernel(knl) + knl = get_one_linearized_kernel(knl) + linearization_items = knl.linearization + + linearization_is_valid = lp.check_linearization_validity( + unprocessed_knl, statement_pair_dep_sets, linearization_items) + assert linearization_is_valid + + +def test_linearization_checker_with_multi_domain(): + knl = lp.make_kernel( + [ + "{[i]: 0<=iacc = 0 {id=insn0} + for j + for k + acc = acc + j + k {id=insn1,dep=insn0} + end + end + end + end + """, + name="nest_multi_dom", + assumptions="ni,nj,nk,nx >= 1", + lang_version=(2018, 2) + ) + knl = lp.prioritize_loops(knl, "x,xx,i") + knl = lp.prioritize_loops(knl, "i,j") + knl = lp.prioritize_loops(knl, "j,k") + + unprocessed_knl = knl.copy() + + statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( + unprocessed_knl) + if hasattr(lp, "add_dependencies_v2"): + knl = lp.add_dependencies_v2( # pylint:disable=no-member + knl, statement_pair_dep_sets) + + # get a linearization to check + if knl.state < KernelState.PREPROCESSED: + knl = preprocess_kernel(knl) + knl = get_one_linearized_kernel(knl) + linearization_items = knl.linearization + + linearization_is_valid = lp.check_linearization_validity( + unprocessed_knl, statement_pair_dep_sets, linearization_items) + assert linearization_is_valid + + +def test_linearization_checker_with_loop_carried_deps(): + knl = lp.make_kernel( + "{[i]: 0<=iacc0 = 0 {id=insn0} + for i + acc0 = acc0 + i {id=insn1,dep=insn0} + <>acc2 = acc0 + i {id=insn2,dep=insn1} + <>acc3 = acc2 + i {id=insn3,dep=insn2} + <>acc4 = acc0 + i {id=insn4,dep=insn1} + end + """, + name="loop_carried_deps", + assumptions="n >= 1", + lang_version=(2018, 2) + ) + + unprocessed_knl = knl.copy() + + statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( + unprocessed_knl) + if hasattr(lp, "add_dependencies_v2"): + knl = lp.add_dependencies_v2( # pylint:disable=no-member + knl, statement_pair_dep_sets) + + # get a linearization to check + if knl.state < KernelState.PREPROCESSED: + knl = preprocess_kernel(knl) + knl = get_one_linearized_kernel(knl) + linearization_items = knl.linearization + + linearization_is_valid = lp.check_linearization_validity( + unprocessed_knl, statement_pair_dep_sets, linearization_items) + assert linearization_is_valid + + +def test_linearization_checker_and_invalid_prioritiy_detection(): + ref_knl = lp.make_kernel( + [ + "{[h]: 0<=h acc = 0 + for h,i,j,k + acc = acc + h + i + j + k + end + """, + name="priorities", + assumptions="ni,nj,nk,nh >= 1", + lang_version=(2018, 2) + ) + + # no error: + knl0 = lp.prioritize_loops(ref_knl, "h,i") + knl0 = lp.prioritize_loops(ref_knl, "i,j") + knl0 = lp.prioritize_loops(knl0, "j,k") + + unprocessed_knl = knl0.copy() + + statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( + unprocessed_knl) + if hasattr(lp, "add_dependencies_v2"): + knl0 = lp.add_dependencies_v2( # pylint:disable=no-member + knl0, statement_pair_dep_sets) + + # get a linearization to check + if knl0.state < KernelState.PREPROCESSED: + knl0 = preprocess_kernel(knl0) + knl0 = get_one_linearized_kernel(knl0) + linearization_items = knl0.linearization + + linearization_is_valid = lp.check_linearization_validity( + unprocessed_knl, statement_pair_dep_sets, linearization_items) + assert linearization_is_valid + + # no error: + knl1 = lp.prioritize_loops(ref_knl, "h,i,k") + knl1 = lp.prioritize_loops(knl1, "h,j,k") + + unprocessed_knl = knl1.copy() + + statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( + unprocessed_knl) + if hasattr(lp, "add_dependencies_v2"): + knl1 = lp.add_dependencies_v2( # pylint:disable=no-member + knl1, statement_pair_dep_sets) + + # get a linearization to check + if knl1.state < KernelState.PREPROCESSED: + knl1 = preprocess_kernel(knl1) + knl1 = get_one_linearized_kernel(knl1) + linearization_items = knl1.linearization + + linearization_is_valid = lp.check_linearization_validity( + unprocessed_knl, statement_pair_dep_sets, linearization_items) + assert linearization_is_valid + + # error (cycle): + knl2 = lp.prioritize_loops(ref_knl, "h,i,j") + knl2 = lp.prioritize_loops(knl2, "j,k") + try: + if hasattr(lp, "constrain_loop_nesting"): + knl2 = lp.constrain_loop_nesting(knl2, "k,i") # pylint:disable=no-member + else: + knl2 = lp.prioritize_loops(knl2, "k,i") + + unprocessed_knl = knl2.copy() + + statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( + unprocessed_knl) + + # get a linearization to check + if knl2.state < KernelState.PREPROCESSED: + knl2 = preprocess_kernel(knl2) + knl2 = get_one_linearized_kernel(knl2) + linearization_items = knl2.linearization + + linearization_is_valid = lp.check_linearization_validity( + unprocessed_knl, statement_pair_dep_sets, linearization_items) + # should raise error + assert False + except ValueError as e: + if hasattr(lp, "constrain_loop_nesting"): + assert "cycle detected" in str(e) + else: + assert "invalid priorities" in str(e) + + # error (inconsistent priorities): + knl3 = lp.prioritize_loops(ref_knl, "h,i,j,k") + try: + if hasattr(lp, "constrain_loop_nesting"): + knl3 = lp.constrain_loop_nesting( # pylint:disable=no-member + knl3, "h,j,i,k") + else: + knl3 = lp.prioritize_loops(knl3, "h,j,i,k") + + unprocessed_knl = knl3.copy() + + statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( + unprocessed_knl) + + # get a linearization to check + if knl3.state < KernelState.PREPROCESSED: + knl3 = preprocess_kernel(knl3) + knl3 = get_one_linearized_kernel(knl3) + linearization_items = knl3.linearization + + linearization_is_valid = lp.check_linearization_validity( + unprocessed_knl, statement_pair_dep_sets, linearization_items) + # should raise error + assert False + except ValueError as e: + if hasattr(lp, "constrain_loop_nesting"): + assert "cycle detected" in str(e) + else: + assert "invalid priorities" in str(e) + +# TODO create more kernels with invalid linearizations to test linearization checker + + if __name__ == "__main__": if len(sys.argv) > 1: exec(sys.argv[1]) -- GitLab From 088cdff796104eca4303c7c094325787b89541c7 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 30 Jun 2020 00:46:20 -0500 Subject: [PATCH 265/415] remove semi-redundant function create_new_isl_set_with_primes() (just a special case of append_marker_to_isl_map_var_names) --- loopy/schedule/checker/dependency.py | 7 ++++--- loopy/schedule/checker/utils.py | 20 -------------------- 2 files changed, 4 insertions(+), 23 deletions(-) diff --git a/loopy/schedule/checker/dependency.py b/loopy/schedule/checker/dependency.py index a31a991af..d5b3b0af1 100644 --- a/loopy/schedule/checker/dependency.py +++ b/loopy/schedule/checker/dependency.py @@ -233,7 +233,7 @@ def create_dependency_constraint( append_apostrophes, add_dims_to_isl_set, insert_missing_dims_and_reorder_by_name, - create_new_isl_set_with_primes, + append_marker_to_isl_map_var_names, list_var_names_in_isl_sets, ) # This function uses the dependency given to create the following constraint: @@ -383,8 +383,9 @@ def create_dependency_constraint( range_to_intersect = add_dims_to_isl_set( statement_dep_set.dom_after, isl.dim_type.out, [statement_var_name], statement_var_pose) - domain_constraint_set = create_new_isl_set_with_primes( - statement_dep_set.dom_before) + domain_constraint_set = append_marker_to_isl_map_var_names( + statement_dep_set.dom_before, isl.dim_type.set, marker="'") + domain_to_intersect = add_dims_to_isl_set( domain_constraint_set, isl.dim_type.out, [statement_var_name_prime], statement_var_pose) diff --git a/loopy/schedule/checker/utils.py b/loopy/schedule/checker/utils.py index 3aae40923..5c51b61b6 100644 --- a/loopy/schedule/checker/utils.py +++ b/loopy/schedule/checker/utils.py @@ -132,26 +132,6 @@ def ensure_dim_names_match_and_align(obj_map, tgt_map): return aligned_obj_map -def create_new_isl_set_with_primes(old_isl_set, marker="'"): - """Return an isl_set with apostrophes appended to - dim_type.set dimension names. - - :arg old_isl_set: A :class:`islpy.Set`. - - :returns: A :class:`islpy.Set` matching `old_isl_set` with - apostrophes appended to dim_type.set dimension names. - - """ - # TODO this is just a special case of append_marker_to_isl_map_var_names - - new_set = old_isl_set.copy() - for i in range(old_isl_set.n_dim()): - new_set = new_set.set_dim_name( - isl.dim_type.set, i, old_isl_set.get_dim_name( - isl.dim_type.set, i)+marker) - return new_set - - def append_marker_to_isl_map_var_names(old_isl_map, dim_type, marker="'"): """Return an isl_map with marker appended to dim_type dimension names. -- GitLab From 814e7991cb593a15a72a25b960fcd99597702ad7 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 30 Jun 2020 00:54:02 -0500 Subject: [PATCH 266/415] remove option from check_linearization_validity() --- loopy/schedule/checker/__init__.py | 90 +++++----------------------- loopy/schedule/checker/dependency.py | 1 - 2 files changed, 15 insertions(+), 76 deletions(-) diff --git a/loopy/schedule/checker/__init__.py b/loopy/schedule/checker/__init__.py index 5c223598b..ac7df4f6b 100644 --- a/loopy/schedule/checker/__init__.py +++ b/loopy/schedule/checker/__init__.py @@ -198,7 +198,6 @@ def check_linearization_validity( knl, statement_pair_dep_sets, linearization_items, - verbose=False, ): # TODO document @@ -217,24 +216,6 @@ def check_linearization_validity( from loopy import preprocess_kernel preprocessed_knl = preprocess_kernel(knl) - if verbose: - print("="*80) - print("Kernel: %s" % (preprocessed_knl.name)) - print("="*80) - print("Dependencies w/domains:") - for dep_set in statement_pair_dep_sets: - print(dep_set) - print(dep_set.dom_before) - print(dep_set.dom_after) - - # Print kernel info ------------------------------------------------------ - print("="*80) - print("Schedule items:") - for linearization_item in linearization_items: - print(linearization_item) - print("="*80) - print("Looping through dep pairs...") - # For each dependency, create+test linearization containing pair of insns------ linearization_is_valid = True for statement_pair_dep_set in statement_pair_dep_sets: @@ -245,11 +226,6 @@ def check_linearization_validity( # reconsider the content of statement_pair_dep_set, which # currently contains doms(do we still want them there?) - if verbose: - print("="*80) - print("Dependency set:") - print(statement_pair_dep_set) - # Create PairwiseScheduleBuilder: mapping of {statement instance: lex point} # include only instructions involved in this dependency sched_builder = get_schedule_for_statement_pair( @@ -261,24 +237,11 @@ def check_linearization_validity( lp_insn_id_to_lex_sched_id = sched_builder.loopy_insn_id_to_lex_sched_id() - if verbose: - print("-"*80) - print("PairwiseScheduleBuilder:") - print(sched_builder) - print("dict{lp insn id : sched sid int}:") - print(lp_insn_id_to_lex_sched_id) - # Get two isl maps from the PairwiseScheduleBuilder, # one for each linearization item involved in the dependency; isl_sched_map_before, isl_sched_map_after = sched_builder.build_maps( preprocessed_knl) - if verbose: - print("-"*80) - print("ISL maps representing schedules for {before, after} statement:") - print(prettier_map_string(isl_sched_map_before)) - print(prettier_map_string(isl_sched_map_after)) - # get map representing lexicographic ordering sched_lex_order_map = sched_builder.get_lex_order_map_for_sched_space() @@ -290,14 +253,6 @@ def check_linearization_validity( sched_lex_order_map, ) - if verbose: - print("-"*80) - print("Statement instance ordering:") - print(prettier_map_string(sio)) - print("-"*80) - print("SIO space (statement instances -> statement instances):") - print(sio.space) - # create a map representing constraints from the dependency, # which maps statement instance to all stmt instances that must occur later # and is acquired from the non-preprocessed kernel @@ -318,23 +273,9 @@ def check_linearization_validity( ensure_dim_names_match_and_align, ) - if verbose: - print("-"*80) - print("Constraint map space (before aligning with SIO):") - print(constraint_map.space) - print("Constraint map:") - print(prettier_map_string(constraint_map)) - aligned_constraint_map = ensure_dim_names_match_and_align( constraint_map, sio) - if verbose: - print("-"*80) - print("Constraint map space (after aligning with SIO):") - print(aligned_constraint_map.space) - print("Constraint map:") - print(prettier_map_string(aligned_constraint_map)) - import islpy as isl assert aligned_constraint_map.space == sio.space assert ( @@ -351,21 +292,20 @@ def check_linearization_validity( linearization_is_valid = False - if verbose: - print("================ constraint check failure =================") - print("Constraint map not subset of SIO") - print("Dependencies:") - print(statement_pair_dep_set) - print("Statement instance ordering:") - print(prettier_map_string(sio)) - print("constraint_map.gist(sio):") - print(prettier_map_string(aligned_constraint_map.gist(sio))) - print("sio.gist(constraint_map)") - print(prettier_map_string(sio.gist(aligned_constraint_map))) - print("Loop priority known:") - print(preprocessed_knl.loop_priority) - print("{insn id -> sched sid int} dict:") - print(lp_insn_id_to_lex_sched_id) - print("===========================================================") + print("================ constraint check failure =================") + print("Constraint map not subset of SIO") + print("Dependencies:") + print(statement_pair_dep_set) + print("Statement instance ordering:") + print(prettier_map_string(sio)) + print("constraint_map.gist(sio):") + print(prettier_map_string(aligned_constraint_map.gist(sio))) + print("sio.gist(constraint_map)") + print(prettier_map_string(sio.gist(aligned_constraint_map))) + print("Loop priority known:") + print(preprocessed_knl.loop_priority) + print("{insn id -> sched sid int} dict:") + print(lp_insn_id_to_lex_sched_id) + print("===========================================================") return linearization_is_valid diff --git a/loopy/schedule/checker/dependency.py b/loopy/schedule/checker/dependency.py index d5b3b0af1..973665aff 100644 --- a/loopy/schedule/checker/dependency.py +++ b/loopy/schedule/checker/dependency.py @@ -385,7 +385,6 @@ def create_dependency_constraint( [statement_var_name], statement_var_pose) domain_constraint_set = append_marker_to_isl_map_var_names( statement_dep_set.dom_before, isl.dim_type.set, marker="'") - domain_to_intersect = add_dims_to_isl_set( domain_constraint_set, isl.dim_type.out, [statement_var_name_prime], statement_var_pose) -- GitLab From d6aef8273097324cea78962a02330ef9b3129e9f Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 30 Jun 2020 01:04:42 -0500 Subject: [PATCH 267/415] remove create_graph_from_pairs() (only used in downstream branches) --- loopy/schedule/checker/utils.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/loopy/schedule/checker/utils.py b/loopy/schedule/checker/utils.py index 5c51b61b6..5ffa54c9d 100644 --- a/loopy/schedule/checker/utils.py +++ b/loopy/schedule/checker/utils.py @@ -484,14 +484,6 @@ def get_orderings_of_length_n( return orderings -def create_graph_from_pairs(before_after_pairs): - # create key for every before - graph = dict([(before, set()) for before, _ in before_after_pairs]) - for before, after in before_after_pairs: - graph[before] = graph[before] | set([after, ]) - return graph - - # only used for example purposes: -- GitLab From cddc0780b473eaaa4cb4f6d6befeed1bc54bd309 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 30 Jun 2020 01:05:47 -0500 Subject: [PATCH 268/415] remove create_explicit_map_from_tuples() (only used in downstream branches) --- loopy/schedule/checker/utils.py | 37 --------------------------------- 1 file changed, 37 deletions(-) diff --git a/loopy/schedule/checker/utils.py b/loopy/schedule/checker/utils.py index 5ffa54c9d..8b3f3aa1b 100644 --- a/loopy/schedule/checker/utils.py +++ b/loopy/schedule/checker/utils.py @@ -484,43 +484,6 @@ def get_orderings_of_length_n( return orderings -# only used for example purposes: - - -def create_explicit_map_from_tuples(tuple_pairs, space): - """Return a :class:`islpy.Map` in :class:`islpy.Space` space - mapping tup_in->tup_out for each `(tup_in, tup_out)` pair - in `tuple_pairs`, where `tup_in` and `tup_out` are - tuples of :class:`int` values to be assigned to the - corresponding dimension variables in `space`. - - """ - - dim_type = isl.dim_type - individual_maps = [] - - for tup_in, tup_out in tuple_pairs: - constraints = [] - for i, val_in in enumerate(tup_in): - constraints.append( - isl.Constraint.equality_alloc(space) - .set_coefficient_val(dim_type.in_, i, 1) - .set_constant_val(-1*val_in)) - for i, val_out in enumerate(tup_out): - constraints.append( - isl.Constraint.equality_alloc(space) - .set_coefficient_val(dim_type.out, i, 1) - .set_constant_val(-1*val_out)) - individual_maps.append( - isl.Map.universe(space).add_constraints(constraints)) - - union_map = individual_maps[0] - for m in individual_maps[1:]: - union_map = union_map.union(m) - - return union_map - - def get_EnterLoop_inames(linearization_items, knl): from loopy.schedule import EnterLoop loop_inames = set() -- GitLab From 16cb288f54f3cb925475afe5590c6a90f7b8c175 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 30 Jun 2020 01:09:28 -0500 Subject: [PATCH 269/415] re-add create_graph_from_pairs() and create_explicit_map_from_tuples() (removed in upstream branch but still used here) --- loopy/schedule/checker/utils.py | 45 +++++++++++++++++++++++++++++++++ 1 file changed, 45 insertions(+) diff --git a/loopy/schedule/checker/utils.py b/loopy/schedule/checker/utils.py index 8b3f3aa1b..5c51b61b6 100644 --- a/loopy/schedule/checker/utils.py +++ b/loopy/schedule/checker/utils.py @@ -484,6 +484,51 @@ def get_orderings_of_length_n( return orderings +def create_graph_from_pairs(before_after_pairs): + # create key for every before + graph = dict([(before, set()) for before, _ in before_after_pairs]) + for before, after in before_after_pairs: + graph[before] = graph[before] | set([after, ]) + return graph + + +# only used for example purposes: + + +def create_explicit_map_from_tuples(tuple_pairs, space): + """Return a :class:`islpy.Map` in :class:`islpy.Space` space + mapping tup_in->tup_out for each `(tup_in, tup_out)` pair + in `tuple_pairs`, where `tup_in` and `tup_out` are + tuples of :class:`int` values to be assigned to the + corresponding dimension variables in `space`. + + """ + + dim_type = isl.dim_type + individual_maps = [] + + for tup_in, tup_out in tuple_pairs: + constraints = [] + for i, val_in in enumerate(tup_in): + constraints.append( + isl.Constraint.equality_alloc(space) + .set_coefficient_val(dim_type.in_, i, 1) + .set_constant_val(-1*val_in)) + for i, val_out in enumerate(tup_out): + constraints.append( + isl.Constraint.equality_alloc(space) + .set_coefficient_val(dim_type.out, i, 1) + .set_constant_val(-1*val_out)) + individual_maps.append( + isl.Map.universe(space).add_constraints(constraints)) + + union_map = individual_maps[0] + for m in individual_maps[1:]: + union_map = union_map.union(m) + + return union_map + + def get_EnterLoop_inames(linearization_items, knl): from loopy.schedule import EnterLoop loop_inames = set() -- GitLab From 066eeb56a8393d6b8c2d9f205fbefaa602157b6a Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 30 Jun 2020 07:22:23 -0500 Subject: [PATCH 270/415] now that the integer ids assigned to instructions in schedule are deterministic (before=0, after=1), eliminate insn_id_to_lex_sched_id() function and dict tracking --- loopy/schedule/checker/__init__.py | 7 ------- loopy/schedule/checker/dependency.py | 14 +++++++------- loopy/schedule/checker/schedule.py | 12 ------------ 3 files changed, 7 insertions(+), 26 deletions(-) diff --git a/loopy/schedule/checker/__init__.py b/loopy/schedule/checker/__init__.py index ac7df4f6b..72c369987 100644 --- a/loopy/schedule/checker/__init__.py +++ b/loopy/schedule/checker/__init__.py @@ -235,8 +235,6 @@ def check_linearization_validity( s_after.insn_id, ) - lp_insn_id_to_lex_sched_id = sched_builder.loopy_insn_id_to_lex_sched_id() - # Get two isl maps from the PairwiseScheduleBuilder, # one for each linearization item involved in the dependency; isl_sched_map_before, isl_sched_map_after = sched_builder.build_maps( @@ -259,11 +257,8 @@ def check_linearization_validity( constraint_map = create_dependency_constraint( statement_pair_dep_set, knl.loop_priority, - lp_insn_id_to_lex_sched_id, sched_builder.statement_var_name, ) - # TODO figure out how to keep a consistent lp_insn_id_to_lex_sched_id map - # when dependency creation is separate from linearization checking # reorder variables/params in constraint map space to match SIO so we can # check to see whether the constraint map is a subset of the SIO @@ -304,8 +299,6 @@ def check_linearization_validity( print(prettier_map_string(sio.gist(aligned_constraint_map))) print("Loop priority known:") print(preprocessed_knl.loop_priority) - print("{insn id -> sched sid int} dict:") - print(lp_insn_id_to_lex_sched_id) print("===========================================================") return linearization_is_valid diff --git a/loopy/schedule/checker/dependency.py b/loopy/schedule/checker/dependency.py index 973665aff..b4fba1a02 100644 --- a/loopy/schedule/checker/dependency.py +++ b/loopy/schedule/checker/dependency.py @@ -186,7 +186,6 @@ def _convert_constraint_set_to_map(constraint_set, mv_count, src_position=None): def create_dependency_constraint( statement_dep_set, loop_priorities, - insn_id_to_int, statement_var_name, statement_var_pose=0, dom_inames_ordered_before=None, @@ -206,10 +205,6 @@ def create_dependency_constraint( attribute of :class:`loopy.LoopKernel` specifying the loop nest ordering rules. - :arg insn_id_to_int: A :class:`dict` mapping insn_id to int_id, where - 'insn_id' and 'int_id' refer to the 'insn_id' and 'int_id' attributes - of :class:`loopy.schedule.checker.schedule.StatementRef`. - :arg statement_var_name: A :class:`str` specifying the name of the isl variable used to represent the unique :class:`int` statement id. @@ -359,9 +354,14 @@ def create_dependency_constraint( constraint_set = create_elementwise_comparison_conjunction_set( inames_prime, inames_list, islvars, op="lt") + # get ints representing statements in PairwiseSchedule + s_before_int = 0 + s_after_int = 0 if ( + statement_dep_set.statement_before.insn_id == + statement_dep_set.statement_after.insn_id + ) else 1 + # set statement_var_name == statement # - s_before_int = insn_id_to_int[statement_dep_set.statement_before.insn_id] - s_after_int = insn_id_to_int[statement_dep_set.statement_after.insn_id] constraint_set = constraint_set & islvars[statement_var_name_prime].eq_set( islvars[0]+s_before_int) constraint_set = constraint_set & islvars[statement_var_name].eq_set( diff --git a/loopy/schedule/checker/schedule.py b/loopy/schedule/checker/schedule.py index 1c0284b20..fbf0c5111 100644 --- a/loopy/schedule/checker/schedule.py +++ b/loopy/schedule/checker/schedule.py @@ -283,18 +283,6 @@ class PairwiseScheduleBuilder(object): # be zero, so add them. self.pad_lex_tuples_with_zeros() - def loopy_insn_id_to_lex_sched_id(self): - """Return a dictionary mapping insn_id to int_id, where ``insn_id`` and - ``int_id`` refer to the ``insn_id`` and ``int_id`` attributes of - :class:`StatementRef`. - """ - return { - self.stmt_instance_before.stmt_ref.insn_id: - self.stmt_instance_before.stmt_ref.int_id, - self.stmt_instance_after.stmt_ref.insn_id: - self.stmt_instance_after.stmt_ref.int_id, - } - def max_lex_dims(self): return max([ len(self.stmt_instance_before.lex_points), -- GitLab From 0c9bf7d6ca49fdb482dddd2b1e50c0bc03c6098b Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 30 Jun 2020 07:45:33 -0500 Subject: [PATCH 271/415] update after removal of semi-redundant function create_new_isl_set_with_primes() (just a special case of append_marker_to_isl_map_var_names) --- loopy/schedule/checker/dependency.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/loopy/schedule/checker/dependency.py b/loopy/schedule/checker/dependency.py index 505a581b6..50c14b9e3 100644 --- a/loopy/schedule/checker/dependency.py +++ b/loopy/schedule/checker/dependency.py @@ -405,7 +405,7 @@ def create_dependency_constraint( return map_with_loop_domain_constraints -# TODO no longer used, remove +# TODO no longer used, move elsewhere def _create_5pt_stencil_dependency_constraint( dom_before_constraint_set, dom_after_constraint_set, @@ -424,7 +424,7 @@ def _create_5pt_stencil_dependency_constraint( append_apostrophes, add_dims_to_isl_set, insert_missing_dims_and_reorder_by_name, - create_new_isl_set_with_primes, + append_marker_to_isl_map_var_names, ) # This function uses the dependency given to create the following constraint: # Statement [s,i,j] comes before statement [s',i',j'] iff @@ -491,7 +491,8 @@ def _create_5pt_stencil_dependency_constraint( range_to_intersect = add_dims_to_isl_set( dom_after_constraint_set, isl.dim_type.out, [statement_var_name], statement_var_pose) - domain_constraint_set = create_new_isl_set_with_primes(dom_before_constraint_set) + domain_constraint_set = append_marker_to_isl_map_var_names( + dom_before_constraint_set, isl.dim_type.set, marker="'") domain_to_intersect = add_dims_to_isl_set( domain_constraint_set, isl.dim_type.out, [statement_var_name_prime], statement_var_pose) @@ -532,7 +533,7 @@ def create_arbitrary_dependency_constraint( append_marker_to_strings, add_dims_to_isl_set, insert_missing_dims_and_reorder_by_name, - create_new_isl_set_with_primes, + append_marker_to_isl_map_var_names, ) # This function uses the constraint given to create the following map: # Statement [s,i,j] comes before statement [s',i',j'] iff @@ -624,9 +625,9 @@ def create_arbitrary_dependency_constraint( range_to_intersect = add_dims_to_isl_set( dom_after_constraint_set, isl.dim_type.out, [statement_var_name], statement_var_pose) - domain_constraint_set = create_new_isl_set_with_primes( - dom_before_constraint_set, - marker="p") # TODO figure out before/after notation + domain_constraint_set = append_marker_to_isl_map_var_names( + dom_before_constraint_set, isl.dim_type.set, marker="p") + # TODO figure out before/after notation domain_to_intersect = add_dims_to_isl_set( domain_constraint_set, isl.dim_type.out, [statement_var_name_prime], statement_var_pose) -- GitLab From 3380450ad171137af5fff4b24108567170c6bb4d Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 7 Jul 2020 00:13:31 -0500 Subject: [PATCH 272/415] eliminate dom_inames_ordered args from create_dependency_constraint (we will deal with any order being returned) --- loopy/schedule/checker/dependency.py | 18 ++++-------------- 1 file changed, 4 insertions(+), 14 deletions(-) diff --git a/loopy/schedule/checker/dependency.py b/loopy/schedule/checker/dependency.py index b4fba1a02..67fff31b7 100644 --- a/loopy/schedule/checker/dependency.py +++ b/loopy/schedule/checker/dependency.py @@ -188,8 +188,6 @@ def create_dependency_constraint( loop_priorities, statement_var_name, statement_var_pose=0, - dom_inames_ordered_before=None, - dom_inames_ordered_after=None, ): """Create a statement dependency constraint represented as a map from each statement instance to statement instances that must occur later, @@ -212,12 +210,6 @@ def create_dependency_constraint( statement instance tuples holds the dimension representing the statement id. Defaults to ``0``. - :arg all_dom_inames_ordered_before: A :class:`list` of :class:`str` - specifying an order for the dimensions representing dependee inames. - - :arg all_dom_inames_ordered_after: A :class:`list` of :class:`str` - specifying an order for the dimensions representing depender inames. - :returns: An :class:`islpy.Map` mapping each statement instance to all statement instances that must occur later according to the constraints. @@ -234,12 +226,10 @@ def create_dependency_constraint( # This function uses the dependency given to create the following constraint: # Statement [s,i,j] comes before statement [s',i',j'] iff - if dom_inames_ordered_before is None: - dom_inames_ordered_before = list_var_names_in_isl_sets( - [statement_dep_set.dom_before]) - if dom_inames_ordered_after is None: - dom_inames_ordered_after = list_var_names_in_isl_sets( - [statement_dep_set.dom_after]) + dom_inames_ordered_before = list_var_names_in_isl_sets( + [statement_dep_set.dom_before]) + dom_inames_ordered_after = list_var_names_in_isl_sets( + [statement_dep_set.dom_after]) # create some (ordered) isl vars to use, e.g., {s, i, j, s', i', j'} islvars = make_islvars_with_marker( -- GitLab From 97a5cbeba3ea6c1e03c5fcdd6d83ab7e41d28863 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 7 Jul 2020 00:18:04 -0500 Subject: [PATCH 273/415] add some todos for eliminating need for ordered_inames args being passed around --- loopy/schedule/checker/dependency.py | 4 ++-- .../checker/experimental_scripts/example_wave_equation.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/loopy/schedule/checker/dependency.py b/loopy/schedule/checker/dependency.py index 28e5550ed..1555c131c 100644 --- a/loopy/schedule/checker/dependency.py +++ b/loopy/schedule/checker/dependency.py @@ -405,7 +405,7 @@ def _create_5pt_stencil_dependency_constraint( time_iname, statement_var_name, statement_var_pose=0, - all_dom_inames_ordered=None, + all_dom_inames_ordered=None, # TODO eliminate need for this arg ): """ WIP: NO NEED TO REVIEW YET """ @@ -511,7 +511,7 @@ def create_arbitrary_dependency_constraint( sid_after, statement_var_name, statement_var_pose=0, - all_dom_inames_ordered=None, + all_dom_inames_ordered=None, # TODO eliminate need for this arg ): """ WIP: NO NEED TO REVIEW YET """ diff --git a/loopy/schedule/checker/experimental_scripts/example_wave_equation.py b/loopy/schedule/checker/experimental_scripts/example_wave_equation.py index 4b93cb501..56c940695 100644 --- a/loopy/schedule/checker/experimental_scripts/example_wave_equation.py +++ b/loopy/schedule/checker/experimental_scripts/example_wave_equation.py @@ -135,7 +135,7 @@ constraint_map = create_arbitrary_dependency_constraint( statement_var_name="_lp_linchk_statement", statement_var_pose=0, #all_dom_inames_ordered=None, - all_dom_inames_ordered=statement_inames_premap_order, + all_dom_inames_ordered=statement_inames_premap_order, # TODO eliminate this arg ) print("constraint_map before mapping:") print(prettier_map_string(constraint_map)) -- GitLab From 0c6407683283a9ea424e69b23418e97b09a0c9f8 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 7 Jul 2020 01:05:58 -0500 Subject: [PATCH 274/415] remove statement_var_name arg being passed around, instead use new module-level variable --- loopy/schedule/checker/__init__.py | 1 - loopy/schedule/checker/dependency.py | 19 ++++++++----------- 2 files changed, 8 insertions(+), 12 deletions(-) diff --git a/loopy/schedule/checker/__init__.py b/loopy/schedule/checker/__init__.py index 72c369987..c5374f685 100644 --- a/loopy/schedule/checker/__init__.py +++ b/loopy/schedule/checker/__init__.py @@ -257,7 +257,6 @@ def check_linearization_validity( constraint_map = create_dependency_constraint( statement_pair_dep_set, knl.loop_priority, - sched_builder.statement_var_name, ) # reorder variables/params in constraint map space to match SIO so we can diff --git a/loopy/schedule/checker/dependency.py b/loopy/schedule/checker/dependency.py index 67fff31b7..b4cb48321 100644 --- a/loopy/schedule/checker/dependency.py +++ b/loopy/schedule/checker/dependency.py @@ -186,7 +186,6 @@ def _convert_constraint_set_to_map(constraint_set, mv_count, src_position=None): def create_dependency_constraint( statement_dep_set, loop_priorities, - statement_var_name, statement_var_pose=0, ): """Create a statement dependency constraint represented as a map from @@ -203,9 +202,6 @@ def create_dependency_constraint( attribute of :class:`loopy.LoopKernel` specifying the loop nest ordering rules. - :arg statement_var_name: A :class:`str` specifying the name of the - isl variable used to represent the unique :class:`int` statement id. - :arg statement_var_pose: A :class:`int` specifying which position in the statement instance tuples holds the dimension representing the statement id. Defaults to ``0``. @@ -223,6 +219,7 @@ def create_dependency_constraint( append_marker_to_isl_map_var_names, list_var_names_in_isl_sets, ) + from loopy.schedule.checker.schedule import STATEMENT_VAR_NAME # This function uses the dependency given to create the following constraint: # Statement [s,i,j] comes before statement [s',i',j'] iff @@ -233,11 +230,11 @@ def create_dependency_constraint( # create some (ordered) isl vars to use, e.g., {s, i, j, s', i', j'} islvars = make_islvars_with_marker( - var_names_needing_marker=[statement_var_name]+dom_inames_ordered_before, - other_var_names=[statement_var_name]+dom_inames_ordered_after, + var_names_needing_marker=[STATEMENT_VAR_NAME]+dom_inames_ordered_before, + other_var_names=[STATEMENT_VAR_NAME]+dom_inames_ordered_after, marker="'", ) - statement_var_name_prime = statement_var_name+"'" + statement_var_name_prime = STATEMENT_VAR_NAME+"'" # initialize constraints to False # this will disappear as soon as we add a constraint @@ -354,7 +351,7 @@ def create_dependency_constraint( # set statement_var_name == statement # constraint_set = constraint_set & islvars[statement_var_name_prime].eq_set( islvars[0]+s_before_int) - constraint_set = constraint_set & islvars[statement_var_name].eq_set( + constraint_set = constraint_set & islvars[STATEMENT_VAR_NAME].eq_set( islvars[0]+s_after_int) # union this constraint_set with all_constraints_set @@ -372,7 +369,7 @@ def create_dependency_constraint( # add statement variable to doms to enable intersection range_to_intersect = add_dims_to_isl_set( statement_dep_set.dom_after, isl.dim_type.out, - [statement_var_name], statement_var_pose) + [STATEMENT_VAR_NAME], statement_var_pose) domain_constraint_set = append_marker_to_isl_map_var_names( statement_dep_set.dom_before, isl.dim_type.set, marker="'") domain_to_intersect = add_dims_to_isl_set( @@ -382,11 +379,11 @@ def create_dependency_constraint( # insert inames missing from doms to enable intersection domain_to_intersect = insert_missing_dims_and_reorder_by_name( domain_to_intersect, isl.dim_type.out, - append_apostrophes([statement_var_name] + dom_inames_ordered_before)) + append_apostrophes([STATEMENT_VAR_NAME] + dom_inames_ordered_before)) range_to_intersect = insert_missing_dims_and_reorder_by_name( range_to_intersect, isl.dim_type.out, - [statement_var_name] + dom_inames_ordered_after) + [STATEMENT_VAR_NAME] + dom_inames_ordered_after) # intersect doms map_with_loop_domain_constraints = all_constraints_map.intersect_domain( -- GitLab From b05eb07a07ddea77510c190e73aa8fb3072ac998 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 7 Jul 2020 01:27:16 -0500 Subject: [PATCH 275/415] elimininate passing of statement_var_pose arg, instead always choose 0 (since we align the maps, any idx should work, however any idx other than 0 risks being out of bounds) --- loopy/schedule/checker/dependency.py | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/loopy/schedule/checker/dependency.py b/loopy/schedule/checker/dependency.py index b4cb48321..d0ddb41b2 100644 --- a/loopy/schedule/checker/dependency.py +++ b/loopy/schedule/checker/dependency.py @@ -186,7 +186,6 @@ def _convert_constraint_set_to_map(constraint_set, mv_count, src_position=None): def create_dependency_constraint( statement_dep_set, loop_priorities, - statement_var_pose=0, ): """Create a statement dependency constraint represented as a map from each statement instance to statement instances that must occur later, @@ -202,10 +201,6 @@ def create_dependency_constraint( attribute of :class:`loopy.LoopKernel` specifying the loop nest ordering rules. - :arg statement_var_pose: A :class:`int` specifying which position in the - statement instance tuples holds the dimension representing the - statement id. Defaults to ``0``. - :returns: An :class:`islpy.Map` mapping each statement instance to all statement instances that must occur later according to the constraints. @@ -365,16 +360,18 @@ def create_dependency_constraint( ) # now apply domain sets to constraint variables + statement_var_idx = 0 # index of statement_var dimension in map + # (anything other than 0 risks being out of bounds) # add statement variable to doms to enable intersection range_to_intersect = add_dims_to_isl_set( statement_dep_set.dom_after, isl.dim_type.out, - [STATEMENT_VAR_NAME], statement_var_pose) + [STATEMENT_VAR_NAME], statement_var_idx) domain_constraint_set = append_marker_to_isl_map_var_names( statement_dep_set.dom_before, isl.dim_type.set, marker="'") domain_to_intersect = add_dims_to_isl_set( domain_constraint_set, isl.dim_type.out, - [statement_var_name_prime], statement_var_pose) + [statement_var_name_prime], statement_var_idx) # insert inames missing from doms to enable intersection domain_to_intersect = insert_missing_dims_and_reorder_by_name( -- GitLab From 6a61ed7f48986aa3904edc110c9393dbb9abd095 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 7 Jul 2020 01:38:35 -0500 Subject: [PATCH 276/415] elimininate passing of statement_var_pose and statement_var_name args in other dependency construction funcs --- loopy/schedule/checker/dependency.py | 42 +++++++++---------- .../example_wave_equation.py | 3 -- 2 files changed, 21 insertions(+), 24 deletions(-) diff --git a/loopy/schedule/checker/dependency.py b/loopy/schedule/checker/dependency.py index 8411c29c3..fc861f115 100644 --- a/loopy/schedule/checker/dependency.py +++ b/loopy/schedule/checker/dependency.py @@ -397,8 +397,6 @@ def _create_5pt_stencil_dependency_constraint( sid_after, space_iname, time_iname, - statement_var_name, - statement_var_pose=0, all_dom_inames_ordered=None, # TODO eliminate need for this arg ): """ WIP: NO NEED TO REVIEW YET """ @@ -410,6 +408,7 @@ def _create_5pt_stencil_dependency_constraint( insert_missing_dims_and_reorder_by_name, append_marker_to_isl_map_var_names, ) + from loopy.schedule.checker.schedule import STATEMENT_VAR_NAME # This function uses the dependency given to create the following constraint: # Statement [s,i,j] comes before statement [s',i',j'] iff @@ -422,11 +421,11 @@ def _create_5pt_stencil_dependency_constraint( # create some (ordered) isl vars to use, e.g., {s, i, j, s', i', j'} islvars = make_islvars_with_marker( - var_names_needing_marker=[statement_var_name]+all_dom_inames_ordered, - other_var_names=[statement_var_name]+all_dom_inames_ordered, + var_names_needing_marker=[STATEMENT_VAR_NAME]+all_dom_inames_ordered, + other_var_names=[STATEMENT_VAR_NAME]+all_dom_inames_ordered, marker="'", ) - statement_var_name_prime = statement_var_name+"'" + statement_var_name_prime = STATEMENT_VAR_NAME+"'" # initialize constraints to False # this will disappear as soon as we add a constraint @@ -462,7 +461,7 @@ def _create_5pt_stencil_dependency_constraint( # set statement_var_name == statement # constraint_set = constraint_set & islvars[statement_var_name_prime].eq_set( islvars[0]+sid_before) - constraint_set = constraint_set & islvars[statement_var_name].eq_set( + constraint_set = constraint_set & islvars[STATEMENT_VAR_NAME].eq_set( islvars[0]+sid_after) # convert constraint set to map @@ -470,25 +469,26 @@ def _create_5pt_stencil_dependency_constraint( constraint_set, len(all_dom_inames_ordered) + 1) # +1 for statement var # now apply domain sets to constraint variables + statement_var_idx = 0 # index of statement_var dimension in map # add statement variable to doms to enable intersection range_to_intersect = add_dims_to_isl_set( dom_after_constraint_set, isl.dim_type.out, - [statement_var_name], statement_var_pose) + [STATEMENT_VAR_NAME], statement_var_idx) domain_constraint_set = append_marker_to_isl_map_var_names( dom_before_constraint_set, isl.dim_type.set, marker="'") domain_to_intersect = add_dims_to_isl_set( domain_constraint_set, isl.dim_type.out, - [statement_var_name_prime], statement_var_pose) + [statement_var_name_prime], statement_var_idx) # insert inames missing from doms to enable intersection domain_to_intersect = insert_missing_dims_and_reorder_by_name( domain_to_intersect, isl.dim_type.out, - append_apostrophes([statement_var_name] + all_dom_inames_ordered)) + append_apostrophes([STATEMENT_VAR_NAME] + all_dom_inames_ordered)) range_to_intersect = insert_missing_dims_and_reorder_by_name( range_to_intersect, isl.dim_type.out, - [statement_var_name] + all_dom_inames_ordered) + [STATEMENT_VAR_NAME] + all_dom_inames_ordered) # intersect doms map_with_loop_domain_constraints = all_constraints_map.intersect_domain( @@ -503,8 +503,6 @@ def create_arbitrary_dependency_constraint( dom_after_constraint_set, sid_before, sid_after, - statement_var_name, - statement_var_pose=0, all_dom_inames_ordered=None, # TODO eliminate need for this arg ): """ WIP: NO NEED TO REVIEW YET """ @@ -519,6 +517,7 @@ def create_arbitrary_dependency_constraint( insert_missing_dims_and_reorder_by_name, append_marker_to_isl_map_var_names, ) + from loopy.schedule.checker.schedule import STATEMENT_VAR_NAME # This function uses the constraint given to create the following map: # Statement [s,i,j] comes before statement [s',i',j'] iff @@ -531,12 +530,12 @@ def create_arbitrary_dependency_constraint( # create some (ordered) isl vars to use, e.g., {s, i, j, s', i', j'} islvars = make_islvars_with_marker( - var_names_needing_marker=[statement_var_name]+all_dom_inames_ordered, - other_var_names=[statement_var_name]+all_dom_inames_ordered, + var_names_needing_marker=[STATEMENT_VAR_NAME]+all_dom_inames_ordered, + other_var_names=[STATEMENT_VAR_NAME]+all_dom_inames_ordered, marker="p", ) # TODO figure out before/after notation - #statement_var_name_prime = statement_var_name+"'" - statement_var_name_prime = statement_var_name+"p" + #statement_var_name_prime = STATEMENT_VAR_NAME+"'" + statement_var_name_prime = STATEMENT_VAR_NAME+"p" # TODO figure out before/after notation # initialize constraints to False @@ -595,7 +594,7 @@ def create_arbitrary_dependency_constraint( islvars[0]+sid_before) ) all_constraints_set = ( - all_constraints_set & islvars[statement_var_name].eq_set( + all_constraints_set & islvars[STATEMENT_VAR_NAME].eq_set( islvars[0]+sid_after) ) @@ -604,27 +603,28 @@ def create_arbitrary_dependency_constraint( all_constraints_set, len(all_dom_inames_ordered) + 1) # +1 for statement var # now apply domain sets to constraint variables + statement_var_idx = 0 # index of statement_var dimension in map # add statement variable to doms to enable intersection range_to_intersect = add_dims_to_isl_set( dom_after_constraint_set, isl.dim_type.out, - [statement_var_name], statement_var_pose) + [STATEMENT_VAR_NAME], statement_var_idx) domain_constraint_set = append_marker_to_isl_map_var_names( dom_before_constraint_set, isl.dim_type.set, marker="p") # TODO figure out before/after notation domain_to_intersect = add_dims_to_isl_set( domain_constraint_set, isl.dim_type.out, - [statement_var_name_prime], statement_var_pose) + [statement_var_name_prime], statement_var_idx) # insert inames missing from doms to enable intersection domain_to_intersect = insert_missing_dims_and_reorder_by_name( domain_to_intersect, isl.dim_type.out, append_marker_to_strings( # TODO figure out before/after notation - [statement_var_name] + all_dom_inames_ordered, "p")) + [STATEMENT_VAR_NAME] + all_dom_inames_ordered, "p")) range_to_intersect = insert_missing_dims_and_reorder_by_name( range_to_intersect, isl.dim_type.out, - [statement_var_name] + all_dom_inames_ordered) + [STATEMENT_VAR_NAME] + all_dom_inames_ordered) # intersect doms map_with_loop_domain_constraints = all_constraints_map.intersect_domain( diff --git a/loopy/schedule/checker/experimental_scripts/example_wave_equation.py b/loopy/schedule/checker/experimental_scripts/example_wave_equation.py index 56c940695..ba06df445 100644 --- a/loopy/schedule/checker/experimental_scripts/example_wave_equation.py +++ b/loopy/schedule/checker/experimental_scripts/example_wave_equation.py @@ -102,8 +102,6 @@ constraint_map = _create_5pt_stencil_dependency_constraint( sid_after = sid_after, space_iname = "ix", time_iname = "it", - statement_var_name = "_lp_linchk_statement", - statement_var_pose=0, #all_dom_inames_ordered=None, all_dom_inames_ordered=statement_inames_premap_order, ) @@ -133,7 +131,6 @@ constraint_map = create_arbitrary_dependency_constraint( sid_before=sid_before, sid_after=sid_after, statement_var_name="_lp_linchk_statement", - statement_var_pose=0, #all_dom_inames_ordered=None, all_dom_inames_ordered=statement_inames_premap_order, # TODO eliminate this arg ) -- GitLab From 07f03fc41c72a5175c13cc4b88fb576edb6d0525 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 7 Jul 2020 01:41:17 -0500 Subject: [PATCH 277/415] eliminate no-longer-used arg statement_var_name --- .../checker/experimental_scripts/example_wave_equation.py | 1 - 1 file changed, 1 deletion(-) diff --git a/loopy/schedule/checker/experimental_scripts/example_wave_equation.py b/loopy/schedule/checker/experimental_scripts/example_wave_equation.py index ba06df445..ed2da94e5 100644 --- a/loopy/schedule/checker/experimental_scripts/example_wave_equation.py +++ b/loopy/schedule/checker/experimental_scripts/example_wave_equation.py @@ -130,7 +130,6 @@ constraint_map = create_arbitrary_dependency_constraint( inames_domain_after, sid_before=sid_before, sid_after=sid_after, - statement_var_name="_lp_linchk_statement", #all_dom_inames_ordered=None, all_dom_inames_ordered=statement_inames_premap_order, # TODO eliminate this arg ) -- GitLab From 27cc82071db62d4a9426c223039d90e38caea50c Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 7 Jul 2020 01:48:19 -0500 Subject: [PATCH 278/415] don't pass no-longer-used args (lp_insn_id_to_lex_sched_id, statement_var_name) to create_dependency_constraint() in filter_deps_by_intersection_with_SAME() --- loopy/schedule/checker/dependency.py | 9 --------- 1 file changed, 9 deletions(-) diff --git a/loopy/schedule/checker/dependency.py b/loopy/schedule/checker/dependency.py index fc861f115..b0979ef67 100644 --- a/loopy/schedule/checker/dependency.py +++ b/loopy/schedule/checker/dependency.py @@ -769,11 +769,6 @@ def filter_deps_by_intersection_with_SAME( dt = DependencyType - # create map from loopy insn ids to ints - # (used for consistent statement numbering between dep and SAME maps) - lp_insn_id_to_lex_sched_id = dict( - [(insn_id, sid) for sid, insn_id in enumerate(insn_ids)]) - # determine which dep relations have a non-empty intersection with # the SAME relation deps_filtered = [] @@ -783,8 +778,6 @@ def filter_deps_by_intersection_with_SAME( dep_constraint_map = create_dependency_constraint( statement_pair_dep_set, knl.loop_priority, - lp_insn_id_to_lex_sched_id, - "statement", ) # create isl map representing "SAME" dep for these two insns @@ -804,8 +797,6 @@ def filter_deps_by_intersection_with_SAME( same_dep_constraint_map = create_dependency_constraint( same_dep_set, knl.loop_priority, - lp_insn_id_to_lex_sched_id, - "statement", ) # see whether the intersection of dep map and SAME dep map exists -- GitLab From e4f5003213bdb1af3f953d4a783930194b242b44 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 7 Jul 2020 06:00:39 -0500 Subject: [PATCH 279/415] temporarily rename create_dependencies_from_legacy_knl() -> _create_dependencies_from_legacy_knl_old() --- loopy/schedule/checker/__init__.py | 4 ++-- loopy/schedule/checker/dependency.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/loopy/schedule/checker/__init__.py b/loopy/schedule/checker/__init__.py index c5374f685..fc2fba709 100644 --- a/loopy/schedule/checker/__init__.py +++ b/loopy/schedule/checker/__init__.py @@ -189,9 +189,9 @@ def statement_pair_dep_sets_from_legacy_knl(knl): # Create StatementPairDependencySet(s) from kernel dependencies from loopy.schedule.checker.dependency import ( - create_dependencies_from_legacy_knl, + _create_dependencies_from_legacy_knl_old, ) - return create_dependencies_from_legacy_knl(preprocessed_knl) + return _create_dependencies_from_legacy_knl_old(preprocessed_knl) def check_linearization_validity( diff --git a/loopy/schedule/checker/dependency.py b/loopy/schedule/checker/dependency.py index d0ddb41b2..7a3585e25 100644 --- a/loopy/schedule/checker/dependency.py +++ b/loopy/schedule/checker/dependency.py @@ -389,7 +389,7 @@ def create_dependency_constraint( return map_with_loop_domain_constraints -def create_dependencies_from_legacy_knl(knl): +def _create_dependencies_from_legacy_knl_old(knl): """Return a list of :class:`StatementPairDependencySet` instances created for a :class:`loopy.LoopKernel` containing legacy depencencies. -- GitLab From 8773eba99cb5ab6c88f8a0ea28e0dcdd43754dfc Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 7 Jul 2020 06:52:41 -0500 Subject: [PATCH 280/415] make (intermediate) new version of create_dependencies_from_legacy_knl() which combines legacy dep setup with dep map creation so that only the map is kept around as state; update tests accordingly --- loopy/__init__.py | 4 +- loopy/schedule/checker/__init__.py | 50 +++++++++---- test/test_linearization_checker.py | 113 ++++++++++++++++++----------- 3 files changed, 106 insertions(+), 61 deletions(-) diff --git a/loopy/__init__.py b/loopy/__init__.py index 47d3ebb4b..ae6e2e7d3 100644 --- a/loopy/__init__.py +++ b/loopy/__init__.py @@ -126,7 +126,7 @@ from loopy.preprocess import preprocess_kernel, realize_reduction from loopy.schedule import ( generate_loop_schedules, get_one_scheduled_kernel, get_one_linearized_kernel) from loopy.schedule.checker import ( - statement_pair_dep_sets_from_legacy_knl, + create_dependencies_from_legacy_knl, check_linearization_validity) from loopy.statistics import (ToCountMap, CountGranularity, stringify_stats_mapping, Op, MemAccess, get_op_map, get_mem_access_map, @@ -253,7 +253,7 @@ __all__ = [ "preprocess_kernel", "realize_reduction", "generate_loop_schedules", "get_one_scheduled_kernel", "get_one_linearized_kernel", - "statement_pair_dep_sets_from_legacy_knl", + "create_dependencies_from_legacy_knl", "check_linearization_validity", "GeneratedProgram", "CodeGenerationResult", "PreambleInfo", diff --git a/loopy/schedule/checker/__init__.py b/loopy/schedule/checker/__init__.py index fc2fba709..b351e604f 100644 --- a/loopy/schedule/checker/__init__.py +++ b/loopy/schedule/checker/__init__.py @@ -194,9 +194,37 @@ def statement_pair_dep_sets_from_legacy_knl(knl): return _create_dependencies_from_legacy_knl_old(preprocessed_knl) +def create_dependencies_from_legacy_knl(knl): + + from loopy.schedule.checker.dependency import ( + create_dependency_constraint, + ) + + spds = statement_pair_dep_sets_from_legacy_knl(knl) + + dep_maps = set() + for statement_pair_dep_set in spds: + # create a map representing constraints from the dependency, + # which maps statement instance to all stmt instances that must occur later + # and is acquired from the non-preprocessed kernel + constraint_map = create_dependency_constraint( + statement_pair_dep_set, + knl.loop_priority, + ) + + dep_maps.add(( + statement_pair_dep_set.statement_before.insn_id, + statement_pair_dep_set.statement_after.insn_id, + constraint_map, + )) + + return dep_maps + + def check_linearization_validity( knl, - statement_pair_dep_sets, + #statement_pair_dep_sets, + dep_maps, linearization_items, ): # TODO document @@ -218,9 +246,8 @@ def check_linearization_validity( # For each dependency, create+test linearization containing pair of insns------ linearization_is_valid = True - for statement_pair_dep_set in statement_pair_dep_sets: - s_before = statement_pair_dep_set.statement_before - s_after = statement_pair_dep_set.statement_after + #for statement_pair_dep_set in statement_pair_dep_sets: + for insn_id_before, insn_id_after, constraint_map in dep_maps: # TODO, since we now get the doms inside # build_maps() # reconsider the content of statement_pair_dep_set, which @@ -231,8 +258,8 @@ def check_linearization_validity( sched_builder = get_schedule_for_statement_pair( preprocessed_knl, linearization_items, - s_before.insn_id, - s_after.insn_id, + insn_id_before, + insn_id_after, ) # Get two isl maps from the PairwiseScheduleBuilder, @@ -251,14 +278,6 @@ def check_linearization_validity( sched_lex_order_map, ) - # create a map representing constraints from the dependency, - # which maps statement instance to all stmt instances that must occur later - # and is acquired from the non-preprocessed kernel - constraint_map = create_dependency_constraint( - statement_pair_dep_set, - knl.loop_priority, - ) - # reorder variables/params in constraint map space to match SIO so we can # check to see whether the constraint map is a subset of the SIO # (spaces must be aligned so that the variables in the constraint map @@ -289,7 +308,8 @@ def check_linearization_validity( print("================ constraint check failure =================") print("Constraint map not subset of SIO") print("Dependencies:") - print(statement_pair_dep_set) + print(insn_id_before+"->"+insn_id_after) + print(prettier_map_string(constraint_map)) print("Statement instance ordering:") print(prettier_map_string(sio)) print("constraint_map.gist(sio):") diff --git a/test/test_linearization_checker.py b/test/test_linearization_checker.py index b58b15e66..6b30987da 100644 --- a/test/test_linearization_checker.py +++ b/test/test_linearization_checker.py @@ -663,11 +663,11 @@ def test_linearization_checker_with_loop_prioritization(): unprocessed_knl = knl.copy() - statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( - unprocessed_knl) + deps = lp.create_dependencies_from_legacy_knl(unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): + # TODO update this after dep refactoring knl = lp.add_dependencies_v2( # pylint:disable=no-member - knl, statement_pair_dep_sets) + knl, deps) # get a linearization to check if knl.state < KernelState.PREPROCESSED: @@ -676,7 +676,7 @@ def test_linearization_checker_with_loop_prioritization(): linearization_items = knl.linearization linearization_is_valid = lp.check_linearization_validity( - unprocessed_knl, statement_pair_dep_sets, linearization_items) + unprocessed_knl, deps, linearization_items) assert linearization_is_valid @@ -701,11 +701,11 @@ def test_linearization_checker_with_matmul(): unprocessed_knl = knl.copy() - statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( - unprocessed_knl) + deps = lp.create_dependencies_from_legacy_knl(unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): + # TODO update this after dep refactoring knl = lp.add_dependencies_v2( # pylint:disable=no-member - knl, statement_pair_dep_sets) + knl, deps) # get a linearization to check if knl.state < KernelState.PREPROCESSED: @@ -714,7 +714,7 @@ def test_linearization_checker_with_matmul(): linearization_items = knl.linearization linearization_is_valid = lp.check_linearization_validity( - unprocessed_knl, statement_pair_dep_sets, linearization_items) + unprocessed_knl, deps, linearization_items) assert linearization_is_valid @@ -752,11 +752,11 @@ def test_linearization_checker_with_dependent_domain(): unprocessed_knl = knl.copy() - statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( - unprocessed_knl) + deps = lp.create_dependencies_from_legacy_knl(unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): + # TODO update this after dep refactoring knl = lp.add_dependencies_v2( # pylint:disable=no-member - knl, statement_pair_dep_sets) + knl, deps) # get a linearization to check if knl.state < KernelState.PREPROCESSED: @@ -765,7 +765,7 @@ def test_linearization_checker_with_dependent_domain(): linearization_items = knl.linearization linearization_is_valid = lp.check_linearization_validity( - unprocessed_knl, statement_pair_dep_sets, linearization_items) + unprocessed_knl, deps, linearization_items) assert linearization_is_valid @@ -806,11 +806,11 @@ def test_linearization_checker_with_stroud_bernstein(): unprocessed_knl = knl.copy() - statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( - unprocessed_knl) + deps = lp.create_dependencies_from_legacy_knl(unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): + # TODO update this after dep refactoring knl = lp.add_dependencies_v2( # pylint:disable=no-member - knl, statement_pair_dep_sets) + knl, deps) # get a linearization to check if knl.state < KernelState.PREPROCESSED: @@ -819,7 +819,7 @@ def test_linearization_checker_with_stroud_bernstein(): linearization_items = knl.linearization linearization_is_valid = lp.check_linearization_validity( - unprocessed_knl, statement_pair_dep_sets, linearization_items) + unprocessed_knl, deps, linearization_items) assert linearization_is_valid @@ -843,11 +843,11 @@ def test_linearization_checker_with_nop(): unprocessed_knl = knl.copy() - statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( - unprocessed_knl) + deps = lp.create_dependencies_from_legacy_knl(unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): + # TODO update this after dep refactoring knl = lp.add_dependencies_v2( # pylint:disable=no-member - knl, statement_pair_dep_sets) + knl, deps) # get a linearization to check if knl.state < KernelState.PREPROCESSED: @@ -856,7 +856,7 @@ def test_linearization_checker_with_nop(): linearization_items = knl.linearization linearization_is_valid = lp.check_linearization_validity( - unprocessed_knl, statement_pair_dep_sets, linearization_items) + unprocessed_knl, deps, linearization_items) assert linearization_is_valid @@ -890,11 +890,11 @@ def test_linearization_checker_with_multi_domain(): unprocessed_knl = knl.copy() - statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( - unprocessed_knl) + deps = lp.create_dependencies_from_legacy_knl(unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): + # TODO update this after dep refactoring knl = lp.add_dependencies_v2( # pylint:disable=no-member - knl, statement_pair_dep_sets) + knl, deps) # get a linearization to check if knl.state < KernelState.PREPROCESSED: @@ -903,7 +903,7 @@ def test_linearization_checker_with_multi_domain(): linearization_items = knl.linearization linearization_is_valid = lp.check_linearization_validity( - unprocessed_knl, statement_pair_dep_sets, linearization_items) + unprocessed_knl, deps, linearization_items) assert linearization_is_valid @@ -926,11 +926,11 @@ def test_linearization_checker_with_loop_carried_deps(): unprocessed_knl = knl.copy() - statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( - unprocessed_knl) + deps = lp.create_dependencies_from_legacy_knl(unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): + # TODO update this after dep refactoring knl = lp.add_dependencies_v2( # pylint:disable=no-member - knl, statement_pair_dep_sets) + knl, deps) # get a linearization to check if knl.state < KernelState.PREPROCESSED: @@ -939,7 +939,7 @@ def test_linearization_checker_with_loop_carried_deps(): linearization_items = knl.linearization linearization_is_valid = lp.check_linearization_validity( - unprocessed_knl, statement_pair_dep_sets, linearization_items) + unprocessed_knl, deps, linearization_items) assert linearization_is_valid @@ -969,11 +969,11 @@ def test_linearization_checker_and_invalid_prioritiy_detection(): unprocessed_knl = knl0.copy() - statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( - unprocessed_knl) + deps = lp.create_dependencies_from_legacy_knl(unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): + # TODO update this after dep refactoring knl0 = lp.add_dependencies_v2( # pylint:disable=no-member - knl0, statement_pair_dep_sets) + knl0, deps) # get a linearization to check if knl0.state < KernelState.PREPROCESSED: @@ -982,7 +982,7 @@ def test_linearization_checker_and_invalid_prioritiy_detection(): linearization_items = knl0.linearization linearization_is_valid = lp.check_linearization_validity( - unprocessed_knl, statement_pair_dep_sets, linearization_items) + unprocessed_knl, deps, linearization_items) assert linearization_is_valid # no error: @@ -991,11 +991,11 @@ def test_linearization_checker_and_invalid_prioritiy_detection(): unprocessed_knl = knl1.copy() - statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( - unprocessed_knl) + deps = lp.create_dependencies_from_legacy_knl(unprocessed_knl) if hasattr(lp, "add_dependencies_v2"): + # TODO update this after dep refactoring knl1 = lp.add_dependencies_v2( # pylint:disable=no-member - knl1, statement_pair_dep_sets) + knl1, deps) # get a linearization to check if knl1.state < KernelState.PREPROCESSED: @@ -1004,22 +1004,35 @@ def test_linearization_checker_and_invalid_prioritiy_detection(): linearization_items = knl1.linearization linearization_is_valid = lp.check_linearization_validity( - unprocessed_knl, statement_pair_dep_sets, linearization_items) + unprocessed_knl, deps, linearization_items) assert linearization_is_valid # error (cycle): knl2 = lp.prioritize_loops(ref_knl, "h,i,j") knl2 = lp.prioritize_loops(knl2, "j,k") + # TODO think about when legacy deps should be updated based on prio changes + try: if hasattr(lp, "constrain_loop_nesting"): knl2 = lp.constrain_loop_nesting(knl2, "k,i") # pylint:disable=no-member + + # legacy deps depend on priorities, so update deps using new knl + deps = lp.create_dependencies_from_legacy_knl(knl2) + if hasattr(lp, "add_dependencies_v2"): + # TODO update this after dep refactoring + knl2 = lp.add_dependencies_v2( # pylint:disable=no-member + knl2, deps) else: knl2 = lp.prioritize_loops(knl2, "k,i") - unprocessed_knl = knl2.copy() + # legacy deps depend on priorities, so update deps using new knl + deps = lp.create_dependencies_from_legacy_knl(knl2) + if hasattr(lp, "add_dependencies_v2"): + # TODO update this after dep refactoring + knl2 = lp.add_dependencies_v2( # pylint:disable=no-member + knl2, deps) - statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( - unprocessed_knl) + unprocessed_knl = knl2.copy() # get a linearization to check if knl2.state < KernelState.PREPROCESSED: @@ -1028,7 +1041,7 @@ def test_linearization_checker_and_invalid_prioritiy_detection(): linearization_items = knl2.linearization linearization_is_valid = lp.check_linearization_validity( - unprocessed_knl, statement_pair_dep_sets, linearization_items) + unprocessed_knl, deps, linearization_items) # should raise error assert False except ValueError as e: @@ -1039,17 +1052,29 @@ def test_linearization_checker_and_invalid_prioritiy_detection(): # error (inconsistent priorities): knl3 = lp.prioritize_loops(ref_knl, "h,i,j,k") + # TODO think about when legacy deps should be updated based on prio changes try: if hasattr(lp, "constrain_loop_nesting"): knl3 = lp.constrain_loop_nesting( # pylint:disable=no-member knl3, "h,j,i,k") + + # legacy deps depend on priorities, so update deps using new knl + deps = lp.create_dependencies_from_legacy_knl(knl3) + if hasattr(lp, "add_dependencies_v2"): + # TODO update this after dep refactoring + knl3 = lp.add_dependencies_v2( # pylint:disable=no-member + knl3, deps) else: knl3 = lp.prioritize_loops(knl3, "h,j,i,k") - unprocessed_knl = knl3.copy() + # legacy deps depend on priorities, so update deps using new knl + deps = lp.create_dependencies_from_legacy_knl(knl3) + if hasattr(lp, "add_dependencies_v2"): + # TODO update this after dep refactoring + knl3 = lp.add_dependencies_v2( # pylint:disable=no-member + knl3, deps) - statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( - unprocessed_knl) + unprocessed_knl = knl3.copy() # get a linearization to check if knl3.state < KernelState.PREPROCESSED: @@ -1058,7 +1083,7 @@ def test_linearization_checker_and_invalid_prioritiy_detection(): linearization_items = knl3.linearization linearization_is_valid = lp.check_linearization_validity( - unprocessed_knl, statement_pair_dep_sets, linearization_items) + unprocessed_knl, deps, linearization_items) # should raise error assert False except ValueError as e: -- GitLab From 9507804b17e615b1632f97faa769420601057eee Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 7 Jul 2020 06:57:00 -0500 Subject: [PATCH 281/415] eliminate function statement_pair_dep_sets_from_legacy_knl(); (inline contents in new version of create_dependencies_from_legacy_knl()) --- loopy/schedule/checker/__init__.py | 20 +++++++------------- 1 file changed, 7 insertions(+), 13 deletions(-) diff --git a/loopy/schedule/checker/__init__.py b/loopy/schedule/checker/__init__.py index b351e604f..f6f704f53 100644 --- a/loopy/schedule/checker/__init__.py +++ b/loopy/schedule/checker/__init__.py @@ -150,9 +150,9 @@ def get_schedule_for_statement_pair( # }}} -def statement_pair_dep_sets_from_legacy_knl(knl): +def create_dependencies_from_legacy_knl(knl): """Return a list of - :class:`loopy.schedule.checker.dependency.StatementPairDependencySet` + :class:`loopy.schedule.checker.dependency.TBD` instances created for a :class:`loopy.LoopKernel` containing legacy depencencies. @@ -180,7 +180,10 @@ def statement_pair_dep_sets_from_legacy_knl(knl): :class:`loopy.schedule.checker.dependency.DependencyType`. """ - # TODO maybe just eliminate this function since it doesn't do much + + from loopy.schedule.checker.dependency import ( + create_dependency_constraint, + ) # Preprocess if not already preprocessed # note: kernels must always be preprocessed before scheduling @@ -191,16 +194,7 @@ def statement_pair_dep_sets_from_legacy_knl(knl): from loopy.schedule.checker.dependency import ( _create_dependencies_from_legacy_knl_old, ) - return _create_dependencies_from_legacy_knl_old(preprocessed_knl) - - -def create_dependencies_from_legacy_knl(knl): - - from loopy.schedule.checker.dependency import ( - create_dependency_constraint, - ) - - spds = statement_pair_dep_sets_from_legacy_knl(knl) + spds = _create_dependencies_from_legacy_knl_old(preprocessed_knl) dep_maps = set() for statement_pair_dep_set in spds: -- GitLab From 322d4e4a04f71028599556d350140a0308d7b15b Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 7 Jul 2020 07:09:00 -0500 Subject: [PATCH 282/415] eliminate _create_dependencies_from_legacy_knl_old() by inlining in new version of the func create_dependencies_from_legacy_knl() --- loopy/schedule/checker/__init__.py | 70 ++++++++++++++++++-- loopy/schedule/checker/dependency.py | 96 ---------------------------- 2 files changed, 66 insertions(+), 100 deletions(-) diff --git a/loopy/schedule/checker/__init__.py b/loopy/schedule/checker/__init__.py index f6f704f53..b2c55f34b 100644 --- a/loopy/schedule/checker/__init__.py +++ b/loopy/schedule/checker/__init__.py @@ -183,7 +183,16 @@ def create_dependencies_from_legacy_knl(knl): from loopy.schedule.checker.dependency import ( create_dependency_constraint, + get_dependency_sources_and_sinks, + StatementPairDependencySet, + DependencyType as dt, ) + from loopy.schedule.checker.utils import ( + get_concurrent_inames, + get_all_nonconcurrent_insn_iname_subsets, + get_linearization_item_ids_within_inames, + ) + from loopy.schedule.checker.schedule import StatementRef # Preprocess if not already preprocessed # note: kernels must always be preprocessed before scheduling @@ -191,10 +200,63 @@ def create_dependencies_from_legacy_knl(knl): preprocessed_knl = preprocess_kernel(knl) # Create StatementPairDependencySet(s) from kernel dependencies - from loopy.schedule.checker.dependency import ( - _create_dependencies_from_legacy_knl_old, - ) - spds = _create_dependencies_from_legacy_knl_old(preprocessed_knl) + spds = set() + + # Introduce SAME dep for set of shared, non-concurrent inames + + conc_inames, non_conc_inames = get_concurrent_inames(preprocessed_knl) + for insn_after in preprocessed_knl.instructions: + for insn_before_id in insn_after.depends_on: + insn_before = preprocessed_knl.id_to_insn[insn_before_id] + insn_before_inames = insn_before.within_inames + insn_after_inames = insn_after.within_inames + shared_inames = insn_before_inames & insn_after_inames + shared_non_conc_inames = shared_inames & non_conc_inames + + spds.add( + StatementPairDependencySet( + StatementRef(insn_id=insn_before.id), + StatementRef(insn_id=insn_after.id), + {dt.SAME: shared_non_conc_inames}, + preprocessed_knl.get_inames_domain(insn_before_inames), + preprocessed_knl.get_inames_domain(insn_after_inames), + )) + + # loop-carried deps ------------------------------------------ + + # Go through insns and get all unique insn.depends_on iname sets + non_conc_iname_subsets = get_all_nonconcurrent_insn_iname_subsets( + preprocessed_knl, exclude_empty=True, non_conc_inames=non_conc_inames) + + # For each set of insns within a given iname set, find sources and sinks. + # Then make PRIOR dep from all sinks to all sources at previous iterations + for iname_subset in non_conc_iname_subsets: + # find items within this iname set + linearization_item_ids = get_linearization_item_ids_within_inames( + preprocessed_knl, iname_subset) + + # find sources and sinks + sources, sinks = get_dependency_sources_and_sinks( + preprocessed_knl, linearization_item_ids) + + # create prior deps + + # in future, consider inserting single no-op source and sink + for source_id in sources: + for sink_id in sinks: + sink_insn_inames = preprocessed_knl.id_to_insn[sink_id].within_inames + source_insn_inames = preprocessed_knl.id_to_insn[source_id].within_inames + shared_inames = sink_insn_inames & source_insn_inames + shared_non_conc_inames = shared_inames & non_conc_inames + + spds.add( + StatementPairDependencySet( + StatementRef(insn_id=sink_id), + StatementRef(insn_id=source_id), + {dt.PRIOR: shared_non_conc_inames}, + preprocessed_knl.get_inames_domain(sink_insn_inames), + preprocessed_knl.get_inames_domain(source_insn_inames), + )) dep_maps = set() for statement_pair_dep_set in spds: diff --git a/loopy/schedule/checker/dependency.py b/loopy/schedule/checker/dependency.py index 7a3585e25..d246470e5 100644 --- a/loopy/schedule/checker/dependency.py +++ b/loopy/schedule/checker/dependency.py @@ -389,102 +389,6 @@ def create_dependency_constraint( return map_with_loop_domain_constraints -def _create_dependencies_from_legacy_knl_old(knl): - """Return a list of :class:`StatementPairDependencySet` instances created - for a :class:`loopy.LoopKernel` containing legacy depencencies. - - Create the new dependencies according to the following rules: - - (1) If a dependency exists between ``insn0`` and ``insn1``, create the - dependnecy ``SAME(SNC)`` where ``SNC`` is the set of non-concurrent inames - used by both ``insn0`` and ``insn1``, and ``SAME`` is the relationship - specified by the ``SAME`` attribute of - :class:`loopy.schedule.checker.dependency.DependencyType`. - - (2) For each subset of non-concurrent inames used by any instruction, - - (a), find the set of all instructions using those inames, - - (b), create a directed graph with these instructions as nodes and - edges representing a 'happens before' relationship specfied by - each dependency, - - (c), find the sources and sinks within this graph, and - - (d), connect each sink to each source (sink happens before source) - with a ``PRIOR(SNC)`` dependency, where ``PRIOR`` is the - relationship specified by the ``PRIOR`` attribute of - :class:`loopy.schedule.checker.dependency.DependencyType`. - - """ - - # Introduce SAME dep for set of shared, non-concurrent inames - - from loopy.schedule.checker.utils import ( - get_concurrent_inames, - get_all_nonconcurrent_insn_iname_subsets, - get_linearization_item_ids_within_inames, - ) - from loopy.schedule.checker.schedule import StatementRef - dt = DependencyType - conc_inames, non_conc_inames = get_concurrent_inames(knl) - statement_dep_sets = [] - for insn_after in knl.instructions: - for insn_before_id in insn_after.depends_on: - insn_before = knl.id_to_insn[insn_before_id] - insn_before_inames = insn_before.within_inames - insn_after_inames = insn_after.within_inames - shared_inames = insn_before_inames & insn_after_inames - shared_non_conc_inames = shared_inames & non_conc_inames - - statement_dep_sets.append( - StatementPairDependencySet( - StatementRef(insn_id=insn_before.id), - StatementRef(insn_id=insn_after.id), - {dt.SAME: shared_non_conc_inames}, - knl.get_inames_domain(insn_before_inames), - knl.get_inames_domain(insn_after_inames), - )) - - # loop-carried deps ------------------------------------------ - - # Go through insns and get all unique insn.depends_on iname sets - non_conc_iname_subsets = get_all_nonconcurrent_insn_iname_subsets( - knl, exclude_empty=True, non_conc_inames=non_conc_inames) - - # For each set of insns within a given iname set, find sources and sinks. - # Then make PRIOR dep from all sinks to all sources at previous iterations - for iname_subset in non_conc_iname_subsets: - # find items within this iname set - linearization_item_ids = get_linearization_item_ids_within_inames( - knl, iname_subset) - - # find sources and sinks - sources, sinks = get_dependency_sources_and_sinks( - knl, linearization_item_ids) - - # create prior deps - - # in future, consider inserting single no-op source and sink - for source_id in sources: - for sink_id in sinks: - sink_insn_inames = knl.id_to_insn[sink_id].within_inames - source_insn_inames = knl.id_to_insn[source_id].within_inames - shared_inames = sink_insn_inames & source_insn_inames - shared_non_conc_inames = shared_inames & non_conc_inames - - statement_dep_sets.append( - StatementPairDependencySet( - StatementRef(insn_id=sink_id), - StatementRef(insn_id=source_id), - {dt.PRIOR: shared_non_conc_inames}, - knl.get_inames_domain(sink_insn_inames), - knl.get_inames_domain(source_insn_inames), - )) - - return set(statement_dep_sets) - - def get_dependency_sources_and_sinks(knl, linearization_item_ids): """Implicitly create a directed graph with the linearization items specified by ``linearization_item_ids`` as nodes, and with edges representing a -- GitLab From 84ce3a71fd1f10fece9fbfca5e1e2b1d08029504 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 7 Jul 2020 07:33:23 -0500 Subject: [PATCH 283/415] rewrite filter_deps_by_intersection_with_SAME() after change to legacy kernel dep generation; change arg legacy_statement_pair_dep_sets to set of (before_id, after_id, map) --- loopy/schedule/checker/dependency.py | 30 ++++++++----------- .../example_pairwise_schedule_validity.py | 20 +++++-------- 2 files changed, 20 insertions(+), 30 deletions(-) diff --git a/loopy/schedule/checker/dependency.py b/loopy/schedule/checker/dependency.py index ef9303984..6690024aa 100644 --- a/loopy/schedule/checker/dependency.py +++ b/loopy/schedule/checker/dependency.py @@ -665,39 +665,33 @@ def get_dependency_sources_and_sinks(knl, linearization_item_ids): def filter_deps_by_intersection_with_SAME( knl, - statement_pair_dep_sets, - insn_ids, + deps, non_conc_inames, ): # TODO document + from loopy.schedule.checker.schedule import StatementRef dt = DependencyType # determine which dep relations have a non-empty intersection with # the SAME relation deps_filtered = [] - for statement_pair_dep_set in statement_pair_dep_sets: - - # create isl map representing dep relation - dep_constraint_map = create_dependency_constraint( - statement_pair_dep_set, - knl.loop_priority, - ) + for insn_id_before, insn_id_after, dep_constraint_map in deps: # create isl map representing "SAME" dep for these two insns - s_before = statement_pair_dep_set.statement_before - s_after = statement_pair_dep_set.statement_after shared_nc_inames = ( - knl.id_to_insn[s_before.insn_id].within_inames & - knl.id_to_insn[s_after.insn_id].within_inames & + knl.id_to_insn[insn_id_before].within_inames & + knl.id_to_insn[insn_id_after].within_inames & non_conc_inames) + same_dep_set = StatementPairDependencySet( - s_before, - s_after, + StatementRef(insn_id=insn_id_before), + StatementRef(insn_id=insn_id_after), {dt.SAME: shared_nc_inames}, - statement_pair_dep_set.dom_before, - statement_pair_dep_set.dom_after, + knl.get_inames_domain(knl.id_to_insn[insn_id_before].within_inames), + knl.get_inames_domain(knl.id_to_insn[insn_id_after].within_inames), ) + same_dep_constraint_map = create_dependency_constraint( same_dep_set, knl.loop_priority, @@ -708,6 +702,6 @@ def filter_deps_by_intersection_with_SAME( intersect_not_empty = not bool(intersect_dep_and_same.is_empty()) if intersect_not_empty: - deps_filtered.append(statement_pair_dep_set) + deps_filtered.append((insn_id_before, insn_id_after, dep_constraint_map)) return deps_filtered diff --git a/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py b/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py index 57715bc88..f81eab249 100644 --- a/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py +++ b/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py @@ -307,8 +307,7 @@ if knl_choice == "loop_carried_deps": unprocessed_knl = knl.copy() -legacy_statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl( - unprocessed_knl) +deps = lp.create_dependencies_from_legacy_knl(unprocessed_knl) # get a linearization to check knl = preprocess_kernel(knl) @@ -318,8 +317,8 @@ linearization_items = knl.linearization print("checking validity") linearization_is_valid = lp.check_linearization_validity( - unprocessed_knl, legacy_statement_pair_dep_sets, linearization_items, - verbose=True) + unprocessed_knl, deps, linearization_items, + ) """ legacy_statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl(knl) @@ -330,7 +329,7 @@ linearized_knl = get_one_linearized_kernel(knl) linearization_items = linearized_knl.linearization linearization_is_valid = lp.check_linearization_validity( - knl, legacy_statement_pair_dep_sets, linearization_items, verbose=True) + knl, legacy_statement_pair_dep_sets, linearization_items) """ print("is linearization valid? constraint map subset of SIO?") @@ -352,18 +351,15 @@ from loopy.schedule.checker.utils import ( _, non_conc_inames = get_concurrent_inames(knl) legacy_deps_filtered_by_same = filter_deps_by_intersection_with_SAME( knl, - legacy_statement_pair_dep_sets, - [insn.id for insn in knl.instructions], + deps, + #[insn.id for insn in knl.instructions], non_conc_inames, ) # get dep graph edges dep_graph_pairs = [ - ( - dep.statement_after.insn_id, - dep.statement_before.insn_id - ) - for dep in legacy_deps_filtered_by_same] + (insn_id_before, insn_id_after) + for insn_id_before, insn_id_after, _ in legacy_deps_filtered_by_same] # create dep graph from edges dep_graph = create_graph_from_pairs(dep_graph_pairs) -- GitLab From 534e177c893aa852a807f536f1fcada7c5c49a9e Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 7 Jul 2020 08:55:41 -0500 Subject: [PATCH 284/415] rename create_dependency_constraint()->create_legacy_dependency_constraint() --- loopy/schedule/checker/__init__.py | 16 +++++++--------- loopy/schedule/checker/dependency.py | 2 +- 2 files changed, 8 insertions(+), 10 deletions(-) diff --git a/loopy/schedule/checker/__init__.py b/loopy/schedule/checker/__init__.py index b2c55f34b..a87ba31db 100644 --- a/loopy/schedule/checker/__init__.py +++ b/loopy/schedule/checker/__init__.py @@ -182,10 +182,10 @@ def create_dependencies_from_legacy_knl(knl): """ from loopy.schedule.checker.dependency import ( - create_dependency_constraint, + create_legacy_dependency_constraint, get_dependency_sources_and_sinks, StatementPairDependencySet, - DependencyType as dt, + DependencyType, ) from loopy.schedule.checker.utils import ( get_concurrent_inames, @@ -217,7 +217,7 @@ def create_dependencies_from_legacy_knl(knl): StatementPairDependencySet( StatementRef(insn_id=insn_before.id), StatementRef(insn_id=insn_after.id), - {dt.SAME: shared_non_conc_inames}, + {DependencyType.SAME: shared_non_conc_inames}, preprocessed_knl.get_inames_domain(insn_before_inames), preprocessed_knl.get_inames_domain(insn_after_inames), )) @@ -245,7 +245,8 @@ def create_dependencies_from_legacy_knl(knl): for source_id in sources: for sink_id in sinks: sink_insn_inames = preprocessed_knl.id_to_insn[sink_id].within_inames - source_insn_inames = preprocessed_knl.id_to_insn[source_id].within_inames + source_insn_inames = preprocessed_knl.id_to_insn[ + source_id].within_inames shared_inames = sink_insn_inames & source_insn_inames shared_non_conc_inames = shared_inames & non_conc_inames @@ -253,7 +254,7 @@ def create_dependencies_from_legacy_knl(knl): StatementPairDependencySet( StatementRef(insn_id=sink_id), StatementRef(insn_id=source_id), - {dt.PRIOR: shared_non_conc_inames}, + {DependencyType.PRIOR: shared_non_conc_inames}, preprocessed_knl.get_inames_domain(sink_insn_inames), preprocessed_knl.get_inames_domain(source_insn_inames), )) @@ -263,7 +264,7 @@ def create_dependencies_from_legacy_knl(knl): # create a map representing constraints from the dependency, # which maps statement instance to all stmt instances that must occur later # and is acquired from the non-preprocessed kernel - constraint_map = create_dependency_constraint( + constraint_map = create_legacy_dependency_constraint( statement_pair_dep_set, knl.loop_priority, ) @@ -285,9 +286,6 @@ def check_linearization_validity( ): # TODO document - from loopy.schedule.checker.dependency import ( - create_dependency_constraint, - ) from loopy.schedule.checker.lexicographic_order_map import ( get_statement_ordering_map, ) diff --git a/loopy/schedule/checker/dependency.py b/loopy/schedule/checker/dependency.py index d246470e5..125a8e970 100644 --- a/loopy/schedule/checker/dependency.py +++ b/loopy/schedule/checker/dependency.py @@ -183,7 +183,7 @@ def _convert_constraint_set_to_map(constraint_set, mv_count, src_position=None): dim_type.out, 0, dim_type.in_, mv_count, mv_count) -def create_dependency_constraint( +def create_legacy_dependency_constraint( statement_dep_set, loop_priorities, ): -- GitLab From 5250fbc9fc7eba1c526145846bc817777c2e457f Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 7 Jul 2020 09:27:53 -0500 Subject: [PATCH 285/415] eliminate usage of statement_dep_set inside create_legacy_dependency_constraint(); instead pass in necessary info as args or compute it; (preparing to eliminate StatementPairDependencySet class) --- loopy/schedule/checker/__init__.py | 6 ++-- loopy/schedule/checker/dependency.py | 49 ++++++++++++++++------------ 2 files changed, 32 insertions(+), 23 deletions(-) diff --git a/loopy/schedule/checker/__init__.py b/loopy/schedule/checker/__init__.py index a87ba31db..d47e69b6f 100644 --- a/loopy/schedule/checker/__init__.py +++ b/loopy/schedule/checker/__init__.py @@ -265,8 +265,10 @@ def create_dependencies_from_legacy_knl(knl): # which maps statement instance to all stmt instances that must occur later # and is acquired from the non-preprocessed kernel constraint_map = create_legacy_dependency_constraint( - statement_pair_dep_set, - knl.loop_priority, + preprocessed_knl, + statement_pair_dep_set.statement_before.insn_id, + statement_pair_dep_set.statement_after.insn_id, + statement_pair_dep_set.deps, ) dep_maps.add(( diff --git a/loopy/schedule/checker/dependency.py b/loopy/schedule/checker/dependency.py index 125a8e970..2b435c355 100644 --- a/loopy/schedule/checker/dependency.py +++ b/loopy/schedule/checker/dependency.py @@ -184,8 +184,10 @@ def _convert_constraint_set_to_map(constraint_set, mv_count, src_position=None): def create_legacy_dependency_constraint( - statement_dep_set, - loop_priorities, + knl, + insn_id_before, + insn_id_after, + deps, ): """Create a statement dependency constraint represented as a map from each statement instance to statement instances that must occur later, @@ -194,12 +196,18 @@ def create_legacy_dependency_constraint( specified condition on inames ``i',j',i,j`` is met. ``i'`` and ``j'`` are the values of inames ``i`` and ``j`` in first statement instance. - :arg statement_dep_set: A :class:`StatementPairDependencySet` describing - the dependency relationship between the two statements. + :arg knl: A :class:`loopy.kernel.LoopKernel` containing the + depender and dependee instructions. - :arg loop_priorities: A list of tuples from the ``loop_priority`` - attribute of :class:`loopy.LoopKernel` specifying the loop nest - ordering rules. + :arg insn_id_before: A :class:`str` specifying the :mod:`loopy` + instruction id for the dependee statement. + + :arg insn_id_after: A :class:`str` specifying the :mod:`loopy` + instruction id for the depender statement. + + :arg deps: A :class:`dict` mapping instances of :class:`DependencyType` + to the :mod:`loopy` kernel inames involved in that particular + dependency relationship. :returns: An :class:`islpy.Map` mapping each statement instance to all statement instances that must occur later according to the constraints. @@ -218,10 +226,12 @@ def create_legacy_dependency_constraint( # This function uses the dependency given to create the following constraint: # Statement [s,i,j] comes before statement [s',i',j'] iff - dom_inames_ordered_before = list_var_names_in_isl_sets( - [statement_dep_set.dom_before]) - dom_inames_ordered_after = list_var_names_in_isl_sets( - [statement_dep_set.dom_after]) + # TODO we're now computing these doms multiple times + # could be more efficient... + dom_before = knl.get_inames_domain(knl.id_to_insn[insn_id_before].within_inames) + dom_after = knl.get_inames_domain(knl.id_to_insn[insn_id_after].within_inames) + dom_inames_ordered_before = list_var_names_in_isl_sets([dom_before]) + dom_inames_ordered_after = list_var_names_in_isl_sets([dom_after]) # create some (ordered) isl vars to use, e.g., {s, i, j, s', i', j'} islvars = make_islvars_with_marker( @@ -238,7 +248,7 @@ def create_legacy_dependency_constraint( # for each (dep_type, inames) pair, create 'happens before' constraint, # all_constraints_set will be the union of all these constraints dt = DependencyType - for dep_type, inames in statement_dep_set.deps.items(): + for dep_type, inames in deps.items(): # need to put inames in a list so that order of inames and inames' # matches when calling create_elementwise_comparison_conj... if not isinstance(inames, list): @@ -254,7 +264,7 @@ def create_legacy_dependency_constraint( priority_known = False # if nesting info is provided: - if loop_priorities: + if knl.loop_priority: # assumes all loop_priority tuples are consistent # with multiple priority tuples, determine whether the combined @@ -265,7 +275,7 @@ def create_legacy_dependency_constraint( # remove irrelevant inames from priority tuples (because we're # about to perform a costly operation on remaining tuples) relevant_priorities = set() - for p_tuple in loop_priorities: + for p_tuple in knl.loop_priority: new_tuple = [iname for iname in p_tuple if iname in inames_list] # empty tuples and single tuples don't help us define # a nesting, so ignore them (if we're dealing with a single @@ -301,7 +311,7 @@ def create_legacy_dependency_constraint( raise ValueError( "create_dependency_constriant encountered invalid " "priorities %s" - % (loop_priorities)) + % (knl.loop_priority)) priority_known = True priority_tuple = orders.pop() @@ -338,10 +348,7 @@ def create_legacy_dependency_constraint( # get ints representing statements in PairwiseSchedule s_before_int = 0 - s_after_int = 0 if ( - statement_dep_set.statement_before.insn_id == - statement_dep_set.statement_after.insn_id - ) else 1 + s_after_int = 0 if insn_id_before == insn_id_after else 1 # set statement_var_name == statement # constraint_set = constraint_set & islvars[statement_var_name_prime].eq_set( @@ -365,10 +372,10 @@ def create_legacy_dependency_constraint( # add statement variable to doms to enable intersection range_to_intersect = add_dims_to_isl_set( - statement_dep_set.dom_after, isl.dim_type.out, + dom_after, isl.dim_type.out, [STATEMENT_VAR_NAME], statement_var_idx) domain_constraint_set = append_marker_to_isl_map_var_names( - statement_dep_set.dom_before, isl.dim_type.set, marker="'") + dom_before, isl.dim_type.set, marker="'") domain_to_intersect = add_dims_to_isl_set( domain_constraint_set, isl.dim_type.out, [statement_var_name_prime], statement_var_idx) -- GitLab From 54357180c797c254aee42673628ff59fab0c9120 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 7 Jul 2020 09:30:44 -0500 Subject: [PATCH 286/415] rename DependencyType->LegacyDependencyType --- loopy/schedule/checker/__init__.py | 10 +++++----- loopy/schedule/checker/dependency.py | 12 ++++++------ 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/loopy/schedule/checker/__init__.py b/loopy/schedule/checker/__init__.py index d47e69b6f..da5b70b26 100644 --- a/loopy/schedule/checker/__init__.py +++ b/loopy/schedule/checker/__init__.py @@ -162,7 +162,7 @@ def create_dependencies_from_legacy_knl(knl): dependnecy ``SAME(SNC)`` where ``SNC`` is the set of non-concurrent inames used by both ``insn0`` and ``insn1``, and ``SAME`` is the relationship specified by the ``SAME`` attribute of - :class:`loopy.schedule.checker.dependency.DependencyType`. + :class:`loopy.schedule.checker.dependency.LegacyDependencyType`. (2) For each subset of non-concurrent inames used by any instruction, @@ -177,7 +177,7 @@ def create_dependencies_from_legacy_knl(knl): (d), connect each sink to each source (sink happens before source) with a ``PRIOR(SNC)`` dependency, where ``PRIOR`` is the relationship specified by the ``PRIOR`` attribute of - :class:`loopy.schedule.checker.dependency.DependencyType`. + :class:`loopy.schedule.checker.dependency.LegacyDependencyType`. """ @@ -185,7 +185,7 @@ def create_dependencies_from_legacy_knl(knl): create_legacy_dependency_constraint, get_dependency_sources_and_sinks, StatementPairDependencySet, - DependencyType, + LegacyDependencyType, ) from loopy.schedule.checker.utils import ( get_concurrent_inames, @@ -217,7 +217,7 @@ def create_dependencies_from_legacy_knl(knl): StatementPairDependencySet( StatementRef(insn_id=insn_before.id), StatementRef(insn_id=insn_after.id), - {DependencyType.SAME: shared_non_conc_inames}, + {LegacyDependencyType.SAME: shared_non_conc_inames}, preprocessed_knl.get_inames_domain(insn_before_inames), preprocessed_knl.get_inames_domain(insn_after_inames), )) @@ -254,7 +254,7 @@ def create_dependencies_from_legacy_knl(knl): StatementPairDependencySet( StatementRef(insn_id=sink_id), StatementRef(insn_id=source_id), - {DependencyType.PRIOR: shared_non_conc_inames}, + {LegacyDependencyType.PRIOR: shared_non_conc_inames}, preprocessed_knl.get_inames_domain(sink_insn_inames), preprocessed_knl.get_inames_domain(source_insn_inames), )) diff --git a/loopy/schedule/checker/dependency.py b/loopy/schedule/checker/dependency.py index 2b435c355..6d6a11a80 100644 --- a/loopy/schedule/checker/dependency.py +++ b/loopy/schedule/checker/dependency.py @@ -23,7 +23,7 @@ THE SOFTWARE. import islpy as isl -class DependencyType: +class LegacyDependencyType: """Strings specifying a particular type of dependency relationship. .. attribute:: SAME @@ -75,7 +75,7 @@ class StatementPairDependencySet(object): .. attribute:: deps - A :class:`dict` mapping instances of :class:`DependencyType` to + A :class:`dict` mapping instances of :class:`LegacyDependencyType` to the :mod:`loopy` kernel inames involved in that particular dependency relationship. @@ -205,7 +205,7 @@ def create_legacy_dependency_constraint( :arg insn_id_after: A :class:`str` specifying the :mod:`loopy` instruction id for the depender statement. - :arg deps: A :class:`dict` mapping instances of :class:`DependencyType` + :arg deps: A :class:`dict` mapping instances of :class:`LegacyDependencyType` to the :mod:`loopy` kernel inames involved in that particular dependency relationship. @@ -247,7 +247,7 @@ def create_legacy_dependency_constraint( # for each (dep_type, inames) pair, create 'happens before' constraint, # all_constraints_set will be the union of all these constraints - dt = DependencyType + ldt = LegacyDependencyType for dep_type, inames in deps.items(): # need to put inames in a list so that order of inames and inames' # matches when calling create_elementwise_comparison_conj... @@ -257,10 +257,10 @@ def create_legacy_dependency_constraint( inames_list = inames[:] inames_prime = append_apostrophes(inames_list) # e.g., [j', k'] - if dep_type == dt.SAME: + if dep_type == ldt.SAME: constraint_set = create_elementwise_comparison_conjunction_set( inames_prime, inames_list, islvars, op="eq") - elif dep_type == dt.PRIOR: + elif dep_type == ldt.PRIOR: priority_known = False # if nesting info is provided: -- GitLab From 46432fca396ed78c1c21653bb2ff575b06e25169 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 7 Jul 2020 09:58:47 -0500 Subject: [PATCH 287/415] eliminate need to maintain state held in StatementPairDependencySet; eliminate class StatementPairDependencySet --- loopy/schedule/checker/__init__.py | 87 +++++++++++++--------------- loopy/schedule/checker/dependency.py | 79 ------------------------- 2 files changed, 41 insertions(+), 125 deletions(-) diff --git a/loopy/schedule/checker/__init__.py b/loopy/schedule/checker/__init__.py index da5b70b26..21dc08b5c 100644 --- a/loopy/schedule/checker/__init__.py +++ b/loopy/schedule/checker/__init__.py @@ -184,7 +184,6 @@ def create_dependencies_from_legacy_knl(knl): from loopy.schedule.checker.dependency import ( create_legacy_dependency_constraint, get_dependency_sources_and_sinks, - StatementPairDependencySet, LegacyDependencyType, ) from loopy.schedule.checker.utils import ( @@ -192,15 +191,14 @@ def create_dependencies_from_legacy_knl(knl): get_all_nonconcurrent_insn_iname_subsets, get_linearization_item_ids_within_inames, ) - from loopy.schedule.checker.schedule import StatementRef # Preprocess if not already preprocessed # note: kernels must always be preprocessed before scheduling from loopy import preprocess_kernel preprocessed_knl = preprocess_kernel(knl) - # Create StatementPairDependencySet(s) from kernel dependencies - spds = set() + # Create constraint maps from kernel dependencies + dep_maps = set() # Introduce SAME dep for set of shared, non-concurrent inames @@ -210,17 +208,26 @@ def create_dependencies_from_legacy_knl(knl): insn_before = preprocessed_knl.id_to_insn[insn_before_id] insn_before_inames = insn_before.within_inames insn_after_inames = insn_after.within_inames - shared_inames = insn_before_inames & insn_after_inames - shared_non_conc_inames = shared_inames & non_conc_inames - - spds.add( - StatementPairDependencySet( - StatementRef(insn_id=insn_before.id), - StatementRef(insn_id=insn_after.id), - {LegacyDependencyType.SAME: shared_non_conc_inames}, - preprocessed_knl.get_inames_domain(insn_before_inames), - preprocessed_knl.get_inames_domain(insn_after_inames), - )) + shared_non_conc_inames = ( + insn_before_inames & insn_after_inames & non_conc_inames) + + # TODO what to do if there is already a dep from insn_before->insn_after? + + # create a map representing constraints from the dependency, + # which maps statement instance to all stmt instances that must occur + # later and is acquired from the non-preprocessed kernel + constraint_map = create_legacy_dependency_constraint( + preprocessed_knl, + insn_before_id, + insn_after.id, + {LegacyDependencyType.SAME: shared_non_conc_inames}, + ) + + dep_maps.add(( + insn_before_id, + insn_after.id, + constraint_map, + )) # loop-carried deps ------------------------------------------ @@ -247,42 +254,31 @@ def create_dependencies_from_legacy_knl(knl): sink_insn_inames = preprocessed_knl.id_to_insn[sink_id].within_inames source_insn_inames = preprocessed_knl.id_to_insn[ source_id].within_inames - shared_inames = sink_insn_inames & source_insn_inames - shared_non_conc_inames = shared_inames & non_conc_inames - - spds.add( - StatementPairDependencySet( - StatementRef(insn_id=sink_id), - StatementRef(insn_id=source_id), - {LegacyDependencyType.PRIOR: shared_non_conc_inames}, - preprocessed_knl.get_inames_domain(sink_insn_inames), - preprocessed_knl.get_inames_domain(source_insn_inames), - )) - - dep_maps = set() - for statement_pair_dep_set in spds: - # create a map representing constraints from the dependency, - # which maps statement instance to all stmt instances that must occur later - # and is acquired from the non-preprocessed kernel - constraint_map = create_legacy_dependency_constraint( - preprocessed_knl, - statement_pair_dep_set.statement_before.insn_id, - statement_pair_dep_set.statement_after.insn_id, - statement_pair_dep_set.deps, - ) - - dep_maps.add(( - statement_pair_dep_set.statement_before.insn_id, - statement_pair_dep_set.statement_after.insn_id, - constraint_map, - )) + shared_non_conc_inames = ( + sink_insn_inames & source_insn_inames & non_conc_inames) + + # create a map representing constraints from the dependency, + # which maps statement instance to all stmt instances that must occur + # later and is acquired from the non-preprocessed kernel + constraint_map = create_legacy_dependency_constraint( + preprocessed_knl, + sink_id, + source_id, + {LegacyDependencyType.PRIOR: shared_non_conc_inames}, + ) + + # TODO what to do if there is already a dep from sink->source? + dep_maps.add(( + sink_id, + source_id, + constraint_map, + )) return dep_maps def check_linearization_validity( knl, - #statement_pair_dep_sets, dep_maps, linearization_items, ): @@ -302,7 +298,6 @@ def check_linearization_validity( # For each dependency, create+test linearization containing pair of insns------ linearization_is_valid = True - #for statement_pair_dep_set in statement_pair_dep_sets: for insn_id_before, insn_id_after, constraint_map in dep_maps: # TODO, since we now get the doms inside # build_maps() diff --git a/loopy/schedule/checker/dependency.py b/loopy/schedule/checker/dependency.py index 6d6a11a80..5507b2260 100644 --- a/loopy/schedule/checker/dependency.py +++ b/loopy/schedule/checker/dependency.py @@ -60,85 +60,6 @@ class LegacyDependencyType: PRIOR = "prior" -class StatementPairDependencySet(object): - """A set of dependencies between two statements. - - .. attribute:: statement_before - - A :class:`loopy.schedule.checker.schedule.StatementRef` depended - on by statement_after. - - .. attribute:: statement_after - - A :class:`loopy.schedule.checker.schedule.StatementRef` which - cdepends on statement_before. - - .. attribute:: deps - - A :class:`dict` mapping instances of :class:`LegacyDependencyType` to - the :mod:`loopy` kernel inames involved in that particular - dependency relationship. - - .. attribute:: dom_before - - A :class:`islpy.BasicSet` representing the domain for the - dependee statement. - - .. attribute:: dom_after - - A :class:`islpy.BasicSet` representing the domain for the - depender statement. - - """ - - def __init__( - self, - statement_before, - statement_after, - deps, # {dep_type: iname_set} - dom_before=None, - dom_after=None, - ): - self.statement_before = statement_before - self.statement_after = statement_after - self.deps = deps - self.dom_before = dom_before - self.dom_after = dom_after - - def __eq__(self, other): - return ( - self.statement_before == other.statement_before - and self.statement_after == other.statement_after - and self.deps == other.deps - and self.dom_before == other.dom_before - and self.dom_after == other.dom_after - ) - - def __lt__(self, other): - return self.__hash__() < other.__hash__() - - def __hash__(self): - return hash(repr(self)) - - def update_persistent_hash(self, key_hash, key_builder): - """Custom hash computation function for use with - :class:`pytools.persistent_dict.PersistentDict`. - """ - - key_builder.rec(key_hash, self.statement_before) - key_builder.rec(key_hash, self.statement_after) - key_builder.rec(key_hash, self.deps) - key_builder.rec(key_hash, self.dom_before) - key_builder.rec(key_hash, self.dom_after) - - def __str__(self): - result = "%s --before->\n%s iff\n " % ( - self.statement_before, self.statement_after) - return result + " and\n ".join( - ["(%s : %s)" % (dep_type, inames) - for dep_type, inames in self.deps.items()]) - - def create_elementwise_comparison_conjunction_set( names0, names1, islvars, op="eq"): """Create a set constrained by the conjunction of conditions comparing -- GitLab From 348a763a043af72c6046434bae12c61e05d0ee94 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 7 Jul 2020 10:03:55 -0500 Subject: [PATCH 288/415] update some TODOs --- loopy/schedule/checker/__init__.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/loopy/schedule/checker/__init__.py b/loopy/schedule/checker/__init__.py index 21dc08b5c..e7b852483 100644 --- a/loopy/schedule/checker/__init__.py +++ b/loopy/schedule/checker/__init__.py @@ -151,7 +151,7 @@ def get_schedule_for_statement_pair( def create_dependencies_from_legacy_knl(knl): - """Return a list of + """Return a set of :class:`loopy.schedule.checker.dependency.TBD` instances created for a :class:`loopy.LoopKernel` containing legacy depencencies. @@ -197,6 +197,9 @@ def create_dependencies_from_legacy_knl(knl): from loopy import preprocess_kernel preprocessed_knl = preprocess_kernel(knl) + + # TODO instead of keeping these in a set, attach each one to depender insn + # Create constraint maps from kernel dependencies dep_maps = set() @@ -299,10 +302,6 @@ def check_linearization_validity( # For each dependency, create+test linearization containing pair of insns------ linearization_is_valid = True for insn_id_before, insn_id_after, constraint_map in dep_maps: - # TODO, since we now get the doms inside - # build_maps() - # reconsider the content of statement_pair_dep_set, which - # currently contains doms(do we still want them there?) # Create PairwiseScheduleBuilder: mapping of {statement instance: lex point} # include only instructions involved in this dependency -- GitLab From dc01bdcb1eecdccf1ca9cbc5be7dd08ee57737d2 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 7 Jul 2020 10:04:23 -0500 Subject: [PATCH 289/415] fix flake8 issue --- loopy/schedule/checker/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/loopy/schedule/checker/__init__.py b/loopy/schedule/checker/__init__.py index e7b852483..681e2cdb4 100644 --- a/loopy/schedule/checker/__init__.py +++ b/loopy/schedule/checker/__init__.py @@ -197,7 +197,6 @@ def create_dependencies_from_legacy_knl(knl): from loopy import preprocess_kernel preprocessed_knl = preprocess_kernel(knl) - # TODO instead of keeping these in a set, attach each one to depender insn # Create constraint maps from kernel dependencies -- GitLab From dabb811f3e33ec01802b99d05067169486465ce3 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 7 Jul 2020 10:17:02 -0500 Subject: [PATCH 290/415] update filter_deps_by_intersection_with_SAME() after dep modifications (DependencyType->LegacyDependencyType, StatementPairDependencySet eliminated, create_dependency_constraint->create_legacy_dependency_constraint) --- loopy/schedule/checker/dependency.py | 19 ++++++------------- .../example_pairwise_schedule_validity.py | 13 ------------- 2 files changed, 6 insertions(+), 26 deletions(-) diff --git a/loopy/schedule/checker/dependency.py b/loopy/schedule/checker/dependency.py index 46804013b..1b6dcb5f6 100644 --- a/loopy/schedule/checker/dependency.py +++ b/loopy/schedule/checker/dependency.py @@ -597,9 +597,8 @@ def filter_deps_by_intersection_with_SAME( non_conc_inames, ): # TODO document - from loopy.schedule.checker.schedule import StatementRef - dt = DependencyType + ldt = LegacyDependencyType # determine which dep relations have a non-empty intersection with # the SAME relation @@ -612,19 +611,13 @@ def filter_deps_by_intersection_with_SAME( knl.id_to_insn[insn_id_after].within_inames & non_conc_inames) - same_dep_set = StatementPairDependencySet( - StatementRef(insn_id=insn_id_before), - StatementRef(insn_id=insn_id_after), - {dt.SAME: shared_nc_inames}, - knl.get_inames_domain(knl.id_to_insn[insn_id_before].within_inames), - knl.get_inames_domain(knl.id_to_insn[insn_id_after].within_inames), + same_dep_constraint_map = create_legacy_dependency_constraint( + knl, + insn_id_before, + insn_id_after, + {ldt.SAME: shared_nc_inames}, ) - same_dep_constraint_map = create_dependency_constraint( - same_dep_set, - knl.loop_priority, - ) - # see whether the intersection of dep map and SAME dep map exists intersect_dep_and_same = same_dep_constraint_map & dep_constraint_map intersect_not_empty = not bool(intersect_dep_and_same.is_empty()) diff --git a/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py b/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py index f81eab249..95eaf1312 100644 --- a/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py +++ b/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py @@ -320,18 +320,6 @@ linearization_is_valid = lp.check_linearization_validity( unprocessed_knl, deps, linearization_items, ) -""" -legacy_statement_pair_dep_sets = lp.statement_pair_dep_sets_from_legacy_knl(knl) - -# get a linearization to check -from loopy import get_one_linearized_kernel -linearized_knl = get_one_linearized_kernel(knl) -linearization_items = linearized_knl.linearization - -linearization_is_valid = lp.check_linearization_validity( - knl, legacy_statement_pair_dep_sets, linearization_items) -""" - print("is linearization valid? constraint map subset of SIO?") print(linearization_is_valid) @@ -352,7 +340,6 @@ _, non_conc_inames = get_concurrent_inames(knl) legacy_deps_filtered_by_same = filter_deps_by_intersection_with_SAME( knl, deps, - #[insn.id for insn in knl.instructions], non_conc_inames, ) -- GitLab From c549f652e739af191d0297e5b2621bdbe33d44a2 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Fri, 10 Jul 2020 07:33:43 -0500 Subject: [PATCH 291/415] use STATEMENT_VAR_NAME and LEX_VAR_PREFIX constants when building test maps --- test/test_linearization_checker.py | 76 ++++++++++++------------------ 1 file changed, 31 insertions(+), 45 deletions(-) diff --git a/test/test_linearization_checker.py b/test/test_linearization_checker.py index 2dc12b451..208d9350e 100644 --- a/test/test_linearization_checker.py +++ b/test/test_linearization_checker.py @@ -494,35 +494,21 @@ def test_statement_instance_ordering_creation(): assert sio_aligned == expected_sio - expected_lex_order_map = isl.Map("{ " - "[_lp_linchk_l0', _lp_linchk_l1', _lp_linchk_l2', _lp_linchk_l3', " - "_lp_linchk_l4']" - " -> " - "[_lp_linchk_l0, _lp_linchk_l1, _lp_linchk_l2, _lp_linchk_l3, " - "_lp_linchk_l4]" - ":" + expected_lex_order_map = isl.Map( + "{{ " + "[{0}0', {0}1', {0}2', {0}3', {0}4'] -> [{0}0, {0}1, {0}2, {0}3, {0}4] :" "(" - "_lp_linchk_l0' < _lp_linchk_l0 " + "{0}0' < {0}0 " ") or (" - "_lp_linchk_l0'= _lp_linchk_l0 and " - "_lp_linchk_l1' < _lp_linchk_l1 " + "{0}0'={0}0 and {0}1' < {0}1 " ") or (" - "_lp_linchk_l0'= _lp_linchk_l0 and " - "_lp_linchk_l1'= _lp_linchk_l1 and " - "_lp_linchk_l2' < _lp_linchk_l2 " + "{0}0'={0}0 and {0}1'={0}1 and {0}2' < {0}2 " ") or (" - "_lp_linchk_l0'= _lp_linchk_l0 and " - "_lp_linchk_l1'= _lp_linchk_l1 and " - "_lp_linchk_l2'= _lp_linchk_l2 and " - "_lp_linchk_l3' < _lp_linchk_l3 " + "{0}0'={0}0 and {0}1'={0}1 and {0}2'={0}2 and {0}3' < {0}3 " ") or (" - "_lp_linchk_l0'= _lp_linchk_l0 and " - "_lp_linchk_l1'= _lp_linchk_l1 and " - "_lp_linchk_l2'= _lp_linchk_l2 and " - "_lp_linchk_l3'= _lp_linchk_l3 and " - "_lp_linchk_l4' < _lp_linchk_l4" + "{0}0'={0}0 and {0}1'={0}1 and {0}2'={0}2 and {0}3'={0}3 and {0}4' < {0}4" ")" - "}") + "}}".format(LEX_VAR_PREFIX)) # Isl ignores these apostrophes, but test would still pass since it ignores # variable names when checking for equality. Even so, explicitly add apostrophes @@ -533,12 +519,12 @@ def test_statement_instance_ordering_creation(): # Relationship between insn_a and insn_b --------------------------------------- expected_sio = isl.Map( - "[pi, pj, pk] -> { " - "[_lp_linchk_statement'=0, i', k'] -> [_lp_linchk_statement=1, i, j]:" + "[pi, pj, pk] -> {{ " + "[{0}'=0, i', k'] -> [{0}=1, i, j] : " "0 <= i' < pi and 0 <= k' < pk and 0 <= j < pj and 0 <= i < pi and i > i'; " - "[_lp_linchk_statement'=0, i', k'] -> [_lp_linchk_statement=1, i=i', j]:" + "[{0}'=0, i', k'] -> [{0}=1, i=i', j] : " "0 <= i' < pi and 0 <= k' < pk and 0 <= j < pj " - "}" + "}}".format(STATEMENT_VAR_NAME) ) # isl ignores these apostrophes, so explicitly add them expected_sio = append_marker_to_isl_map_var_names( @@ -550,12 +536,12 @@ def test_statement_instance_ordering_creation(): # Relationship between insn_a and insn_c --------------------------------------- expected_sio = isl.Map( - "[pi, pj, pk] -> { " - "[_lp_linchk_statement'=0, i', k'] -> [_lp_linchk_statement=1, i, j]:" + "[pi, pj, pk] -> {{ " + "[{0}'=0, i', k'] -> [{0}=1, i, j] : " "0 <= i' < pi and 0 <= k' < pk and 0 <= j < pj and 0 <= i < pi and i > i'; " - "[_lp_linchk_statement'=0, i', k'] -> [_lp_linchk_statement=1, i=i', j]:" + "[{0}'=0, i', k'] -> [{0}=1, i=i', j] : " "0 <= i' < pi and 0 <= k' < pk and 0 <= j < pj " - "}" + "}}".format(STATEMENT_VAR_NAME) ) # isl ignores these apostrophes, so explicitly add them expected_sio = append_marker_to_isl_map_var_names( @@ -567,10 +553,10 @@ def test_statement_instance_ordering_creation(): # Relationship between insn_a and insn_d --------------------------------------- expected_sio = isl.Map( - "[pt, pi, pk] -> { " - "[_lp_linchk_statement'=0, i', k'] -> [_lp_linchk_statement=1, t]:" + "[pt, pi, pk] -> {{ " + "[{0}'=0, i', k'] -> [{0}=1, t] : " "0 <= i' < pi and 0 <= k' < pk and 0 <= t < pt " - "}" + "}}".format(STATEMENT_VAR_NAME) ) # isl ignores these apostrophes, so explicitly add them expected_sio = append_marker_to_isl_map_var_names( @@ -582,14 +568,14 @@ def test_statement_instance_ordering_creation(): # Relationship between insn_b and insn_c --------------------------------------- expected_sio = isl.Map( - "[pi, pj] -> { " - "[_lp_linchk_statement'=0, i', j'] -> [_lp_linchk_statement=1, i, j]:" + "[pi, pj] -> {{ " + "[{0}'=0, i', j'] -> [{0}=1, i, j] : " "0 <= i' < pi and 0 <= j' < pj and i > i' and 0 <= i < pi and 0 <= j < pj; " - "[_lp_linchk_statement'=0, i', j'] -> [_lp_linchk_statement=1, i=i', j]:" + "[{0}'=0, i', j'] -> [{0}=1, i=i', j] : " "0 <= i' < pi and 0 <= j' < pj and j > j' and 0 <= j < pj; " - "[_lp_linchk_statement'=0, i', j'] -> [_lp_linchk_statement=1, i=i', j=j']:" + "[{0}'=0, i', j'] -> [{0}=1, i=i', j=j'] : " "0 <= i' < pi and 0 <= j' < pj " - "}" + "}}".format(STATEMENT_VAR_NAME) ) # isl ignores these apostrophes, so explicitly add them expected_sio = append_marker_to_isl_map_var_names( @@ -601,10 +587,10 @@ def test_statement_instance_ordering_creation(): # Relationship between insn_b and insn_d --------------------------------------- expected_sio = isl.Map( - "[pt, pi, pj] -> { " - "[_lp_linchk_statement'=0, i', j'] -> [_lp_linchk_statement=1, t]:" + "[pt, pi, pj] -> {{ " + "[{0}'=0, i', j'] -> [{0}=1, t] : " "0 <= i' < pi and 0 <= j' < pj and 0 <= t < pt " - "}" + "}}".format(STATEMENT_VAR_NAME) ) # isl ignores these apostrophes, so explicitly add them expected_sio = append_marker_to_isl_map_var_names( @@ -616,10 +602,10 @@ def test_statement_instance_ordering_creation(): # Relationship between insn_c and insn_d --------------------------------------- expected_sio = isl.Map( - "[pt, pi, pj] -> { " - "[_lp_linchk_statement'=0, i', j'] -> [_lp_linchk_statement=1, t]:" + "[pt, pi, pj] -> {{ " + "[{0}'=0, i', j'] -> [{0}=1, t] : " "0 <= i' < pi and 0 <= j' < pj and 0 <= t < pt " - "}" + "}}".format(STATEMENT_VAR_NAME) ) # isl ignores these apostrophes, so explicitly add them expected_sio = append_marker_to_isl_map_var_names( -- GitLab From 0e8e3f448cedd993bf8a1a53048404c9f0c7dc5d Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Fri, 10 Jul 2020 08:34:04 -0500 Subject: [PATCH 292/415] minor comment change --- loopy/schedule/checker/dependency.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/loopy/schedule/checker/dependency.py b/loopy/schedule/checker/dependency.py index 5507b2260..b8179fdb4 100644 --- a/loopy/schedule/checker/dependency.py +++ b/loopy/schedule/checker/dependency.py @@ -242,8 +242,8 @@ def create_legacy_dependency_constraint( priority_known = True if priority_known: - # PRIOR requires statement_before complete previous iterations - # of loops before statement_after completes current iteration + # PRIOR requires statement before complete previous iterations + # of loops before statement after completes current iteration # according to loop nest order inames_list_nest_ordered = [ iname for iname in priority_tuple -- GitLab From d4f9b43d5453f629b3b0a004ce4edb91d36b6a38 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 13 Jul 2020 02:58:25 -0500 Subject: [PATCH 293/415] comment clarifications --- loopy/schedule/checker/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/loopy/schedule/checker/__init__.py b/loopy/schedule/checker/__init__.py index 681e2cdb4..621b51bab 100644 --- a/loopy/schedule/checker/__init__.py +++ b/loopy/schedule/checker/__init__.py @@ -214,6 +214,7 @@ def create_dependencies_from_legacy_knl(knl): insn_before_inames & insn_after_inames & non_conc_inames) # TODO what to do if there is already a dep from insn_before->insn_after? + # (currently just add a new one) # create a map representing constraints from the dependency, # which maps statement instance to all stmt instances that must occur @@ -269,7 +270,7 @@ def create_dependencies_from_legacy_knl(knl): {LegacyDependencyType.PRIOR: shared_non_conc_inames}, ) - # TODO what to do if there is already a dep from sink->source? + # TODO what if there is already a different dep from sink->source? dep_maps.add(( sink_id, source_id, -- GitLab From 200eed41de56f90bec1a8c3f85d6a3ef9ddc05bc Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 14 Jul 2020 09:07:08 -0500 Subject: [PATCH 294/415] update tests after removeal of PairwiseScheduleBuilder class --- test/test_linearization_checker.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/test/test_linearization_checker.py b/test/test_linearization_checker.py index 82658bc01..9ad268edb 100644 --- a/test/test_linearization_checker.py +++ b/test/test_linearization_checker.py @@ -50,7 +50,7 @@ else: faulthandler.enable() -# {{{ test PairwiseScheduleBuilder and map creation +# {{{ test pairwise schedule map creation def test_pairwise_schedule_and_map_creation(): import islpy as isl @@ -379,6 +379,9 @@ def test_statement_instance_ordering_creation(): from loopy.schedule.checker import ( get_schedule_for_statement_pair, ) + from loopy.schedule.checker.schedule import ( + get_lex_order_map_for_sched_space, + ) from loopy.schedule.checker.utils import ( ensure_dim_names_match_and_align, append_marker_to_isl_map_var_names, @@ -431,18 +434,16 @@ def test_statement_instance_ordering_creation(): expected_sio, ): - sched_builder = get_schedule_for_statement_pair( + # Get pairwise schedule + sched_map_before, sched_map_after = get_schedule_for_statement_pair( knl, linearization_items, insn_id_before, insn_id_after, ) - # Get two isl maps from the PairwiseScheduleBuilder - sched_map_before, sched_map_after = sched_builder.build_maps(knl) - # get map representing lexicographic ordering - sched_lex_order_map = sched_builder.get_lex_order_map_for_sched_space() + sched_lex_order_map = get_lex_order_map_for_sched_space(sched_map_before) assert sched_lex_order_map == expected_lex_order_map -- GitLab From cd1c1310b88d4f22157e6f9b5b79774f0e5f397f Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 14 Jul 2020 09:07:49 -0500 Subject: [PATCH 295/415] in create_lex_order_map(), make n_dims arg optional --- loopy/schedule/checker/lexicographic_order_map.py | 4 +++- loopy/schedule/checker/schedule.py | 3 +-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/loopy/schedule/checker/lexicographic_order_map.py b/loopy/schedule/checker/lexicographic_order_map.py index b547e1d94..0966cba99 100644 --- a/loopy/schedule/checker/lexicographic_order_map.py +++ b/loopy/schedule/checker/lexicographic_order_map.py @@ -128,7 +128,7 @@ def get_lex_order_constraint(before_names, after_names, islvars=None): def create_lex_order_map( - n_dims, + n_dims=None, before_names=None, after_names=None, ): @@ -166,6 +166,8 @@ def create_lex_order_map( append_marker_to_strings, ) before_names = append_marker_to_strings(after_names, marker="'") + if n_dims is None: + n_dims = len(after_names) assert len(before_names) == len(after_names) == n_dims dim_type = isl.dim_type diff --git a/loopy/schedule/checker/schedule.py b/loopy/schedule/checker/schedule.py index ad2ecefc6..a73c72cb2 100644 --- a/loopy/schedule/checker/schedule.py +++ b/loopy/schedule/checker/schedule.py @@ -341,5 +341,4 @@ def get_lex_order_map_for_sched_space(schedule): ) lex_dim_names = schedule.space.get_var_names(isl.dim_type.out) - return create_lex_order_map( - len(lex_dim_names), after_names=lex_dim_names) + return create_lex_order_map(after_names=lex_dim_names) -- GitLab From c1ec6735ba05462bca688e92321abed367e2794e Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 14 Jul 2020 09:26:01 -0500 Subject: [PATCH 296/415] update after removeal of PairwiseScheduleBuilder class and get_lex_order_map_for_sched_space() changed from method to standalone function --- loopy/schedule/checker/__init__.py | 16 +++++++--------- loopy/schedule/checker/dependency.py | 2 +- 2 files changed, 8 insertions(+), 10 deletions(-) diff --git a/loopy/schedule/checker/__init__.py b/loopy/schedule/checker/__init__.py index 7fccd335c..2869ef17f 100644 --- a/loopy/schedule/checker/__init__.py +++ b/loopy/schedule/checker/__init__.py @@ -294,6 +294,9 @@ def check_linearization_validity( from loopy.schedule.checker.utils import ( prettier_map_string, ) + from loopy.schedule.checker.schedule import ( + get_lex_order_map_for_sched_space, + ) # Preprocess if not already preprocessed # note: kernels must always be preprocessed before scheduling @@ -304,22 +307,17 @@ def check_linearization_validity( linearization_is_valid = True for insn_id_before, insn_id_after, constraint_map in dep_maps: - # Create PairwiseScheduleBuilder: mapping of {statement instance: lex point} - # include only instructions involved in this dependency - sched_builder = get_schedule_for_statement_pair( + # Get two isl maps from {statement instance: lex point}, + # one for each linearization item involved in the dependency + isl_sched_map_before, isl_sched_map_after = get_schedule_for_statement_pair( preprocessed_knl, linearization_items, insn_id_before, insn_id_after, ) - # Get two isl maps from the PairwiseScheduleBuilder, - # one for each linearization item involved in the dependency; - isl_sched_map_before, isl_sched_map_after = sched_builder.build_maps( - preprocessed_knl) - # get map representing lexicographic ordering - sched_lex_order_map = sched_builder.get_lex_order_map_for_sched_space() + sched_lex_order_map = get_lex_order_map_for_sched_space(isl_sched_map_before) # create statement instance ordering, # maps each statement instance to all statement instances occuring later diff --git a/loopy/schedule/checker/dependency.py b/loopy/schedule/checker/dependency.py index b8179fdb4..5afeb8fd4 100644 --- a/loopy/schedule/checker/dependency.py +++ b/loopy/schedule/checker/dependency.py @@ -267,7 +267,7 @@ def create_legacy_dependency_constraint( constraint_set = create_elementwise_comparison_conjunction_set( inames_prime, inames_list, islvars, op="lt") - # get ints representing statements in PairwiseSchedule + # get ints representing statements in pairwise schedule s_before_int = 0 s_after_int = 0 if insn_id_before == insn_id_after else 1 -- GitLab From 67887d36ed9eb1b1a229833b4590cac030f7d2b1 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sun, 26 Jul 2020 21:02:18 -0500 Subject: [PATCH 297/415] update sio test to deal with new output from get_schedules_for_statement_pairs(); don't hardcode expected lex order maps, instead create them to match expected dim size --- test/test_linearization_checker.py | 79 ++++++++++++++---------------- 1 file changed, 36 insertions(+), 43 deletions(-) diff --git a/test/test_linearization_checker.py b/test/test_linearization_checker.py index 7a1723d47..f081e2184 100644 --- a/test/test_linearization_checker.py +++ b/test/test_linearization_checker.py @@ -360,7 +360,7 @@ def test_pairwise_schedule_creation(): def test_statement_instance_ordering_creation(): import islpy as isl from loopy.schedule.checker import ( - get_schedule_for_statement_pair, + get_schedules_for_statement_pairs, ) from loopy.schedule.checker.schedule import ( get_lex_order_map_for_sched_space, @@ -371,6 +371,7 @@ def test_statement_instance_ordering_creation(): ) from loopy.schedule.checker.lexicographic_order_map import ( get_statement_ordering_map, + create_lex_order_map, ) # example kernel (add deps to fix loop order) @@ -410,24 +411,44 @@ def test_statement_instance_ordering_creation(): knl = get_one_linearized_kernel(knl) linearization_items = knl.linearization + # Get pairwise schedules + insn_id_pairs = [ + ("insn_a", "insn_b"), + ("insn_a", "insn_c"), + ("insn_a", "insn_d"), + ("insn_b", "insn_c"), + ("insn_b", "insn_d"), + ("insn_c", "insn_d"), + ] + sched_maps = get_schedules_for_statement_pairs( + knl, + linearization_items, + insn_id_pairs, + ) + def check_sio_for_insn_pair( insn_id_before, insn_id_after, - expected_lex_order_map, + expected_lex_dims, expected_sio, ): # Get pairwise schedule - sched_map_before, sched_map_after = get_schedule_for_statement_pair( - knl, - linearization_items, - insn_id_before, - insn_id_after, - ) + sched_map_before, sched_map_after = sched_maps[ + (insn_id_before, insn_id_after)] - # get map representing lexicographic ordering + # Get map representing lexicographic ordering sched_lex_order_map = get_lex_order_map_for_sched_space(sched_map_before) + # Get expected lex order map + expected_lex_order_map = create_lex_order_map( + n_dims=expected_lex_dims, + before_names=["%s%d'" % (LEX_VAR_PREFIX, i) + for i in range(expected_lex_dims)], + after_names=["%s%d" % (LEX_VAR_PREFIX, i) + for i in range(expected_lex_dims)], + ) + assert sched_lex_order_map == expected_lex_order_map # create statement instance ordering, @@ -442,28 +463,6 @@ def test_statement_instance_ordering_creation(): assert sio_aligned == expected_sio - expected_lex_order_map = isl.Map( - "{{ " - "[{0}0', {0}1', {0}2', {0}3', {0}4'] -> [{0}0, {0}1, {0}2, {0}3, {0}4] :" - "(" - "{0}0' < {0}0 " - ") or (" - "{0}0'={0}0 and {0}1' < {0}1 " - ") or (" - "{0}0'={0}0 and {0}1'={0}1 and {0}2' < {0}2 " - ") or (" - "{0}0'={0}0 and {0}1'={0}1 and {0}2'={0}2 and {0}3' < {0}3 " - ") or (" - "{0}0'={0}0 and {0}1'={0}1 and {0}2'={0}2 and {0}3'={0}3 and {0}4' < {0}4" - ")" - "}}".format(LEX_VAR_PREFIX)) - - # Isl ignores these apostrophes, but test would still pass since it ignores - # variable names when checking for equality. Even so, explicitly add apostrophes - # for sanity. - expected_lex_order_map = append_marker_to_isl_map_var_names( - expected_lex_order_map, isl.dim_type.in_, "'") - # Relationship between insn_a and insn_b --------------------------------------- expected_sio = isl.Map( @@ -478,8 +477,7 @@ def test_statement_instance_ordering_creation(): expected_sio = append_marker_to_isl_map_var_names( expected_sio, isl.dim_type.in_, "'") - check_sio_for_insn_pair( - "insn_a", "insn_b", expected_lex_order_map, expected_sio) + check_sio_for_insn_pair("insn_a", "insn_b", 3, expected_sio) # Relationship between insn_a and insn_c --------------------------------------- @@ -495,8 +493,7 @@ def test_statement_instance_ordering_creation(): expected_sio = append_marker_to_isl_map_var_names( expected_sio, isl.dim_type.in_, "'") - check_sio_for_insn_pair( - "insn_a", "insn_c", expected_lex_order_map, expected_sio) + check_sio_for_insn_pair("insn_a", "insn_c", 3, expected_sio) # Relationship between insn_a and insn_d --------------------------------------- @@ -510,8 +507,7 @@ def test_statement_instance_ordering_creation(): expected_sio = append_marker_to_isl_map_var_names( expected_sio, isl.dim_type.in_, "'") - check_sio_for_insn_pair( - "insn_a", "insn_d", expected_lex_order_map, expected_sio) + check_sio_for_insn_pair("insn_a", "insn_d", 3, expected_sio) # Relationship between insn_b and insn_c --------------------------------------- @@ -529,8 +525,7 @@ def test_statement_instance_ordering_creation(): expected_sio = append_marker_to_isl_map_var_names( expected_sio, isl.dim_type.in_, "'") - check_sio_for_insn_pair( - "insn_b", "insn_c", expected_lex_order_map, expected_sio) + check_sio_for_insn_pair("insn_b", "insn_c", 3, expected_sio) # Relationship between insn_b and insn_d --------------------------------------- @@ -544,8 +539,7 @@ def test_statement_instance_ordering_creation(): expected_sio = append_marker_to_isl_map_var_names( expected_sio, isl.dim_type.in_, "'") - check_sio_for_insn_pair( - "insn_b", "insn_d", expected_lex_order_map, expected_sio) + check_sio_for_insn_pair("insn_b", "insn_d", 3, expected_sio) # Relationship between insn_c and insn_d --------------------------------------- @@ -559,8 +553,7 @@ def test_statement_instance_ordering_creation(): expected_sio = append_marker_to_isl_map_var_names( expected_sio, isl.dim_type.in_, "'") - check_sio_for_insn_pair( - "insn_c", "insn_d", expected_lex_order_map, expected_sio) + check_sio_for_insn_pair("insn_c", "insn_d", 3, expected_sio) # }}} -- GitLab From 81dd0eee59b577edc58c41be83e425f110a2e1b3 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sun, 26 Jul 2020 21:14:55 -0500 Subject: [PATCH 298/415] add independent test for lex order map creation --- test/test_linearization_checker.py | 61 ++++++++++++++++++++++++++++++ 1 file changed, 61 insertions(+) diff --git a/test/test_linearization_checker.py b/test/test_linearization_checker.py index f081e2184..6070909c5 100644 --- a/test/test_linearization_checker.py +++ b/test/test_linearization_checker.py @@ -355,6 +355,67 @@ def test_pairwise_schedule_creation(): # }}} +# {{{ test lex order map creation + +def test_lex_order_map_creation(): + import islpy as isl + from loopy.schedule.checker.lexicographic_order_map import ( + create_lex_order_map, + ) + from loopy.schedule.checker.utils import ( + append_marker_to_isl_map_var_names, + ) + + def _check_lex_map(expected_lex_order_map, n_dims): + # Isl ignores the apostrophes, so explicitly add them + expected_lex_order_map = append_marker_to_isl_map_var_names( + expected_lex_order_map, isl.dim_type.in_, "'") + + lex_order_map = create_lex_order_map( + n_dims=n_dims, + before_names=["%s%d'" % (LEX_VAR_PREFIX, i) for i in range(n_dims)], + after_names=["%s%d" % (LEX_VAR_PREFIX, i) for i in range(n_dims)], + ) + + assert lex_order_map == expected_lex_order_map + assert ( + lex_order_map.get_var_names(isl.dim_type.in_) == + expected_lex_order_map.get_var_names(isl.dim_type.in_)) + assert ( + lex_order_map.get_var_names(isl.dim_type.out) == + expected_lex_order_map.get_var_names(isl.dim_type.out)) + + expected_lex_order_map = isl.Map( + "{{ " + "[{0}0', {0}1', {0}2', {0}3', {0}4'] -> [{0}0, {0}1, {0}2, {0}3, {0}4] :" + "(" + "{0}0' < {0}0 " + ") or (" + "{0}0'={0}0 and {0}1' < {0}1 " + ") or (" + "{0}0'={0}0 and {0}1'={0}1 and {0}2' < {0}2 " + ") or (" + "{0}0'={0}0 and {0}1'={0}1 and {0}2'={0}2 and {0}3' < {0}3 " + ") or (" + "{0}0'={0}0 and {0}1'={0}1 and {0}2'={0}2 and {0}3'={0}3 and {0}4' < {0}4" + ")" + "}}".format(LEX_VAR_PREFIX)) + + _check_lex_map(expected_lex_order_map, 5) + + expected_lex_order_map = isl.Map( + "{{ " + "[{0}0'] -> [{0}0] :" + "(" + "{0}0' < {0}0 " + ")" + "}}".format(LEX_VAR_PREFIX)) + + _check_lex_map(expected_lex_order_map, 1) + +# }}} + + # {{{ test statement instance ordering creation def test_statement_instance_ordering_creation(): -- GitLab From 5f060a84d96cf960c50a528b0b37b18ec355c170 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sun, 2 Aug 2020 21:32:55 -0500 Subject: [PATCH 299/415] reduce the number of dims expected in lex maps after update that simplified lex maps --- test/test_linearization_checker.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/test/test_linearization_checker.py b/test/test_linearization_checker.py index a3a95b624..bf33bebb2 100644 --- a/test/test_linearization_checker.py +++ b/test/test_linearization_checker.py @@ -538,7 +538,7 @@ def test_statement_instance_ordering_creation(): expected_sio = append_marker_to_isl_map_var_names( expected_sio, isl.dim_type.in_, "'") - check_sio_for_insn_pair("insn_a", "insn_b", 3, expected_sio) + check_sio_for_insn_pair("insn_a", "insn_b", 2, expected_sio) # Relationship between insn_a and insn_c --------------------------------------- @@ -554,7 +554,7 @@ def test_statement_instance_ordering_creation(): expected_sio = append_marker_to_isl_map_var_names( expected_sio, isl.dim_type.in_, "'") - check_sio_for_insn_pair("insn_a", "insn_c", 3, expected_sio) + check_sio_for_insn_pair("insn_a", "insn_c", 2, expected_sio) # Relationship between insn_a and insn_d --------------------------------------- @@ -568,7 +568,7 @@ def test_statement_instance_ordering_creation(): expected_sio = append_marker_to_isl_map_var_names( expected_sio, isl.dim_type.in_, "'") - check_sio_for_insn_pair("insn_a", "insn_d", 3, expected_sio) + check_sio_for_insn_pair("insn_a", "insn_d", 1, expected_sio) # Relationship between insn_b and insn_c --------------------------------------- @@ -600,7 +600,7 @@ def test_statement_instance_ordering_creation(): expected_sio = append_marker_to_isl_map_var_names( expected_sio, isl.dim_type.in_, "'") - check_sio_for_insn_pair("insn_b", "insn_d", 3, expected_sio) + check_sio_for_insn_pair("insn_b", "insn_d", 1, expected_sio) # Relationship between insn_c and insn_d --------------------------------------- @@ -614,7 +614,7 @@ def test_statement_instance_ordering_creation(): expected_sio = append_marker_to_isl_map_var_names( expected_sio, isl.dim_type.in_, "'") - check_sio_for_insn_pair("insn_c", "insn_d", 3, expected_sio) + check_sio_for_insn_pair("insn_c", "insn_d", 1, expected_sio) # }}} -- GitLab From b51a17b484c163ccd9d51d9bc2a2bc012ad1e398 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Tue, 4 Aug 2020 04:56:26 -0500 Subject: [PATCH 300/415] update after renamingn of insert_missing_dims_and_reorder_by_name()->reorder_dims_by_name() --- loopy/schedule/checker/dependency.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/loopy/schedule/checker/dependency.py b/loopy/schedule/checker/dependency.py index a56293f48..1fced6968 100644 --- a/loopy/schedule/checker/dependency.py +++ b/loopy/schedule/checker/dependency.py @@ -139,7 +139,7 @@ def create_legacy_dependency_constraint( make_islvars_with_marker, append_apostrophes, add_dims_to_isl_set, - insert_missing_dims_and_reorder_by_name, + reorder_dims_by_name, append_marker_to_isl_map_var_names, sorted_union_of_names_in_isl_sets, ) @@ -302,17 +302,17 @@ def create_legacy_dependency_constraint( [statement_var_name_prime], statement_var_idx) # insert inames missing from doms to enable intersection - # TODO nothing should be missing now + # TODO nothing should be missing now, just reorder assert set( append_apostrophes([STATEMENT_VAR_NAME] + dom_inames_ordered_before) ) == set(domain_to_intersect.get_var_names(isl.dim_type.out)) assert set( [STATEMENT_VAR_NAME] + dom_inames_ordered_after ) == set(range_to_intersect.get_var_names(isl.dim_type.out)) - domain_to_intersect = insert_missing_dims_and_reorder_by_name( + domain_to_intersect = reorder_dims_by_name( domain_to_intersect, isl.dim_type.out, append_apostrophes([STATEMENT_VAR_NAME] + dom_inames_ordered_before)) - range_to_intersect = insert_missing_dims_and_reorder_by_name( + range_to_intersect = reorder_dims_by_name( range_to_intersect, isl.dim_type.out, [STATEMENT_VAR_NAME] + dom_inames_ordered_after) -- GitLab From 9ab0a22d1232f8dabeb0ae7bb3b2e880f808c225 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Sun, 27 Sep 2020 21:24:01 -0500 Subject: [PATCH 301/415] rename get_lex_order_constraint->get_lex_order_set; lots of documenation/naming/comment improvements for clarity --- .../checker/lexicographic_order_map.py | 168 ++++++++++-------- loopy/schedule/checker/schedule.py | 12 +- loopy/schedule/checker/utils.py | 21 +-- 3 files changed, 109 insertions(+), 92 deletions(-) diff --git a/loopy/schedule/checker/lexicographic_order_map.py b/loopy/schedule/checker/lexicographic_order_map.py index 0966cba99..d9066030f 100644 --- a/loopy/schedule/checker/lexicographic_order_map.py +++ b/loopy/schedule/checker/lexicographic_order_map.py @@ -25,17 +25,19 @@ import islpy as isl def get_statement_ordering_map( - sched_map_before, sched_map_after, lex_map, before_marker="'"): - """Return a mapping that maps each statement instance to - all statement instances occuring later. + sched_before, sched_after, lex_map, before_marker="'"): + """Return a statement ordering represented as a map from each statement + instance to all statement instances occurring later. - :arg sched_map_before: An :class:`islpy.Map` representing instruction - instance order for the dependee as a mapping from each statement - instance to a point in the lexicographic ordering. + :arg sched_before: An :class:`islpy.Map` representing a schedule + as a mapping from statement instances (for one particular statement) + to lexicographic time. The statement represented will typically + be the dependee in a dependency relationship. - :arg sched_map_after: An :class:`islpy.Map` representing instruction - instance order for the depender as a mapping from each statement - instance to a point in the lexicographic ordering. + :arg sched_after: An :class:`islpy.Map` representing a schedule + as a mapping from statement instances (for one particular statement) + to lexicographic time. The statement represented will typically + be the depender in a dependency relationship. :arg lex_map: An :class:`islpy.Map` representing a lexicographic ordering as a mapping from each point in lexicographic time @@ -45,17 +47,23 @@ def get_statement_ordering_map( i0' < i0 or (i0' = i0 and i1' < i1) or (i0' = i0 and i1' = i1 and i2' < i2) ...} - :returns: An :class:`islpy.Map` representing the lex schedule as + :arg before_marker: A :class:`str` to be appended to the names of the + map dimensions representing the 'before' statement in the + 'happens before' relationship. + + :returns: An :class:`islpy.Map` representing the statement odering as a mapping from each statement instance to all statement instances - occuring later. I.e., we compose relations B, L, and A as - B ∘ L ∘ A^-1, where B is sched_map_before, A is sched_map_after, - and L is the lexicographic ordering map. + occurring later. I.e., we compose relations B, L, and A as + B ∘ L ∘ A^-1, where B is `sched_before`, A is `sched_after`, + and L is `lex_map`. """ - sio = sched_map_before.apply_range( - lex_map).apply_range(sched_map_after.reverse()) - # append marker to in names + # Perform the composition of relations + sio = sched_before.apply_range( + lex_map).apply_range(sched_after.reverse()) + + # Append marker to in_ dims from loopy.schedule.checker.utils import ( append_marker_to_isl_map_var_names, ) @@ -63,30 +71,38 @@ def get_statement_ordering_map( sio, isl.dim_type.in_, before_marker) -def get_lex_order_constraint(before_names, after_names, islvars=None): - """Return a constraint represented as an :class:`islpy.Set` - defining a 'happens before' relationship in a lexicographic - ordering. - - :arg before_names: A list of :class:`str` variable names representing - the lexicographic space dimensions for a point in lexicographic - time that occurs before. (see example below) - - :arg after_names: A list of :class:`str` variable names representing - the lexicographic space dimensions for a point in lexicographic - time that occurs after. (see example below) - - :arg islvars: A dictionary from variable names to :class:`islpy.PwAff` - instances that represent each of the variables - (islvars may be produced by `islpy.make_zero_and_vars`). The key - '0' is also include and represents a :class:`islpy.PwAff` zero constant. - This dictionary defines the space to be used for the set. If no - value is passed, the dictionary will be made using ``before_names`` - and ``after_names``. - - :returns: An :class:`islpy.Set` representing a constraint that enforces a - lexicographic ordering. E.g., if ``before_names = [i0', i1', i2']`` and - ``after_names = [i0, i1, i2]``, return the set:: +def get_lex_order_set(before_names, after_names, islvars=None): + """Return an :class:`islpy.Set` representing a lexicographic ordering + with the number of dimensions provided in `before_names` + (equal to the number of dimensions in `after_names`). + + :arg before_names: A list of :class:`str` variable names to be used + to describe lexicographic space dimensions for a point in a lexicographic + ordering that occurs before another point, which will be represented using + `after_names`. (see example below) + + :arg after_names: A list of :class:`str` variable names to be used + to describe lexicographic space dimensions for a point in a lexicographic + ordering that occurs after another point, which will be represented using + `before_names`. (see example below) + + :arg islvars: A dictionary mapping variable names in `before_names` and + `after_names` to :class:`islpy.PwAff` instances that represent each + of the variables (islvars may be produced by `islpy.make_zero_and_vars`). + The key '0' is also include and represents a :class:`islpy.PwAff` zero + constant. This dictionary defines the space to be used for the set. If no + value is passed, the dictionary will be made using `before_names` + and `after_names`. + + :returns: An :class:`islpy.Set` representing a big-endian lexicographic ordering + with the number of dimensions provided in `before_names`. The set + has one dimension for each name in *both* `before_names` and + `after_names`, and contains all points which meet a 'happens before' + constraint defining the lexicographic ordering. E.g., if + `before_names = [i0', i1', i2']` and `after_names = [i0, i1, i2]`, + return the set containing all points in a 3-dimensional, big-endian + lexicographic ordering such that point + `[i0', i1', i2']` happens before `[i0, i1, i2]`. I.e., return:: {[i0', i1', i2', i0, i1, i2] : i0' < i0 or (i0' = i0 and i1' < i1) @@ -98,33 +114,31 @@ def get_lex_order_constraint(before_names, after_names, islvars=None): if islvars is None: islvars = isl.make_zero_and_vars(before_names+after_names, []) - # Initialize constraint with i0' < i0 - lex_order_constraint = islvars[before_names[0]].lt_set(islvars[after_names[0]]) + # Initialize set with constraint i0' < i0 + lex_order_set = islvars[before_names[0]].lt_set(islvars[after_names[0]]) - # Initialize conjunction constraint with True. - # For each dim d, starting with d=1, this conjunction will have d equalities, - # e.g., (i0' = i0 and i1' = i1 and ... i(d-1)' = i(d-1)) - equality_constraint_conj = islvars[0].eq_set(islvars[0]) + # For each dim d, starting with d=1, equality_conj_set will be constrained + # by d equalities, e.g., (i0' = i0 and i1' = i1 and ... i(d-1)' = i(d-1)). + equality_conj_set = islvars[0].eq_set(islvars[0]) # initialize to 'true' for i in range(1, len(before_names)): - # Add the next equality constraint to equality_constraint_conj - equality_constraint_conj = equality_constraint_conj & \ + # Add the next equality constraint to equality_conj_set + equality_conj_set = equality_conj_set & \ islvars[before_names[i-1]].eq_set(islvars[after_names[i-1]]) - # Create a conjunction constraint by combining a less-than - # constraint for this dim, e.g., (i1' < i1), with the current - # equality constraint conjunction. - # For each dim d, starting with d=1, this conjunction will have d equalities, - # and one inequality, - # e.g., (i0' = i0 and i1' = i1 and ... i(d-1)' = i(d-1) and id' < id) - full_conj_constraint = islvars[before_names[i]].lt_set( - islvars[after_names[i]]) & equality_constraint_conj + # Create a set constrained by adding a less-than constraint for this dim, + # e.g., (i1' < i1), to the current equality conjunction set. + # For each dim d, starting with d=1, this full conjunction will have + # d equalities and one inequality, e.g., + # (i0' = i0 and i1' = i1 and ... i(d-1)' = i(d-1) and id' < id) + full_conj_set = islvars[before_names[i]].lt_set( + islvars[after_names[i]]) & equality_conj_set - # Union this new constraint with the current lex_order_constraint - lex_order_constraint = lex_order_constraint | full_conj_constraint + # Union this new constraint with the current lex_order_set + lex_order_set = lex_order_set | full_conj_set - return lex_order_constraint + return lex_order_set def create_lex_order_map( @@ -132,26 +146,28 @@ def create_lex_order_map( before_names=None, after_names=None, ): - """Return a mapping that maps each point in a lexicographic - ordering to every point that occurs later in lexicographic - time. + """Return a map from each point in a lexicographic ordering to every + point that occurs later in the lexicographic ordering. :arg n_dims: An :class:`int` representing the number of dimensions - in the lexicographic ordering. + in the lexicographic ordering. If not provided, `n_dims` will be + set to length of `after_names`. - :arg before_names: A list of :class:`str` variable names representing - the lexicographic space dimensions for a point in lexicographic - time that occurs before. (see example below) + :arg before_names: A list of :class:`str` variable names to be used + to describe lexicographic space dimensions for a point in a lexicographic + ordering that occurs before another point, which will be represented using + `after_names`. (see example below) - :arg after_names: A list of :class:`str` variable names representing - the lexicographic space dimensions for a point in lexicographic - time that occurs after. (see example below) + :arg after_names: A list of :class:`str` variable names to be used + to describe lexicographic space dimensions for a point in a lexicographic + ordering that occurs after another point, which will be represented using + `before_names`. (see example below) :returns: An :class:`islpy.Map` representing a lexicographic ordering as a mapping from each point in lexicographic time to every point that occurs later in lexicographic time. - E.g., if ``before_names = [i0', i1', i2']`` and - ``after_names = [i0, i1, i2]``, return the map:: + E.g., if `before_names = [i0', i1', i2']` and + `after_names = [i0, i1, i2]`, return the map:: {[i0', i1', i2'] -> [i0, i1, i2] : i0' < i0 or (i0' = i0 and i1' < i1) @@ -172,11 +188,11 @@ def create_lex_order_map( assert len(before_names) == len(after_names) == n_dims dim_type = isl.dim_type - lex_order_constraint = get_lex_order_constraint(before_names, after_names) + # First, get a set representing the lexicographic ordering. + lex_order_set = get_lex_order_set(before_names, after_names) - lex_map = isl.Map.from_domain(lex_order_constraint) - lex_map = lex_map.move_dims( + # Now convert that set to a map. + lex_map = isl.Map.from_domain(lex_order_set) + return lex_map.move_dims( dim_type.out, 0, dim_type.in_, len(before_names), len(after_names)) - - return lex_map diff --git a/loopy/schedule/checker/schedule.py b/loopy/schedule/checker/schedule.py index 97764a5e2..a947da3ac 100644 --- a/loopy/schedule/checker/schedule.py +++ b/loopy/schedule/checker/schedule.py @@ -317,17 +317,17 @@ def generate_pairwise_schedules( def get_lex_order_map_for_sched_space(schedule): """Return an :class:`islpy.BasicMap` that maps each point in a - lexicographic ordering to every point that is - lexocigraphically greater. + lexicographic ordering to every point that occurs later. :arg schedule: A :class:`islpy.Map` representing the ordering of statement instances as a mapping from statement instances to lexicographic time. - :returns: An :class:`islpy.BasicMap` that maps each point in a - lexicographic ordering to every point that is - lexocigraphically greater with the dimension number and names - matching the output dimension of `schedule`. + :returns: An :class:`islpy.BasicMap` representing a lexicographic + ordering as a mapping from each point in lexicographic time + to every point that occurs later in lexicographic time, with + the dimension count and names matching the output dimension + of `schedule`. """ diff --git a/loopy/schedule/checker/utils.py b/loopy/schedule/checker/utils.py index 959c2116d..db1d861c8 100644 --- a/loopy/schedule/checker/utils.py +++ b/loopy/schedule/checker/utils.py @@ -88,16 +88,19 @@ def ensure_dim_names_match_and_align(obj_map, tgt_map): def append_marker_to_isl_map_var_names(old_isl_map, dim_type, marker="'"): - """Return an isl_map with marker appended to - dim_type dimension names. + """Return an :class:`islpy.Map` with a marker appended to the specified + dimension names. - :arg old_isl_map: A :class:`islpy.Map`. + :arg old_isl_map: An :class:`islpy.Map`. - :arg dim_type: A :class:`islpy.dim_type`, i.e., an :class:`int`, + :arg dim_type: An :class:`islpy.dim_type`, i.e., an :class:`int`, specifying the dimension to be marked. - :returns: A :class:`islpy.Map` matching `old_isl_map` with - apostrophes appended to dim_type dimension names. + :arg marker: A :class:`str` to be appended to the specified dimension + names. If not provided, `marker` defaults to an apostrophe. + + :returns: An :class:`islpy.Map` matching `old_isl_map` with + `marker` appended to the `dim_type` dimension names. """ @@ -109,10 +112,8 @@ def append_marker_to_isl_map_var_names(old_isl_map, dim_type, marker="'"): def append_marker_to_strings(strings, marker="'"): - if not isinstance(strings, list): - raise ValueError("append_marker_to_strings did not receive a list") - else: - return [s+marker for s in strings] + assert isinstance(strings, list) + return [s+marker for s in strings] def sorted_union_of_names_in_isl_sets( -- GitLab From 48a040ad3b766fd5dff418966d550abea0ec682a Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Wed, 14 Oct 2020 01:24:50 -0500 Subject: [PATCH 302/415] complete the implementation of loopy.transform.data.rename_argument - handle cases with argument in domain - argument as a part of a variables shape expr. - added a test to check such use cases --- loopy/transform/data.py | 38 ++++++++++++++++++++++++++++++++++++-- test/test_transform.py | 12 ++++++++++++ 2 files changed, 48 insertions(+), 2 deletions(-) diff --git a/loopy/transform/data.py b/loopy/transform/data.py index 5356d4903..fb55251fa 100644 --- a/loopy/transform/data.py +++ b/loopy/transform/data.py @@ -635,12 +635,15 @@ def rename_argument(kernel, old_name, new_name, existing_ok=False): raise LoopyError("argument name '%s' conflicts with an existing identifier" "--cannot rename" % new_name) + # {{{ instructions + from pymbolic import var subst_dict = {old_name: var(new_name)} from loopy.symbolic import ( RuleAwareSubstitutionMapper, - SubstitutionRuleMappingContext) + SubstitutionRuleMappingContext, + SubstitutionMapper) from pymbolic.mapper.substitutor import make_subst_func rule_mapping_context = SubstitutionRuleMappingContext( kernel.substitutions, var_name_gen) @@ -650,14 +653,45 @@ def rename_argument(kernel, old_name, new_name, existing_ok=False): kernel = smap.map_kernel(kernel) + # }}} + + # {{{ args, temporary_variables + + from loopy.kernel.array import ArrayBase + subst_mapper = SubstitutionMapper(make_subst_func(subst_dict)) + new_args = [] for arg in kernel.args: if arg.name == old_name: arg = arg.copy(name=new_name) + if isinstance(arg, ArrayBase) and arg.shape: + arg = arg.copy(shape=subst_mapper(arg.shape)) new_args.append(arg) - return kernel.copy(args=new_args) + new_tvs = {} + for tv_name, tv in kernel.temporary_variables.items(): + if tv.shape: + tv = tv.copy(shape=subst_mapper(tv.shape)) + + new_tvs[tv_name] = tv + + # }}} + + # {{{ domain + + new_domains = [] + for dom in kernel.domains: + if old_name in dom.get_var_dict(): + dt, pos = dom.get_var_dict()[old_name] + dom = dom.set_dim_name(dt, pos, new_name) + + new_domains.append(dom) + + # }}} + + return kernel.copy(domains=new_domains, args=new_args, + temporary_variables=new_tvs) # }}} diff --git a/test/test_transform.py b/test/test_transform.py index e4ca2af0d..32cdb3710 100644 --- a/test/test_transform.py +++ b/test/test_transform.py @@ -672,6 +672,18 @@ def test_add_inames_for_unused_hw_axes(ctx_factory): parameters={"n": n}) +def test_rename_argument_of_domain_params(): + knl = lp.make_kernel( + "{[i]: 0<=i 1: exec(sys.argv[1]) -- GitLab From 30cb9fbbb99057790b468ab0d48d41920d19bf8b Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Wed, 14 Oct 2020 14:06:13 -0500 Subject: [PATCH 303/415] stride expression must also be substituted --- loopy/transform/data.py | 10 ++++++++-- test/test_transform.py | 5 +++-- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/loopy/transform/data.py b/loopy/transform/data.py index fb55251fa..8368655a4 100644 --- a/loopy/transform/data.py +++ b/loopy/transform/data.py @@ -665,14 +665,20 @@ def rename_argument(kernel, old_name, new_name, existing_ok=False): if arg.name == old_name: arg = arg.copy(name=new_name) if isinstance(arg, ArrayBase) and arg.shape: - arg = arg.copy(shape=subst_mapper(arg.shape)) + arg = arg.copy( + shape=subst_mapper(arg.shape), + dim_tags=[dim_tag.map_expr(subst_mapper) + for dim_tag in arg.dim_tags]) new_args.append(arg) new_tvs = {} for tv_name, tv in kernel.temporary_variables.items(): if tv.shape: - tv = tv.copy(shape=subst_mapper(tv.shape)) + tv = tv.copy( + shape=subst_mapper(tv.shape), + dim_tags=[dim_tag.map_expr(subst_mapper) + for dim_tag in tv.dim_tags]) new_tvs[tv_name] = tv diff --git a/test/test_transform.py b/test/test_transform.py index 32cdb3710..d8c20dc21 100644 --- a/test/test_transform.py +++ b/test/test_transform.py @@ -674,12 +674,13 @@ def test_add_inames_for_unused_hw_axes(ctx_factory): def test_rename_argument_of_domain_params(): knl = lp.make_kernel( - "{[i]: 0<=i Date: Wed, 14 Oct 2020 14:10:37 -0500 Subject: [PATCH 304/415] rephrase to reduce ISL calls MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Andreas Klöckner --- loopy/transform/data.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/loopy/transform/data.py b/loopy/transform/data.py index 8368655a4..eee23e984 100644 --- a/loopy/transform/data.py +++ b/loopy/transform/data.py @@ -688,8 +688,9 @@ def rename_argument(kernel, old_name, new_name, existing_ok=False): new_domains = [] for dom in kernel.domains: - if old_name in dom.get_var_dict(): - dt, pos = dom.get_var_dict()[old_name] + dom_var_dict = dom.get_var_dict() + if old_name in dom_var_dict: + dt, pos = dom_var_dict[old_name] dom = dom.set_dim_name(dt, pos, new_name) new_domains.append(dom) -- GitLab From 2e8eed1fcdaf8597805e460a636071c82558c9af Mon Sep 17 00:00:00 2001 From: Nicholas Christensen Date: Wed, 14 Oct 2020 18:20:29 -0500 Subject: [PATCH 305/415] add tags variable to KernelArgument --- loopy/kernel/data.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/loopy/kernel/data.py b/loopy/kernel/data.py index 43770ffb6..b0136a36e 100644 --- a/loopy/kernel/data.py +++ b/loopy/kernel/data.py @@ -263,7 +263,7 @@ def parse_tag(tag): # }}} -# {{{ memory address space +# {{{ memory address space class AddressSpace: """Storage location of a variable. @@ -339,6 +339,8 @@ class KernelArgument(ImmutableRecord): dtype = kwargs.pop("dtype", None) + tags = kwargs.pop("tags", None) + if "for_atomic" in kwargs: for_atomic = kwargs["for_atomic"] else: -- GitLab From 1f06347d133e395456201703b6f4518c797bd158 Mon Sep 17 00:00:00 2001 From: Nicholas Christensen Date: Wed, 14 Oct 2020 18:25:13 -0500 Subject: [PATCH 306/415] bump pytools requirement --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index bd03daac8..0e562ff1f 100644 --- a/setup.py +++ b/setup.py @@ -84,7 +84,7 @@ setup(name="loo.py", python_requires="~=3.6", install_requires=[ - "pytools>=2020.4", + "pytools>=2020.4.2", "pymbolic>=2019.2", "genpy>=2016.1.2", "cgen>=2016.1", -- GitLab From d7e86afa04ba85091b8ad8e079b982594e8da0e4 Mon Sep 17 00:00:00 2001 From: Nicholas Christensen Date: Wed, 14 Oct 2020 18:26:10 -0500 Subject: [PATCH 307/415] bump loopy version --- loopy/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/loopy/version.py b/loopy/version.py index fc6408dd7..fddd44479 100644 --- a/loopy/version.py +++ b/loopy/version.py @@ -42,7 +42,7 @@ else: # }}} -VERSION = (2020, 2) +VERSION = (2020, 2, 1) VERSION_STATUS = "" VERSION_TEXT = ".".join(str(x) for x in VERSION) + VERSION_STATUS -- GitLab From 1feb883c84ff9400dc76d854587698f5e2ace75e Mon Sep 17 00:00:00 2001 From: Nicholas Christensen Date: Wed, 14 Oct 2020 18:39:17 -0500 Subject: [PATCH 308/415] flake8 fixes --- loopy/kernel/data.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/loopy/kernel/data.py b/loopy/kernel/data.py index b0136a36e..3bbe52bb2 100644 --- a/loopy/kernel/data.py +++ b/loopy/kernel/data.py @@ -263,7 +263,7 @@ def parse_tag(tag): # }}} -# {{{ memory address space +# {{{ memory address space class AddressSpace: """Storage location of a variable. @@ -339,7 +339,7 @@ class KernelArgument(ImmutableRecord): dtype = kwargs.pop("dtype", None) - tags = kwargs.pop("tags", None) + tags = kwargs.pop("tags", None) if "for_atomic" in kwargs: for_atomic = kwargs["for_atomic"] -- GitLab From e51134d6078abda7bdae2cc4f2f00299a0ea19a6 Mon Sep 17 00:00:00 2001 From: Nicholas Christensen Date: Wed, 14 Oct 2020 18:45:38 -0500 Subject: [PATCH 309/415] noqa on tags variable --- loopy/kernel/data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/loopy/kernel/data.py b/loopy/kernel/data.py index 3bbe52bb2..70be4ccee 100644 --- a/loopy/kernel/data.py +++ b/loopy/kernel/data.py @@ -339,7 +339,7 @@ class KernelArgument(ImmutableRecord): dtype = kwargs.pop("dtype", None) - tags = kwargs.pop("tags", None) + tags = kwargs.pop("tags", None) # noqa: F841 if "for_atomic" in kwargs: for_atomic = kwargs["for_atomic"] -- GitLab From f08b5e1e4e9ce1f4d24399a2bbc06e3eaae86dfe Mon Sep 17 00:00:00 2001 From: Nicholas Christensen Date: Wed, 14 Oct 2020 18:48:20 -0500 Subject: [PATCH 310/415] additional space --- loopy/kernel/data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/loopy/kernel/data.py b/loopy/kernel/data.py index 70be4ccee..ce401d647 100644 --- a/loopy/kernel/data.py +++ b/loopy/kernel/data.py @@ -339,7 +339,7 @@ class KernelArgument(ImmutableRecord): dtype = kwargs.pop("dtype", None) - tags = kwargs.pop("tags", None) # noqa: F841 + tags = kwargs.pop("tags", None) # noqa: F841 if "for_atomic" in kwargs: for_atomic = kwargs["for_atomic"] -- GitLab From 95968507ba5c12f42546821966707321626ada88 Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Thu, 15 Oct 2020 13:28:14 -0500 Subject: [PATCH 311/415] RuleAwareIdentityMapper.map_kernel should also map shape/stride expressions of variables --- loopy/symbolic.py | 42 +++++++++++++++++++++++++++++++++++------ loopy/transform/data.py | 26 +++---------------------- 2 files changed, 39 insertions(+), 29 deletions(-) diff --git a/loopy/symbolic.py b/loopy/symbolic.py index 19ff83431..165c09a4e 100644 --- a/loopy/symbolic.py +++ b/loopy/symbolic.py @@ -980,6 +980,10 @@ class RuleAwareIdentityMapper(IdentityMapper): return sym def __call__(self, expr, kernel, insn): + """ + :arg insn: A :class:`~loopy.kernel.InstructionBase` of which *expr* is + a part of, or *None* if *expr*'s source is not an instruction. + """ from loopy.kernel.data import InstructionBase assert insn is None or isinstance(insn, InstructionBase) @@ -1003,7 +1007,32 @@ class RuleAwareIdentityMapper(IdentityMapper): lambda expr: self(expr, kernel, insn))) for insn in kernel.instructions] - return kernel.copy(instructions=new_insns) + from loopy.kernel.array import ArrayBase + from functools import partial + non_insn_self = partial(self, kernel=kernel, insn=None) + + new_args = [] + for arg in kernel.args: + if isinstance(arg, ArrayBase) and arg.shape: + arg = arg.copy( + shape=non_insn_self(arg.shape), + dim_tags=[dim_tag.map_expr(non_insn_self) + for dim_tag in arg.dim_tags]) + + new_args.append(arg) + + new_tvs = {} + for tv_name, tv in kernel.temporary_variables.items(): + if tv.shape: + tv = tv.copy( + shape=non_insn_self(tv.shape), + dim_tags=[dim_tag.map_expr(non_insn_self) + for dim_tag in tv.dim_tags]) + + new_tvs[tv_name] = tv + + return kernel.copy(instructions=new_insns, args=new_args, + temporary_variables=new_tvs) class RuleAwareSubstitutionMapper(RuleAwareIdentityMapper): @@ -1014,11 +1043,12 @@ class RuleAwareSubstitutionMapper(RuleAwareIdentityMapper): self.within = within def map_variable(self, expr, expn_state): - if (expr.name in expn_state.arg_context - or not self.within( - expn_state.kernel, expn_state.instruction, expn_state.stack)): - return super(RuleAwareSubstitutionMapper, self).map_variable( - expr, expn_state) + if expn_state.instruction is not None: + if (expr.name in expn_state.arg_context + or not self.within(expn_state.kernel, expn_state.instruction, + expn_state.stack)): + return super(RuleAwareSubstitutionMapper, self).map_variable( + expr, expn_state) result = self.subst_func(expr) if result is not None: diff --git a/loopy/transform/data.py b/loopy/transform/data.py index eee23e984..b915dcc62 100644 --- a/loopy/transform/data.py +++ b/loopy/transform/data.py @@ -642,8 +642,7 @@ def rename_argument(kernel, old_name, new_name, existing_ok=False): from loopy.symbolic import ( RuleAwareSubstitutionMapper, - SubstitutionRuleMappingContext, - SubstitutionMapper) + SubstitutionRuleMappingContext) from pymbolic.mapper.substitutor import make_subst_func rule_mapping_context = SubstitutionRuleMappingContext( kernel.substitutions, var_name_gen) @@ -655,33 +654,15 @@ def rename_argument(kernel, old_name, new_name, existing_ok=False): # }}} - # {{{ args, temporary_variables - - from loopy.kernel.array import ArrayBase - subst_mapper = SubstitutionMapper(make_subst_func(subst_dict)) + # {{{ args new_args = [] for arg in kernel.args: if arg.name == old_name: arg = arg.copy(name=new_name) - if isinstance(arg, ArrayBase) and arg.shape: - arg = arg.copy( - shape=subst_mapper(arg.shape), - dim_tags=[dim_tag.map_expr(subst_mapper) - for dim_tag in arg.dim_tags]) new_args.append(arg) - new_tvs = {} - for tv_name, tv in kernel.temporary_variables.items(): - if tv.shape: - tv = tv.copy( - shape=subst_mapper(tv.shape), - dim_tags=[dim_tag.map_expr(subst_mapper) - for dim_tag in tv.dim_tags]) - - new_tvs[tv_name] = tv - # }}} # {{{ domain @@ -697,8 +678,7 @@ def rename_argument(kernel, old_name, new_name, existing_ok=False): # }}} - return kernel.copy(domains=new_domains, args=new_args, - temporary_variables=new_tvs) + return kernel.copy(domains=new_domains, args=new_args) # }}} -- GitLab From 9f3c7d22fcd23fefcb3770383559177a0bbd76a2 Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Thu, 15 Oct 2020 14:11:59 -0500 Subject: [PATCH 312/415] use ArrayBase.map_exprs --- loopy/symbolic.py | 16 +++++----------- 1 file changed, 5 insertions(+), 11 deletions(-) diff --git a/loopy/symbolic.py b/loopy/symbolic.py index a8a8f3402..db06026d4 100644 --- a/loopy/symbolic.py +++ b/loopy/symbolic.py @@ -1008,27 +1008,21 @@ class RuleAwareIdentityMapper(IdentityMapper): from loopy.kernel.array import ArrayBase from functools import partial + non_insn_self = partial(self, kernel=kernel, insn=None) new_args = [] for arg in kernel.args: - if isinstance(arg, ArrayBase) and arg.shape: - arg = arg.copy( - shape=non_insn_self(arg.shape), - dim_tags=[dim_tag.map_expr(non_insn_self) - for dim_tag in arg.dim_tags]) + if isinstance(arg, ArrayBase): + arg = arg.map_exprs(non_insn_self) new_args.append(arg) new_tvs = {} for tv_name, tv in kernel.temporary_variables.items(): - if tv.shape: - tv = tv.copy( - shape=non_insn_self(tv.shape), - dim_tags=[dim_tag.map_expr(non_insn_self) - for dim_tag in tv.dim_tags]) + new_tvs[tv_name] = tv.map_exprs(non_insn_self) - new_tvs[tv_name] = tv + # variables names, domain dim names not expressions => do not map return kernel.copy(instructions=new_insns, args=new_args, temporary_variables=new_tvs) -- GitLab From f0e9708b63af3b81e80fd2fdb09e469da38b814e Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Fri, 30 Oct 2020 11:02:07 -0500 Subject: [PATCH 313/415] infer insn predicates while performing check_bounds --- loopy/check.py | 50 ++++++++++++++++++++++++++++++++++------------ test/test_loopy.py | 12 +++++++++++ 2 files changed, 49 insertions(+), 13 deletions(-) diff --git a/loopy/check.py b/loopy/check.py index e66af04d2..a19ed8634 100644 --- a/loopy/check.py +++ b/loopy/check.py @@ -375,6 +375,25 @@ def check_for_data_dependent_parallel_bounds(kernel): # {{{ check access bounds +def _condition_to_set(space, expr): + """ + Returns an instance of :class:`islpy.Set` is *expr* can be expressed as an + ISL-set on *space*, if not then returns *None*. + """ + from loopy.symbolic import get_dependencies + if get_dependencies(expr) <= frozenset( + space.get_var_dict()): + try: + from loopy.symbolic import isl_set_from_expr + return isl_set_from_expr(space, expr) + except ExpressionToAffineConversionError: + # non-affine condition: can't do much + return None + else: + # data-dependent condition: can't do much + return None + + class _AccessCheckMapper(WalkMapper): def __init__(self, kernel, insn_id): self.kernel = kernel @@ -445,19 +464,11 @@ class _AccessCheckMapper(WalkMapper): % (expr, self.insn_id, access_range, shape_domain)) def map_if(self, expr, domain): - from loopy.symbolic import get_dependencies - if get_dependencies(expr.condition) <= frozenset( - domain.space.get_var_dict()): - try: - from loopy.symbolic import isl_set_from_expr - then_set = isl_set_from_expr(domain.space, expr.condition) - else_set = then_set.complement() - except ExpressionToAffineConversionError: - # non-affine condition: can't do much - then_set = else_set = isl.BasicSet.universe(domain.space) - else: - # data-dependent condition: can't do much + then_set = _condition_to_set(domain.space, expr.condition) + if then_set is None: then_set = else_set = isl.BasicSet.universe(domain.space) + else: + else_set = then_set.complement() self.rec(expr.then, domain & then_set) self.rec(expr.else_, domain & else_set) @@ -479,8 +490,21 @@ def check_bounds(kernel): domain, assumptions = isl.align_two(domain, kernel.assumptions) domain_with_assumptions = domain & assumptions + # {{{ handle insns predicates + + insn_preds_set = isl.BasicSet.universe(domain.space) + + for predicate in insn.predicates: + predicate_as_isl_set = _condition_to_set(domain.space, predicate) + if predicate_as_isl_set is not None: + insn_preds_set = insn_preds_set & predicate_as_isl_set + + # }}} + + domain_with_assumptions_with_pred = domain_with_assumptions & insn_preds_set + def run_acm(expr): - acm(expr, domain_with_assumptions) + acm(expr, domain_with_assumptions_with_pred) return expr insn.with_transformed_expressions(run_acm) diff --git a/test/test_loopy.py b/test/test_loopy.py index 41b5315e8..c31d008b7 100644 --- a/test/test_loopy.py +++ b/test/test_loopy.py @@ -2920,6 +2920,18 @@ def test_access_check_with_conditionals(): lp.generate_code_v2(legal_but_nonaffine_condition_knl) +def test_access_check_with_insn_predicates(): + knl = lp.make_kernel( + "{[i]: 0 1: exec(sys.argv[1]) -- GitLab From f3f50d07c92796f1bd92a878320b18290afc5fb1 Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Fri, 30 Oct 2020 12:10:10 -0500 Subject: [PATCH 314/415] fixes typo in _condition_to_set docs --- loopy/check.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/loopy/check.py b/loopy/check.py index a19ed8634..e16c43c53 100644 --- a/loopy/check.py +++ b/loopy/check.py @@ -377,7 +377,7 @@ def check_for_data_dependent_parallel_bounds(kernel): def _condition_to_set(space, expr): """ - Returns an instance of :class:`islpy.Set` is *expr* can be expressed as an + Returns an instance of :class:`islpy.Set` if *expr* can be expressed as an ISL-set on *space*, if not then returns *None*. """ from loopy.symbolic import get_dependencies -- GitLab From 70f4980b2a16b0b648f9c18a6f4ed2d0f102aefb Mon Sep 17 00:00:00 2001 From: Nick Date: Thu, 13 Sep 2018 12:07:59 -0400 Subject: [PATCH 315/415] more complicated example w/ parameters, previously broken w/ space conflict --- test/test_loopy.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/test/test_loopy.py b/test/test_loopy.py index c31d008b7..aa10f6334 100644 --- a/test/test_loopy.py +++ b/test/test_loopy.py @@ -2932,6 +2932,26 @@ def test_access_check_with_insn_predicates(): print(lp.generate_code_v2(knl).device_code()) +def test_conditional_access_range_with_parameters(ctx_factory): + ctx = ctx_factory() + queue = cl.CommandQueue(ctx) + + knl = lp.make_kernel( + ["{[i]: 0 <= i < 10}", + "{[j]: 0 <= j < problem_size}"], + """ + if i < 8 and j < problem_size + tmp[j, i] = tmp[j, i] + 1 + end + """, + [lp.GlobalArg("tmp", shape=("problem_size", 8,), dtype=np.int64), + lp.ValueArg("problem_size", dtype=np.int64)]) + + assert np.array_equal(knl(queue, tmp=np.arange(80).reshape((10, 8)), + problem_size=10)[1][0], np.arange(1, 81).reshape( + (10, 8))) + + if __name__ == "__main__": if len(sys.argv) > 1: exec(sys.argv[1]) -- GitLab From 228a5ba6b321522a86da607217d3f5a4934ecb6f Mon Sep 17 00:00:00 2001 From: Nick Date: Thu, 13 Sep 2018 13:05:40 -0400 Subject: [PATCH 316/415] Add test where half of logical and predicate is data-dependent (and will fail) but other half will succeed to test --- test/test_loopy.py | 21 ++++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/test/test_loopy.py b/test/test_loopy.py index aa10f6334..e3b4829cc 100644 --- a/test/test_loopy.py +++ b/test/test_loopy.py @@ -2938,7 +2938,7 @@ def test_conditional_access_range_with_parameters(ctx_factory): knl = lp.make_kernel( ["{[i]: 0 <= i < 10}", - "{[j]: 0 <= j < problem_size}"], + "{[j]: 0 <= j < problem_size+2}"], """ if i < 8 and j < problem_size tmp[j, i] = tmp[j, i] + 1 @@ -2951,6 +2951,25 @@ def test_conditional_access_range_with_parameters(ctx_factory): problem_size=10)[1][0], np.arange(1, 81).reshape( (10, 8))) + # test a conditional that's only _half_ data-dependent to ensure the other + # half works + knl = lp.make_kernel( + ["{[i]: 0 <= i < 10}", + "{[j]: 0 <= j < problem_size}"], + """ + if i < 8 and (j + offset) < problem_size + tmp[j, i] = tmp[j, i] + 1 + end + """, + [lp.GlobalArg("tmp", shape=("problem_size", 8,), dtype=np.int64), + lp.ValueArg("problem_size", dtype=np.int64), + lp.ValueArg("offset", dtype=np.int64)]) + + assert np.array_equal(knl(queue, tmp=np.arange(80).reshape((10, 8)), + problem_size=10, + offset=0)[1][0], np.arange(1, 81).reshape( + (10, 8))) + if __name__ == "__main__": if len(sys.argv) > 1: -- GitLab From 5f7595ac877001d010f0d7c5aeb2716e39e060a7 Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Fri, 30 Oct 2020 17:30:56 -0500 Subject: [PATCH 317/415] add insn's dependency value args to the domain's space --- loopy/check.py | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/loopy/check.py b/loopy/check.py index e16c43c53..bb3080945 100644 --- a/loopy/check.py +++ b/loopy/check.py @@ -481,11 +481,29 @@ def check_bounds(kernel): temp_var_names = set(kernel.temporary_variables) for insn in kernel.instructions: domain = kernel.get_inames_domain(kernel.insn_inames(insn)) + domain_param_names = set(domain.get_var_names(dim_type.param)) # data-dependent bounds? can't do much - if set(domain.get_var_names(dim_type.param)) & temp_var_names: + if domain_param_names & temp_var_names: continue + # {{{ add read-only ValueArgs to domain + + from loopy.kernel.data import ValueArg + + valueargs_to_add = ({arg.name for arg in kernel.args + if isinstance(arg, ValueArg) + and arg.name not in kernel.get_written_variables()} + - domain_param_names) & insn.read_dependency_names() + + while valueargs_to_add: + arg_to_add = valueargs_to_add.pop() + idim = domain.dim(isl.dim_type.param) + domain = domain.add_dims(isl.dim_type.param, 1) + domain = domain.set_dim_name(isl.dim_type.param, idim, arg_to_add) + + # }}} + acm = _AccessCheckMapper(kernel, insn.id) domain, assumptions = isl.align_two(domain, kernel.assumptions) domain_with_assumptions = domain & assumptions -- GitLab From ceeab80e631033cad3a12d4fea94630c6b906cf5 Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Sat, 31 Oct 2020 00:23:12 -0500 Subject: [PATCH 318/415] use comprehensions instead of loops --- loopy/symbolic.py | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/loopy/symbolic.py b/loopy/symbolic.py index db06026d4..3fd3fcf06 100644 --- a/loopy/symbolic.py +++ b/loopy/symbolic.py @@ -1011,16 +1011,13 @@ class RuleAwareIdentityMapper(IdentityMapper): non_insn_self = partial(self, kernel=kernel, insn=None) - new_args = [] - for arg in kernel.args: - if isinstance(arg, ArrayBase): - arg = arg.map_exprs(non_insn_self) + new_args = [ + arg.map_exprs(non_insn_self) if isinstance(arg, ArrayBase) else arg + for arg in kernel.args] - new_args.append(arg) - - new_tvs = {} - for tv_name, tv in kernel.temporary_variables.items(): - new_tvs[tv_name] = tv.map_exprs(non_insn_self) + new_tvs = { + tv_name: tv.map_exprs(non_insn_self) + for tv_name, tv in kernel.temporary_variables.items()} # variables names, domain dim names not expressions => do not map -- GitLab From 0b8589a23b38ff369483f30e2d5992835f56dc9c Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Sat, 31 Oct 2020 00:24:23 -0500 Subject: [PATCH 319/415] test_rename_arguments: actually test renamed argument not present in the gen code --- test/test_transform.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/test/test_transform.py b/test/test_transform.py index aa9572de9..ccaaebc19 100644 --- a/test/test_transform.py +++ b/test/test_transform.py @@ -670,7 +670,7 @@ def test_add_inames_for_unused_hw_axes(ctx_factory): parameters={"n": n}) -def test_rename_argument_of_domain_params(): +def test_rename_argument_of_domain_params(ctx_factory): knl = lp.make_kernel( "{[i, j]: 0<=i Date: Sat, 31 Oct 2020 00:44:43 -0500 Subject: [PATCH 320/415] explain a code branch --- loopy/symbolic.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/loopy/symbolic.py b/loopy/symbolic.py index 3fd3fcf06..e03e27a48 100644 --- a/loopy/symbolic.py +++ b/loopy/symbolic.py @@ -1033,12 +1033,15 @@ class RuleAwareSubstitutionMapper(RuleAwareIdentityMapper): self.within = within def map_variable(self, expr, expn_state): - if expn_state.instruction is not None: - if (expr.name in expn_state.arg_context - or not self.within(expn_state.kernel, expn_state.instruction, - expn_state.stack)): - return super().map_variable( - expr, expn_state) + if expn_state.instruction is None: + # expr not a part of instruction => mimic SubstitutionMapper + return SubstitutionMapper.map_variable(self, expr) + + if (expr.name in expn_state.arg_context + or not self.within(expn_state.kernel, expn_state.instruction, + expn_state.stack)): + return super().map_variable( + expr, expn_state) result = self.subst_func(expr) if result is not None: -- GitLab From a5cc253720e421b70e8dfb04e02a0e2a9d89fffd Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Sat, 31 Oct 2020 00:47:44 -0500 Subject: [PATCH 321/415] get rid of spurious diff --- loopy/symbolic.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/loopy/symbolic.py b/loopy/symbolic.py index e03e27a48..6a6f55196 100644 --- a/loopy/symbolic.py +++ b/loopy/symbolic.py @@ -1038,8 +1038,8 @@ class RuleAwareSubstitutionMapper(RuleAwareIdentityMapper): return SubstitutionMapper.map_variable(self, expr) if (expr.name in expn_state.arg_context - or not self.within(expn_state.kernel, expn_state.instruction, - expn_state.stack)): + or not self.within( + expn_state.kernel, expn_state.instruction, expn_state.stack)): return super().map_variable( expr, expn_state) -- GitLab From 14aba3bf1bd09763541df0db4fa3460202b02f2b Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Sat, 31 Oct 2020 16:23:14 -0500 Subject: [PATCH 322/415] RuleAwareSubstitionMapper is not a SubstitutionMapper for within=False => implement the argument expression mappings in an adhoc manner --- loopy/symbolic.py | 27 ++------------------------- loopy/transform/data.py | 21 +++++++++++++++++++-- 2 files changed, 21 insertions(+), 27 deletions(-) diff --git a/loopy/symbolic.py b/loopy/symbolic.py index 6a6f55196..dfacb4438 100644 --- a/loopy/symbolic.py +++ b/loopy/symbolic.py @@ -979,10 +979,6 @@ class RuleAwareIdentityMapper(IdentityMapper): return sym def __call__(self, expr, kernel, insn): - """ - :arg insn: A :class:`~loopy.kernel.InstructionBase` of which *expr* is - a part of, or *None* if *expr*'s source is not an instruction. - """ from loopy.kernel.data import InstructionBase assert insn is None or isinstance(insn, InstructionBase) @@ -1006,23 +1002,7 @@ class RuleAwareIdentityMapper(IdentityMapper): lambda expr: self(expr, kernel, insn))) for insn in kernel.instructions] - from loopy.kernel.array import ArrayBase - from functools import partial - - non_insn_self = partial(self, kernel=kernel, insn=None) - - new_args = [ - arg.map_exprs(non_insn_self) if isinstance(arg, ArrayBase) else arg - for arg in kernel.args] - - new_tvs = { - tv_name: tv.map_exprs(non_insn_self) - for tv_name, tv in kernel.temporary_variables.items()} - - # variables names, domain dim names not expressions => do not map - - return kernel.copy(instructions=new_insns, args=new_args, - temporary_variables=new_tvs) + return kernel.copy(instructions=new_insns) class RuleAwareSubstitutionMapper(RuleAwareIdentityMapper): @@ -1033,13 +1013,10 @@ class RuleAwareSubstitutionMapper(RuleAwareIdentityMapper): self.within = within def map_variable(self, expr, expn_state): - if expn_state.instruction is None: - # expr not a part of instruction => mimic SubstitutionMapper - return SubstitutionMapper.map_variable(self, expr) - if (expr.name in expn_state.arg_context or not self.within( expn_state.kernel, expn_state.instruction, expn_state.stack)): + # expr not in within => do nothing (call IdentityMapper) return super().map_variable( expr, expn_state) diff --git a/loopy/transform/data.py b/loopy/transform/data.py index 82d770808..9c4725c0d 100644 --- a/loopy/transform/data.py +++ b/loopy/transform/data.py @@ -637,6 +637,7 @@ def rename_argument(kernel, old_name, new_name, existing_ok=False): subst_dict = {old_name: var(new_name)} from loopy.symbolic import ( + SubstitutionMapper, RuleAwareSubstitutionMapper, SubstitutionRuleMappingContext) from pymbolic.mapper.substitutor import make_subst_func @@ -646,21 +647,36 @@ def rename_argument(kernel, old_name, new_name, existing_ok=False): make_subst_func(subst_dict), within=lambda kernel, insn, stack: True) - kernel = smap.map_kernel(kernel) + kernel = rule_mapping_context.finish_kernel(smap.map_kernel(kernel)) # }}} + subst_mapper = SubstitutionMapper(make_subst_func(subst_dict)) + # {{{ args + from loopy.kernel.array import ArrayBase + new_args = [] for arg in kernel.args: if arg.name == old_name: arg = arg.copy(name=new_name) + if isinstance(arg, ArrayBase): + arg = arg.map_exprs(subst_mapper) + new_args.append(arg) # }}} + # {{{ tvs + + new_tvs = { + tv_name: tv.map_exprs(subst_mapper) + for tv_name, tv in kernel.temporary_variables.items()} + + # }}} + # {{{ domain new_domains = [] @@ -674,7 +690,8 @@ def rename_argument(kernel, old_name, new_name, existing_ok=False): # }}} - return kernel.copy(domains=new_domains, args=new_args) + return kernel.copy(domains=new_domains, args=new_args, + temporary_variables=new_tvs) # }}} -- GitLab From 9fb45d266e1484262cb29b1e168031ca1ba738d9 Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Tue, 3 Nov 2020 02:00:12 -0600 Subject: [PATCH 323/415] move _condition_to_set to loopy.symbolic --- loopy/check.py | 28 +++++----------------------- loopy/symbolic.py | 19 +++++++++++++++++++ 2 files changed, 24 insertions(+), 23 deletions(-) diff --git a/loopy/check.py b/loopy/check.py index bb3080945..24d8dba93 100644 --- a/loopy/check.py +++ b/loopy/check.py @@ -24,8 +24,7 @@ THE SOFTWARE. from islpy import dim_type import islpy as isl from loopy.symbolic import WalkMapper -from loopy.diagnostic import (LoopyError, WriteRaceConditionWarning, - warn_with_kernel, ExpressionToAffineConversionError) +from loopy.diagnostic import LoopyError, WriteRaceConditionWarning, warn_with_kernel from loopy.type_inference import TypeInferenceMapper from loopy.kernel.instruction import (MultiAssignmentBase, CallInstruction, CInstruction, _DataObliviousInstruction) @@ -375,25 +374,6 @@ def check_for_data_dependent_parallel_bounds(kernel): # {{{ check access bounds -def _condition_to_set(space, expr): - """ - Returns an instance of :class:`islpy.Set` if *expr* can be expressed as an - ISL-set on *space*, if not then returns *None*. - """ - from loopy.symbolic import get_dependencies - if get_dependencies(expr) <= frozenset( - space.get_var_dict()): - try: - from loopy.symbolic import isl_set_from_expr - return isl_set_from_expr(space, expr) - except ExpressionToAffineConversionError: - # non-affine condition: can't do much - return None - else: - # data-dependent condition: can't do much - return None - - class _AccessCheckMapper(WalkMapper): def __init__(self, kernel, insn_id): self.kernel = kernel @@ -464,7 +444,8 @@ class _AccessCheckMapper(WalkMapper): % (expr, self.insn_id, access_range, shape_domain)) def map_if(self, expr, domain): - then_set = _condition_to_set(domain.space, expr.condition) + from loopy.symbolic import condition_to_set + then_set = condition_to_set(domain.space, expr.condition) if then_set is None: then_set = else_set = isl.BasicSet.universe(domain.space) else: @@ -513,7 +494,8 @@ def check_bounds(kernel): insn_preds_set = isl.BasicSet.universe(domain.space) for predicate in insn.predicates: - predicate_as_isl_set = _condition_to_set(domain.space, predicate) + from loopy.symbolic import condition_to_set + predicate_as_isl_set = condition_to_set(domain.space, predicate) if predicate_as_isl_set is not None: insn_preds_set = insn_preds_set & predicate_as_isl_set diff --git a/loopy/symbolic.py b/loopy/symbolic.py index 7e5de3164..6d428d606 100644 --- a/loopy/symbolic.py +++ b/loopy/symbolic.py @@ -1679,6 +1679,25 @@ def isl_set_from_expr(space, expr): return set_ + +def condition_to_set(space, expr): + """ + Returns an instance of :class:`islpy.Set` if *expr* can be expressed as an + ISL-set on *space*, if not then returns *None*. + """ + from loopy.symbolic import get_dependencies + if get_dependencies(expr) <= frozenset( + space.get_var_dict()): + try: + from loopy.symbolic import isl_set_from_expr + return isl_set_from_expr(space, expr) + except ExpressionToAffineConversionError: + # non-affine condition: can't do much + return None + else: + # data-dependent condition: can't do much + return None + # }}} -- GitLab From 74211876ac6a6424372464dab90f6ff331d87669 Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Tue, 3 Nov 2020 02:01:34 -0600 Subject: [PATCH 324/415] docs: justifies what we do when condition not expressible as ISL set --- loopy/check.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/loopy/check.py b/loopy/check.py index 24d8dba93..a8bb7612c 100644 --- a/loopy/check.py +++ b/loopy/check.py @@ -447,6 +447,8 @@ class _AccessCheckMapper(WalkMapper): from loopy.symbolic import condition_to_set then_set = condition_to_set(domain.space, expr.condition) if then_set is None: + # condition cannot be inferred as ISL expression => ignore + # for domain contributions enforced by it then_set = else_set = isl.BasicSet.universe(domain.space) else: else_set = then_set.complement() -- GitLab From 82e3d6d827e286ced59d440024d0f653a4b36931 Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Tue, 3 Nov 2020 02:02:53 -0600 Subject: [PATCH 325/415] formatting: while -> for --- loopy/check.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/loopy/check.py b/loopy/check.py index a8bb7612c..221914657 100644 --- a/loopy/check.py +++ b/loopy/check.py @@ -479,8 +479,7 @@ def check_bounds(kernel): and arg.name not in kernel.get_written_variables()} - domain_param_names) & insn.read_dependency_names() - while valueargs_to_add: - arg_to_add = valueargs_to_add.pop() + for arg_to_add in valueargs_to_add: idim = domain.dim(isl.dim_type.param) domain = domain.add_dims(isl.dim_type.param, 1) domain = domain.set_dim_name(isl.dim_type.param, idim, arg_to_add) -- GitLab From b7f397d5526fc36c3d446cfabece2997b4150961 Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Tue, 3 Nov 2020 02:44:01 -0600 Subject: [PATCH 326/415] re-organize check_bounds and move parts of code to InstructionBase.get_domain --- loopy/check.py | 37 +++-------------------------- loopy/kernel/instruction.py | 46 +++++++++++++++++++++++++++++++++++++ 2 files changed, 49 insertions(+), 34 deletions(-) diff --git a/loopy/check.py b/loopy/check.py index 221914657..e8a5f9dca 100644 --- a/loopy/check.py +++ b/loopy/check.py @@ -463,49 +463,18 @@ def check_bounds(kernel): """ temp_var_names = set(kernel.temporary_variables) for insn in kernel.instructions: - domain = kernel.get_inames_domain(kernel.insn_inames(insn)) - domain_param_names = set(domain.get_var_names(dim_type.param)) + domain = insn.get_domain(kernel) # data-dependent bounds? can't do much - if domain_param_names & temp_var_names: + if set(domain.get_var_names(dim_type.param)) & temp_var_names: continue - # {{{ add read-only ValueArgs to domain - - from loopy.kernel.data import ValueArg - - valueargs_to_add = ({arg.name for arg in kernel.args - if isinstance(arg, ValueArg) - and arg.name not in kernel.get_written_variables()} - - domain_param_names) & insn.read_dependency_names() - - for arg_to_add in valueargs_to_add: - idim = domain.dim(isl.dim_type.param) - domain = domain.add_dims(isl.dim_type.param, 1) - domain = domain.set_dim_name(isl.dim_type.param, idim, arg_to_add) - - # }}} - acm = _AccessCheckMapper(kernel, insn.id) domain, assumptions = isl.align_two(domain, kernel.assumptions) domain_with_assumptions = domain & assumptions - # {{{ handle insns predicates - - insn_preds_set = isl.BasicSet.universe(domain.space) - - for predicate in insn.predicates: - from loopy.symbolic import condition_to_set - predicate_as_isl_set = condition_to_set(domain.space, predicate) - if predicate_as_isl_set is not None: - insn_preds_set = insn_preds_set & predicate_as_isl_set - - # }}} - - domain_with_assumptions_with_pred = domain_with_assumptions & insn_preds_set - def run_acm(expr): - acm(expr, domain_with_assumptions_with_pred) + acm(expr, domain_with_assumptions) return expr insn.with_transformed_expressions(run_acm) diff --git a/loopy/kernel/instruction.py b/loopy/kernel/instruction.py index 791ea89a6..8471d39f0 100644 --- a/loopy/kernel/instruction.py +++ b/loopy/kernel/instruction.py @@ -25,6 +25,7 @@ from pytools import ImmutableRecord, memoize_method from loopy.diagnostic import LoopyError from loopy.tools import Optional from warnings import warn +import islpy as isl # {{{ instructions: base class @@ -146,6 +147,7 @@ class InstructionBase(ImmutableRecord): .. automethod:: with_transformed_expressions .. automethod:: write_dependency_names .. automethod:: dependency_names + .. automethod:: get_domain .. automethod:: copy """ @@ -409,6 +411,50 @@ class InstructionBase(ImmutableRecord): self.within_inames = ( intern_frozenset_of_ids(self.within_inames)) + def get_domain(self, kernel): + """ + Returns an instance of :class:`islpy.Set` for the instruction's domain. + + .. note:: + + Does not take into account additional hints available through + :attr:`loopy.LoopKernel.assumptions`. + """ + domain = kernel.get_inames_domain(self.within_inames) + + # {{{ add read-only ValueArgs to domain + + from loopy.kernel.data import ValueArg + + valueargs_to_add = ({arg.name for arg in kernel.args + if isinstance(arg, ValueArg) + and arg.name not in kernel.get_written_variables()} + - set(domain.get_var_names(isl.dim_type.param))) + + # only consider valueargs relevant to *self* + valueargs_to_add = valueargs_to_add & self.read_dependency_names() + + for arg_to_add in valueargs_to_add: + idim = domain.dim(isl.dim_type.param) + domain = domain.add_dims(isl.dim_type.param, 1) + domain = domain.set_dim_name(isl.dim_type.param, idim, arg_to_add) + + # }}} + + # {{{ enforce restriction from predicates + + insn_preds_set = isl.BasicSet.universe(domain.space) + + for predicate in self.predicates: + from loopy.symbolic import condition_to_set + predicate_as_isl_set = condition_to_set(domain.space, predicate) + if predicate_as_isl_set is not None: + insn_preds_set = insn_preds_set & predicate_as_isl_set + + # }}} + + return domain & insn_preds_set + # }}} -- GitLab From 430d54246e0b54a39b49305dee85584aa56626ab Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Wed, 4 Nov 2020 11:32:40 -0600 Subject: [PATCH 327/415] simplify_using_aff: Restrict usage of inames to those that are already there --- loopy/symbolic.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/loopy/symbolic.py b/loopy/symbolic.py index 7e5de3164..cda89aa70 100644 --- a/loopy/symbolic.py +++ b/loopy/symbolic.py @@ -1525,7 +1525,13 @@ def qpolynomial_from_expr(space, expr): def simplify_using_aff(kernel, expr): inames = get_dependencies(expr) & kernel.all_inames() - domain = kernel.get_inames_domain(inames) + # FIXME: Ideally, we should find out what inames are usable and allow + # the simplification to use all of those. For now, fall back to making + # sure that the sipmlification only uses inames that were already there. + domain = ( + kernel + .get_inames_domain(inames) + .project_out_except(inames, [dim_type.set])) try: aff = guarded_aff_from_expr(domain.space, expr) -- GitLab From c8212b6a3c38bd6d94f54e1abd76e5d2054a75ce Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Wed, 4 Nov 2020 13:39:53 -0600 Subject: [PATCH 328/415] Factor set processing in split_iname into separate function, rename variable split_iname -> iname_to_split --- loopy/transform/iname.py | 138 +++++++++++++++++++++------------------ 1 file changed, 74 insertions(+), 64 deletions(-) diff --git a/loopy/transform/iname.py b/loopy/transform/iname.py index 241c1492d..4c47abc39 100644 --- a/loopy/transform/iname.py +++ b/loopy/transform/iname.py @@ -118,25 +118,25 @@ def prioritize_loops(kernel, loop_priority): class _InameSplitter(RuleAwareIdentityMapper): def __init__(self, rule_mapping_context, within, - split_iname, outer_iname, inner_iname, replacement_index): + iname_to_split, outer_iname, inner_iname, replacement_index): super().__init__(rule_mapping_context) self.within = within - self.split_iname = split_iname + self.iname_to_split = iname_to_split self.outer_iname = outer_iname self.inner_iname = inner_iname self.replacement_index = replacement_index def map_reduction(self, expr, expn_state): - if (self.split_iname in expr.inames - and self.split_iname not in expn_state.arg_context + if (self.iname_to_split in expr.inames + and self.iname_to_split not in expn_state.arg_context and self.within( expn_state.kernel, expn_state.instruction)): new_inames = list(expr.inames) - new_inames.remove(self.split_iname) + new_inames.remove(self.iname_to_split) new_inames.extend([self.outer_iname, self.inner_iname]) from loopy.symbolic import Reduction @@ -147,8 +147,8 @@ class _InameSplitter(RuleAwareIdentityMapper): return super().map_reduction(expr, expn_state) def map_variable(self, expr, expn_state): - if (expr.name == self.split_iname - and self.split_iname not in expn_state.arg_context + if (expr.name == self.iname_to_split + and self.iname_to_split not in expn_state.arg_context and self.within( expn_state.kernel, expn_state.instruction)): @@ -157,7 +157,49 @@ class _InameSplitter(RuleAwareIdentityMapper): return super().map_variable(expr, expn_state) -def _split_iname_backend(kernel, split_iname, +def _split_iname_in_set(s, iname_to_split, inner_iname, outer_iname, fixed_length, + fixed_length_is_inner, split_iname_should_remain): + var_dict = s.get_var_dict() + + if iname_to_split not in var_dict: + return s + + orig_dim_type, _ = var_dict[iname_to_split] + + outer_var_nr = s.dim(orig_dim_type) + inner_var_nr = s.dim(orig_dim_type)+1 + + s = s.add_dims(orig_dim_type, 2) + s = s.set_dim_name(orig_dim_type, outer_var_nr, outer_iname) + s = s.set_dim_name(orig_dim_type, inner_var_nr, inner_iname) + + from loopy.isl_helpers import make_slab + + if fixed_length_is_inner: + fixed_iname, var_length_iname = inner_iname, outer_iname + else: + fixed_iname, var_length_iname = outer_iname, inner_iname + + space = s.get_space() + fixed_constraint_set = ( + make_slab(space, fixed_iname, 0, fixed_length) + # name = fixed_iname + fixed_length*var_length_iname + .add_constraint(isl.Constraint.eq_from_names( + space, { + iname_to_split: 1, + fixed_iname: -1, + var_length_iname: -fixed_length}))) + + name_dim_type, name_idx = space.get_var_dict()[iname_to_split] + s = s.intersect(fixed_constraint_set) + + if split_iname_should_remain: + return s + else: + return s.project_out(name_dim_type, name_idx, 1) + + +def _split_iname_backend(kernel, iname_to_split, fixed_length, fixed_length_is_inner, make_new_loop_index, outer_iname=None, inner_iname=None, @@ -186,88 +228,55 @@ def _split_iname_backend(kernel, split_iname, # }}} - existing_tags = kernel.iname_tags(split_iname) + existing_tags = kernel.iname_tags(iname_to_split) from loopy.kernel.data import ForceSequentialTag, filter_iname_tags_by_type if (do_tagged_check and existing_tags and not filter_iname_tags_by_type(existing_tags, ForceSequentialTag)): - raise LoopyError("cannot split already tagged iname '%s'" % split_iname) + raise LoopyError(f"cannot split already tagged iname '{iname_to_split}'") - if split_iname not in kernel.all_inames(): - raise ValueError("cannot split loop for unknown variable '%s'" % split_iname) + if iname_to_split not in kernel.all_inames(): + raise ValueError( + f"cannot split loop for unknown variable '{iname_to_split}'") applied_iname_rewrites = kernel.applied_iname_rewrites[:] vng = kernel.get_var_name_generator() if outer_iname is None: - outer_iname = vng(split_iname+"_outer") + outer_iname = vng(iname_to_split+"_outer") if inner_iname is None: - inner_iname = vng(split_iname+"_inner") - - def process_set(s): - var_dict = s.get_var_dict() - - if split_iname not in var_dict: - return s - - orig_dim_type, _ = var_dict[split_iname] - - outer_var_nr = s.dim(orig_dim_type) - inner_var_nr = s.dim(orig_dim_type)+1 - - s = s.add_dims(orig_dim_type, 2) - s = s.set_dim_name(orig_dim_type, outer_var_nr, outer_iname) - s = s.set_dim_name(orig_dim_type, inner_var_nr, inner_iname) - - from loopy.isl_helpers import make_slab - - if fixed_length_is_inner: - fixed_iname, var_length_iname = inner_iname, outer_iname - else: - fixed_iname, var_length_iname = outer_iname, inner_iname - - space = s.get_space() - fixed_constraint_set = ( - make_slab(space, fixed_iname, 0, fixed_length) - # name = fixed_iname + fixed_length*var_length_iname - .add_constraint(isl.Constraint.eq_from_names( - space, { - split_iname: 1, - fixed_iname: -1, - var_length_iname: -fixed_length}))) - - name_dim_type, name_idx = space.get_var_dict()[split_iname] - s = s.intersect(fixed_constraint_set) + inner_iname = vng(iname_to_split+"_inner") - def _project_out_only_if_all_instructions_in_within(): - for insn in kernel.instructions: - if split_iname in insn.within_inames and ( - not within(kernel, insn)): - return s - - return s.project_out(name_dim_type, name_idx, 1) - - return _project_out_only_if_all_instructions_in_within() + all_insns_using_iname_in_within = all( + # "does not use iname or is targeted by the within" + # <=> + # "'uses iname' implies within" + iname_to_split not in insn.within_inames or within(kernel, insn) + for insn in kernel.instructions) - new_domains = [process_set(dom) for dom in kernel.domains] + new_domains = [ + _split_iname_in_set(dom, iname_to_split, inner_iname, outer_iname, + fixed_length, fixed_length_is_inner, + split_iname_should_remain=not all_insns_using_iname_in_within) + for dom in kernel.domains] from pymbolic import var inner = var(inner_iname) outer = var(outer_iname) new_loop_index = make_new_loop_index(inner, outer) - subst_map = {var(split_iname): new_loop_index} + subst_map = {var(iname_to_split): new_loop_index} applied_iname_rewrites.append(subst_map) # {{{ update within_inames new_insns = [] for insn in kernel.instructions: - if split_iname in insn.within_inames and ( + if iname_to_split in insn.within_inames and ( within(kernel, insn)): new_within_inames = ( (insn.within_inames.copy() - - frozenset([split_iname])) + - frozenset([iname_to_split])) | frozenset([outer_iname, inner_iname])) else: new_within_inames = insn.within_inames @@ -286,7 +295,7 @@ def _split_iname_backend(kernel, split_iname, for prio in kernel.loop_priority: new_prio = () for prio_iname in prio: - if prio_iname == split_iname: + if prio_iname == iname_to_split: new_prio = new_prio + (outer_iname, inner_iname) else: new_prio = new_prio + (prio_iname,) @@ -302,7 +311,7 @@ def _split_iname_backend(kernel, split_iname, rule_mapping_context = SubstitutionRuleMappingContext( kernel.substitutions, kernel.get_var_name_generator()) ins = _InameSplitter(rule_mapping_context, within, - split_iname, outer_iname, inner_iname, new_loop_index) + iname_to_split, outer_iname, inner_iname, new_loop_index) kernel = ins.map_kernel(kernel) kernel = rule_mapping_context.finish_kernel(kernel) @@ -319,6 +328,7 @@ def _split_iname_backend(kernel, split_iname, # {{{ split iname def split_iname(kernel, split_iname, inner_length, + *, outer_iname=None, inner_iname=None, outer_tag=None, inner_tag=None, slabs=(0, 0), do_tagged_check=True, -- GitLab From 0a18085ff2016fe4e71edc68f3f6bf54041d68bb Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Wed, 4 Nov 2020 14:01:34 -0600 Subject: [PATCH 329/415] Fix split_iname logic to use temporary duplicate iname for split, indepependent of original --- loopy/transform/iname.py | 31 ++++++++++++++++++++++--------- 1 file changed, 22 insertions(+), 9 deletions(-) diff --git a/loopy/transform/iname.py b/loopy/transform/iname.py index 4c47abc39..f52969669 100644 --- a/loopy/transform/iname.py +++ b/loopy/transform/iname.py @@ -165,13 +165,25 @@ def _split_iname_in_set(s, iname_to_split, inner_iname, outer_iname, fixed_lengt return s orig_dim_type, _ = var_dict[iname_to_split] + assert orig_dim_type == dim_type.set + del orig_dim_type - outer_var_nr = s.dim(orig_dim_type) - inner_var_nr = s.dim(orig_dim_type)+1 + # NB: dup_iname_to_split is not a globally valid identifier: only uniqure + # wrt the set s. + from pytools import generate_unique_names + for dup_iname_to_split in generate_unique_names(f"dup_{iname_to_split}"): + if dup_iname_to_split not in var_dict: + break - s = s.add_dims(orig_dim_type, 2) - s = s.set_dim_name(orig_dim_type, outer_var_nr, outer_iname) - s = s.set_dim_name(orig_dim_type, inner_var_nr, inner_iname) + from loopy.isl_helpers import duplicate_axes + s = duplicate_axes(s, (iname_to_split,), (dup_iname_to_split,)) + + outer_var_nr = s.dim(dim_type.set) + inner_var_nr = s.dim(dim_type.set)+1 + + s = s.add_dims(dim_type.set, 2) + s = s.set_dim_name(dim_type.set, outer_var_nr, outer_iname) + s = s.set_dim_name(dim_type.set, inner_var_nr, inner_iname) from loopy.isl_helpers import make_slab @@ -181,21 +193,22 @@ def _split_iname_in_set(s, iname_to_split, inner_iname, outer_iname, fixed_lengt fixed_iname, var_length_iname = outer_iname, inner_iname space = s.get_space() - fixed_constraint_set = ( + s = s & ( make_slab(space, fixed_iname, 0, fixed_length) # name = fixed_iname + fixed_length*var_length_iname .add_constraint(isl.Constraint.eq_from_names( space, { - iname_to_split: 1, + dup_iname_to_split: 1, fixed_iname: -1, var_length_iname: -fixed_length}))) - name_dim_type, name_idx = space.get_var_dict()[iname_to_split] - s = s.intersect(fixed_constraint_set) + _, dup_name_idx = space.get_var_dict()[dup_iname_to_split] + s = s.project_out(dim_type.set, dup_name_idx, 1) if split_iname_should_remain: return s else: + name_dim_type, name_idx = space.get_var_dict()[iname_to_split] return s.project_out(name_dim_type, name_idx, 1) -- GitLab From 355a0c37913ff92c11bec8f6723c59fff7989b18 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Wed, 4 Nov 2020 14:02:16 -0600 Subject: [PATCH 330/415] Add test_split_iname_within (gh-163) --- test/test_loopy.py | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/test/test_loopy.py b/test/test_loopy.py index 41b5315e8..753a8df2e 100644 --- a/test/test_loopy.py +++ b/test/test_loopy.py @@ -2920,6 +2920,33 @@ def test_access_check_with_conditionals(): lp.generate_code_v2(legal_but_nonaffine_condition_knl) +def test_split_iname_within(ctx_factory): + # https://github.com/inducer/loopy/issues/163 + ctx = ctx_factory() + + # Two bugs: + # - simplify_using_aff introduces variables that have no business being there + # - independent copies of i/j should remain + knl = lp.make_kernel( + "{ [i, j]: 0<=i 1: exec(sys.argv[1]) -- GitLab From 6a93094d278224dd7ec086eb0c0702c46bc907a7 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Wed, 4 Nov 2020 14:04:07 -0600 Subject: [PATCH 331/415] Remove stray temp comment from test_split_iname_within --- test/test_loopy.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/test/test_loopy.py b/test/test_loopy.py index 753a8df2e..2ac08f8c4 100644 --- a/test/test_loopy.py +++ b/test/test_loopy.py @@ -2924,9 +2924,6 @@ def test_split_iname_within(ctx_factory): # https://github.com/inducer/loopy/issues/163 ctx = ctx_factory() - # Two bugs: - # - simplify_using_aff introduces variables that have no business being there - # - independent copies of i/j should remain knl = lp.make_kernel( "{ [i, j]: 0<=i Date: Mon, 9 Nov 2020 15:07:36 -0600 Subject: [PATCH 332/415] _split_iname_in_set: Remove assertion that orig_dim_type is set --- loopy/transform/iname.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/loopy/transform/iname.py b/loopy/transform/iname.py index f52969669..05d6562f5 100644 --- a/loopy/transform/iname.py +++ b/loopy/transform/iname.py @@ -165,7 +165,8 @@ def _split_iname_in_set(s, iname_to_split, inner_iname, outer_iname, fixed_lengt return s orig_dim_type, _ = var_dict[iname_to_split] - assert orig_dim_type == dim_type.set + # orig_dim_type may be set or param (the latter if the iname is + # used as a parameter in a subdomain). del orig_dim_type # NB: dup_iname_to_split is not a globally valid identifier: only uniqure @@ -178,12 +179,12 @@ def _split_iname_in_set(s, iname_to_split, inner_iname, outer_iname, fixed_lengt from loopy.isl_helpers import duplicate_axes s = duplicate_axes(s, (iname_to_split,), (dup_iname_to_split,)) - outer_var_nr = s.dim(dim_type.set) - inner_var_nr = s.dim(dim_type.set)+1 + outer_var_nr = s.dim(orig_dim_type) + inner_var_nr = s.dim(orig_dim_type)+1 - s = s.add_dims(dim_type.set, 2) - s = s.set_dim_name(dim_type.set, outer_var_nr, outer_iname) - s = s.set_dim_name(dim_type.set, inner_var_nr, inner_iname) + s = s.add_dims(orig_dim_type, 2) + s = s.set_dim_name(orig_dim_type, outer_var_nr, outer_iname) + s = s.set_dim_name(orig_dim_type, inner_var_nr, inner_iname) from loopy.isl_helpers import make_slab @@ -203,7 +204,7 @@ def _split_iname_in_set(s, iname_to_split, inner_iname, outer_iname, fixed_lengt var_length_iname: -fixed_length}))) _, dup_name_idx = space.get_var_dict()[dup_iname_to_split] - s = s.project_out(dim_type.set, dup_name_idx, 1) + s = s.project_out(orig_dim_type, dup_name_idx, 1) if split_iname_should_remain: return s -- GitLab From d3cd7487d2cb92f1246f1032af08934e1d9dd32a Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Mon, 9 Nov 2020 15:09:27 -0600 Subject: [PATCH 333/415] Fix some comment typos relating to gh-167 --- loopy/symbolic.py | 2 +- loopy/transform/iname.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/loopy/symbolic.py b/loopy/symbolic.py index cda89aa70..3cdc0708d 100644 --- a/loopy/symbolic.py +++ b/loopy/symbolic.py @@ -1527,7 +1527,7 @@ def simplify_using_aff(kernel, expr): # FIXME: Ideally, we should find out what inames are usable and allow # the simplification to use all of those. For now, fall back to making - # sure that the sipmlification only uses inames that were already there. + # sure that the simplification only uses inames that were already there. domain = ( kernel .get_inames_domain(inames) diff --git a/loopy/transform/iname.py b/loopy/transform/iname.py index 05d6562f5..cb52b48bb 100644 --- a/loopy/transform/iname.py +++ b/loopy/transform/iname.py @@ -169,7 +169,7 @@ def _split_iname_in_set(s, iname_to_split, inner_iname, outer_iname, fixed_lengt # used as a parameter in a subdomain). del orig_dim_type - # NB: dup_iname_to_split is not a globally valid identifier: only uniqure + # NB: dup_iname_to_split is not a globally valid identifier: only unique # wrt the set s. from pytools import generate_unique_names for dup_iname_to_split in generate_unique_names(f"dup_{iname_to_split}"): -- GitLab From 3974763c8c1b004adf654cc9ef83aa45d1e4d60d Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Mon, 9 Nov 2020 15:13:03 -0600 Subject: [PATCH 334/415] Delete stray del orig_dim_type in_split_iname_in_set --- loopy/transform/iname.py | 1 - 1 file changed, 1 deletion(-) diff --git a/loopy/transform/iname.py b/loopy/transform/iname.py index cb52b48bb..372f972e5 100644 --- a/loopy/transform/iname.py +++ b/loopy/transform/iname.py @@ -167,7 +167,6 @@ def _split_iname_in_set(s, iname_to_split, inner_iname, outer_iname, fixed_lengt orig_dim_type, _ = var_dict[iname_to_split] # orig_dim_type may be set or param (the latter if the iname is # used as a parameter in a subdomain). - del orig_dim_type # NB: dup_iname_to_split is not a globally valid identifier: only unique # wrt the set s. -- GitLab From 750687c66640c7eb7f020d61e2c205c3a7ea782e Mon Sep 17 00:00:00 2001 From: Nicholas Christensen Date: Tue, 10 Nov 2020 05:15:45 -0600 Subject: [PATCH 335/415] add tags to loopy --- loopy/kernel/array.py | 3 ++- loopy/kernel/data.py | 11 ++++------- 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/loopy/kernel/array.py b/loopy/kernel/array.py index 6b0248f4f..9033ebb14 100644 --- a/loopy/kernel/array.py +++ b/loopy/kernel/array.py @@ -652,7 +652,7 @@ class ArrayBase(ImmutableRecord): def __init__(self, name, dtype=None, shape=None, dim_tags=None, offset=0, dim_names=None, strides=None, order=None, for_atomic=False, - target=None, alignment=None, + target=None, alignment=None, tags=None, **kwargs): """ All of the following (except *name*) are optional. @@ -848,6 +848,7 @@ class ArrayBase(ImmutableRecord): order=order, alignment=alignment, for_atomic=for_atomic, + tags=tags, **kwargs) def __eq__(self, other): diff --git a/loopy/kernel/data.py b/loopy/kernel/data.py index ce401d647..e7f7cd731 100644 --- a/loopy/kernel/data.py +++ b/loopy/kernel/data.py @@ -339,8 +339,6 @@ class KernelArgument(ImmutableRecord): dtype = kwargs.pop("dtype", None) - tags = kwargs.pop("tags", None) # noqa: F841 - if "for_atomic" in kwargs: for_atomic = kwargs["for_atomic"] else: @@ -359,7 +357,6 @@ class KernelArgument(ImmutableRecord): DeprecationWarning, stacklevel=2) dtype = None - kwargs["dtype"] = dtype ImmutableRecord.__init__(self, **kwargs) @@ -381,13 +378,13 @@ class ArrayArg(ArrayBase, KernelArgument): allowed_extra_kwargs = [ "address_space", - "is_output_only"] + "is_output_only", + "tags"] def __init__(self, *args, **kwargs): if "address_space" not in kwargs: raise TypeError("'address_space' must be specified") kwargs["is_output_only"] = kwargs.pop("is_output_only", False) - super().__init__(*args, **kwargs) min_target_axes = 0 @@ -455,13 +452,13 @@ class ImageArg(ArrayBase, KernelArgument): class ValueArg(KernelArgument): def __init__(self, name, dtype=None, approximately=1000, target=None, - is_output_only=False): + is_output_only=False,tags=None): KernelArgument.__init__(self, name=name, dtype=dtype, approximately=approximately, target=target, - is_output_only=is_output_only) + is_output_only=is_output_only,tags=tags) def __str__(self): import loopy as lp -- GitLab From 70231657027019bebd18440099d07038163de5fb Mon Sep 17 00:00:00 2001 From: Nicholas Christensen Date: Tue, 10 Nov 2020 05:21:10 -0600 Subject: [PATCH 336/415] placate flake8 --- loopy/kernel/data.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/loopy/kernel/data.py b/loopy/kernel/data.py index e7f7cd731..82cf2f4c7 100644 --- a/loopy/kernel/data.py +++ b/loopy/kernel/data.py @@ -452,13 +452,13 @@ class ImageArg(ArrayBase, KernelArgument): class ValueArg(KernelArgument): def __init__(self, name, dtype=None, approximately=1000, target=None, - is_output_only=False,tags=None): + is_output_only=False, tags=None): KernelArgument.__init__(self, name=name, dtype=dtype, approximately=approximately, target=target, - is_output_only=is_output_only,tags=tags) + is_output_only=is_output_only, tags=tags) def __str__(self): import loopy as lp -- GitLab From c36ab97fe07d43f344410454955cae8edfca275d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Kl=C3=B6ckner?= Date: Wed, 11 Nov 2020 18:56:57 +0100 Subject: [PATCH 337/415] Require avx2 node tag for Gitlab examples CI --- .gitlab-ci.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index d69f0b8c4..f0e9aa0e5 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -89,6 +89,8 @@ Python 3 POCL Examples: - python3 - pocl - large-node + # For examples/python/ispc-stream-harness.py + - avx2 except: - tags -- GitLab From e6d7d6b12b48abe3d6ddf313f2018cdef7b18f71 Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Wed, 11 Nov 2020 20:18:14 -0600 Subject: [PATCH 338/415] define the scope of RuleAwareSubstitutionMapper --- loopy/symbolic.py | 32 +++++++++++++++++++++++++++++++- 1 file changed, 31 insertions(+), 1 deletion(-) diff --git a/loopy/symbolic.py b/loopy/symbolic.py index dfacb4438..e170a7854 100644 --- a/loopy/symbolic.py +++ b/loopy/symbolic.py @@ -1006,11 +1006,41 @@ class RuleAwareIdentityMapper(IdentityMapper): class RuleAwareSubstitutionMapper(RuleAwareIdentityMapper): + """ + Mapper to substitute expressions and record any divergence of substitution + rule expressions of :class:`loopy.LoopKernel`. + + .. attribute:: rule_mapping_context + + An instance of :class:`SubstitutionRuleMappingContext` to record + divergence of substitution rules. + + .. attribute:: within + + An instance of :class:`loopy.match.StackMatchComponent`. + :class:`RuleAwareSubstitutionMapper` would perform + substitutions in the expression if the stack match is ``True`` or + if the expression does not arise from an :class:`~loopy.InstructionBase`. + + .. note:: + + The mapped kernel should be passed through + :meth:`SubstitutionRuleMappingContext.finish_kernel` to perform any + renaming mandated by the rule expression divergences. + """ def __init__(self, rule_mapping_context, subst_func, within): super().__init__(rule_mapping_context) self.subst_func = subst_func - self.within = within + self._within = within + + def within(self, kernel, instruction, stack): + if instruction is None: + # always perform substitutions on expressions not coming from + # instructions. + return True + else: + return self._within(kernel, instruction, stack) def map_variable(self, expr, expn_state): if (expr.name in expn_state.arg_context -- GitLab From 6e34b689c06f352b39ebacdd08b2829f436cdf0e Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Wed, 11 Nov 2020 20:27:13 -0600 Subject: [PATCH 339/415] argument shape expresssions should be handled in RuleAwareSubstitutionMapper.map_kernel --- loopy/symbolic.py | 28 +++++++++++++++++++++++++++- loopy/transform/data.py | 19 +------------------ 2 files changed, 28 insertions(+), 19 deletions(-) diff --git a/loopy/symbolic.py b/loopy/symbolic.py index e170a7854..ccfc1723a 100644 --- a/loopy/symbolic.py +++ b/loopy/symbolic.py @@ -1002,7 +1002,33 @@ class RuleAwareIdentityMapper(IdentityMapper): lambda expr: self(expr, kernel, insn))) for insn in kernel.instructions] - return kernel.copy(instructions=new_insns) + from functools import partial + + non_insn_self = partial(self, kernel=kernel, insn=None) + + from loopy.kernel.array import ArrayBase + + # {{{ args + + new_args = [ + arg.map_exprs(non_insn_self) if isinstance(arg, ArrayBase) else arg + for arg in kernel.args] + + # }}} + + # {{{ tvs + + new_tvs = { + tv_name: tv.map_exprs(non_insn_self) + for tv_name, tv in kernel.temporary_variables.items()} + + # }}} + + # domains, var names: not exprs => do not map + + return kernel.copy(instructions=new_insns, + args=new_args, + temporary_variables=new_tvs) class RuleAwareSubstitutionMapper(RuleAwareIdentityMapper): diff --git a/loopy/transform/data.py b/loopy/transform/data.py index 9c4725c0d..e946a67c0 100644 --- a/loopy/transform/data.py +++ b/loopy/transform/data.py @@ -637,7 +637,6 @@ def rename_argument(kernel, old_name, new_name, existing_ok=False): subst_dict = {old_name: var(new_name)} from loopy.symbolic import ( - SubstitutionMapper, RuleAwareSubstitutionMapper, SubstitutionRuleMappingContext) from pymbolic.mapper.substitutor import make_subst_func @@ -651,32 +650,17 @@ def rename_argument(kernel, old_name, new_name, existing_ok=False): # }}} - subst_mapper = SubstitutionMapper(make_subst_func(subst_dict)) - # {{{ args - from loopy.kernel.array import ArrayBase - new_args = [] for arg in kernel.args: if arg.name == old_name: arg = arg.copy(name=new_name) - if isinstance(arg, ArrayBase): - arg = arg.map_exprs(subst_mapper) - new_args.append(arg) # }}} - # {{{ tvs - - new_tvs = { - tv_name: tv.map_exprs(subst_mapper) - for tv_name, tv in kernel.temporary_variables.items()} - - # }}} - # {{{ domain new_domains = [] @@ -690,8 +674,7 @@ def rename_argument(kernel, old_name, new_name, existing_ok=False): # }}} - return kernel.copy(domains=new_domains, args=new_args, - temporary_variables=new_tvs) + return kernel.copy(domains=new_domains, args=new_args) # }}} -- GitLab From 039a687459745eea22e00182cd2252a8a590084e Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Wed, 11 Nov 2020 21:37:41 -0600 Subject: [PATCH 340/415] Track iname dim_types in rewritten _split_iname_in_set --- loopy/transform/iname.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/loopy/transform/iname.py b/loopy/transform/iname.py index 372f972e5..d2704a024 100644 --- a/loopy/transform/iname.py +++ b/loopy/transform/iname.py @@ -202,13 +202,13 @@ def _split_iname_in_set(s, iname_to_split, inner_iname, outer_iname, fixed_lengt fixed_iname: -1, var_length_iname: -fixed_length}))) - _, dup_name_idx = space.get_var_dict()[dup_iname_to_split] - s = s.project_out(orig_dim_type, dup_name_idx, 1) + dup_iname_dim_type, dup_name_idx = space.get_var_dict()[dup_iname_to_split] + s = s.project_out(dup_iname_dim_type, dup_name_idx, 1) if split_iname_should_remain: return s else: - name_dim_type, name_idx = space.get_var_dict()[iname_to_split] + name_dim_type, name_idx = s.space.get_var_dict()[iname_to_split] return s.project_out(name_dim_type, name_idx, 1) -- GitLab From 2376269efb84f0d38d7a389176c864c23d3c9f96 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Wed, 11 Nov 2020 22:37:50 -0600 Subject: [PATCH 341/415] remove_unused_inames: Project inames out of all domains instead of using DomainChanger --- loopy/transform/iname.py | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/loopy/transform/iname.py b/loopy/transform/iname.py index d2704a024..eba1d5612 100644 --- a/loopy/transform/iname.py +++ b/loopy/transform/iname.py @@ -1220,16 +1220,22 @@ def remove_unused_inames(kernel, inames=None): # {{{ remove them - from loopy.kernel.tools import DomainChanger - + domains = kernel.domains for iname in unused_inames: - domch = DomainChanger(kernel, (iname,)) + new_domains = [] + + for dom in domains: + try: + dt, idx = dom.get_var_dict()[iname] + except KeyError: + pass + else: + dom = dom.project_out(dt, idx, 1) + new_domains.append(dom) - dom = domch.domain - dt, idx = dom.get_var_dict()[iname] - dom = dom.project_out(dt, idx, 1) + domains = new_domains - kernel = kernel.copy(domains=domch.get_domains_with(dom)) + kernel = kernel.copy(domains=domains) # }}} -- GitLab From 8e4d3ebe50cfa91484cfcfd0cae8a8701c488811 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Wed, 11 Nov 2020 22:38:38 -0600 Subject: [PATCH 342/415] split_iname: use remove_unused_inames --- loopy/transform/iname.py | 23 +++++++---------------- 1 file changed, 7 insertions(+), 16 deletions(-) diff --git a/loopy/transform/iname.py b/loopy/transform/iname.py index eba1d5612..fefa7ed5f 100644 --- a/loopy/transform/iname.py +++ b/loopy/transform/iname.py @@ -158,7 +158,7 @@ class _InameSplitter(RuleAwareIdentityMapper): def _split_iname_in_set(s, iname_to_split, inner_iname, outer_iname, fixed_length, - fixed_length_is_inner, split_iname_should_remain): + fixed_length_is_inner): var_dict = s.get_var_dict() if iname_to_split not in var_dict: @@ -205,11 +205,7 @@ def _split_iname_in_set(s, iname_to_split, inner_iname, outer_iname, fixed_lengt dup_iname_dim_type, dup_name_idx = space.get_var_dict()[dup_iname_to_split] s = s.project_out(dup_iname_dim_type, dup_name_idx, 1) - if split_iname_should_remain: - return s - else: - name_dim_type, name_idx = s.space.get_var_dict()[iname_to_split] - return s.project_out(name_dim_type, name_idx, 1) + return s def _split_iname_backend(kernel, iname_to_split, @@ -260,17 +256,9 @@ def _split_iname_backend(kernel, iname_to_split, if inner_iname is None: inner_iname = vng(iname_to_split+"_inner") - all_insns_using_iname_in_within = all( - # "does not use iname or is targeted by the within" - # <=> - # "'uses iname' implies within" - iname_to_split not in insn.within_inames or within(kernel, insn) - for insn in kernel.instructions) - new_domains = [ _split_iname_in_set(dom, iname_to_split, inner_iname, outer_iname, - fixed_length, fixed_length_is_inner, - split_iname_should_remain=not all_insns_using_iname_in_within) + fixed_length, fixed_length_is_inner) for dom in kernel.domains] from pymbolic import var @@ -333,7 +321,10 @@ def _split_iname_backend(kernel, iname_to_split, kernel = tag_inames(kernel, {outer_iname: existing_tag, inner_iname: existing_tag}) - return tag_inames(kernel, {outer_iname: outer_tag, inner_iname: inner_tag}) + kernel = tag_inames(kernel, {outer_iname: outer_tag, inner_iname: inner_tag}) + kernel = remove_unused_inames(kernel, [iname_to_split]) + + return kernel # }}} -- GitLab From a445efd38e4679bcb9c4b66e3c3891bc0f7ca4fd Mon Sep 17 00:00:00 2001 From: Nicholas Christensen Date: Fri, 13 Nov 2020 01:32:10 -0600 Subject: [PATCH 343/415] document tags attribute --- loopy/kernel/array.py | 4 +++- loopy/kernel/data.py | 5 +++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/loopy/kernel/array.py b/loopy/kernel/array.py index 9033ebb14..b4468fa15 100644 --- a/loopy/kernel/array.py +++ b/loopy/kernel/array.py @@ -691,7 +691,9 @@ class ArrayBase(ImmutableRecord): using atomic-capable data types. :arg offset: (See :attr:`offset`) :arg alignment: memory alignment in bytes - + :arg tags: A metadata tag or list of metadata tags intended for + consumption by an application. These could be strings or instances + of :class:`pytools.tag` for example. """ for kwarg_name in kwargs: diff --git a/loopy/kernel/data.py b/loopy/kernel/data.py index 82cf2f4c7..ff4e8f218 100644 --- a/loopy/kernel/data.py +++ b/loopy/kernel/data.py @@ -453,6 +453,11 @@ class ImageArg(ArrayBase, KernelArgument): class ValueArg(KernelArgument): def __init__(self, name, dtype=None, approximately=1000, target=None, is_output_only=False, tags=None): + """ + :arg tags: A metadata tag or list of metadata tags intended for + consumption by an application. These could be strings or instances + of :class:`pytools.tag` for example. + """ KernelArgument.__init__(self, name=name, dtype=dtype, -- GitLab From af807eaab1189b61e990ebe7d8222920b3de0b6e Mon Sep 17 00:00:00 2001 From: Nicholas Christensen Date: Fri, 13 Nov 2020 01:51:16 -0600 Subject: [PATCH 344/415] fully specify class name --- loopy/kernel/array.py | 2 +- loopy/kernel/data.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/loopy/kernel/array.py b/loopy/kernel/array.py index b4468fa15..3bd8d227a 100644 --- a/loopy/kernel/array.py +++ b/loopy/kernel/array.py @@ -693,7 +693,7 @@ class ArrayBase(ImmutableRecord): :arg alignment: memory alignment in bytes :arg tags: A metadata tag or list of metadata tags intended for consumption by an application. These could be strings or instances - of :class:`pytools.tag` for example. + of :class:`pytools.tag.Tag` for example. """ for kwarg_name in kwargs: diff --git a/loopy/kernel/data.py b/loopy/kernel/data.py index ff4e8f218..504109e23 100644 --- a/loopy/kernel/data.py +++ b/loopy/kernel/data.py @@ -456,7 +456,7 @@ class ValueArg(KernelArgument): """ :arg tags: A metadata tag or list of metadata tags intended for consumption by an application. These could be strings or instances - of :class:`pytools.tag` for example. + of :class:`pytools.tag.Tag` for example. """ KernelArgument.__init__(self, name=name, -- GitLab From 171a47416edf3e60bd0e98853a7d7f1567c5a091 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Fri, 13 Nov 2020 14:31:29 -0600 Subject: [PATCH 345/415] Switch install docs to miniforge --- doc/misc.rst | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/doc/misc.rst b/doc/misc.rst index 4c8c9867f..e8bcefc65 100644 --- a/doc/misc.rst +++ b/doc/misc.rst @@ -49,21 +49,18 @@ MacOS support computers: Everywhere else, just making sure you have the ``g++`` package should be enough. -#. Install `miniconda `_. - (Both Python 2 and 3 should work. In the absence of other constraints, prefer Python 3.) +#. Install `miniforge `_. -#. ``export CONDA=/WHERE/YOU/INSTALLED/miniconda3`` +#. ``export CONDA=/WHERE/YOU/INSTALLED/miniforge3`` If you accepted the default location, this should work: - ``export CONDA=$HOME/miniconda3`` + ``export CONDA=$HOME/miniforge3`` #. ``$CONDA/bin/conda create -n dev`` #. ``source $CONDA/bin/activate dev`` -#. ``conda config --add channels conda-forge`` - #. ``conda install git pip pocl islpy pyopencl`` (Linux) or @@ -76,7 +73,7 @@ MacOS support computers: Next time you want to use :mod:`loopy`, just run the following command:: - source /WHERE/YOU/INSTALLED/miniconda3/bin/activate dev + source /WHERE/YOU/INSTALLED/miniforge3/bin/activate dev You may also like to add this to a startup file (like :file:`$HOME/.bashrc`) or create an alias for it. -- GitLab From d4428fa90f216df6df5410cdbcf3dae87a5f9dd6 Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Sun, 15 Nov 2020 13:23:51 -0600 Subject: [PATCH 346/415] FixedStrideArrayDimTag.map_expr: handle stride==lp.auto --- loopy/kernel/array.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/loopy/kernel/array.py b/loopy/kernel/array.py index 6b0248f4f..e1b12eeae 100644 --- a/loopy/kernel/array.py +++ b/loopy/kernel/array.py @@ -136,6 +136,12 @@ class FixedStrideArrayDimTag(_StrideArrayDimTagBase): return self.stringify(True) def map_expr(self, mapper): + from loopy.kernel.data import auto + + if self.stride is auto: + # lp.auto not an expr => do not map + return self + return self.copy(stride=mapper(self.stride)) -- GitLab From c94b2c3bdf25c0328777e4c2fb956414bd900ed6 Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Sun, 15 Nov 2020 13:24:46 -0600 Subject: [PATCH 347/415] add test_rename_argument_with_auto_stride --- test/test_transform.py | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/test/test_transform.py b/test/test_transform.py index ccaaebc19..daa659808 100644 --- a/test/test_transform.py +++ b/test/test_transform.py @@ -690,6 +690,29 @@ def test_rename_argument_of_domain_params(ctx_factory): lp.auto_test_vs_ref(knl, ctx_factory(), knl, parameters={"M": 10, "N": 4}) +def test_rename_argument_with_auto_stride(ctx_factory): + from loopy.kernel.array import FixedStrideArrayDimTag + + ctx = ctx_factory() + queue = cl.CommandQueue(ctx) + + knl = lp.make_kernel( + "{[i]: 0<=i<10}", + """ + y[i] = x[i] + """, [lp.GlobalArg("x", dtype=float, + shape=lp.auto, + dim_tags=[FixedStrideArrayDimTag(lp.auto)]), ...]) + + knl = lp.rename_argument(knl, "x", "x_new") + + code_str = lp.generate_code_v2(knl).device_code() + assert code_str.find("double const *__restrict__ x_new,") != -1 + assert code_str.find("double const *__restrict__ x,") == -1 + + evt, (out, ) = knl(queue, x_new=np.random.rand(10)) + + if __name__ == "__main__": if len(sys.argv) > 1: exec(sys.argv[1]) -- GitLab From dc57c07d53452fa8e6747d3d3814d9cf521db3c9 Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Mon, 16 Nov 2020 00:26:11 -0600 Subject: [PATCH 348/415] InstructionBase.get_domain -> get_insn_domain --- loopy/check.py | 3 +- loopy/kernel/instruction.py | 90 ++++++++++++++++++------------------- 2 files changed, 47 insertions(+), 46 deletions(-) diff --git a/loopy/check.py b/loopy/check.py index e8a5f9dca..910327850 100644 --- a/loopy/check.py +++ b/loopy/check.py @@ -461,9 +461,10 @@ def check_bounds(kernel): """ Performs out-of-bound check for every array access. """ + from loopy.kernel.instruction import get_insn_domain temp_var_names = set(kernel.temporary_variables) for insn in kernel.instructions: - domain = insn.get_domain(kernel) + domain = get_insn_domain(insn, kernel) # data-dependent bounds? can't do much if set(domain.get_var_names(dim_type.param)) & temp_var_names: diff --git a/loopy/kernel/instruction.py b/loopy/kernel/instruction.py index 8471d39f0..101d16624 100644 --- a/loopy/kernel/instruction.py +++ b/loopy/kernel/instruction.py @@ -147,7 +147,6 @@ class InstructionBase(ImmutableRecord): .. automethod:: with_transformed_expressions .. automethod:: write_dependency_names .. automethod:: dependency_names - .. automethod:: get_domain .. automethod:: copy """ @@ -411,50 +410,6 @@ class InstructionBase(ImmutableRecord): self.within_inames = ( intern_frozenset_of_ids(self.within_inames)) - def get_domain(self, kernel): - """ - Returns an instance of :class:`islpy.Set` for the instruction's domain. - - .. note:: - - Does not take into account additional hints available through - :attr:`loopy.LoopKernel.assumptions`. - """ - domain = kernel.get_inames_domain(self.within_inames) - - # {{{ add read-only ValueArgs to domain - - from loopy.kernel.data import ValueArg - - valueargs_to_add = ({arg.name for arg in kernel.args - if isinstance(arg, ValueArg) - and arg.name not in kernel.get_written_variables()} - - set(domain.get_var_names(isl.dim_type.param))) - - # only consider valueargs relevant to *self* - valueargs_to_add = valueargs_to_add & self.read_dependency_names() - - for arg_to_add in valueargs_to_add: - idim = domain.dim(isl.dim_type.param) - domain = domain.add_dims(isl.dim_type.param, 1) - domain = domain.set_dim_name(isl.dim_type.param, idim, arg_to_add) - - # }}} - - # {{{ enforce restriction from predicates - - insn_preds_set = isl.BasicSet.universe(domain.space) - - for predicate in self.predicates: - from loopy.symbolic import condition_to_set - predicate_as_isl_set = condition_to_set(domain.space, predicate) - if predicate_as_isl_set is not None: - insn_preds_set = insn_preds_set & predicate_as_isl_set - - # }}} - - return domain & insn_preds_set - # }}} @@ -1484,4 +1439,49 @@ def _check_and_fix_temp_var_type(temp_var_type, stacklevel=2): # }}} +def get_insn_domain(insn, kernel): + """ + Returns an instance of :class:`islpy.Set` for the *insn*'s domain. + + .. note:: + + Does not take into account additional hints available through + :attr:`loopy.LoopKernel.assumptions`. + """ + domain = kernel.get_inames_domain(insn.within_inames) + + # {{{ add read-only ValueArgs to domain + + from loopy.kernel.data import ValueArg + + valueargs_to_add = ({arg.name for arg in kernel.args + if isinstance(arg, ValueArg) + and arg.name not in kernel.get_written_variables()} + - set(domain.get_var_names(isl.dim_type.param))) + + # only consider valueargs relevant to *insn* + valueargs_to_add = valueargs_to_add & insn.read_dependency_names() + + for arg_to_add in valueargs_to_add: + idim = domain.dim(isl.dim_type.param) + domain = domain.add_dims(isl.dim_type.param, 1) + domain = domain.set_dim_name(isl.dim_type.param, idim, arg_to_add) + + # }}} + + # {{{ enforce restriction from predicates + + insn_preds_set = isl.BasicSet.universe(domain.space) + + for predicate in insn.predicates: + from loopy.symbolic import condition_to_set + predicate_as_isl_set = condition_to_set(domain.space, predicate) + if predicate_as_isl_set is not None: + insn_preds_set = insn_preds_set & predicate_as_isl_set + + # }}} + + return domain & insn_preds_set + + # vim: foldmethod=marker -- GitLab From 14afe584b7ff70d8b3c54eb6f05e1dda9908176f Mon Sep 17 00:00:00 2001 From: Nicholas Christensen Date: Mon, 16 Nov 2020 00:34:00 -0600 Subject: [PATCH 349/415] only mention pytools.tag.Tag --- loopy/kernel/array.py | 4 ++-- loopy/kernel/data.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/loopy/kernel/array.py b/loopy/kernel/array.py index 3bd8d227a..2ae45a5e8 100644 --- a/loopy/kernel/array.py +++ b/loopy/kernel/array.py @@ -692,8 +692,8 @@ class ArrayBase(ImmutableRecord): :arg offset: (See :attr:`offset`) :arg alignment: memory alignment in bytes :arg tags: A metadata tag or list of metadata tags intended for - consumption by an application. These could be strings or instances - of :class:`pytools.tag.Tag` for example. + consumption by an application. It is intended these tags be + instances of :class:`pytools.tag.Tag. """ for kwarg_name in kwargs: diff --git a/loopy/kernel/data.py b/loopy/kernel/data.py index 504109e23..45021c2d1 100644 --- a/loopy/kernel/data.py +++ b/loopy/kernel/data.py @@ -455,8 +455,8 @@ class ValueArg(KernelArgument): is_output_only=False, tags=None): """ :arg tags: A metadata tag or list of metadata tags intended for - consumption by an application. These could be strings or instances - of :class:`pytools.tag.Tag` for example. + consumption by an application. It is intended these tags be + instances of :class:`pytools.tag.Tag`. """ KernelArgument.__init__(self, name=name, -- GitLab From 61610e65af206da51eaac08fdabad0562dd312dc Mon Sep 17 00:00:00 2001 From: Nicholas Christensen Date: Mon, 16 Nov 2020 01:07:11 -0600 Subject: [PATCH 350/415] fix doc generation --- loopy/kernel/array.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/loopy/kernel/array.py b/loopy/kernel/array.py index 2ae45a5e8..c1687bb03 100644 --- a/loopy/kernel/array.py +++ b/loopy/kernel/array.py @@ -693,7 +693,7 @@ class ArrayBase(ImmutableRecord): :arg alignment: memory alignment in bytes :arg tags: A metadata tag or list of metadata tags intended for consumption by an application. It is intended these tags be - instances of :class:`pytools.tag.Tag. + instances of :class:`pytools.tag.Tag`. """ for kwarg_name in kwargs: -- GitLab From d88204e1aa4fbf6c16d393cf061ae387e4eab7c5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Kl=C3=B6ckner?= Date: Tue, 17 Nov 2020 05:45:29 +0100 Subject: [PATCH 351/415] Specify unit of ArrayBase.offset --- loopy/kernel/array.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/loopy/kernel/array.py b/loopy/kernel/array.py index c004d69ec..4254171db 100644 --- a/loopy/kernel/array.py +++ b/loopy/kernel/array.py @@ -606,7 +606,8 @@ class ArrayBase(ImmutableRecord): .. attribute:: offset Offset from the beginning of the buffer to the point from - which the strides are counted. May be one of + which the strides are counted, in units of the :attr:`dtype`. + May be one of * 0 or None * a string (that is interpreted as an argument name). -- GitLab From ba9880a5c366a17966d6e6077e577b8251f31b44 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Kl=C3=B6ckner?= Date: Tue, 17 Nov 2020 14:42:24 -0600 Subject: [PATCH 352/415] Revert "Add tags to Loopy" --- loopy/kernel/array.py | 7 ++----- loopy/kernel/data.py | 14 +++++--------- setup.py | 2 +- 3 files changed, 8 insertions(+), 15 deletions(-) diff --git a/loopy/kernel/array.py b/loopy/kernel/array.py index 4254171db..d5b4284b8 100644 --- a/loopy/kernel/array.py +++ b/loopy/kernel/array.py @@ -659,7 +659,7 @@ class ArrayBase(ImmutableRecord): def __init__(self, name, dtype=None, shape=None, dim_tags=None, offset=0, dim_names=None, strides=None, order=None, for_atomic=False, - target=None, alignment=None, tags=None, + target=None, alignment=None, **kwargs): """ All of the following (except *name*) are optional. @@ -698,9 +698,7 @@ class ArrayBase(ImmutableRecord): using atomic-capable data types. :arg offset: (See :attr:`offset`) :arg alignment: memory alignment in bytes - :arg tags: A metadata tag or list of metadata tags intended for - consumption by an application. It is intended these tags be - instances of :class:`pytools.tag.Tag`. + """ for kwarg_name in kwargs: @@ -857,7 +855,6 @@ class ArrayBase(ImmutableRecord): order=order, alignment=alignment, for_atomic=for_atomic, - tags=tags, **kwargs) def __eq__(self, other): diff --git a/loopy/kernel/data.py b/loopy/kernel/data.py index 45021c2d1..43770ffb6 100644 --- a/loopy/kernel/data.py +++ b/loopy/kernel/data.py @@ -357,6 +357,7 @@ class KernelArgument(ImmutableRecord): DeprecationWarning, stacklevel=2) dtype = None + kwargs["dtype"] = dtype ImmutableRecord.__init__(self, **kwargs) @@ -378,13 +379,13 @@ class ArrayArg(ArrayBase, KernelArgument): allowed_extra_kwargs = [ "address_space", - "is_output_only", - "tags"] + "is_output_only"] def __init__(self, *args, **kwargs): if "address_space" not in kwargs: raise TypeError("'address_space' must be specified") kwargs["is_output_only"] = kwargs.pop("is_output_only", False) + super().__init__(*args, **kwargs) min_target_axes = 0 @@ -452,18 +453,13 @@ class ImageArg(ArrayBase, KernelArgument): class ValueArg(KernelArgument): def __init__(self, name, dtype=None, approximately=1000, target=None, - is_output_only=False, tags=None): - """ - :arg tags: A metadata tag or list of metadata tags intended for - consumption by an application. It is intended these tags be - instances of :class:`pytools.tag.Tag`. - """ + is_output_only=False): KernelArgument.__init__(self, name=name, dtype=dtype, approximately=approximately, target=target, - is_output_only=is_output_only, tags=tags) + is_output_only=is_output_only) def __str__(self): import loopy as lp diff --git a/setup.py b/setup.py index 084aaeab5..ddc47fefc 100644 --- a/setup.py +++ b/setup.py @@ -84,7 +84,7 @@ setup(name="loopy", python_requires="~=3.6", install_requires=[ - "pytools>=2020.4.2", + "pytools>=2020.4", "pymbolic>=2019.2", "genpy>=2016.1.2", "cgen>=2016.1", -- GitLab From 32c262687d7951603e45a02b9b0887d9909b0f1c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Kl=C3=B6ckner?= Date: Tue, 17 Nov 2020 16:13:16 -0600 Subject: [PATCH 353/415] Revert "Revert "Add tags to Loopy"" --- loopy/kernel/array.py | 7 +++++-- loopy/kernel/data.py | 14 +++++++++----- setup.py | 2 +- 3 files changed, 15 insertions(+), 8 deletions(-) diff --git a/loopy/kernel/array.py b/loopy/kernel/array.py index d5b4284b8..4254171db 100644 --- a/loopy/kernel/array.py +++ b/loopy/kernel/array.py @@ -659,7 +659,7 @@ class ArrayBase(ImmutableRecord): def __init__(self, name, dtype=None, shape=None, dim_tags=None, offset=0, dim_names=None, strides=None, order=None, for_atomic=False, - target=None, alignment=None, + target=None, alignment=None, tags=None, **kwargs): """ All of the following (except *name*) are optional. @@ -698,7 +698,9 @@ class ArrayBase(ImmutableRecord): using atomic-capable data types. :arg offset: (See :attr:`offset`) :arg alignment: memory alignment in bytes - + :arg tags: A metadata tag or list of metadata tags intended for + consumption by an application. It is intended these tags be + instances of :class:`pytools.tag.Tag`. """ for kwarg_name in kwargs: @@ -855,6 +857,7 @@ class ArrayBase(ImmutableRecord): order=order, alignment=alignment, for_atomic=for_atomic, + tags=tags, **kwargs) def __eq__(self, other): diff --git a/loopy/kernel/data.py b/loopy/kernel/data.py index 43770ffb6..45021c2d1 100644 --- a/loopy/kernel/data.py +++ b/loopy/kernel/data.py @@ -357,7 +357,6 @@ class KernelArgument(ImmutableRecord): DeprecationWarning, stacklevel=2) dtype = None - kwargs["dtype"] = dtype ImmutableRecord.__init__(self, **kwargs) @@ -379,13 +378,13 @@ class ArrayArg(ArrayBase, KernelArgument): allowed_extra_kwargs = [ "address_space", - "is_output_only"] + "is_output_only", + "tags"] def __init__(self, *args, **kwargs): if "address_space" not in kwargs: raise TypeError("'address_space' must be specified") kwargs["is_output_only"] = kwargs.pop("is_output_only", False) - super().__init__(*args, **kwargs) min_target_axes = 0 @@ -453,13 +452,18 @@ class ImageArg(ArrayBase, KernelArgument): class ValueArg(KernelArgument): def __init__(self, name, dtype=None, approximately=1000, target=None, - is_output_only=False): + is_output_only=False, tags=None): + """ + :arg tags: A metadata tag or list of metadata tags intended for + consumption by an application. It is intended these tags be + instances of :class:`pytools.tag.Tag`. + """ KernelArgument.__init__(self, name=name, dtype=dtype, approximately=approximately, target=target, - is_output_only=is_output_only) + is_output_only=is_output_only, tags=tags) def __str__(self): import loopy as lp diff --git a/setup.py b/setup.py index ddc47fefc..084aaeab5 100644 --- a/setup.py +++ b/setup.py @@ -84,7 +84,7 @@ setup(name="loopy", python_requires="~=3.6", install_requires=[ - "pytools>=2020.4", + "pytools>=2020.4.2", "pymbolic>=2019.2", "genpy>=2016.1.2", "cgen>=2016.1", -- GitLab From c7f2c7f4808f7c1d13f0ed2bc0f280cda1bf590a Mon Sep 17 00:00:00 2001 From: Nicholas Christensen Date: Tue, 24 Nov 2020 13:58:46 -0600 Subject: [PATCH 354/415] Use Taggable class with ArrayBase and ValueArg --- loopy/kernel/array.py | 18 +++++++++++++----- loopy/kernel/data.py | 19 +++++++++++++------ loopy/version.py | 2 +- setup.py | 2 +- 4 files changed, 28 insertions(+), 13 deletions(-) diff --git a/loopy/kernel/array.py b/loopy/kernel/array.py index 4254171db..ba97c9088 100644 --- a/loopy/kernel/array.py +++ b/loopy/kernel/array.py @@ -26,6 +26,7 @@ THE SOFTWARE. import re from pytools import ImmutableRecord, memoize_method +from pytools.tag import Taggable import numpy as np # noqa @@ -563,7 +564,7 @@ def _parse_shape_or_strides(x): return tuple(_pymbolic_parse_if_necessary(xi) for xi in x) -class ArrayBase(ImmutableRecord): +class ArrayBase(ImmutableRecord, Taggable): """ .. attribute :: name @@ -643,6 +644,14 @@ class ArrayBase(ImmutableRecord): .. versionadded:: 2018.1 + .. attribute:: tags + + A (possibly empty) frozenset of instances of + :class:`pytools.tag.Tag` intended for + consumption by an application. + + ..versionadded: 2020.2.2 + .. automethod:: __init__ .. automethod:: __eq__ .. automethod:: num_user_axes @@ -659,7 +668,7 @@ class ArrayBase(ImmutableRecord): def __init__(self, name, dtype=None, shape=None, dim_tags=None, offset=0, dim_names=None, strides=None, order=None, for_atomic=False, - target=None, alignment=None, tags=None, + target=None, alignment=None, tags=frozenset(), **kwargs): """ All of the following (except *name*) are optional. @@ -698,9 +707,8 @@ class ArrayBase(ImmutableRecord): using atomic-capable data types. :arg offset: (See :attr:`offset`) :arg alignment: memory alignment in bytes - :arg tags: A metadata tag or list of metadata tags intended for - consumption by an application. It is intended these tags be - instances of :class:`pytools.tag.Tag`. + :arg tags: An instance of or an Iterable of instances of + :class:`pytools.tag.Tag`. """ for kwarg_name in kwargs: diff --git a/loopy/kernel/data.py b/loopy/kernel/data.py index 45021c2d1..0702ea618 100644 --- a/loopy/kernel/data.py +++ b/loopy/kernel/data.py @@ -27,6 +27,7 @@ THE SOFTWARE. from sys import intern import numpy as np # noqa from pytools import ImmutableRecord +from pytools.tag import Taggable from loopy.kernel.array import ArrayBase from loopy.diagnostic import LoopyError from loopy.kernel.instruction import ( # noqa @@ -449,14 +450,20 @@ class ImageArg(ArrayBase, KernelArgument): return ast_builder.get_image_arg_decl(self.name + name_suffix, shape, self.num_target_axes(), dtype, is_written) - -class ValueArg(KernelArgument): +""" + :attribute tags: A (possibly empty) frozenset of instances of + :class:`pytools.tag.Tag` intended for consumption by an + application. + + ..versionadded: 2020.2.2 +""" +class ValueArg(KernelArgument, Taggable): def __init__(self, name, dtype=None, approximately=1000, target=None, - is_output_only=False, tags=None): + is_output_only=False, tags=frozenset()): """ - :arg tags: A metadata tag or list of metadata tags intended for - consumption by an application. It is intended these tags be - instances of :class:`pytools.tag.Tag`. + :arg tags: A an instance of or Iterable of instances of + :class:`pytools.tag.Tag` intended for consumption by an + application. """ KernelArgument.__init__(self, name=name, diff --git a/loopy/version.py b/loopy/version.py index fddd44479..6f66c5347 100644 --- a/loopy/version.py +++ b/loopy/version.py @@ -42,7 +42,7 @@ else: # }}} -VERSION = (2020, 2, 1) +VERSION = (2020, 2, 2) VERSION_STATUS = "" VERSION_TEXT = ".".join(str(x) for x in VERSION) + VERSION_STATUS diff --git a/setup.py b/setup.py index 084aaeab5..bd76d293a 100644 --- a/setup.py +++ b/setup.py @@ -84,7 +84,7 @@ setup(name="loopy", python_requires="~=3.6", install_requires=[ - "pytools>=2020.4.2", + "pytools>=2020.4.4", "pymbolic>=2019.2", "genpy>=2016.1.2", "cgen>=2016.1", -- GitLab From 32b0cd9393e8650ebf43022db9f6e1db4c3595cb Mon Sep 17 00:00:00 2001 From: Nicholas Christensen Date: Tue, 24 Nov 2020 14:02:27 -0600 Subject: [PATCH 355/415] placate flake8 --- loopy/kernel/data.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/loopy/kernel/data.py b/loopy/kernel/data.py index 0702ea618..be7ccc699 100644 --- a/loopy/kernel/data.py +++ b/loopy/kernel/data.py @@ -450,18 +450,21 @@ class ImageArg(ArrayBase, KernelArgument): return ast_builder.get_image_arg_decl(self.name + name_suffix, shape, self.num_target_axes(), dtype, is_written) + """ :attribute tags: A (possibly empty) frozenset of instances of :class:`pytools.tag.Tag` intended for consumption by an application. - + ..versionadded: 2020.2.2 """ + + class ValueArg(KernelArgument, Taggable): def __init__(self, name, dtype=None, approximately=1000, target=None, is_output_only=False, tags=frozenset()): """ - :arg tags: A an instance of or Iterable of instances of + :arg tags: A an instance of or Iterable of instances of :class:`pytools.tag.Tag` intended for consumption by an application. """ -- GitLab From 87acfe3477d1abc21cf88474ab80a40ef59e3fd1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Kl=C3=B6ckner?= Date: Mon, 30 Nov 2020 10:52:45 -0600 Subject: [PATCH 356/415] Add '#egg=' tag to f2py requirement h/t @matthiasdiener https://github.com/illinois-ceesd/mirgecom/pull/162 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 2105aede0..1072cdec0 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,7 +6,7 @@ git+https://github.com/inducer/pymbolic.git#egg=pymbolic git+https://github.com/inducer/genpy.git#egg=genpy git+https://github.com/inducer/codepy.git#egg=codepy -git+https://github.com/inducer/f2py +git+https://github.com/inducer/f2py#egg=f2py # Optional, needed for using the C preprocessor on Fortran ply>=3.6 -- GitLab From 130c76658101795678196a55e2fc438e6613c511 Mon Sep 17 00:00:00 2001 From: Nicholas Christensen Date: Mon, 30 Nov 2020 17:35:37 -0600 Subject: [PATCH 357/415] change requirements.txt --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 2105aede0..d64f33279 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -git+https://github.com/inducer/pytools.git#egg=pytools +git+https://github.com/nchristensen/pytools.git@master#egg=pytools == 2020.4.4 git+https://github.com/inducer/islpy.git#egg=islpy git+https://github.com/inducer/cgen.git#egg=cgen git+https://github.com/inducer/pyopencl.git#egg=pyopencl -- GitLab From e9ebf7df739d5ee6a3b58da6a403c8a40334930f Mon Sep 17 00:00:00 2001 From: Nicholas Christensen Date: Mon, 30 Nov 2020 18:07:54 -0600 Subject: [PATCH 358/415] Trigger -- GitLab From 23a9b1a7c973026bf9bbf6d127163f159047790e Mon Sep 17 00:00:00 2001 From: Nicholas Christensen Date: Mon, 30 Nov 2020 19:55:27 -0600 Subject: [PATCH 359/415] Trigger CI -- GitLab From fa44cd08c6d1af3054e7534110603ce6ab4981e3 Mon Sep 17 00:00:00 2001 From: Nicholas Christensen Date: Mon, 30 Nov 2020 20:48:12 -0600 Subject: [PATCH 360/415] missing colon --- loopy/kernel/array.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/loopy/kernel/array.py b/loopy/kernel/array.py index ba97c9088..fc0ac7e87 100644 --- a/loopy/kernel/array.py +++ b/loopy/kernel/array.py @@ -650,7 +650,7 @@ class ArrayBase(ImmutableRecord, Taggable): :class:`pytools.tag.Tag` intended for consumption by an application. - ..versionadded: 2020.2.2 + ..versionadded:: 2020.2.2 .. automethod:: __init__ .. automethod:: __eq__ -- GitLab From 6b7bc1e2827f498366d3d12317ec5b308435abaa Mon Sep 17 00:00:00 2001 From: Nicholas Christensen Date: Mon, 30 Nov 2020 21:18:43 -0600 Subject: [PATCH 361/415] Trigger CI -- GitLab From 8c15ab2812731dae7f76bf92c674ac16b65270b3 Mon Sep 17 00:00:00 2001 From: Nicholas Christensen Date: Mon, 30 Nov 2020 21:31:56 -0600 Subject: [PATCH 362/415] missing tab --- loopy/kernel/array.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/loopy/kernel/array.py b/loopy/kernel/array.py index fc0ac7e87..b2982598a 100644 --- a/loopy/kernel/array.py +++ b/loopy/kernel/array.py @@ -650,7 +650,7 @@ class ArrayBase(ImmutableRecord, Taggable): :class:`pytools.tag.Tag` intended for consumption by an application. - ..versionadded:: 2020.2.2 + .. versionadded:: 2020.2.2 .. automethod:: __init__ .. automethod:: __eq__ @@ -708,7 +708,7 @@ class ArrayBase(ImmutableRecord, Taggable): :arg offset: (See :attr:`offset`) :arg alignment: memory alignment in bytes :arg tags: An instance of or an Iterable of instances of - :class:`pytools.tag.Tag`. + :class:`pytools.tag.Tag`. """ for kwarg_name in kwargs: -- GitLab From 4bee34179506fdb899cd7f4ffe077134ea62a10e Mon Sep 17 00:00:00 2001 From: Nicholas Christensen Date: Mon, 30 Nov 2020 22:04:26 -0600 Subject: [PATCH 363/415] default to None --- loopy/kernel/array.py | 3 +-- loopy/kernel/data.py | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/loopy/kernel/array.py b/loopy/kernel/array.py index b2982598a..9fd166ab8 100644 --- a/loopy/kernel/array.py +++ b/loopy/kernel/array.py @@ -668,8 +668,7 @@ class ArrayBase(ImmutableRecord, Taggable): def __init__(self, name, dtype=None, shape=None, dim_tags=None, offset=0, dim_names=None, strides=None, order=None, for_atomic=False, - target=None, alignment=None, tags=frozenset(), - **kwargs): + target=None, alignment=None, tags=None, **kwargs): """ All of the following (except *name*) are optional. Specify either strides or shape. diff --git a/loopy/kernel/data.py b/loopy/kernel/data.py index be7ccc699..6e454d925 100644 --- a/loopy/kernel/data.py +++ b/loopy/kernel/data.py @@ -462,7 +462,7 @@ class ImageArg(ArrayBase, KernelArgument): class ValueArg(KernelArgument, Taggable): def __init__(self, name, dtype=None, approximately=1000, target=None, - is_output_only=False, tags=frozenset()): + is_output_only=False, tags=None): """ :arg tags: A an instance of or Iterable of instances of :class:`pytools.tag.Tag` intended for consumption by an -- GitLab From 77f9036574f32fa33de0e0052da695d392c65eb3 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Tue, 1 Dec 2020 22:13:11 -0600 Subject: [PATCH 364/415] Switch to furo doc theme --- doc/conf.py | 79 +++++++++++++++++++++++---------------------------- doc/index.rst | 2 ++ 2 files changed, 37 insertions(+), 44 deletions(-) diff --git a/doc/conf.py b/doc/conf.py index 942afcd3c..7912290e1 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -21,32 +21,33 @@ import os # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' +#needs_sphinx = "1.0" # Add any Sphinx extension module names here, as strings. They can be extensions -# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. +# coming with Sphinx (named "sphinx.ext.*") or your custom ones. extensions = [ - 'sphinx.ext.autodoc', - 'sphinx.ext.intersphinx', - #'sphinx.ext.viewcode', - 'sphinx.ext.doctest', + "sphinx.ext.autodoc", + "sphinx.ext.intersphinx", + #"sphinx.ext.viewcode", + "sphinx.ext.doctest", + "sphinx_copybutton", ] # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix of source filenames. -source_suffix = '.rst' +source_suffix = ".rst" # The encoding of source files. -#source_encoding = 'utf-8-sig' +#source_encoding = "utf-8-sig" # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = 'loopy' -copyright = '2016, Andreas Klöckner' +project = "loopy" +copyright = "2016, Andreas Klöckner" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -59,7 +60,7 @@ with open(_version_source) as vpy_file: version_py = vpy_file.read() os.environ["AKPYTHON_EXEC_IMPORT_UNAVAILABLE"] = "1" -exec(compile(version_py, _version_source, 'exec'), ver_dic) +exec(compile(version_py, _version_source, "exec"), ver_dic) version = ".".join(str(x) for x in ver_dic["VERSION"]) # The full version, including alpha/beta/rc tags. release = ver_dic["VERSION_TEXT"] @@ -77,7 +78,7 @@ del os.environ["AKPYTHON_EXEC_IMPORT_UNAVAILABLE"] # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ['_build'] +exclude_patterns = ["_build"] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None @@ -94,7 +95,7 @@ exclude_patterns = ['_build'] #show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] @@ -102,23 +103,13 @@ pygments_style = 'sphinx' # -- Options for HTML output --------------------------------------------------- -html_theme = "alabaster" +html_theme = "furo" html_theme_options = { - "extra_nav_links": { - "🚀 Github": "https://github.com/inducer/loopy", - "💾 Download Releases": "https://pypi.org/project/loopy", - } } html_sidebars = { - '**': [ - 'about.html', - 'navigation.html', - 'relations.html', - 'searchbox.html', - ] -} + } # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the @@ -149,7 +140,7 @@ html_sidebars = { # so a file named "default.css" will overwrite the builtin "default.css". # html_static_path = ['_static'] -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# If not '', a "Last updated on:" timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' @@ -191,22 +182,22 @@ html_show_sourcelink = False #html_file_suffix = None # Output file base name for HTML help builder. -htmlhelp_basename = 'loopydoc' +htmlhelp_basename = "loopydoc" # -- Options for LaTeX output -------------------------------------------------- -# The paper size ('letter' or 'a4'). -#latex_paper_size = 'letter' +# The paper size ("letter" or "a4"). +#latex_paper_size = "letter" -# The font size ('10pt', '11pt' or '12pt'). -#latex_font_size = '10pt' +# The font size ("10pt", "11pt" or "12pt"). +#latex_font_size = "10pt" # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ - ('index', 'loopy.tex', 'loopy Documentation', - 'Andreas Kloeckner', 'manual'), + ("index", "loopy.tex", "loopy Documentation", + "Andreas Kloeckner", "manual"), ] # The name of an image file (relative to this directory) to place at the top of @@ -238,20 +229,20 @@ latex_documents = [ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - ('index', 'loopy', 'loopy Documentation', - ['Andreas Kloeckner'], 1) + ("index", "loopy", "loopy Documentation", + ["Andreas Kloeckner"], 1) ] # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { - 'https://docs.python.org/3': None, - 'https://documen.tician.de/islpy': None, - 'https://documen.tician.de/pyopencl': None, - 'https://documen.tician.de/cgen': None, - 'https://docs.scipy.org/doc/numpy/': None, - 'https://documen.tician.de/pymbolic': None, - 'https://documen.tician.de/pytools': None, + "https://docs.python.org/3": None, + "https://documen.tician.de/islpy": None, + "https://documen.tician.de/pyopencl": None, + "https://documen.tician.de/cgen": None, + "https://docs.scipy.org/doc/numpy/": None, + "https://documen.tician.de/pymbolic": None, + "https://documen.tician.de/pytools": None, } autoclass_content = "class" diff --git a/doc/index.rst b/doc/index.rst index 8eb996f6b..7baff3249 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -46,6 +46,8 @@ Please check :ref:`installation` to get started. ref_other misc ref_internals + 🚀 Github + 💾 Download Releases Indices and tables ================== -- GitLab From 458d82d89183a6a96c0e063389d7491b9bdda2f1 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Tue, 1 Dec 2020 22:15:24 -0600 Subject: [PATCH 365/415] Fix numpy intersphinx link --- doc/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/conf.py b/doc/conf.py index 7912290e1..1e5deb5f3 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -237,10 +237,10 @@ man_pages = [ # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { "https://docs.python.org/3": None, + "https://numpy.org/doc/stable/": None, "https://documen.tician.de/islpy": None, "https://documen.tician.de/pyopencl": None, "https://documen.tician.de/cgen": None, - "https://docs.scipy.org/doc/numpy/": None, "https://documen.tician.de/pymbolic": None, "https://documen.tician.de/pytools": None, } -- GitLab From affa83bbf20bc0993f01743353e0f17a99a2b933 Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Wed, 2 Dec 2020 12:36:00 -0600 Subject: [PATCH 366/415] LoopKernel.insn_inames(insn: InstructionBase) -> insn.within_inames --- loopy/check.py | 20 ++++++++++---------- loopy/codegen/instruction.py | 2 +- loopy/kernel/__init__.py | 2 +- loopy/kernel/creation.py | 4 ++-- loopy/kernel/tools.py | 12 ++++++------ loopy/preprocess.py | 16 ++++++++-------- loopy/schedule/__init__.py | 8 ++++---- loopy/statistics.py | 4 ++-- loopy/symbolic.py | 2 +- loopy/transform/iname.py | 2 +- loopy/transform/privatize.py | 2 +- 11 files changed, 37 insertions(+), 37 deletions(-) diff --git a/loopy/check.py b/loopy/check.py index 910327850..0bf02f7cf 100644 --- a/loopy/check.py +++ b/loopy/check.py @@ -215,7 +215,7 @@ def check_for_double_use_of_hw_axes(kernel): for insn in kernel.instructions: insn_tag_keys = set() - for iname in kernel.insn_inames(insn): + for iname in insn.within_inames: for tag in kernel.iname_tags_of_type(iname, UniqueTag): key = tag.key if key in insn_tag_keys: @@ -232,12 +232,12 @@ def check_for_inactive_iname_access(kernel): for insn in kernel.instructions: expression_inames = insn.read_dependency_names() & kernel.all_inames() - if not expression_inames <= kernel.insn_inames(insn): + if not expression_inames <= insn.within_inames: raise LoopyError( "instruction '%s' references " "inames '%s' that the instruction does not depend on" % (insn.id, - ", ".join(expression_inames - kernel.insn_inames(insn)))) + ", ".join(expression_inames - insn.within_inames))) def check_for_unused_inames(kernel): @@ -293,7 +293,7 @@ def check_for_write_races(kernel): insn.assignee_var_names(), insn.assignee_subscript_deps()): assignee_inames = assignee_indices & kernel.all_inames() - if not assignee_inames <= kernel.insn_inames(insn): + if not assignee_inames <= insn.within_inames: raise LoopyError( "assignee of instructions '%s' references " "iname that the instruction does not depend on" @@ -304,13 +304,13 @@ def check_for_write_races(kernel): # will cause write races. raceable_parallel_insn_inames = { - iname for iname in kernel.insn_inames(insn) + iname for iname in insn.within_inames if kernel.iname_tags_of_type(iname, ConcurrentTag)} elif assignee_name in kernel.temporary_variables: temp_var = kernel.temporary_variables[assignee_name] raceable_parallel_insn_inames = { - iname for iname in kernel.insn_inames(insn) + iname for iname in insn.within_inames if any(_is_racing_iname_tag(temp_var, tag) for tag in kernel.iname_tags(iname))} @@ -491,7 +491,7 @@ def check_write_destinations(kernel): if wvar in kernel.all_inames(): raise LoopyError("iname '%s' may not be written" % wvar) - insn_domain = kernel.get_inames_domain(kernel.insn_inames(insn)) + insn_domain = kernel.get_inames_domain(insn.within_inames) insn_params = set(insn_domain.get_var_names(dim_type.param)) if wvar in kernel.all_params(): @@ -936,7 +936,7 @@ def _check_for_unused_hw_axes_in_kernel_chunk(kernel, sched_index=None): group_axes_used = set() local_axes_used = set() - for iname in kernel.insn_inames(insn): + for iname in insn.within_inames: ltags = kernel.iname_tags_of_type(iname, LocalIndexTag, max_num=1) gtags = kernel.iname_tags_of_type(iname, GroupIndexTag, max_num=1) altags = kernel.iname_tags_of_type( @@ -1192,7 +1192,7 @@ def check_implemented_domains(kernel, implemented_domains, code=None): assert idomains - insn_inames = kernel.insn_inames(insn) + insn_inames = insn.within_inames # {{{ if we've checked the same thing before, no need to check it again @@ -1269,7 +1269,7 @@ def check_implemented_domains(kernel, implemented_domains, code=None): iname_to_dim = pt.get_space().get_var_dict() point_axes = [] - for iname in kernel.insn_inames(insn) | parameter_inames: + for iname in insn_inames | parameter_inames: tp, dim = iname_to_dim[iname] point_axes.append("%s=%d" % ( iname, pt.get_coordinate_val(tp, dim).to_python())) diff --git a/loopy/codegen/instruction.py b/loopy/codegen/instruction.py index 71133ef7c..14efb64f4 100644 --- a/loopy/codegen/instruction.py +++ b/loopy/codegen/instruction.py @@ -89,7 +89,7 @@ def generate_instruction_code(codegen_state, insn): else: raise RuntimeError("unexpected instruction type") - insn_inames = kernel.insn_inames(insn) + insn_inames = insn.within_inames return to_codegen_result( codegen_state, diff --git a/loopy/kernel/__init__.py b/loopy/kernel/__init__.py index 9088f3bfe..72a9f0c2e 100644 --- a/loopy/kernel/__init__.py +++ b/loopy/kernel/__init__.py @@ -824,7 +824,7 @@ class LoopKernel(ImmutableRecordWithoutPickling): result = { iname: set() for iname in self.all_inames()} for insn in self.instructions: - for iname in self.insn_inames(insn): + for iname in insn.within_inames: result[iname].add(insn.id) return result diff --git a/loopy/kernel/creation.py b/loopy/kernel/creation.py index a22fef9e8..94534382f 100644 --- a/loopy/kernel/creation.py +++ b/loopy/kernel/creation.py @@ -1523,7 +1523,7 @@ def determine_shapes_of_temporaries(knl): def feed_all_expressions(receiver): for insn in knl.instructions: insn.with_transformed_expressions( - lambda expr: receiver(expr, knl.insn_inames(insn))) + lambda expr: receiver(expr, insn.within_inames)) var_to_base_indices, var_to_shape, var_to_error = ( find_shapes_of_vars( @@ -1543,7 +1543,7 @@ def determine_shapes_of_temporaries(knl): def feed_assignee_of_instruction(receiver): for insn in knl.instructions: for assignee in insn.assignees: - receiver(assignee, knl.insn_inames(insn)) + receiver(assignee, insn.within_inames) var_to_base_indices_fallback, var_to_shape_fallback, var_to_error = ( find_shapes_of_vars( diff --git a/loopy/kernel/tools.py b/loopy/kernel/tools.py index 0b8d9841e..541bb45ce 100644 --- a/loopy/kernel/tools.py +++ b/loopy/kernel/tools.py @@ -685,7 +685,7 @@ def get_auto_axis_iname_ranking_by_stride(kernel, insn): from loopy.kernel.data import AutoLocalIndexTagBase auto_axis_inames = { - iname for iname in kernel.insn_inames(insn) + iname for iname in insn.within_inames if kernel.iname_tags_of_type(iname, AutoLocalIndexTagBase)} # }}} @@ -744,7 +744,7 @@ def get_auto_axis_iname_ranking_by_stride(kernel, insn): if aggregate_strides: very_large_stride = int(np.iinfo(np.int32).max) - return sorted((iname for iname in kernel.insn_inames(insn)), + return sorted((iname for iname in insn.within_inames), key=lambda iname: ( aggregate_strides.get(iname, very_large_stride), iname)) @@ -885,7 +885,7 @@ def assign_automatic_axes(kernel, axis=0, local_size=None): continue auto_axis_inames = [ - iname for iname in kernel.insn_inames(insn) + iname for iname in insn.within_inames if kernel.iname_tags_of_type(iname, AutoLocalIndexTagBase)] if not auto_axis_inames: @@ -893,7 +893,7 @@ def assign_automatic_axes(kernel, axis=0, local_size=None): assigned_local_axes = set() - for iname in kernel.insn_inames(insn): + for iname in insn.within_inames: tags = kernel.iname_tags_of_type(iname, LocalIndexTag, max_num=1) if tags: tag, = tags @@ -1000,7 +1000,7 @@ def guess_var_shape(kernel, var_name): submap = SubstitutionRuleExpander(kernel.substitutions) def run_through_armap(expr): - armap(submap(expr), kernel.insn_inames(insn)) + armap(submap(expr), insn.within_inames) return expr try: @@ -1533,7 +1533,7 @@ def stringify_instruction_list(kernel): raise LoopyError("unexpected instruction type: %s" % type(insn).__name__) - adapt_to_new_inames_list(kernel.insn_inames(insn)) + adapt_to_new_inames_list(insn.within_inames) options = ["id="+Fore.GREEN+insn.id+Style.RESET_ALL] if insn.priority: diff --git a/loopy/preprocess.py b/loopy/preprocess.py index 12f1cb469..ab5e703e4 100644 --- a/loopy/preprocess.py +++ b/loopy/preprocess.py @@ -1004,7 +1004,7 @@ def realize_reduction(kernel, insn_id_filter=None, unknown_types_ok=True, def map_reduction_seq(expr, rec, nresults, arg_dtypes, reduction_dtypes): - outer_insn_inames = temp_kernel.insn_inames(insn) + outer_insn_inames = insn.within_inames from loopy.kernel.data import AddressSpace acc_var_names = make_temporaries( @@ -1041,7 +1041,7 @@ def realize_reduction(kernel, insn_id_filter=None, unknown_types_ok=True, update_id = insn_id_gen( based_on="{}_{}_update".format(insn.id, "_".join(expr.inames))) - update_insn_iname_deps = temp_kernel.insn_inames(insn) | set(expr.inames) + update_insn_iname_deps = insn.within_inames | set(expr.inames) if insn.within_inames_is_final: update_insn_iname_deps = insn.within_inames | set(expr.inames) @@ -1126,7 +1126,7 @@ def realize_reduction(kernel, insn_id_filter=None, unknown_types_ok=True, size = _get_int_iname_size(red_iname) - outer_insn_inames = temp_kernel.insn_inames(insn) + outer_insn_inames = insn.within_inames from loopy.kernel.data import LocalIndexTagBase outer_local_inames = tuple(oiname for oiname in outer_insn_inames @@ -1363,7 +1363,7 @@ def realize_reduction(kernel, insn_id_filter=None, unknown_types_ok=True, def map_scan_seq(expr, rec, nresults, arg_dtypes, reduction_dtypes, sweep_iname, scan_iname, sweep_min_value, scan_min_value, stride): - outer_insn_inames = temp_kernel.insn_inames(insn) + outer_insn_inames = insn.within_inames inames_to_remove.add(scan_iname) track_iname = var_name_gen( @@ -1417,7 +1417,7 @@ def realize_reduction(kernel, insn_id_filter=None, unknown_types_ok=True, update_id = insn_id_gen( based_on="{}_{}_update".format(insn.id, "_".join(expr.inames))) - update_insn_iname_deps = temp_kernel.insn_inames(insn) | {track_iname} + update_insn_iname_deps = insn.within_inames | {track_iname} if insn.within_inames_is_final: update_insn_iname_deps = insn.within_inames | {track_iname} @@ -1461,7 +1461,7 @@ def realize_reduction(kernel, insn_id_filter=None, unknown_types_ok=True, return map_reduction_seq( expr, rec, nresults, arg_dtypes, reduction_dtypes) - outer_insn_inames = temp_kernel.insn_inames(insn) + outer_insn_inames = insn.within_inames from loopy.kernel.data import LocalIndexTagBase outer_local_inames = tuple(oiname for oiname in outer_insn_inames @@ -1668,7 +1668,7 @@ def realize_reduction(kernel, insn_id_filter=None, unknown_types_ok=True, infer_arg_and_reduction_dtypes_for_reduction_expression( temp_kernel, expr, unknown_types_ok)) - outer_insn_inames = temp_kernel.insn_inames(insn) + outer_insn_inames = insn.within_inames bad_inames = frozenset(expr.inames) & outer_insn_inames if bad_inames: raise LoopyError("reduction used within loop(s) that it was " @@ -1854,7 +1854,7 @@ def realize_reduction(kernel, insn_id_filter=None, unknown_types_ok=True, no_sync_with=insn.no_sync_with | frozenset(new_insn_add_no_sync_with), within_inames=( - temp_kernel.insn_inames(insn) + insn.within_inames | new_insn_add_within_inames)) kwargs.pop("id") diff --git a/loopy/schedule/__init__.py b/loopy/schedule/__init__.py index 936c7c4d6..0eae1c4cc 100644 --- a/loopy/schedule/__init__.py +++ b/loopy/schedule/__init__.py @@ -296,7 +296,7 @@ def find_loop_insn_dep_map(kernel, loop_nest_with_map, loop_nest_around_map): continue dep_insn = kernel.id_to_insn[dep_insn_id] - dep_insn_inames = kernel.insn_inames(dep_insn) + dep_insn_inames = dep_insn.within_inames if iname in dep_insn_inames: # Nothing to be learned, dependency is in loop over iname @@ -940,7 +940,7 @@ def generate_loop_schedules_internal( if not is_ready: continue - want = kernel.insn_inames(insn) - sched_state.parallel_inames + want = insn.within_inames - sched_state.parallel_inames have = active_inames_set - sched_state.parallel_inames if want != have: @@ -1106,7 +1106,7 @@ def generate_loop_schedules_internal( for insn_id in sched_state.unscheduled_insn_ids: insn = kernel.id_to_insn[insn_id] - if last_entered_loop in kernel.insn_inames(insn): + if last_entered_loop in insn.within_inames: if debug_mode: print("cannot leave '%s' because '%s' still depends on it" % (last_entered_loop, format_insn(kernel, insn.id))) @@ -1294,7 +1294,7 @@ def generate_loop_schedules_internal( for insn_id in reachable_insn_ids: insn = kernel.id_to_insn[insn_id] - want = kernel.insn_inames(insn) + want = insn.within_inames if hypothetically_active_loops <= want: if usefulness is None: diff --git a/loopy/statistics.py b/loopy/statistics.py index eda750120..a0a0f9c7e 100755 --- a/loopy/statistics.py +++ b/loopy/statistics.py @@ -1239,7 +1239,7 @@ def get_unused_hw_axes_factor(knl, insn, disregard_local_axes, space=None): l_used = set() from loopy.kernel.data import LocalIndexTag, GroupIndexTag - for iname in knl.insn_inames(insn): + for iname in insn.within_inames: tags = knl.iname_tags_of_type(iname, (LocalIndexTag, GroupIndexTag), max_num=1) if tags: @@ -1273,7 +1273,7 @@ def get_unused_hw_axes_factor(knl, insn, disregard_local_axes, space=None): def count_insn_runs(knl, insn, count_redundant_work, disregard_local_axes=False): - insn_inames = knl.insn_inames(insn) + insn_inames = insn.within_inames if disregard_local_axes: from loopy.kernel.data import LocalIndexTag diff --git a/loopy/symbolic.py b/loopy/symbolic.py index 2a89d7dc5..77f8228b6 100644 --- a/loopy/symbolic.py +++ b/loopy/symbolic.py @@ -2118,7 +2118,7 @@ class AccessRangeOverlapChecker: arm = BatchedAccessRangeMapper(self.kernel, self.vars, overestimate=True) for expr in exprs: - arm(expr, self.kernel.insn_inames(insn)) + arm(expr, insn.within_inames) for name, arange in arm.access_ranges.items(): if arm.bad_subscripts[name]: diff --git a/loopy/transform/iname.py b/loopy/transform/iname.py index fefa7ed5f..fb5e8d781 100644 --- a/loopy/transform/iname.py +++ b/loopy/transform/iname.py @@ -1609,7 +1609,7 @@ def find_unused_axis_tag(kernel, kind, insn_match=None): insns = [insn for insn in kernel.instructions if match(kernel, insn)] for insn in insns: - for iname in kernel.insn_inames(insn): + for iname in insn.within_inames: if kernel.iname_tags_of_type(iname, kind): used_axes.add(kind.axis) diff --git a/loopy/transform/privatize.py b/loopy/transform/privatize.py index 8527023bc..ce2d7942b 100644 --- a/loopy/transform/privatize.py +++ b/loopy/transform/privatize.py @@ -124,7 +124,7 @@ def privatize_temporaries_with_inames( for writer_insn_id in wmap.get(tv.name, []): writer_insn = kernel.id_to_insn[writer_insn_id] - priv_axis_inames = kernel.insn_inames(writer_insn) & privatizing_inames + priv_axis_inames = writer_insn.within_inames & privatizing_inames referenced_priv_axis_inames = (priv_axis_inames & writer_insn.write_dependency_names()) -- GitLab From 1519e06e615d6c2e0cf20c42e2954e1912043bb2 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Mon, 7 Dec 2020 13:24:58 -0600 Subject: [PATCH 367/415] Stop using pytools.persistent_dict.new_hash --- loopy/kernel/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/loopy/kernel/__init__.py b/loopy/kernel/__init__.py index 72a9f0c2e..72b7db07f 100644 --- a/loopy/kernel/__init__.py +++ b/loopy/kernel/__init__.py @@ -1563,8 +1563,8 @@ class LoopKernel(ImmutableRecordWithoutPickling): def __hash__(self): from loopy.tools import LoopyKeyBuilder - from pytools.persistent_dict import new_hash - key_hash = new_hash() + import hashlib + key_hash = hashlib.sha256() self.update_persistent_hash(key_hash, LoopyKeyBuilder()) return hash(key_hash.digest()) -- GitLab From ac2eb5bf356a5435c31b9f32ca71ac8a144fee58 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Mon, 7 Dec 2020 17:55:50 -0600 Subject: [PATCH 368/415] Add link to canonical name of TargetBase --- loopy/target/__init__.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/loopy/target/__init__.py b/loopy/target/__init__.py index 6bad214ec..8af47c412 100644 --- a/loopy/target/__init__.py +++ b/loopy/target/__init__.py @@ -39,6 +39,14 @@ __doc__ = """ .. autoclass:: NumbaTarget .. autoclass:: NumbaCudaTarget +References to Canonical Names +----------------------------- + +.. currentmodule:: loopy.target + +.. class:: TargetBase + + See :class:`loopy.TargetBase`. """ -- GitLab From 87c398ae6c00f3db8e4559824059752d254ce81f Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Mon, 7 Dec 2020 18:15:46 -0600 Subject: [PATCH 369/415] Remove unnecessary any/all imports --- loopy/preprocess.py | 1 - loopy/schedule/__init__.py | 1 - 2 files changed, 2 deletions(-) diff --git a/loopy/preprocess.py b/loopy/preprocess.py index ab5e703e4..40b582734 100644 --- a/loopy/preprocess.py +++ b/loopy/preprocess.py @@ -256,7 +256,6 @@ def find_temporary_address_space(kernel): overall_aspace = max(desired_aspace_per_insn) - from pytools import all if not all(iaspace == overall_aspace for iaspace in desired_aspace_per_insn): raise LoopyError("not all instructions agree on the " "the desired address space (private/local/global) of the " diff --git a/loopy/schedule/__init__.py b/loopy/schedule/__init__.py index 0eae1c4cc..fde967c65 100644 --- a/loopy/schedule/__init__.py +++ b/loopy/schedule/__init__.py @@ -182,7 +182,6 @@ def has_barrier_within(kernel, sched_index): if isinstance(sched_item, BeginBlockItem): loop_contents, _ = gather_schedule_block( kernel.schedule, sched_index) - from pytools import any return any(isinstance(subsched_item, Barrier) for subsched_item in loop_contents) elif isinstance(sched_item, Barrier): -- GitLab From 9d628326d9c1310cdd86813bcc2ee51f0358711c Mon Sep 17 00:00:00 2001 From: Nicholas Christensen Date: Mon, 14 Dec 2020 22:41:04 -0600 Subject: [PATCH 370/415] bump required pytools version --- requirements.txt | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 1c266ff39..641c75970 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -git+https://github.com/nchristensen/pytools.git@master#egg=pytools == 2020.4.4 +git+https://github.com/nchristensen/pytools.git@master#egg=pytools == 2020.4.5 git+https://github.com/inducer/islpy.git#egg=islpy git+https://github.com/inducer/cgen.git#egg=cgen git+https://github.com/inducer/pyopencl.git#egg=pyopencl diff --git a/setup.py b/setup.py index bd76d293a..c580fabb1 100644 --- a/setup.py +++ b/setup.py @@ -84,7 +84,7 @@ setup(name="loopy", python_requires="~=3.6", install_requires=[ - "pytools>=2020.4.4", + "pytools>=2020.4.5", "pymbolic>=2019.2", "genpy>=2016.1.2", "cgen>=2016.1", -- GitLab From 24045f9ad105a35d9beb722670bf9358c6a8fa5f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Kl=C3=B6ckner?= Date: Wed, 16 Dec 2020 16:31:18 -0600 Subject: [PATCH 371/415] Memoize LoopKernel.__hash__ --- loopy/kernel/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/loopy/kernel/__init__.py b/loopy/kernel/__init__.py index 72b7db07f..b24cde2c4 100644 --- a/loopy/kernel/__init__.py +++ b/loopy/kernel/__init__.py @@ -1561,6 +1561,7 @@ class LoopKernel(ImmutableRecordWithoutPickling): for field_name in self.hash_fields: key_builder.rec(key_hash, getattr(self, field_name)) + @memoize_method def __hash__(self): from loopy.tools import LoopyKeyBuilder import hashlib -- GitLab From b736f1fdd9a8108f9857721f0caf4eb973688a56 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Kl=C3=B6ckner?= Date: Mon, 4 Jan 2021 19:28:03 +0100 Subject: [PATCH 372/415] Standardize, shorten doc/conf --- doc/conf.py | 130 +--------------------------------------------------- 1 file changed, 2 insertions(+), 128 deletions(-) diff --git a/doc/conf.py b/doc/conf.py index 1e5deb5f3..9b8cf81e1 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -1,23 +1,5 @@ -# -# loopy documentation build configuration file, created by -# sphinx-quickstart on Tue Aug 9 13:40:49 2011. -# -# This file is execfile()d with the current directory set to its containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -#import sys import os -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -#sys.path.insert(0, os.path.abspath('.')) - # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. @@ -111,117 +93,8 @@ html_theme_options = { html_sidebars = { } -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -#html_theme_options = {} - -# Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -#html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -#html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -#html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -# html_static_path = ['_static'] - -# If not '', a "Last updated on:" timestamp is inserted at every page bottom, -# using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -#html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -#html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -#html_additional_pages = {} - -# If false, no module index is generated. -#html_domain_indices = True - -# If false, no index is generated. -#html_use_index = True - -# If true, the index is split into individual pages for each letter. -#html_split_index = False - # If true, links to the reST sources are added to the pages. -html_show_sourcelink = False - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -#html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None - -# Output file base name for HTML help builder. -htmlhelp_basename = "loopydoc" - - -# -- Options for LaTeX output -------------------------------------------------- - -# The paper size ("letter" or "a4"). -#latex_paper_size = "letter" - -# The font size ("10pt", "11pt" or "12pt"). -#latex_font_size = "10pt" - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, author, documentclass [howto/manual]). -latex_documents = [ - ("index", "loopy.tex", "loopy Documentation", - "Andreas Kloeckner", "manual"), -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -#latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -#latex_use_parts = False - -# If true, show page references after internal links. -#latex_show_pagerefs = False - -# If true, show URL addresses after external links. -#latex_show_urls = False - -# Additional stuff for the LaTeX preamble. -#latex_preamble = '' - -# Documents to append as an appendix to all manuals. -#latex_appendices = [] - -# If false, no module index is generated. -#latex_domain_indices = True +html_show_sourcelink = True # -- Options for manual page output -------------------------------------------- @@ -246,3 +119,4 @@ intersphinx_mapping = { } autoclass_content = "class" +autodoc_typehints = "description" -- GitLab From a625e8fe0c211ad8e2132d4219ca12a2fd997a59 Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Tue, 5 Jan 2021 10:07:30 -0600 Subject: [PATCH 373/415] check that terms aren't raised to signed int powers --- loopy/check.py | 36 ++++++++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/loopy/check.py b/loopy/check.py index 0bf02f7cf..3ef2804bb 100644 --- a/loopy/check.py +++ b/loopy/check.py @@ -130,6 +130,41 @@ def check_for_integer_subscript_indices(kernel): type(insn).__name__)) +class ExponentIsUnsignedChecker(TypeInferenceMapper): + def map_power(self, expr): + res_dtype = super().map_power(expr) + exp_dtype = self.rec(expr.exponent) + if not res_dtype: + raise LoopyError( + "When checking for unsigned exponents for int-int" + f"pow expressions, type inference did not find type of {expr}.") + + if res_dtype[0].is_integral(): + if exp_dtype[0].numpy_dtype.kind == "i": + raise LoopyError("Integers to signed integer powers are not" + " allowed.") + + return res_dtype + + +def check_int_pow_has_unsigned_exponent(kernel): + """ + Checks that all expressions of the ``a**b``, where both ``a`` + and ``b`` are integers (signed or unsigned) have exponents of type + unsigned. + """ + exp_is_uint_checker = ExponentIsUnsignedChecker(kernel) + for insn in kernel.instructions: + if isinstance(insn, MultiAssignmentBase): + exp_is_uint_checker(insn.expression, return_tuple=isinstance(insn, + CallInstruction), return_dtype_set=True) + elif isinstance(insn, (CInstruction, _DataObliviousInstruction)): + pass + else: + raise NotImplementedError("Unknown insn type %s." % ( + type(insn).__name__)) + + def check_insn_attributes(kernel): """ Check for legality of attributes of every instruction in *kernel*. @@ -801,6 +836,7 @@ def pre_schedule_checks(kernel): logger.debug("%s: pre-schedule check: start" % kernel.name) check_for_integer_subscript_indices(kernel) + check_int_pow_has_unsigned_exponent(kernel) check_for_duplicate_insn_ids(kernel) check_for_orphaned_user_hardware_axes(kernel) check_for_double_use_of_hw_axes(kernel) -- GitLab From 891b1d3bdd826cd84d7dcb6e28ed2b04c75ca725 Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Tue, 5 Jan 2021 10:07:58 -0600 Subject: [PATCH 374/415] adds support for integer exponentation in loopy --- loopy/target/c/__init__.py | 25 +++++++++++++++++ loopy/target/c/codegen/expression.py | 42 ++++++++++++++++++++++------ 2 files changed, 58 insertions(+), 9 deletions(-) diff --git a/loopy/target/c/__init__.py b/loopy/target/c/__init__.py index 3234da45d..dff194049 100644 --- a/loopy/target/c/__init__.py +++ b/loopy/target/c/__init__.py @@ -172,6 +172,31 @@ def _preamble_generator(preamble_info): yield ("04_%s" % func_name, func_body) yield undef_integer_types_macro + for func in preamble_info.seen_functions: + if func.name == "int_pow": + base_ctype = preamble_info.kernel.target.dtype_to_typename( + func.arg_dtypes[0]) + exp_ctype = preamble_info.kernel.target.dtype_to_typename( + func.arg_dtypes[1]) + + yield("07_int_pow", f""" + inline {base_ctype} {func.c_name}({base_ctype} b, {exp_ctype} n) {{ + if (n == 0) + return 1 + + {base_ctype} y = 1; + + while (n > 1) {{ + if (n % 2) {{ + x = x * x; + y = x * y; + }} + else + x = x * x; + n = n / 2; + }} + }}""") + # }}} diff --git a/loopy/target/c/codegen/expression.py b/loopy/target/c/codegen/expression.py index 74f1ead8b..f200a1594 100644 --- a/loopy/target/c/codegen/expression.py +++ b/loopy/target/c/codegen/expression.py @@ -701,6 +701,10 @@ class ExpressionToCExpressionMapper(IdentityMapper): self.rec(expr.denominator, type_context, tgt_dtype)) def map_power(self, expr, type_context): + tgt_dtype = self.infer_type(expr) + base_dtype = self.infer_type(expr.base) + exponent_dtype = self.infer_type(expr.exponent) + def base_impl(expr, type_context): from pymbolic.primitives import is_constant, is_zero if is_constant(expr.exponent): @@ -711,14 +715,35 @@ class ExpressionToCExpressionMapper(IdentityMapper): elif is_zero(expr.exponent - 2): return self.rec(expr.base*expr.base, type_context) - return type(expr)( - self.rec(expr.base, type_context), - self.rec(expr.exponent, type_context)) + if exponent_dtype.numpy_dtype.kind == "u": + # FIXME: need to add this to the seen functions + + from loopy.codegen import SeenFunction + func_name = ("loopy_pow_" + f"{base_dtype.numpy_dtype}_{exponent_dtype.numpy_dtype}") + + self.codegen_state.seen_functions.add( + SeenFunction( + "int_pow", func_name, + (base_dtype, exponent_dtype))) + return var("loopy_pow_" + f"{base_dtype.numpy_dtype}_{exponent_dtype.numpy_dtype}")( + self.rec(expr.base), self.rec(expr.exponent)) + else: + from loopy.types import to_loopy_type + loopy_f64_dtype = to_loopy_type(np.float64, + target=self.kernel.target) + return self.wrap_in_typecast( + loopy_f64_dtype, + tgt_dtype, + var("pow")(self.rec(expr.base, type_context, + loopy_f64_dtype), + self.rec(expr.base, type_context, + loopy_f64_dtype))) if not self.allow_complex: return base_impl(expr, type_context) - tgt_dtype = self.infer_type(expr) if tgt_dtype.is_complex(): if expr.exponent in [2, 3, 4]: value = expr.base @@ -726,8 +751,8 @@ class ExpressionToCExpressionMapper(IdentityMapper): value = value * expr.base return self.rec(value, type_context) else: - b_complex = self.infer_type(expr.base).is_complex() - e_complex = self.infer_type(expr.exponent).is_complex() + b_complex = base_dtype.is_complex() + e_complex = exponent_dtype.is_complex() if b_complex and not e_complex: return var("%s_powr" % self.complex_type_name(tgt_dtype))( @@ -754,6 +779,7 @@ class ExpressionToCExpressionMapper(IdentityMapper): # {{{ C expression to code mapper class CExpressionToCodeMapper(RecursiveMapper): + # {{{ helpers def parenthesize_if_needed(self, s, enclosing_prec, my_prec): @@ -954,9 +980,7 @@ class CExpressionToCodeMapper(RecursiveMapper): return self._map_division_operator("%", expr, enclosing_prec) def map_power(self, expr, enclosing_prec): - return "pow({}, {})".format( - self.rec(expr.base, PREC_NONE), - self.rec(expr.exponent, PREC_NONE)) + raise NotImplementedError() def map_array_literal(self, expr, enclosing_prec): return "{ %s }" % self.join_rec(", ", expr.children, PREC_NONE) -- GitLab From e1eb0bf6a211d49744a36a6c67ba9d796ad1f3eb Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Tue, 5 Jan 2021 10:08:27 -0600 Subject: [PATCH 375/415] test loopy pown --- test/test_loopy.py | 43 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/test/test_loopy.py b/test/test_loopy.py index 9bc532d53..149afd079 100644 --- a/test/test_loopy.py +++ b/test/test_loopy.py @@ -2995,6 +2995,49 @@ def test_split_iname_within(ctx_factory): lp.auto_test_vs_ref(ref_knl, ctx, knl, parameters=dict(n=5)) +@pytest.mark.parametrize("basetype,exptype", [(np.int32, np.uint32), (np.int64, + np.uint64), (np.int, np.float), (np.float, np.int)]) +def test_int_int_pow(ctx_factory, basetype, exptype): + ctx = ctx_factory() + queue = cl.CommandQueue(ctx) + + def _make_random_np_array(shape, dtype): + from numpy.random import default_rng + rng = default_rng() + if isinstance(shape, int): + shape = (shape,) + + dtype = np.dtype(dtype) + if dtype.kind in ["u", "i"]: + # choosing numbers so that we don't have overflow, to not trigger + # undefined behavior + low = 0 if dtype.kind == "u" else -6 + high = 6 + return rng.integers(low=low, high=high, size=shape, dtype=dtype) + elif dtype.kind == "f": + return rng.random(*shape).astype(dtype) + else: + raise NotImplementedError() + + base = _make_random_np_array(10, basetype) + power = _make_random_np_array(10, exptype) + expected_result = base ** power + + knl = lp.make_kernel( + "{[i]: 0<=i 1: exec(sys.argv[1]) -- GitLab From 6d51618308724ad4c96caa12c567bad2c8717741 Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Tue, 5 Jan 2021 12:41:14 -0600 Subject: [PATCH 376/415] re-enable int exponents --- loopy/check.py | 36 ------------------------------------ 1 file changed, 36 deletions(-) diff --git a/loopy/check.py b/loopy/check.py index 3ef2804bb..0bf02f7cf 100644 --- a/loopy/check.py +++ b/loopy/check.py @@ -130,41 +130,6 @@ def check_for_integer_subscript_indices(kernel): type(insn).__name__)) -class ExponentIsUnsignedChecker(TypeInferenceMapper): - def map_power(self, expr): - res_dtype = super().map_power(expr) - exp_dtype = self.rec(expr.exponent) - if not res_dtype: - raise LoopyError( - "When checking for unsigned exponents for int-int" - f"pow expressions, type inference did not find type of {expr}.") - - if res_dtype[0].is_integral(): - if exp_dtype[0].numpy_dtype.kind == "i": - raise LoopyError("Integers to signed integer powers are not" - " allowed.") - - return res_dtype - - -def check_int_pow_has_unsigned_exponent(kernel): - """ - Checks that all expressions of the ``a**b``, where both ``a`` - and ``b`` are integers (signed or unsigned) have exponents of type - unsigned. - """ - exp_is_uint_checker = ExponentIsUnsignedChecker(kernel) - for insn in kernel.instructions: - if isinstance(insn, MultiAssignmentBase): - exp_is_uint_checker(insn.expression, return_tuple=isinstance(insn, - CallInstruction), return_dtype_set=True) - elif isinstance(insn, (CInstruction, _DataObliviousInstruction)): - pass - else: - raise NotImplementedError("Unknown insn type %s." % ( - type(insn).__name__)) - - def check_insn_attributes(kernel): """ Check for legality of attributes of every instruction in *kernel*. @@ -836,7 +801,6 @@ def pre_schedule_checks(kernel): logger.debug("%s: pre-schedule check: start" % kernel.name) check_for_integer_subscript_indices(kernel) - check_int_pow_has_unsigned_exponent(kernel) check_for_duplicate_insn_ids(kernel) check_for_orphaned_user_hardware_axes(kernel) check_for_double_use_of_hw_axes(kernel) -- GitLab From 12fd89d59662beb9d7b641883be3afac778f3f38 Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Tue, 5 Jan 2021 12:42:46 -0600 Subject: [PATCH 377/415] also record return dtype in SeenFunction --- loopy/codegen/__init__.py | 9 +++++++-- loopy/target/c/codegen/expression.py | 9 ++++++--- 2 files changed, 13 insertions(+), 5 deletions(-) diff --git a/loopy/codegen/__init__.py b/loopy/codegen/__init__.py index cbae4eac5..54924295a 100644 --- a/loopy/codegen/__init__.py +++ b/loopy/codegen/__init__.py @@ -146,13 +146,18 @@ class SeenFunction(ImmutableRecord): .. attribute:: arg_dtypes a tuple of arg dtypes + + .. attribute:: res_dtypes + + a tuple of result dtypes """ - def __init__(self, name, c_name, arg_dtypes): + def __init__(self, name, c_name, arg_dtypes, res_dtypes=()): ImmutableRecord.__init__(self, name=name, c_name=c_name, - arg_dtypes=arg_dtypes) + arg_dtypes=arg_dtypes, + res_dtypes=res_dtypes) class CodeGenerationState: diff --git a/loopy/target/c/codegen/expression.py b/loopy/target/c/codegen/expression.py index f200a1594..6f48f4de5 100644 --- a/loopy/target/c/codegen/expression.py +++ b/loopy/target/c/codegen/expression.py @@ -325,7 +325,8 @@ class ExpressionToCExpressionMapper(IdentityMapper): self.codegen_state.seen_functions.add( SeenFunction( name, f"{name}_{suffix}", - (result_dtype, result_dtype))) + (result_dtype, result_dtype), + (result_dtype,))) if den_nonneg: if num_nonneg: @@ -538,7 +539,8 @@ class ExpressionToCExpressionMapper(IdentityMapper): self.codegen_state.seen_functions.add( SeenFunction(identifier, mangle_result.target_name, - mangle_result.arg_dtypes or par_dtypes)) + mangle_result.arg_dtypes or par_dtypes, + mangle_result.result_dtypes)) return var(mangle_result.target_name)(*processed_parameters) @@ -725,7 +727,8 @@ class ExpressionToCExpressionMapper(IdentityMapper): self.codegen_state.seen_functions.add( SeenFunction( "int_pow", func_name, - (base_dtype, exponent_dtype))) + (base_dtype, exponent_dtype), + (tgt_dtype, ))) return var("loopy_pow_" f"{base_dtype.numpy_dtype}_{exponent_dtype.numpy_dtype}")( self.rec(expr.base), self.rec(expr.exponent)) -- GitLab From c58c420c8a88883617a36700ecef78fdd4c77b75 Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Tue, 5 Jan 2021 12:46:20 -0600 Subject: [PATCH 378/415] minor fixes in T**int --- loopy/target/c/__init__.py | 22 ++++++++++++++++++---- loopy/target/c/codegen/expression.py | 6 ++---- 2 files changed, 20 insertions(+), 8 deletions(-) diff --git a/loopy/target/c/__init__.py b/loopy/target/c/__init__.py index dff194049..cae5b2335 100644 --- a/loopy/target/c/__init__.py +++ b/loopy/target/c/__init__.py @@ -178,23 +178,37 @@ def _preamble_generator(preamble_info): func.arg_dtypes[0]) exp_ctype = preamble_info.kernel.target.dtype_to_typename( func.arg_dtypes[1]) + res_ctype = preamble_info.kernel.target.dtype_to_typename( + func.res_dtypes[0]) + + if func.arg_dtypes[1].numpy_dtype.kind == "u": + signed_exponent_preamble = "" + else: + signed_exponent_preamble = """ + if (n < 0) { + x = 1.0/x; + n = -n; + }""" yield("07_int_pow", f""" - inline {base_ctype} {func.c_name}({base_ctype} b, {exp_ctype} n) {{ + inline {res_ctype} {func.c_name}({base_ctype} x, {exp_ctype} n) {{ if (n == 0) - return 1 + return 1; + {signed_exponent_preamble} - {base_ctype} y = 1; + {res_ctype} y = 1; while (n > 1) {{ if (n % 2) {{ - x = x * x; y = x * y; + x = x * x; }} else x = x * x; n = n / 2; }} + + return x*y; }}""") # }}} diff --git a/loopy/target/c/codegen/expression.py b/loopy/target/c/codegen/expression.py index 6f48f4de5..4971d2bab 100644 --- a/loopy/target/c/codegen/expression.py +++ b/loopy/target/c/codegen/expression.py @@ -717,9 +717,7 @@ class ExpressionToCExpressionMapper(IdentityMapper): elif is_zero(expr.exponent - 2): return self.rec(expr.base*expr.base, type_context) - if exponent_dtype.numpy_dtype.kind == "u": - # FIXME: need to add this to the seen functions - + if exponent_dtype.is_integral(): from loopy.codegen import SeenFunction func_name = ("loopy_pow_" f"{base_dtype.numpy_dtype}_{exponent_dtype.numpy_dtype}") @@ -741,7 +739,7 @@ class ExpressionToCExpressionMapper(IdentityMapper): tgt_dtype, var("pow")(self.rec(expr.base, type_context, loopy_f64_dtype), - self.rec(expr.base, type_context, + self.rec(expr.exponent, type_context, loopy_f64_dtype))) if not self.allow_complex: -- GitLab From ba6615e9b0469b20e85fcd54749156f38c1eef8e Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Tue, 5 Jan 2021 12:46:40 -0600 Subject: [PATCH 379/415] formatting --- test/test_loopy.py | 24 +++++++++++------------- 1 file changed, 11 insertions(+), 13 deletions(-) diff --git a/test/test_loopy.py b/test/test_loopy.py index 149afd079..48484141a 100644 --- a/test/test_loopy.py +++ b/test/test_loopy.py @@ -2995,41 +2995,39 @@ def test_split_iname_within(ctx_factory): lp.auto_test_vs_ref(ref_knl, ctx, knl, parameters=dict(n=5)) -@pytest.mark.parametrize("basetype,exptype", [(np.int32, np.uint32), (np.int64, - np.uint64), (np.int, np.float), (np.float, np.int)]) -def test_int_int_pow(ctx_factory, basetype, exptype): +@pytest.mark.parametrize("base_type,exp_type", [(np.int32, np.uint32), (np.int64, + np.uint64), (np.int, np.float), (np.float, np.int), (np.int, np.int)]) +def test_int_int_pow(ctx_factory, base_type, exp_type): ctx = ctx_factory() queue = cl.CommandQueue(ctx) def _make_random_np_array(shape, dtype): from numpy.random import default_rng - rng = default_rng() + rng = default_rng(0) if isinstance(shape, int): shape = (shape,) dtype = np.dtype(dtype) if dtype.kind in ["u", "i"]: - # choosing numbers so that we don't have overflow, to not trigger - # undefined behavior - low = 0 if dtype.kind == "u" else -6 - high = 6 + low = 0 # numpy might trigger error for -ve int exponents + high = 6 # choosing numbers to avoid overflow (undefined behavior) return rng.integers(low=low, high=high, size=shape, dtype=dtype) elif dtype.kind == "f": return rng.random(*shape).astype(dtype) else: raise NotImplementedError() - base = _make_random_np_array(10, basetype) - power = _make_random_np_array(10, exptype) + base = _make_random_np_array(10, base_type) + power = _make_random_np_array(10, exp_type) expected_result = base ** power knl = lp.make_kernel( "{[i]: 0<=i Date: Tue, 5 Jan 2021 13:04:12 -0600 Subject: [PATCH 380/415] comment: explain CExpressionToCodeMapper.map_power is no longer supported --- loopy/target/c/codegen/expression.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/loopy/target/c/codegen/expression.py b/loopy/target/c/codegen/expression.py index 4971d2bab..047902ef6 100644 --- a/loopy/target/c/codegen/expression.py +++ b/loopy/target/c/codegen/expression.py @@ -981,7 +981,9 @@ class CExpressionToCodeMapper(RecursiveMapper): return self._map_division_operator("%", expr, enclosing_prec) def map_power(self, expr, enclosing_prec): - raise NotImplementedError() + # No trivial "**" operator for C-like targets, should have been preprocessed + # into other expression types. + raise RuntimeError() def map_array_literal(self, expr, enclosing_prec): return "{ %s }" % self.join_rec(", ", expr.children, PREC_NONE) -- GitLab From 5831d8facbb5baf20b90b39844ee8c0e113e28b1 Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Wed, 6 Jan 2021 23:39:56 -0600 Subject: [PATCH 381/415] res_dtypes->result_dtypes --- loopy/codegen/__init__.py | 6 +++--- loopy/target/c/__init__.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/loopy/codegen/__init__.py b/loopy/codegen/__init__.py index 54924295a..75ea33bbd 100644 --- a/loopy/codegen/__init__.py +++ b/loopy/codegen/__init__.py @@ -147,17 +147,17 @@ class SeenFunction(ImmutableRecord): a tuple of arg dtypes - .. attribute:: res_dtypes + .. attribute:: result_dtypes a tuple of result dtypes """ - def __init__(self, name, c_name, arg_dtypes, res_dtypes=()): + def __init__(self, name, c_name, arg_dtypes, result_dtypes=()): ImmutableRecord.__init__(self, name=name, c_name=c_name, arg_dtypes=arg_dtypes, - res_dtypes=res_dtypes) + result_dtypes=result_dtypes) class CodeGenerationState: diff --git a/loopy/target/c/__init__.py b/loopy/target/c/__init__.py index cae5b2335..1aff14627 100644 --- a/loopy/target/c/__init__.py +++ b/loopy/target/c/__init__.py @@ -179,7 +179,7 @@ def _preamble_generator(preamble_info): exp_ctype = preamble_info.kernel.target.dtype_to_typename( func.arg_dtypes[1]) res_ctype = preamble_info.kernel.target.dtype_to_typename( - func.res_dtypes[0]) + func.result_dtypes[0]) if func.arg_dtypes[1].numpy_dtype.kind == "u": signed_exponent_preamble = "" -- GitLab From 8b5590cbfc4d292dd57da915e6d718a984a7e669 Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Wed, 6 Jan 2021 23:46:47 -0600 Subject: [PATCH 382/415] remove default initialization of SeenFunction.result_dtypes --- loopy/codegen/__init__.py | 2 +- loopy/target/c/__init__.py | 3 ++- loopy/target/python.py | 3 ++- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/loopy/codegen/__init__.py b/loopy/codegen/__init__.py index 75ea33bbd..0f5d824cc 100644 --- a/loopy/codegen/__init__.py +++ b/loopy/codegen/__init__.py @@ -152,7 +152,7 @@ class SeenFunction(ImmutableRecord): a tuple of result dtypes """ - def __init__(self, name, c_name, arg_dtypes, result_dtypes=()): + def __init__(self, name, c_name, arg_dtypes, result_dtypes): ImmutableRecord.__init__(self, name=name, c_name=c_name, diff --git a/loopy/target/c/__init__.py b/loopy/target/c/__init__.py index 1aff14627..0d65da1b6 100644 --- a/loopy/target/c/__init__.py +++ b/loopy/target/c/__init__.py @@ -981,7 +981,8 @@ class CFamilyASTBuilder(ASTBuilderBase): codegen_state.seen_functions.add( SeenFunction(func_id, mangle_result.target_name, - mangle_result.arg_dtypes)) + mangle_result.arg_dtypes, + mangle_result.result_dtypes)) from pymbolic import var for i, (a, tgt_dtype) in enumerate( diff --git a/loopy/target/python.py b/loopy/target/python.py index e54aa622f..a1557e47b 100644 --- a/loopy/target/python.py +++ b/loopy/target/python.py @@ -118,7 +118,8 @@ class ExpressionToPythonMapper(StringifyMapper): self.codegen_state.seen_functions.add( SeenFunction(identifier, mangle_result.target_name, - mangle_result.arg_dtypes or par_dtypes)) + mangle_result.arg_dtypes or par_dtypes, + mangle_result.result_dtypes)) return "{}({})".format(mangle_result.target_name, ", ".join(str_parameters)) -- GitLab From 3cb6522b7bc5d2b770f295d0ec0ad0a8b4e5e6c3 Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Thu, 7 Jan 2021 01:00:26 -0600 Subject: [PATCH 383/415] respect python indentation --- loopy/target/c/__init__.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/loopy/target/c/__init__.py b/loopy/target/c/__init__.py index 0d65da1b6..5126172fb 100644 --- a/loopy/target/c/__init__.py +++ b/loopy/target/c/__init__.py @@ -34,6 +34,9 @@ from loopy.symbolic import IdentityMapper from loopy.types import NumpyType import pymbolic.primitives as p +from loopy.tools import remove_common_indentation +import re + from pytools import memoize_method __doc__ = """ @@ -184,17 +187,18 @@ def _preamble_generator(preamble_info): if func.arg_dtypes[1].numpy_dtype.kind == "u": signed_exponent_preamble = "" else: - signed_exponent_preamble = """ - if (n < 0) { - x = 1.0/x; - n = -n; - }""" + signed_exponent_preamble = "\n" + remove_common_indentation( + """ + if (n < 0) { + x = 1.0/x; + n = -n; + }""") yield("07_int_pow", f""" inline {res_ctype} {func.c_name}({base_ctype} x, {exp_ctype} n) {{ if (n == 0) return 1; - {signed_exponent_preamble} + {re.sub("^", 14*" ", signed_exponent_preamble, flags=re.M)} {res_ctype} y = 1; -- GitLab From db8d71ec323e6158070bc035050fdb5cc4707e38 Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Thu, 7 Jan 2021 01:49:12 -0600 Subject: [PATCH 384/415] let ExpressionToCExpressionMapper.map_call handle pow(.,.) --- loopy/target/c/__init__.py | 13 ++++++++++++- loopy/target/c/codegen/expression.py | 15 ++------------- loopy/target/cuda.py | 12 ++++++++++++ loopy/target/opencl.py | 1 + test/test_loopy.py | 2 +- 5 files changed, 28 insertions(+), 15 deletions(-) diff --git a/loopy/target/c/__init__.py b/loopy/target/c/__init__.py index 5126172fb..b8cd47b4a 100644 --- a/loopy/target/c/__init__.py +++ b/loopy/target/c/__init__.py @@ -497,7 +497,7 @@ def c_math_mangler(target, name, arg_dtypes, modify_name=True): [], [dtype.numpy_dtype for dtype in arg_dtypes]) if dtype.kind == "c": - raise LoopyTypeError("%s does not support complex numbers") + raise LoopyTypeError(f"{name} does not support complex numbers") elif dtype.kind == "f": if modify_name: @@ -517,6 +517,17 @@ def c_math_mangler(target, name, arg_dtypes, modify_name=True): result_dtypes=(result_dtype,), arg_dtypes=2*(result_dtype,)) + if name == "pow" and len(arg_dtypes) == 2: + if any(dtype.is_complex() == "c" for dtype in arg_dtypes): + raise LoopyTypeError(f"{name} does not support complex numbers") + + f64_dtype = NumpyType(np.float64) + + # math.h only provides double pow(double, double) + return CallMangleInfo(target_name=name, + arg_dtypes=(f64_dtype, f64_dtype), + result_dtypes=(f64_dtype,)) + return None # }}} diff --git a/loopy/target/c/codegen/expression.py b/loopy/target/c/codegen/expression.py index 047902ef6..180ee1611 100644 --- a/loopy/target/c/codegen/expression.py +++ b/loopy/target/c/codegen/expression.py @@ -731,16 +731,7 @@ class ExpressionToCExpressionMapper(IdentityMapper): f"{base_dtype.numpy_dtype}_{exponent_dtype.numpy_dtype}")( self.rec(expr.base), self.rec(expr.exponent)) else: - from loopy.types import to_loopy_type - loopy_f64_dtype = to_loopy_type(np.float64, - target=self.kernel.target) - return self.wrap_in_typecast( - loopy_f64_dtype, - tgt_dtype, - var("pow")(self.rec(expr.base, type_context, - loopy_f64_dtype), - self.rec(expr.exponent, type_context, - loopy_f64_dtype))) + return self.rec(var("pow")(expr.base, expr.exponent), type_context) if not self.allow_complex: return base_impl(expr, type_context) @@ -981,9 +972,7 @@ class CExpressionToCodeMapper(RecursiveMapper): return self._map_division_operator("%", expr, enclosing_prec) def map_power(self, expr, enclosing_prec): - # No trivial "**" operator for C-like targets, should have been preprocessed - # into other expression types. - raise RuntimeError() + raise RuntimeError(f"'{expr}' should have been transformed to 'Call' expression node.") def map_array_literal(self, expr, enclosing_prec): return "{ %s }" % self.join_rec(", ", expr.children, PREC_NONE) diff --git a/loopy/target/cuda.py b/loopy/target/cuda.py index 2023077bf..67dc1fe24 100644 --- a/loopy/target/cuda.py +++ b/loopy/target/cuda.py @@ -127,6 +127,18 @@ def cuda_function_mangler(kernel, name, arg_dtypes): return dtype, name + if name in ["pow"] and len(arg_dtypes) == 2: + dtype = np.find_common_type([], arg_dtypes) + + if dtype == np.float64: + pass # pow + elif dtype == np.float32: + name = name + "f" # powf + else: + raise RuntimeError(f"{name} does not support type {dtype}") + + return dtype, name + if name in "atan2" and len(arg_dtypes) == 2: return arg_dtypes[0], name diff --git a/loopy/target/opencl.py b/loopy/target/opencl.py index 2ff9ede55..230c73c6f 100644 --- a/loopy/target/opencl.py +++ b/loopy/target/opencl.py @@ -144,6 +144,7 @@ _CL_SIMPLE_MULTI_ARG_FUNCTIONS = { "rsqrt": 1, "clamp": 3, "atan2": 2, + "pow": 2, } diff --git a/test/test_loopy.py b/test/test_loopy.py index 48484141a..18956dbc6 100644 --- a/test/test_loopy.py +++ b/test/test_loopy.py @@ -2997,7 +2997,7 @@ def test_split_iname_within(ctx_factory): @pytest.mark.parametrize("base_type,exp_type", [(np.int32, np.uint32), (np.int64, np.uint64), (np.int, np.float), (np.float, np.int), (np.int, np.int)]) -def test_int_int_pow(ctx_factory, base_type, exp_type): +def test_int_pow(ctx_factory, base_type, exp_type): ctx = ctx_factory() queue = cl.CommandQueue(ctx) -- GitLab From 0e44c6f42544380db9f47a748a0f9e1d43d4cbe7 Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Thu, 7 Jan 2021 01:52:08 -0600 Subject: [PATCH 385/415] formatting: line length < 85 --- loopy/target/c/codegen/expression.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/loopy/target/c/codegen/expression.py b/loopy/target/c/codegen/expression.py index 180ee1611..4705c9c35 100644 --- a/loopy/target/c/codegen/expression.py +++ b/loopy/target/c/codegen/expression.py @@ -972,7 +972,8 @@ class CExpressionToCodeMapper(RecursiveMapper): return self._map_division_operator("%", expr, enclosing_prec) def map_power(self, expr, enclosing_prec): - raise RuntimeError(f"'{expr}' should have been transformed to 'Call' expression node.") + raise RuntimeError(f"'{expr}' should have been transformed to 'Call'" + " expression node.") def map_array_literal(self, expr, enclosing_prec): return "{ %s }" % self.join_rec(", ", expr.children, PREC_NONE) -- GitLab From efa0670aabfe6c5cce314296d490a76f1890a16b Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Thu, 7 Jan 2021 02:02:44 -0600 Subject: [PATCH 386/415] C supports pow[fl]? --- loopy/target/c/__init__.py | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/loopy/target/c/__init__.py b/loopy/target/c/__init__.py index b8cd47b4a..2ca08d6bf 100644 --- a/loopy/target/c/__init__.py +++ b/loopy/target/c/__init__.py @@ -490,7 +490,7 @@ def c_math_mangler(target, name, arg_dtypes, modify_name=True): arg_dtypes=arg_dtypes) # binary functions - if (name in ["fmax", "fmin", "copysign"] + if (name in ["fmax", "fmin", "copysign", "pow"] and len(arg_dtypes) == 2): dtype = np.find_common_type( @@ -517,17 +517,6 @@ def c_math_mangler(target, name, arg_dtypes, modify_name=True): result_dtypes=(result_dtype,), arg_dtypes=2*(result_dtype,)) - if name == "pow" and len(arg_dtypes) == 2: - if any(dtype.is_complex() == "c" for dtype in arg_dtypes): - raise LoopyTypeError(f"{name} does not support complex numbers") - - f64_dtype = NumpyType(np.float64) - - # math.h only provides double pow(double, double) - return CallMangleInfo(target_name=name, - arg_dtypes=(f64_dtype, f64_dtype), - result_dtypes=(f64_dtype,)) - return None # }}} -- GitLab From e37f34157ca5e21b6b09d8c45ca75d880e1368f7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Kl=C3=B6ckner?= Date: Fri, 8 Jan 2021 18:10:50 -0600 Subject: [PATCH 387/415] Point pytools back to master, for Taggable --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 641c75970..1072cdec0 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -git+https://github.com/nchristensen/pytools.git@master#egg=pytools == 2020.4.5 +git+https://github.com/inducer/pytools.git#egg=pytools git+https://github.com/inducer/islpy.git#egg=islpy git+https://github.com/inducer/cgen.git#egg=cgen git+https://github.com/inducer/pyopencl.git#egg=pyopencl -- GitLab From 3c9f798c8c3000381acd26b1637888ace45b8148 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Kl=C3=B6ckner?= Date: Fri, 8 Jan 2021 18:12:02 -0600 Subject: [PATCH 388/415] Bump pytools dep version for Taggable --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index c580fabb1..fcf284bc8 100644 --- a/setup.py +++ b/setup.py @@ -84,7 +84,7 @@ setup(name="loopy", python_requires="~=3.6", install_requires=[ - "pytools>=2020.4.5", + "pytools>=2021.1", "pymbolic>=2019.2", "genpy>=2016.1.2", "cgen>=2016.1", -- GitLab From 2e6cea3b5280a0fd8dc5042802a050353a8f2506 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Kl=C3=B6ckner?= Date: Fri, 8 Jan 2021 18:21:03 -0600 Subject: [PATCH 389/415] Add version requirement to requirements.txt for pytools --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 1072cdec0..8016ee7a8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -git+https://github.com/inducer/pytools.git#egg=pytools +git+https://github.com/inducer/pytools.git#egg=pytools >= 2021.1 git+https://github.com/inducer/islpy.git#egg=islpy git+https://github.com/inducer/cgen.git#egg=cgen git+https://github.com/inducer/pyopencl.git#egg=pyopencl -- GitLab From ca744aeaa01c956a4c5edbfc043e8bcb760f6e07 Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Sun, 10 Jan 2021 16:33:41 -0600 Subject: [PATCH 390/415] fixes a bug when there are multiple pown's in a kernel --- loopy/target/c/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/loopy/target/c/__init__.py b/loopy/target/c/__init__.py index 2ca08d6bf..d1e474c20 100644 --- a/loopy/target/c/__init__.py +++ b/loopy/target/c/__init__.py @@ -194,7 +194,7 @@ def _preamble_generator(preamble_info): n = -n; }""") - yield("07_int_pow", f""" + yield(f"07_{func.c_name}", f""" inline {res_ctype} {func.c_name}({base_ctype} x, {exp_ctype} n) {{ if (n == 0) return 1; -- GitLab From c09fc4163af4c610764b75ac1a096bd222db8182 Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Sun, 10 Jan 2021 16:53:47 -0600 Subject: [PATCH 391/415] base type must be cast to tgt_type --- loopy/target/c/codegen/expression.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/loopy/target/c/codegen/expression.py b/loopy/target/c/codegen/expression.py index 4705c9c35..c50d89dc6 100644 --- a/loopy/target/c/codegen/expression.py +++ b/loopy/target/c/codegen/expression.py @@ -720,16 +720,14 @@ class ExpressionToCExpressionMapper(IdentityMapper): if exponent_dtype.is_integral(): from loopy.codegen import SeenFunction func_name = ("loopy_pow_" - f"{base_dtype.numpy_dtype}_{exponent_dtype.numpy_dtype}") + f"{tgt_dtype.numpy_dtype}_{exponent_dtype.numpy_dtype}") self.codegen_state.seen_functions.add( SeenFunction( "int_pow", func_name, - (base_dtype, exponent_dtype), + (tgt_dtype, exponent_dtype), (tgt_dtype, ))) - return var("loopy_pow_" - f"{base_dtype.numpy_dtype}_{exponent_dtype.numpy_dtype}")( - self.rec(expr.base), self.rec(expr.exponent)) + return var(func_name)(self.rec(expr.base), self.rec(expr.exponent)) else: return self.rec(var("pow")(expr.base, expr.exponent), type_context) -- GitLab From 1b43d409d2eaeb4ad52d70b7a7f7252b44f21913 Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Sun, 10 Jan 2021 18:07:22 -0600 Subject: [PATCH 392/415] tests float ** double --- test/test_loopy.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/test/test_loopy.py b/test/test_loopy.py index 18956dbc6..be595aaa5 100644 --- a/test/test_loopy.py +++ b/test/test_loopy.py @@ -2996,8 +2996,9 @@ def test_split_iname_within(ctx_factory): @pytest.mark.parametrize("base_type,exp_type", [(np.int32, np.uint32), (np.int64, - np.uint64), (np.int, np.float), (np.float, np.int), (np.int, np.int)]) -def test_int_pow(ctx_factory, base_type, exp_type): + np.uint64), (np.int, np.float), (np.float, np.int), (np.int, np.int), + (np.float32, np.float64), (np.float64, np.float32)]) +def test_pow(ctx_factory, base_type, exp_type): ctx = ctx_factory() queue = cl.CommandQueue(ctx) -- GitLab From 7e37f5effc01b4e685de2b0475600e18e3729049 Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Sun, 10 Jan 2021 18:08:20 -0600 Subject: [PATCH 393/415] emits opencl pow to explicitly typed pow variants --- loopy/target/opencl.py | 32 ++++++++++++++++++++++++++++++-- 1 file changed, 30 insertions(+), 2 deletions(-) diff --git a/loopy/target/opencl.py b/loopy/target/opencl.py index 230c73c6f..c409df380 100644 --- a/loopy/target/opencl.py +++ b/loopy/target/opencl.py @@ -28,7 +28,7 @@ import numpy as np from loopy.target.c import CFamilyTarget, CFamilyASTBuilder from loopy.target.c.codegen.expression import ExpressionToCExpressionMapper from pytools import memoize_method -from loopy.diagnostic import LoopyError +from loopy.diagnostic import LoopyError, LoopyTypeError from loopy.types import NumpyType from loopy.target.c import DTypeRegistryWrapper, c_math_mangler from loopy.kernel.data import AddressSpace, CallMangleInfo @@ -144,7 +144,6 @@ _CL_SIMPLE_MULTI_ARG_FUNCTIONS = { "rsqrt": 1, "clamp": 3, "atan2": 2, - "pow": 2, } @@ -182,6 +181,22 @@ def opencl_function_mangler(kernel, name, arg_dtypes): result_dtypes=(result_dtype,), arg_dtypes=2*(result_dtype,)) + if name == "pow" and len(arg_dtypes) == 2: + dtype = np.find_common_type( + [], [dtype.numpy_dtype for dtype in arg_dtypes]) + if dtype == np.float64: + name = "powf64" + elif dtype == np.float32: + name = "powf32" + else: + raise LoopyTypeError(f"'pow' does not support type {dtype}.") + + result_dtype = NumpyType(dtype) + return CallMangleInfo( + target_name=name, + result_dtypes=(result_dtype,), + arg_dtypes=2*(result_dtype,)) + if name == "dot": scalar_dtype, offset, field_name = arg_dtypes[0].numpy_dtype.fields["s0"] return CallMangleInfo( @@ -287,6 +302,19 @@ def opencl_preamble_generator(preamble_info): """ % dict(idx_ctype=kernel.target.dtype_to_typename( kernel.index_dtype)))) + for func in preamble_info.seen_functions: + if func.name == "pow" and func.c_name == "powf32": + yield("08_clpowf32", """ + inline float powf32(float x, float y) { + return pow(x, y); + }""") + + if func.name == "pow" and func.c_name == "powf64": + yield("08_clpowf64", """ + inline double powf64(double x, double y) { + return pow(x, y); + }""") + # }}} -- GitLab From cf2c43bb36890d94e9d4e0e750b4bc0e90900974 Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Sun, 10 Jan 2021 19:08:57 -0600 Subject: [PATCH 394/415] pass in the type context --- loopy/target/c/codegen/expression.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/loopy/target/c/codegen/expression.py b/loopy/target/c/codegen/expression.py index c50d89dc6..9ec99c784 100644 --- a/loopy/target/c/codegen/expression.py +++ b/loopy/target/c/codegen/expression.py @@ -727,7 +727,8 @@ class ExpressionToCExpressionMapper(IdentityMapper): "int_pow", func_name, (tgt_dtype, exponent_dtype), (tgt_dtype, ))) - return var(func_name)(self.rec(expr.base), self.rec(expr.exponent)) + return var(func_name)(self.rec(expr.base, type_context), + self.rec(expr.exponent, type_context)) else: return self.rec(var("pow")(expr.base, expr.exponent), type_context) -- GitLab From 290ee93128d9862a7d354df04fe1dfb3df3083e8 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Mon, 11 Jan 2021 00:08:37 -0600 Subject: [PATCH 395/415] Draft description of computations expressible in Loopy --- doc/ref_kernel.rst | 59 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 59 insertions(+) diff --git a/doc/ref_kernel.rst b/doc/ref_kernel.rst index d339e1b19..b9f8cced0 100644 --- a/doc/ref_kernel.rst +++ b/doc/ref_kernel.rst @@ -3,6 +3,65 @@ Reference: Loopy's Model of a Kernel ==================================== +What Types of Computation can a Loopy Kernel Express? +----------------------------------------------------- + +Loopy kernels consist of an a-priori unordered set of statements, operating +on :math:`n`-dimensional arrays. + +Arrays consist of "plain old data" and structures thereof, as describable +by a :class:`numpy.dtype`. The n-dimensional shape of these arrays is +given by a tuple of expressions at most affine in parameters that are +fixed for the duration of program execution. + +A statement (still called 'instruction' in some places, cf. +:class:`loopy.Instruction`) encodes an assignment to an entry of an array. +The right-hand side of an assignment consists of an expression that may +consist of arithmetic and calls to functions. +If the outermost operation of the RHS expression is a function call, +the RHS value may be a tuple, and multiple (still scalar) arrays appear +as LHS values. (This is the only sense in which tuple types are supported.) +Each statement is parametrized by zero or more loop variables ("inames"). +A statement is executed once for each integer point defined by the domain +forest for the iname tuple. Each execution is called a statement instance. +Dependencies between these instances as well as instances of other +statements are encoded in the program representation specify permissible +execution orderings. (The semantics of the dependencies are `being +sharpened `__.) Assignments +(comprising the evaluation of the RHS and the assignment to the LHS) may +be specified to be atomic. + +The basic building blocks of the domain forest are sets given as +conjunctions of equalities and inequalities of quasi-affine expressions on +integer tuples, called domains. The entries of each integer tuple are +either *parameters* or *inames*. Each domain may optionally have a *parent +domain*. Parameters of parent-less domains are given by value arguments +supplied to the program that will remain unchanged during program +execution. Parameters of domains with parents may be + +- run-time-constant value arguments to the program, or +- inames from parent domains, or +- scalar, integer temporary variables that are written by statements + with iteration domains controlled by a parent domain. + +For each tuple of parameter values, the set of iname tuples must be +finite. Each iname is defined by exactly one domain. + +For a tuple of inames, the domain forest defines an iteration domain +by finding all the domains defining the inames involved, along with their +parent domains. The resulting tree of domains may contain multiple roots, +but no branches. The iteration domain is then constructed by intersecting +these domains and constructing the projection of that set onto the space +given by the required iname tuple. Observe that, via the parent-child +domain mechanism, imperfectly-nested and data-dependent loops become +expressible. + +The set of functions callable from the language is predefined by the system. +Additional functions may be defined by the user by registering them. It is +not currently possible to define functions from within Loopy, however work +is progressing on permitting this. Even once this is allowed, recursion +will not be permitted. + .. _domain-tree: Loop Domain Forest -- GitLab From 9144a4059333675f7f82715498c9fa51060b4c05 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Mon, 11 Jan 2021 00:10:23 -0600 Subject: [PATCH 396/415] Un-ignore doc PRs for Github CI --- .github/workflows/ci.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 7d8101763..05b2e3237 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -4,8 +4,6 @@ on: branches: - master pull_request: - paths-ignore: - - 'doc/*.rst' schedule: - cron: '17 3 * * 0' -- GitLab From 2ff9c5ebc3cdd0b5ec3ee1d1fa4fd9b1dad3780b Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Mon, 11 Jan 2021 10:22:47 -0600 Subject: [PATCH 397/415] Fix InstructionBase reference in expressible-computations description --- doc/ref_kernel.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/ref_kernel.rst b/doc/ref_kernel.rst index b9f8cced0..068ccbe73 100644 --- a/doc/ref_kernel.rst +++ b/doc/ref_kernel.rst @@ -15,7 +15,7 @@ given by a tuple of expressions at most affine in parameters that are fixed for the duration of program execution. A statement (still called 'instruction' in some places, cf. -:class:`loopy.Instruction`) encodes an assignment to an entry of an array. +:class:`loopy.InstructionBase`) encodes an assignment to an entry of an array. The right-hand side of an assignment consists of an expression that may consist of arithmetic and calls to functions. If the outermost operation of the RHS expression is a function call, -- GitLab From 76bcc1fc8ac89f2340ec4f44ec0798dcd99767cc Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Mon, 11 Jan 2021 10:25:07 -0600 Subject: [PATCH 398/415] Describe variables in expressible-computations (thanks @kaushikcfd) --- doc/ref_kernel.rst | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/doc/ref_kernel.rst b/doc/ref_kernel.rst index 068ccbe73..aff99c7c6 100644 --- a/doc/ref_kernel.rst +++ b/doc/ref_kernel.rst @@ -3,16 +3,20 @@ Reference: Loopy's Model of a Kernel ==================================== -What Types of Computation can a Loopy Kernel Express? ------------------------------------------------------ +What Types of Computation can a Loopy Program Express? +------------------------------------------------------ -Loopy kernels consist of an a-priori unordered set of statements, operating -on :math:`n`-dimensional arrays. +Loopy programs consist of an a-priori unordered set of statements, operating +on :math:`n`-dimensional array variables. Arrays consist of "plain old data" and structures thereof, as describable by a :class:`numpy.dtype`. The n-dimensional shape of these arrays is given by a tuple of expressions at most affine in parameters that are fixed for the duration of program execution. +Each array variable in the program is either an argument or a temporary +variable. A temporary variable is only live within the program, while +argument variables are accessible outside the program and constitute the +program's inputs and outputs. A statement (still called 'instruction' in some places, cf. :class:`loopy.InstructionBase`) encodes an assignment to an entry of an array. -- GitLab From 25af6b96a08bed5fa4b0de983ec26873365287a2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Kl=C3=B6ckner?= Date: Mon, 11 Jan 2021 10:25:57 -0600 Subject: [PATCH 399/415] Expressible computations: arithmetic -> arithmetic operations Co-authored-by: Kaushik Kulkarni <15399010+kaushikcfd@users.noreply.github.com> --- doc/ref_kernel.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/ref_kernel.rst b/doc/ref_kernel.rst index aff99c7c6..392ebe726 100644 --- a/doc/ref_kernel.rst +++ b/doc/ref_kernel.rst @@ -21,7 +21,7 @@ program's inputs and outputs. A statement (still called 'instruction' in some places, cf. :class:`loopy.InstructionBase`) encodes an assignment to an entry of an array. The right-hand side of an assignment consists of an expression that may -consist of arithmetic and calls to functions. +consist of arithmetic operations and calls to functions. If the outermost operation of the RHS expression is a function call, the RHS value may be a tuple, and multiple (still scalar) arrays appear as LHS values. (This is the only sense in which tuple types are supported.) -- GitLab From 3f5e253a00af958fdeee7ae74b3854e99329b235 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Kl=C3=B6ckner?= Date: Tue, 12 Jan 2021 09:08:50 -0600 Subject: [PATCH 400/415] Fix constant type inference in float context --- loopy/type_inference.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/loopy/type_inference.py b/loopy/type_inference.py index 64337864f..787966efc 100644 --- a/loopy/type_inference.py +++ b/loopy/type_inference.py @@ -216,8 +216,12 @@ class TypeInferenceMapper(CombineMapper): # Numpy types are sized return [NumpyType(np.dtype(type(expr)))] elif dt.kind == "f": - # deduce the smaller type by default - return [NumpyType(np.dtype(np.float32))] + if np.float32(expr) == np.float64(expr): + # No precision is lost by 'guessing' single precision, use that. + # This at least covers simple cases like '1j'. + return [NumpyType(np.dtype(np.float32))] + + return [NumpyType(np.dtype(np.float64))] elif dt.kind == "c": if np.complex64(expr) == np.complex128(expr): # (COMPLEX_GUESS_LOGIC) -- GitLab From a541c8b2f3e034cbd394d40872e0aa357b35406d Mon Sep 17 00:00:00 2001 From: Kaushik Kulkarni Date: Tue, 12 Jan 2021 11:24:09 -0600 Subject: [PATCH 401/415] explicitly specify 3.14 to be f32 --- test/test_statistics.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/test_statistics.py b/test/test_statistics.py index 757f59e86..bcdc542cb 100644 --- a/test/test_statistics.py +++ b/test/test_statistics.py @@ -1070,7 +1070,7 @@ def test_floor_div_coefficient_collector(): [ "for i_outer", "for j_outer", - "<> loc[i_inner,j_inner] = 3.14 {id=loc_init}", + "<> loc[i_inner,j_inner] = 3.14f {id=loc_init}", "loc[i_inner,(j_inner+r+4) %% %d] = loc[i_inner,(j_inner+r) %% %d]" " {id=add,dep=loc_init}" % (bsize, bsize), "out0[i_outer*16+i_inner,j_outer*16+j_inner] = loc[i_inner,j_inner]" -- GitLab From 73db73a8a78a376682ce06de1772ff2e35210312 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Wed, 13 Jan 2021 17:23:25 -0600 Subject: [PATCH 402/415] Small tweaks to the description of expressible computations --- doc/ref_kernel.rst | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/doc/ref_kernel.rst b/doc/ref_kernel.rst index 392ebe726..f399d812e 100644 --- a/doc/ref_kernel.rst +++ b/doc/ref_kernel.rst @@ -27,9 +27,11 @@ the RHS value may be a tuple, and multiple (still scalar) arrays appear as LHS values. (This is the only sense in which tuple types are supported.) Each statement is parametrized by zero or more loop variables ("inames"). A statement is executed once for each integer point defined by the domain -forest for the iname tuple. Each execution is called a statement instance. -Dependencies between these instances as well as instances of other -statements are encoded in the program representation specify permissible +forest for the iname tuple given for that statement +(:attr:`loopy.InstructionBase.within_inames`). Each execution of a +statement (with specific values of the inames) is called a *statement +instance*. Dependencies between these instances as well as instances of +other statements are encoded in the program representation and specify permissible execution orderings. (The semantics of the dependencies are `being sharpened `__.) Assignments (comprising the evaluation of the RHS and the assignment to the LHS) may @@ -37,7 +39,8 @@ be specified to be atomic. The basic building blocks of the domain forest are sets given as conjunctions of equalities and inequalities of quasi-affine expressions on -integer tuples, called domains. The entries of each integer tuple are +integer tuples, called domains, and represented as instances of +:class:`islpy.BasicSet`. The entries of each integer tuple are either *parameters* or *inames*. Each domain may optionally have a *parent domain*. Parameters of parent-less domains are given by value arguments supplied to the program that will remain unchanged during program @@ -48,7 +51,7 @@ execution. Parameters of domains with parents may be - scalar, integer temporary variables that are written by statements with iteration domains controlled by a parent domain. -For each tuple of parameter values, the set of iname tuples must be +For each tuple of concrete parameter values, the set of iname tuples must be finite. Each iname is defined by exactly one domain. For a tuple of inames, the domain forest defines an iteration domain -- GitLab From a985bf7b8437e3e175a790cb957082309928dd1d Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Sun, 17 Jan 2021 18:12:30 -0600 Subject: [PATCH 403/415] Drop an old BPL workaround for integers in invoker generation --- loopy/target/pyopencl.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/loopy/target/pyopencl.py b/loopy/target/pyopencl.py index a17416c47..8b329c1a1 100644 --- a/loopy/target/pyopencl.py +++ b/loopy/target/pyopencl.py @@ -509,14 +509,6 @@ def generate_value_arg_setup(kernel, devices, implemented_data_info): Raise('RuntimeError("input argument \'{name}\' ' 'must be supplied")'.format(name=idi.name)))) - if idi.dtype.is_integral(): - gen(Comment("cast to Python int to avoid trouble " - "with struct packing or Boost.Python")) - py_type = "int" - - gen(Assign(idi.name, f"{py_type}({idi.name})")) - gen(Line()) - if idi.dtype.is_composite(): gen(S("_lpy_knl.set_arg(%d, %s)" % (cl_arg_idx, idi.name))) cl_arg_idx += 1 -- GitLab From b967d74209bd8de0a06aa1411876763fabdbe3ff Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Sun, 17 Jan 2021 18:13:01 -0600 Subject: [PATCH 404/415] Use PyOpenCL's internal _set_arg_buf interface to speed up ValueArg passing in invoker --- loopy/target/pyopencl.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/loopy/target/pyopencl.py b/loopy/target/pyopencl.py index 8b329c1a1..2bd72efa8 100644 --- a/loopy/target/pyopencl.py +++ b/loopy/target/pyopencl.py @@ -570,7 +570,7 @@ def generate_value_arg_setup(kernel, devices, implemented_data_info): fp_arg_count += 1 gen(S( - "_lpy_knl.set_arg(%d, _lpy_pack('%s', %s))" + "_lpy_knl._set_arg_buf(%d, _lpy_pack('%s', %s))" % (cl_arg_idx, idi.dtype.dtype.char, idi.name))) cl_arg_idx += 1 -- GitLab From 77860f0762f9d5fed189ef082860cec6cc365874 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Mon, 18 Jan 2021 00:37:38 -0600 Subject: [PATCH 405/415] Drop redundant future import in invoker --- loopy/target/execution.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/loopy/target/execution.py b/loopy/target/execution.py index 74819b939..d22d020b7 100644 --- a/loopy/target/execution.py +++ b/loopy/target/execution.py @@ -639,8 +639,6 @@ class ExecutionWrapperGeneratorBase: if issubclass(idi.arg_class, KernelArgument) ]) - gen.add_to_preamble("from __future__ import division") - gen.add_to_preamble("") self.target_specific_preamble(gen) gen.add_to_preamble("") self.generate_host_code(gen, codegen_result) -- GitLab From d8967594c56273d477cca9b381cacb447c1c2cf7 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Mon, 18 Jan 2021 00:39:08 -0600 Subject: [PATCH 406/415] Globalize imports in pyopencl invoker --- loopy/target/pyopencl.py | 6 +----- loopy/target/pyopencl_execution.py | 1 + 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/loopy/target/pyopencl.py b/loopy/target/pyopencl.py index 2bd72efa8..4bc768b56 100644 --- a/loopy/target/pyopencl.py +++ b/loopy/target/pyopencl.py @@ -624,15 +624,11 @@ class PyOpenCLPythonASTBuilder(PythonASTBuilderBase): if not issubclass(idi.arg_class, TemporaryVariable)] + ["wait_for=None", "allocator=None"]) - from genpy import (For, Function, Suite, Import, ImportAs, Return, - FromImport, Line, Statement as S) + from genpy import (For, Function, Suite, Return, Line, Statement as S) return Function( codegen_result.current_program(codegen_state).name, args, Suite([ - FromImport("struct", ["pack as _lpy_pack"]), - ImportAs("pyopencl", "_lpy_cl"), - Import("pyopencl.tools"), Line(), ] + [ Line(), diff --git a/loopy/target/pyopencl_execution.py b/loopy/target/pyopencl_execution.py index 7fc20f191..cdee5600b 100644 --- a/loopy/target/pyopencl_execution.py +++ b/loopy/target/pyopencl_execution.py @@ -142,6 +142,7 @@ class PyOpenCLExecutionWrapperGenerator(ExecutionWrapperGeneratorBase): gen.add_to_preamble("import pyopencl as _lpy_cl") gen.add_to_preamble("import pyopencl.array as _lpy_cl_array") gen.add_to_preamble("import pyopencl.tools as _lpy_cl_tools") + gen.add_to_preamble("from struct import pack as _lpy_pack") def initialize_system_args(self, gen): """ -- GitLab From ba57b237e4866b2288ccc7a167dfe081f8cf95f3 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Mon, 18 Jan 2021 00:39:45 -0600 Subject: [PATCH 407/415] Do not emit global temporaries handling in invoker if no global temporaries exist --- loopy/target/pyopencl.py | 29 ++++++++++++++++------------- 1 file changed, 16 insertions(+), 13 deletions(-) diff --git a/loopy/target/pyopencl.py b/loopy/target/pyopencl.py index 4bc768b56..ed44daa2f 100644 --- a/loopy/target/pyopencl.py +++ b/loopy/target/pyopencl.py @@ -634,11 +634,12 @@ class PyOpenCLPythonASTBuilder(PythonASTBuilderBase): Line(), function_body, Line(), - ] + [ - For("_tv", "_global_temporaries", - # free global temporaries - S("_tv.release()")) - ] + [ + ] + ([ + For("_tv", "_global_temporaries", + # free global temporaries + S("_tv.release()")) + ] if self._get_global_temporaries(codegen_state) else [] + ) + [ Line(), Return("_lpy_evt"), ])) @@ -648,6 +649,14 @@ class PyOpenCLPythonASTBuilder(PythonASTBuilderBase): # no such thing in Python return None + def _get_global_temporaries(self, codegen_state): + from loopy.kernel.data import AddressSpace + + return sorted( + (tv for tv in codegen_state.kernel.temporary_variables.values() + if tv.address_space == AddressSpace.GLOBAL), + key=lambda tv: tv.name) + def get_temporary_decls(self, codegen_state, schedule_state): from genpy import Assign, Comment, Line @@ -656,18 +665,12 @@ class PyOpenCLPythonASTBuilder(PythonASTBuilderBase): from operator import mul return tv.dtype.numpy_dtype.itemsize * reduce(mul, tv.shape, 1) - from loopy.kernel.data import AddressSpace - - global_temporaries = sorted( - (tv for tv in codegen_state.kernel.temporary_variables.values() - if tv.address_space == AddressSpace.GLOBAL), - key=lambda tv: tv.name) - from pymbolic.mapper.stringifier import PREC_NONE ecm = self.get_expression_to_code_mapper(codegen_state) + global_temporaries = self._get_global_temporaries(codegen_state) if not global_temporaries: - return [Assign("_global_temporaries", "[]"), Line()] + return [] return [ Comment("{{{ allocate global temporaries"), -- GitLab From f5dcf9d6febbad98c98ec9405f739a56a279e924 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Mon, 18 Jan 2021 11:54:20 -0600 Subject: [PATCH 408/415] Use positional args in enqueue_nd_range_kernel in invoker --- loopy/target/pyopencl.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/loopy/target/pyopencl.py b/loopy/target/pyopencl.py index ed44daa2f..8d0c309b0 100644 --- a/loopy/target/pyopencl.py +++ b/loopy/target/pyopencl.py @@ -725,8 +725,13 @@ class PyOpenCLPythonASTBuilder(PythonASTBuilderBase): arry_arg_code, Assign("_lpy_evt", "%(pyopencl_module_name)s.enqueue_nd_range_kernel(" "queue, _lpy_knl, " - "%(gsize)s, %(lsize)s, wait_for=wait_for, " - "g_times_l=True, allow_empty_ndrange=True)" + "%(gsize)s, %(lsize)s, " + # using positional args because pybind is slow with kwargs + "None, " # offset + "wait_for, " + "True, " # g_times_l + "True, " # allow_empty_ndrange + ")" % dict( pyopencl_module_name=self.target.pyopencl_module_name, gsize=ecm(gsize, prec=PREC_NONE, type_context="i"), -- GitLab From cc47cedc341c036bcbe5ab5079097c89fb6f5b26 Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Tue, 19 Jan 2021 00:45:12 -0600 Subject: [PATCH 409/415] Fix handling of skip_arg_checks in stride value finding --- loopy/target/execution.py | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/loopy/target/execution.py b/loopy/target/execution.py index d22d020b7..74887155b 100644 --- a/loopy/target/execution.py +++ b/loopy/target/execution.py @@ -281,20 +281,20 @@ class ExecutionWrapperGeneratorBase: 'passed array")' % (arg.name, impl_array_name)) - base_arg = kernel.impl_arg_to_arg[impl_array_name] - - if not options.skip_arg_checks: - gen("%s, _lpy_remdr = divmod(%s.strides[%d], %d)" - % (arg.name, impl_array_name, stride_impl_axis, - base_arg.dtype.dtype.itemsize)) + base_arg = kernel.impl_arg_to_arg[impl_array_name] - gen("assert _lpy_remdr == 0, \"Stride %d of array '%s' " - ' is not divisible by its dtype itemsize"' - % (stride_impl_axis, impl_array_name)) - gen("del _lpy_remdr") - else: - gen("%s = _lpy_offset // %d" - % (arg.name, base_arg.dtype.itemsize)) + if not options.skip_arg_checks: + gen("%s, _lpy_remdr = divmod(%s.strides[%d], %d)" + % (arg.name, impl_array_name, stride_impl_axis, + base_arg.dtype.dtype.itemsize)) + + gen("assert _lpy_remdr == 0, \"Stride %d of array '%s' " + ' is not divisible by its dtype itemsize"' + % (stride_impl_axis, impl_array_name)) + gen("del _lpy_remdr") + else: + gen("%s = _lpy_offset // %d" + % (arg.name, base_arg.dtype.itemsize)) gen("# }}}") gen("") -- GitLab From 5ca67b040f1bcb4c7d35c981c55d6b93ccd71fbb Mon Sep 17 00:00:00 2001 From: Andreas Kloeckner Date: Tue, 19 Jan 2021 00:45:48 -0600 Subject: [PATCH 410/415] Determine skip_arg_checks default according to value of python -O flag --- loopy/options.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/loopy/options.py b/loopy/options.py index 2dc8f22cd..46ff37947 100644 --- a/loopy/options.py +++ b/loopy/options.py @@ -98,6 +98,12 @@ class Options(ImmutableRecord): Do not do any checking (data type, data layout, shape, etc.) on arguments for a minor performance gain. + .. versionchanged:: 2021.1 + + This now defaults to the same value as the ``optimize`` + sub-flag from :data:`sys.flags`. This flag can be controlled + (i.e. set to *True*) by running Python with the ``-O`` flag. + .. attribute:: no_numpy Do not check for or accept :mod:`numpy` arrays as @@ -196,6 +202,7 @@ class Options(ImmutableRecord): allow_terminal_colors_def = ( ALLOW_TERMINAL_COLORS and allow_terminal_colors_def) + import sys ImmutableRecord.__init__( self, @@ -203,7 +210,7 @@ class Options(ImmutableRecord): trace_assignments=kwargs.get("trace_assignments", False), trace_assignment_values=kwargs.get("trace_assignment_values", False), - skip_arg_checks=kwargs.get("skip_arg_checks", False), + skip_arg_checks=kwargs.get("skip_arg_checks", sys.flags.optimize), no_numpy=kwargs.get("no_numpy", False), cl_exec_manage_array_events=kwargs.get("no_numpy", True), return_dict=kwargs.get("return_dict", False), -- GitLab From d88b1e9fa9bc8c0832060424cb0a3d1390a5692c Mon Sep 17 00:00:00 2001 From: Isuru Fernando Date: Sat, 23 Jan 2021 20:09:15 -0600 Subject: [PATCH 411/415] Compare insns by id only for list remove --- loopy/schedule/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/loopy/schedule/__init__.py b/loopy/schedule/__init__.py index fde967c65..3fcdc4bdf 100644 --- a/loopy/schedule/__init__.py +++ b/loopy/schedule/__init__.py @@ -1046,7 +1046,8 @@ def generate_loop_schedules_internal( new_insn_ids_to_try = None new_toposorted_insns = sched_state.insns_in_topologically_sorted_order[:] - new_toposorted_insns.remove(insn) + new_toposorted_insns = \ + list(filter(lambda x: x.id != insn.id, new_toposorted_insns)) # }}} -- GitLab From b9cb3a0f1d64a4a9844469c997270c4c73a194f4 Mon Sep 17 00:00:00 2001 From: Isuru Fernando Date: Sat, 23 Jan 2021 20:55:08 -0600 Subject: [PATCH 412/415] Add a comment and remove unnecessary copy --- loopy/schedule/__init__.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/loopy/schedule/__init__.py b/loopy/schedule/__init__.py index 3fcdc4bdf..bb52430d6 100644 --- a/loopy/schedule/__init__.py +++ b/loopy/schedule/__init__.py @@ -1045,9 +1045,10 @@ def generate_loop_schedules_internal( sched_state.active_group_counts.keys()): new_insn_ids_to_try = None - new_toposorted_insns = sched_state.insns_in_topologically_sorted_order[:] + # explicitly use id to compare to avoid performance issues like #199 new_toposorted_insns = \ - list(filter(lambda x: x.id != insn.id, new_toposorted_insns)) + list(filter(lambda x: x.id != insn.id, + sched_state.insns_in_topologically_sorted_order)) # }}} -- GitLab From e35486bcc4aa5d1ddabf2ff9427eb88b09389e35 Mon Sep 17 00:00:00 2001 From: Isuru Fernando Date: Sun, 24 Jan 2021 13:04:19 -0600 Subject: [PATCH 413/415] Use list comprehension --- loopy/schedule/__init__.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/loopy/schedule/__init__.py b/loopy/schedule/__init__.py index bb52430d6..ccfe0d5ff 100644 --- a/loopy/schedule/__init__.py +++ b/loopy/schedule/__init__.py @@ -1046,9 +1046,8 @@ def generate_loop_schedules_internal( new_insn_ids_to_try = None # explicitly use id to compare to avoid performance issues like #199 - new_toposorted_insns = \ - list(filter(lambda x: x.id != insn.id, - sched_state.insns_in_topologically_sorted_order)) + new_toposorted_insns = [x for x in + sched_state.insns_in_topologically_sorted_order if x.id != insn.id] # }}} -- GitLab From a8a48fcbc6c92e619c8f7f67931a9b4703a619e0 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 25 Jan 2021 01:26:37 -0600 Subject: [PATCH 414/415] update after renaming of get_lex_order_constraint->get_lex_order_set --- loopy/schedule/checker/dependency.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/loopy/schedule/checker/dependency.py b/loopy/schedule/checker/dependency.py index 1fced6968..d3d37834b 100644 --- a/loopy/schedule/checker/dependency.py +++ b/loopy/schedule/checker/dependency.py @@ -257,7 +257,7 @@ def create_legacy_dependency_constraint( from loopy.schedule.checker import ( lexicographic_order_map as lom) # TODO handle case where inames list is empty - constraint_set = lom.get_lex_order_constraint( + constraint_set = lom.get_lex_order_set( inames_list_nest_ordered_prime, inames_list_nest_ordered, islvars, -- GitLab From 3ced956b5eb14e9068a35c19fc46a82ebb33e516 Mon Sep 17 00:00:00 2001 From: jdsteve2 Date: Mon, 25 Jan 2021 01:55:53 -0600 Subject: [PATCH 415/415] rename the following after changes in parent branches: insert_missing_dims_and_reorder_by_name->reorder_dims_by_name, list_var_names_in_isl_sets->sorted_union_of_names_in_isl_sets, get_concurrent_inames->partition_inames_by_concurrency, _lp_linchk_statement->_lp_linchk_stmt --- loopy/schedule/checker/dependency.py | 20 ++++++++--------- .../example_pairwise_schedule_validity.py | 4 ++-- .../example_wave_equation.py | 22 +++++++++---------- 3 files changed, 23 insertions(+), 23 deletions(-) diff --git a/loopy/schedule/checker/dependency.py b/loopy/schedule/checker/dependency.py index 6717c4681..e8855f230 100644 --- a/loopy/schedule/checker/dependency.py +++ b/loopy/schedule/checker/dependency.py @@ -340,7 +340,7 @@ def _create_5pt_stencil_dependency_constraint( make_islvars_with_marker, append_apostrophes, add_dims_to_isl_set, - insert_missing_dims_and_reorder_by_name, + reorder_dims_by_name, append_marker_to_isl_map_var_names, ) from loopy.schedule.checker.schedule import STATEMENT_VAR_NAME @@ -348,10 +348,10 @@ def _create_5pt_stencil_dependency_constraint( # Statement [s,i,j] comes before statement [s',i',j'] iff from loopy.schedule.checker.utils import ( - list_var_names_in_isl_sets, + sorted_union_of_names_in_isl_sets, ) if all_dom_inames_ordered is None: - all_dom_inames_ordered = list_var_names_in_isl_sets( + all_dom_inames_ordered = sorted_union_of_names_in_isl_sets( [dom_before_constraint_set, dom_after_constraint_set]) # create some (ordered) isl vars to use, e.g., {s, i, j, s', i', j'} @@ -417,10 +417,10 @@ def _create_5pt_stencil_dependency_constraint( [statement_var_name_prime], statement_var_idx) # insert inames missing from doms to enable intersection - domain_to_intersect = insert_missing_dims_and_reorder_by_name( + domain_to_intersect = reorder_dims_by_name( domain_to_intersect, isl.dim_type.out, append_apostrophes([STATEMENT_VAR_NAME] + all_dom_inames_ordered)) - range_to_intersect = insert_missing_dims_and_reorder_by_name( + range_to_intersect = reorder_dims_by_name( range_to_intersect, isl.dim_type.out, [STATEMENT_VAR_NAME] + all_dom_inames_ordered) @@ -449,7 +449,7 @@ def create_arbitrary_dependency_constraint( #append_apostrophes, append_marker_to_strings, add_dims_to_isl_set, - insert_missing_dims_and_reorder_by_name, + reorder_dims_by_name, append_marker_to_isl_map_var_names, ) from loopy.schedule.checker.schedule import STATEMENT_VAR_NAME @@ -457,10 +457,10 @@ def create_arbitrary_dependency_constraint( # Statement [s,i,j] comes before statement [s',i',j'] iff from loopy.schedule.checker.utils import ( - list_var_names_in_isl_sets, + sorted_union_of_names_in_isl_sets, ) if all_dom_inames_ordered is None: - all_dom_inames_ordered = list_var_names_in_isl_sets( + all_dom_inames_ordered = sorted_union_of_names_in_isl_sets( [dom_before_constraint_set, dom_after_constraint_set]) # create some (ordered) isl vars to use, e.g., {s, i, j, s', i', j'} @@ -552,11 +552,11 @@ def create_arbitrary_dependency_constraint( [statement_var_name_prime], statement_var_idx) # insert inames missing from doms to enable intersection - domain_to_intersect = insert_missing_dims_and_reorder_by_name( + domain_to_intersect = reorder_dims_by_name( domain_to_intersect, isl.dim_type.out, append_marker_to_strings( # TODO figure out before/after notation [STATEMENT_VAR_NAME] + all_dom_inames_ordered, "p")) - range_to_intersect = insert_missing_dims_and_reorder_by_name( + range_to_intersect = reorder_dims_by_name( range_to_intersect, isl.dim_type.out, [STATEMENT_VAR_NAME] + all_dom_inames_ordered) diff --git a/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py b/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py index 95eaf1312..3e4999cd4 100644 --- a/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py +++ b/loopy/schedule/checker/experimental_scripts/example_pairwise_schedule_validity.py @@ -334,9 +334,9 @@ print("="*80) # create a graph including these deps as edges (from after->before) from loopy.schedule.checker.utils import ( - get_concurrent_inames, + partition_inames_by_concurrency, ) -_, non_conc_inames = get_concurrent_inames(knl) +_, non_conc_inames = partition_inames_by_concurrency(knl) legacy_deps_filtered_by_same = filter_deps_by_intersection_with_SAME( knl, deps, diff --git a/loopy/schedule/checker/experimental_scripts/example_wave_equation.py b/loopy/schedule/checker/experimental_scripts/example_wave_equation.py index ed2da94e5..8e168a5ef 100644 --- a/loopy/schedule/checker/experimental_scripts/example_wave_equation.py +++ b/loopy/schedule/checker/experimental_scripts/example_wave_equation.py @@ -33,7 +33,7 @@ from loopy.schedule.checker.utils import ( prettier_map_string, ensure_dim_names_match_and_align, append_marker_to_isl_map_var_names, - get_concurrent_inames, + partition_inames_by_concurrency, ) from loopy.schedule.checker.dependency import ( create_arbitrary_dependency_constraint, @@ -169,7 +169,7 @@ if verbose: # }}} -conc_loop_inames, _ = get_concurrent_inames(linearized_knl) +conc_loop_inames, _ = partition_inames_by_concurrency(linearized_knl) # Create a mapping of {statement instance: lex point} # including only instructions involved in this dependency @@ -290,9 +290,9 @@ if not linearization_is_valid: print(preprocessed_knl.loop_priority) """ from loopy.schedule.checker.utils import ( - get_concurrent_inames, + partition_inames_by_concurrency, ) - conc_inames, non_conc_inames = get_concurrent_inames(linearized_knl) + conc_inames, non_conc_inames = partition_inames_by_concurrency(linearized_knl) print("concurrent inames:", conc_inames) print("sequential inames:", non_conc_inames) print("constraint map space (stmt instances -> stmt instances):") @@ -348,8 +348,8 @@ m = isl.BasicMap( "16*(tx + tt + tparity) + itt + itx = ix + it and " "0<=tparity<2 and 0 <= itx - itt < 16 and 0 <= itt+itx < 16}") m2 = isl.BasicMap( - "[nx,nt,unused] -> {[_lp_linchk_statement, ix, it] -> " - "[_lp_linchk_statement'=_lp_linchk_statement, tx, tt, tparity, itt, itx]: " + "[nx,nt,unused] -> {[_lp_linchk_stmt, ix, it] -> " + "[_lp_linchk_stmt'=_lp_linchk_stmt, tx, tt, tparity, itt, itx]: " "16*(tx - tt) + itx - itt = ix - it and " "16*(tx + tt + tparity) + itt + itx = ix + it and " "0<=tparity<2 and 0 <= itx - itt < 16 and 0 <= itt+itx < 16}") @@ -360,8 +360,8 @@ m2 = isl.BasicMap( # "16*(tx' + tt' + tparity') + itt' + itx' = ix + it and " # "0<=tparity'<2 and 0 <= itx' - itt' < 16 and 0 <= itt'+itx' < 16}") m2_prime = isl.BasicMap( - "[nx,nt,unused] -> {[_lp_linchk_statement', ix', it'] -> " - "[_lp_linchk_statement=_lp_linchk_statement', tx, tt, tparity, itt, itx]: " + "[nx,nt,unused] -> {[_lp_linchk_stmt', ix', it'] -> " + "[_lp_linchk_stmt=_lp_linchk_stmt', tx, tt, tparity, itt, itx]: " "16*(tx - tt) + itx - itt = ix' - it' and " "16*(tx + tt + tparity) + itt + itx = ix' + it' and " "0<=tparity<2 and 0 <= itx - itt < 16 and 0 <= itt+itx < 16}") @@ -438,7 +438,7 @@ if verbose: # }}} -conc_loop_inames, _ = get_concurrent_inames(linearized_knl) +conc_loop_inames, _ = partition_inames_by_concurrency(linearized_knl) # Create a mapping of {statement instance: lex point} # including only instructions involved in this dependency sched = PairwiseScheduleBuilder( @@ -558,9 +558,9 @@ if not linearization_is_valid: print(preprocessed_knl.loop_priority) """ from loopy.schedule.checker.utils import ( - get_concurrent_inames, + partition_inames_by_concurrency, ) - conc_inames, non_conc_inames = get_concurrent_inames(linearized_knl) + conc_inames, non_conc_inames = partition_inames_by_concurrency(linearized_knl) print("concurrent inames:", conc_inames) print("sequential inames:", non_conc_inames) print("constraint map space (stmt instances -> stmt instances):") -- GitLab