diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 96de6c9c4407efd52b4fc02885bde228d9392336..69021af24fd195bcc432ac9a38537106ec4f986d 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -59,7 +59,18 @@ Flake8: - curl -L -O -k https://gitlab.tiker.net/inducer/ci-support/raw/master/prepare-and-run-flake8.sh - ". ./prepare-and-run-flake8.sh pytools test" tags: - - python3.5 + - python3.6 + except: + - tags + +Pylint: + script: + - EXTRA_INSTALL="pymbolic matplotlib mpi4py" + - py_version=3.6 + - curl -L -O -k https://gitlab.tiker.net/inducer/ci-support/raw/master/prepare-and-run-pylint.sh + - ". ./prepare-and-run-pylint.sh pytools test/test_*.py" + tags: + - python3.6 except: - tags diff --git a/pytools/__init__.py b/pytools/__init__.py index 6c03ef7fbe66dc5391202fa67f41d4b93e1d2210..57e33787e7660dd15729fd633d1d892e21946825 100644 --- a/pytools/__init__.py +++ b/pytools/__init__.py @@ -1,3 +1,6 @@ +# pylint: disable=too-many-lines +# (Yes, it has a point!) + from __future__ import division, absolute_import, print_function __copyright__ = "Copyright (C) 2009-2013 Andreas Kloeckner" @@ -25,23 +28,15 @@ THE SOFTWARE. import operator import sys +import logging +from functools import reduce -from pytools.decorator import decorator import six from six.moves import range, zip, intern, input -from functools import reduce -import logging -try: - decorator_module = __import__("decorator", level=0) -except TypeError: - # this must be Python 2.4 - my_decorator = decorator -except ImportError: - my_decorator = decorator -else: - my_decorator = decorator_module.decorator +decorator_module = __import__("decorator", level=0) +my_decorator = decorator_module.decorator __doc__ = """ A Collection of Utilities @@ -237,9 +232,12 @@ class RecordWithoutPickling(object): __slots__ = [] - def __init__(self, valuedict=None, exclude=["self"], **kwargs): + def __init__(self, valuedict=None, exclude=None, **kwargs): assert self.__class__ is not Record + if exclude is None: + exclude = ["self"] + try: fields = self.__class__.fields except AttributeError: @@ -280,6 +278,13 @@ class RecordWithoutPickling(object): fields.update(new_fields) + def __getattr__(self, name): + # This method is implemented to avoid pylint 'no-member' errors for + # attribute access. + raise AttributeError( + "'%s' object has no attribute '%s'" % ( + self.__class__.__name__, name)) + class Record(RecordWithoutPickling): __slots__ = [] @@ -336,39 +341,6 @@ class Reference(object): self.value = value -# {{{ dictionary with default - -class DictionaryWithDefault(object): - def __init__(self, default_value_generator, start={}): - self._Dictionary = dict(start) - self._DefaultGenerator = default_value_generator - - def __getitem__(self, index): - try: - return self._Dictionary[index] - except KeyError: - value = self._DefaultGenerator(index) - self._Dictionary[index] = value - return value - - def __setitem__(self, index, value): - self._Dictionary[index] = value - - def __contains__(self, item): - return True - - def iterkeys(self): - return six.iterkeys(self._Dictionary) - - def __iter__(self): - return self._Dictionary.__iter__() - - def iteritems(self): - return six.iteritems(self._Dictionary) - -# }}} - - class FakeList(object): def __init__(self, f, length): self._Length = length @@ -388,7 +360,10 @@ class FakeList(object): # {{{ dependent dictionary ---------------------------------------------------- class DependentDictionary(object): - def __init__(self, f, start={}): + def __init__(self, f, start=None): + if start is None: + start = {} + self._Function = f self._Dictionary = start.copy() @@ -397,7 +372,7 @@ class DependentDictionary(object): def __contains__(self, key): try: - self[key] + self[key] # pylint: disable=pointless-statement return True except KeyError: return False @@ -531,15 +506,15 @@ def memoize(*args, **kwargs): # http://www.phyast.pitt.edu/~micheles/python/ key = key_func(*args, **kwargs) try: - return func._memoize_dic[key] + return func._memoize_dic[key] # pylint: disable=protected-access except AttributeError: # _memoize_dic doesn't exist yet. result = func(*args, **kwargs) - func._memoize_dic = {key: result} + func._memoize_dic = {key: result} # pylint: disable=protected-access return result except KeyError: result = func(*args, **kwargs) - func._memoize_dic[key] = result + func._memoize_dic[key] = result # pylint: disable=protected-access return result else: @my_decorator @@ -547,15 +522,15 @@ def memoize(*args, **kwargs): # by Michele Simionato # http://www.phyast.pitt.edu/~micheles/python/ try: - return func._memoize_dic[args] + return func._memoize_dic[args] # pylint: disable=protected-access except AttributeError: # _memoize_dic doesn't exist yet. result = func(*args) - func._memoize_dic = {args: result} + func._memoize_dic = {args: result} # pylint:disable=protected-access return result except KeyError: result = func(*args) - func._memoize_dic[args] = result + func._memoize_dic[args] = result # pylint: disable=protected-access return result if not args: return _deco @@ -623,13 +598,18 @@ def memoize_method(method): return memoize_on_first_arg(method, intern("_memoize_dic_"+method.__name__)) -def memoize_method_with_uncached(uncached_args=[], uncached_kwargs=set()): +def memoize_method_with_uncached(uncached_args=None, uncached_kwargs=None): """Supports cache deletion via ``method_name.clear_cache(self)``. :arg uncached_args: a list of argument numbers (0-based, not counting 'self' argument) """ + if uncached_args is None: + uncached_args = [] + if uncached_kwargs is None: + uncached_kwargs = set() + # delete starting from the end uncached_args = sorted(uncached_args, reverse=True) uncached_kwargs = list(uncached_kwargs) @@ -783,7 +763,7 @@ def monkeypatch_method(cls): return decorator -def monkeypatch_class(name, bases, namespace): +def monkeypatch_class(_name, bases, namespace): # from GvR, http://mail.python.org/pipermail/python-dev/2008-January/076194.html assert len(bases) == 1, "Exactly one base class required" @@ -829,13 +809,13 @@ def len_iterable(iterable): return sum(1 for i in iterable) -def flatten(list): +def flatten(iterable): """For an iterable of sub-iterables, generate each member of each sub-iterable in turn, i.e. a flattened version of that super-iterable. Example: Turn [[a,b,c],[d,e,f]] into [a,b,c,d,e,f]. """ - for sublist in list: + for sublist in iterable: for j in sublist: yield j @@ -846,7 +826,7 @@ def general_sum(sequence): def linear_combination(coefficients, vectors): result = coefficients[0] * vectors[0] - for c, v in zip(coefficients, vectors)[1:]: + for c, v in zip(coefficients[1:], vectors[1:]): result += c*v return result @@ -859,8 +839,8 @@ def common_prefix(iterable, empty=None): return empty for v in it: - for j in range(len(pfx)): - if pfx[j] != v[j]: + for j, pfx_j in enumerate(pfx): + if pfx_j != v[j]: pfx = pfx[:j] if j == 0: return pfx @@ -869,14 +849,14 @@ def common_prefix(iterable, empty=None): return pfx -def decorate(function, list): - return [(x, function(x)) for x in list] +def decorate(function, iterable): + return [(x, function(x)) for x in iterable] -def partition(criterion, list): +def partition(criterion, iterable): part_true = [] part_false = [] - for i in list: + for i in iterable: if criterion(i): part_true.append(i) else: @@ -900,8 +880,8 @@ def product(iterable): return reduce(mul, iterable, 1) -all = six.moves.builtins.all -any = six.moves.builtins.any +all = six.moves.builtins.all # pylint: disable=redefined-builtin +any = six.moves.builtins.any # pylint: disable=redefined-builtin def reverse_dictionary(the_dict): @@ -955,7 +935,7 @@ def find_max_where(predicate, prec=1e-5, initial_guess=1, fail_bound=1e38): # {{{ establish bracket - mag = 1 + mag = initial_guess if predicate(mag): mag *= 2 @@ -996,8 +976,8 @@ def find_max_where(predicate, prec=1e-5, initial_guess=1, fail_bound=1e38): lower_true = mid else: upper_false = mid - else: - return lower_true + + return lower_true # }}} @@ -1088,16 +1068,16 @@ def average(iterable): it = iterable.__iter__() try: - sum = next(it) + s = next(it) count = 1 except StopIteration: raise ValueError("empty average") for value in it: - sum = sum + value + s = s + value count += 1 - return sum/count + return s/count class VarianceAggregator: @@ -1114,9 +1094,9 @@ class VarianceAggregator: def step(self, x): self.n += 1 - delta = x - self.mean - self.mean += delta/self.n - self.m2 += delta*(x - self.mean) + delta_ = x - self.mean + self.mean += delta_/self.n + self.m2 += delta_*(x - self.mean) def finalize(self): if self.entire_pop: @@ -1158,7 +1138,7 @@ def indices_in_shape(shape): if isinstance(shape, int): shape = (shape,) - if len(shape) == 0: + if not shape: yield () elif len(shape) == 1: for i in range(0, shape[0]): @@ -1173,7 +1153,7 @@ def indices_in_shape(shape): def generate_nonnegative_integer_tuples_below(n, length=None, least=0): """n may be a sequence, in which case length must be None.""" if length is None: - if len(n) == 0: + if not n: yield () return @@ -1196,23 +1176,24 @@ def generate_nonnegative_integer_tuples_below(n, length=None, least=0): yield my_part + base -def generate_decreasing_nonnegative_tuples_summing_to(n, length, min=0, max=None): +def generate_decreasing_nonnegative_tuples_summing_to( + n, length, min_value=0, max_value=None): if length == 0: yield () elif length == 1: - if n <= max: - #print "MX", n, max + if n <= max_value: + #print "MX", n, max_value yield (n,) else: return else: - if max is None or n < max: - max = n + if max_value is None or n < max_value: + max_value = n - for i in range(min, max+1): + for i in range(min_value, max_value+1): #print "SIG", sig, i for remainder in generate_decreasing_nonnegative_tuples_summing_to( - n-i, length-1, min, i): + n-i, length-1, min_value, i): yield (i,) + remainder @@ -1279,10 +1260,10 @@ def generate_permutations(original): if len(original) <= 1: yield original else: - for perm in generate_permutations(original[1:]): - for i in range(len(perm)+1): + for perm_ in generate_permutations(original[1:]): + for i in range(len(perm_)+1): #nb str[0:1] works in both string and list contexts - yield perm[:i] + original[0:1] + perm[i:] + yield perm_[:i] + original[0:1] + perm_[i:] def generate_unique_permutations(original): @@ -1291,10 +1272,10 @@ def generate_unique_permutations(original): had_those = set() - for perm in generate_permutations(original): - if perm not in had_those: - had_those.add(perm) - yield perm + for perm_ in generate_permutations(original): + if perm_ not in had_those: + had_those.add(perm_) + yield perm_ def enumerate_basic_directions(dimensions): @@ -1314,12 +1295,18 @@ def get_read_from_map_from_permutation(original, permuted): Requires that the permutation can be inferred from C{original} and C{permuted}. - >>> for p1 in generate_permutations(range(5)): - ... for p2 in generate_permutations(range(5)): - ... rfm = get_read_from_map_from_permutation(p1, p2) - ... p2a = [p1[rfm[i]] for i in range(len(p1))] - ... assert p2 == p2a + .. doctest :: + + >>> for p1 in generate_permutations(range(5)): + ... for p2 in generate_permutations(range(5)): + ... rfm = get_read_from_map_from_permutation(p1, p2) + ... p2a = [p1[rfm[i]] for i in range(len(p1))] + ... assert p2 == p2a """ + from warnings import warn + warn("get_read_from_map_from_permutation is deprecated and will be " + "removed in 2019", DeprecationWarning, stacklevel=2) + assert len(original) == len(permuted) where_in_original = dict( (original[i], i) for i in range(len(original))) @@ -1335,14 +1322,20 @@ def get_write_to_map_from_permutation(original, permuted): Requires that the permutation can be inferred from C{original} and C{permuted}. - >>> for p1 in generate_permutations(range(5)): - ... for p2 in generate_permutations(range(5)): - ... wtm = get_write_to_map_from_permutation(p1, p2) - ... p2a = [0] * len(p2) - ... for i, oi in enumerate(p1): - ... p2a[wtm[i]] = oi - ... assert p2 == p2a + .. doctest:: + + >>> for p1 in generate_permutations(range(5)): + ... for p2 in generate_permutations(range(5)): + ... wtm = get_write_to_map_from_permutation(p1, p2) + ... p2a = [0] * len(p2) + ... for i, oi in enumerate(p1): + ... p2a[wtm[i]] = oi + ... assert p2 == p2a """ + from warnings import warn + warn("get_write_to_map_from_permutation is deprecated and will be " + "removed in 2019", DeprecationWarning, stacklevel=2) + assert len(original) == len(permuted) where_in_permuted = dict( @@ -1356,9 +1349,11 @@ def get_write_to_map_from_permutation(original, permuted): # {{{ graph algorithms -def a_star(initial_state, goal_state, neighbor_map, +def a_star( # pylint: disable=too-many-locals + initial_state, goal_state, neighbor_map, estimate_remaining_cost=None, - get_step_cost=lambda x, y: 1): + get_step_cost=lambda x, y: 1 + ): """ With the default cost and heuristic, this amounts to Dijkstra's algorithm. """ @@ -1366,7 +1361,7 @@ def a_star(initial_state, goal_state, neighbor_map, from heapq import heappop, heappush if estimate_remaining_cost is None: - def estimate_remaining_cost(x): + def estimate_remaining_cost(x): # pylint: disable=function-redefined if x != goal_state: return 1 else: @@ -1387,7 +1382,7 @@ def a_star(initial_state, goal_state, neighbor_map, queue = [(init_remcost, AStarNode(initial_state, parent=None, path_cost=0))] visited_states = set() - while len(queue): + while queue: _, top = heappop(queue) visited_states.add(top.state) @@ -1449,7 +1444,9 @@ class Table: for col_width in col_widths)] return "\n".join(lines) - def latex(self, skip_lines=0, hline_after=[]): + def latex(self, skip_lines=0, hline_after=None): + if hline_after is None: + hline_after = [] lines = [] for row_nr, row in list(enumerate(self.rows))[skip_lines:]: lines.append(" & ".join(row)+r" \\") @@ -1463,7 +1460,8 @@ class Table: # {{{ histogram formatting -def string_histogram(iterable, min_value=None, max_value=None, +def string_histogram( # pylint: disable=too-many-arguments,too-many-locals + iterable, min_value=None, max_value=None, bin_count=20, width=70, bin_starts=None, use_unicode=True): if bin_starts is None: if min_value is None or max_value is None: @@ -1523,7 +1521,7 @@ def word_wrap(text, width, wrap_using="\n"): breaks are posix newlines (``\n``). """ space_or_break = [" ", wrap_using] - return reduce(lambda line, word, width=width: '%s%s%s' % + return reduce(lambda line, word: '%s%s%s' % (line, space_or_break[(len(line)-line.rfind('\n')-1 + len(word.split('\n', 1)[0]) @@ -1549,7 +1547,11 @@ class CPyUserInterface(object): class Parameters(Record): pass - def __init__(self, variables, constants={}, doc={}): + def __init__(self, variables, constants=None, doc=None): + if constants is None: + constants = {} + if doc is None: + doc = {} self.variables = variables self.constants = constants self.doc = doc @@ -1577,8 +1579,6 @@ class CPyUserInterface(object): print(" %s" % self.doc[c]) def gather(self, argv=None): - import sys - if argv is None: argv = sys.argv @@ -1638,17 +1638,19 @@ class MovedFunctionDeprecationWrapper: class StderrToStdout(object): def __enter__(self): - import sys + # pylint: disable=attribute-defined-outside-init self.stderr_backup = sys.stderr sys.stderr = sys.stdout def __exit__(self, exc_type, exc_val, exc_tb): - import sys sys.stderr = self.stderr_backup del self.stderr_backup -def typedump(val, max_seq=5, special_handlers={}): +def typedump(val, max_seq=5, special_handlers=None): + if special_handlers is None: + special_handlers = {} + try: hdlr = special_handlers[type(val)] except KeyError: @@ -1697,7 +1699,7 @@ def invoke_editor(s, filename="edit.txt", descr="the file"): if "EDITOR" in os.environ: from subprocess import Popen p = Popen([os.environ["EDITOR"], full_name]) - os.waitpid(p.pid, 0)[1] + os.waitpid(p.pid, 0) else: print("(Set the EDITOR environment variable to be " "dropped directly into an editor next time.)") @@ -1715,7 +1717,7 @@ def invoke_editor(s, filename="edit.txt", descr="the file"): # {{{ progress bars -class ProgressBar: +class ProgressBar(object): # pylint: disable=too-many-instance-attributes """ .. automethod:: draw .. automethod:: progress @@ -1763,7 +1765,6 @@ class ProgressBar: else: eta_str = "?" - import sys sys.stderr.write("%-20s [%s] ETA %s\r" % ( self.description, squares*"#"+(self.length-squares)*" ", @@ -1780,7 +1781,6 @@ class ProgressBar: self.draw() def finished(self): - import sys self.set_progress(self.total) sys.stderr.write("\n") @@ -1802,7 +1802,6 @@ def assert_not_a_file(name): def add_python_path_relative_to_script(rel_path): - import sys from os.path import dirname, join, abspath script_name = sys.argv[0] @@ -1895,7 +1894,10 @@ class UniqueNameGenerator(object): .. automethod:: add_names .. automethod:: __call__ """ - def __init__(self, existing_names=set(), forced_prefix=""): + def __init__(self, existing_names=None, forced_prefix=""): + if existing_names is None: + existing_names = set() + self.existing_names = existing_names.copy() self.forced_prefix = forced_prefix self.prefix_to_counter = {} @@ -1937,7 +1939,7 @@ class UniqueNameGenerator(object): self.prefix_to_counter[based_on] = counter - var_name = intern(var_name) + var_name = intern(var_name) # pylint: disable=undefined-loop-variable self.existing_names.add(var_name) self._name_added(var_name) @@ -1953,6 +1955,8 @@ class MinRecursionLimit(object): self.min_rec_limit = min_rec_limit def __enter__(self): + # pylint: disable=attribute-defined-outside-init + self.prev_recursion_limit = sys.getrecursionlimit() new_limit = max(self.prev_recursion_limit, self.min_rec_limit) sys.setrecursionlimit(new_limit) @@ -1997,7 +2001,7 @@ def download_from_web_if_not_present(url, local_name=None): # {{{ find git revisions -def find_git_revision(tree_root): +def find_git_revision(tree_root): # pylint: disable=too-many-locals # Keep this routine self-contained so that it can be copy-pasted into # setup.py. @@ -2027,7 +2031,6 @@ def find_git_revision(tree_root): cwd=tree_root, env=env) (git_rev, _) = p.communicate() - import sys if sys.version_info >= (3,): git_rev = git_rev.decode() @@ -2108,6 +2111,8 @@ class ProcessTimer(object): self.done() def done(self): + # pylint: disable=attribute-defined-outside-init + import time if sys.version_info >= (3, 3): self.wall_elapsed = time.perf_counter() - self.perf_counter_start @@ -2123,7 +2128,7 @@ class ProcessTimer(object): # {{{ log utilities -class ProcessLogger(object): +class ProcessLogger(object): # pylint: disable=too-many-instance-attributes """Logs the completion time of a (presumably) lengthy process to :mod:`logging`. Only uses a high log level if the process took perceptible time. @@ -2135,7 +2140,8 @@ class ProcessLogger(object): default_noisy_level = logging.INFO - def __init__(self, logger, description, + def __init__( # pylint: disable=too-many-arguments + self, logger, description, silent_level=None, noisy_level=None, long_threshold_seconds=None): self.logger = logger self.description = description @@ -2168,7 +2174,8 @@ class ProcessLogger(object): self.description, sleep_duration) - def done(self, extra_msg=None, *extra_fmt_args): + def done( # pylint: disable=keyword-arg-before-vararg + self, extra_msg=None, *extra_fmt_args): self.timer.done() self.is_done = True diff --git a/pytools/arithmetic_container.py b/pytools/arithmetic_container.py deleted file mode 100644 index cbacc71c231bd77ab7940a95031c2651dec27c8a..0000000000000000000000000000000000000000 --- a/pytools/arithmetic_container.py +++ /dev/null @@ -1,449 +0,0 @@ -from __future__ import division -from __future__ import absolute_import -from .decorator import decorator -import operator -import six -from six.moves import range -from six.moves import zip - - - - -class ArithmeticList(list): - """A list with elementwise arithmetic operations.""" - - def assert_same_length(self, other): - assert len(self) == len(other) - - def unary_operator(self, operator): - return ArithmeticList(operator(v) for v in self) - - def binary_operator(self, other, operator): - if not isinstance(other, ArithmeticList): - return ArithmeticList(operator(v, other) for v in self) - - self.assert_same_length(other) - return ArithmeticList(operator(v, w) for v, w in zip(self, other)) - - def reverse_binary_operator(self, other, operator): - if not isinstance(other, ArithmeticList): - return ArithmeticList(operator(other, v) for v in self) - - self.assert_same_length(other) - return ArithmeticList(operator(w, v) for v, w in zip(self, other)) - - def __neg__(self): return self.unary_operator(operator.neg) - def __pos__(self): return self.unary_operator(operator.pos) - def __abs__(self): return self.unary_operator(operator.abs) - def __invert__(self): return self.unary_operator(operator.invert) - - def __add__(self, other): return self.binary_operator(other, operator.add) - def __sub__(self, other): return self.binary_operator(other, operator.sub) - def __mul__(self, other): return self.binary_operator(other, operator.mul) - def __div__(self, other): return self.binary_operator(other, operator.div) - def __truediv__(self, other): return self.binary_operator(other, operator.truediv) - def __floordiv__(self, other): return self.binary_operator(other, operator.floordiv) - def __mod__(self, other): return self.binary_operator(other, operator.mod) - def __pow__(self, other): return self.binary_operator(other, operator.pow) - def __lshift__(self, other): return self.binary_operator(other, operator.lshift) - def __rshift__(self, other): return self.binary_operator(other, operator.rshift) - def __and__(self, other): return self.binary_operator(other, operator.and_) - def __or__(self, other): return self.binary_operator(other, operator.or_) - def __xor__(self, other): return self.binary_operator(other, operator.xor) - - def __radd__(self, other): return self.reverse_binary_operator(other, operator.add) - def __rsub__(self, other): return self.reverse_binary_operator(other, operator.sub) - def __rmul__(self, other): return self.reverse_binary_operator(other, operator.mul) - def __rdiv__(self, other): return self.reverse_binary_operator(other, operator.div) - def __rtruediv__(self, other): return self.reverse_binary_operator(other, operator.truediv) - def __rfloordiv__(self, other): return self.reverse_binary_operator(other, operator.floordiv) - def __rmod__(self, other): return self.reverse_binary_operator(other, operator.mod) - def __rpow__(self, other): return self.reverse_binary_operator(other, operator.pow) - def __rlshift__(self, other): return self.reverse_binary_operator(other, operator.lshift) - def __rrshift__(self, other): return self.reverse_binary_operator(other, operator.rshift) - def __rand__(self, other): return self.reverse_binary_operator(other, operator.and_) - def __ror__(self, other): return self.reverse_binary_operator(other, operator.or_) - def __rxor__(self, other): return self.reverse_binary_operator(other, operator.xor) - - def __iadd__(self, other): - self.assert_same_length(other) - for i in range(len(self)): - self[i] += other[i] - return self - - def __isub__(self, other): - self.assert_same_length(other) - for i in range(len(self)): - self[i] -= other[i] - return self - - def __imul__(self, other): - self.assert_same_length(other) - for i in range(len(self)): - self[i] *= other[i] - return self - - def __idiv__(self, other): - from operator import div - self.assert_same_length(other) - for i in range(len(self)): - self[i] = div(self[i], other[i]) - return self - - def __itruediv__(self, other): - self.assert_same_length(other) - for i in range(len(self)): - self[i] /= other[i] - return self - - def __ifloordiv__(self, other): - self.assert_same_length(other) - for i in range(len(self)): - self[i] //= other[i] - return self - - def __imod__(self, other): - self.assert_same_length(other) - for i in range(len(self)): - self[i] %= other[i] - return self - - def __ipow__(self, other): - self.assert_same_length(other) - for i in range(len(self)): - self[i] **= other[i] - return self - - def __ilshift__(self, other): - self.assert_same_length(other) - for i in range(len(self)): - self[i] <<= other[i] - return self - - def __irshift__(self, other): - self.assert_same_length(other) - for i in range(len(self)): - self[i] >>= other[i] - return self - - def __iand__(self, other): - self.assert_same_length(other) - for i in range(len(self)): - self[i] &= other[i] - return self - - def __ior__(self, other): - self.assert_same_length(other) - for i in range(len(self)): - self[i] |= other[i] - return self - - def __ixor__(self, other): - self.assert_same_length(other) - for i in range(len(self)): - self[i] ^= other[i] - return self - - def __getslice__(self, i, j): - return ArithmeticList(list.__getslice__(self, i, j)) - - def __str__(self): - return "ArithmeticList(%s)" % list.__repr__(self) - - def __repr__(self): - return "ArithmeticList(%s)" % list.__repr__(self) - - def plus(self, other): - """Return a copy of self extended by the entries from the iterable - C{other}. - - Makes up for the loss of the C{+} operator (which is now arithmetic). - """ - result = ArithmeticList(self) - result.extend(other) - return result - - - - -def join_fields(*fields): - result = ArithmeticList() - for f in fields: - if isinstance(f, (ArithmeticList, list)): - result.extend(f) - else: - result.append(f) - return result - - - - -@decorator -def work_with_arithmetic_containers(f, *args, **kwargs): - """This decorator allows simple elementwise functions to automatically - accept containers of arithmetic types, by acting on each element. - - At present, it only works for ArithmeticList. - """ - - class SimpleArg: - def __init__(self, arg_number): - self.arg_number = arg_number - - def eval(self, current_tp): - return args[self.arg_number] - - class SimpleKwArg: - def __init__(self, arg_name): - self.arg_name = arg_name - - def eval(self, current_tp): - return kwargs[self.arg_name] - - class ListArg: - def __init__(self, list_number): - self.list_number = list_number - - def eval(self, current_tp): - return current_tp[self.list_number] - - lists = [] - formal_args = [] - formal_kwargs = {} - - for arg in args: - if isinstance(arg, ArithmeticList): - formal_args.append(ListArg(len(lists))) - lists.append(arg) - else: - formal_args.append(SimpleArg(len(formal_args))) - - for name, arg in six.iteritems(kwargs): - if isinstance(arg, ArithmeticList): - formal_kwargs[name] = ListArg(len(lists)) - lists.append(arg) - else: - formal_kwargs[name] = SimpleKwArg(name) - - if lists: - from pytools import all_equal - assert all_equal(len(lst) for lst in lists) - - return ArithmeticList( - f( - *list(formal_arg.eval(tp) for formal_arg in formal_args), - **dict((name, formal_arg.eval(tp)) - for name, formal_arg in six.iteritems(formal_kwargs)) - ) - for tp in zip(*lists)) - else: - return f(*args, **kwargs) - - - - -def outer_product(al1, al2, mult_op=operator.mul): - return ArithmeticListMatrix( - [[mult_op(al1i, al2i) for al2i in al2] for al1i in al1] - ) - - - - -class ArithmeticListMatrix: - """A matrix type that operates on L{ArithmeticLists}.""" - def __init__(self, matrix): - """Initialize the ArithmeticListMatrix. - - C{matrix} must allow the following interface: - - - len(matrix) gives the height of the matrix. - - matrix is iterable, giving the rows of the matrix. - - Each row, in turn, must support C{len()} and iteration. - """ - self.matrix = matrix - - def times(self, other, mult_op): - if not isinstance(other, ArithmeticList): - raise NotImplementedError - - result = ArithmeticList(None for i in range(len(self.matrix))) - - for i, row in enumerate(self.matrix): - if len(row) != len(other): - raise ValueError("matrix width does not match ArithmeticList") - - for j, entry in enumerate(row): - if not isinstance(entry, (int, float)) or entry: - if not isinstance(entry, (int, float)) or entry != 1: - contrib = mult_op(entry, other[j]) - else: - contrib = other[j] - - if result[i] is None: - result[i] = contrib - else: - result[i] += contrib - - for i in range(len(result)): - if result[i] is None and len(other): - result[i] = 0 * other[0] - - return result - - def __mul__(self, other): - if not isinstance(other, ArithmeticList): - return NotImplemented - - from operator import mul - return self.times(other, mul) - - def map(self, entry_map): - return ArithmeticListMatrix([[ - entry_map(entry) - for j, entry in enumerate(row)] - for i, row in enumerate(self.matrix)]) - - - - -class ArithmeticDictionary(dict): - """A dictionary with elementwise (on the values, not the keys) - arithmetic operations.""" - - def _get_empty_self(self): - return ArithmeticDictionary() - - def assert_same_keys(self, other): - for key in self: - assert key in other - for key in other: - assert key in self - - def unary_operator(self, operator): - result = self._get_empty_self() - for key in self: - result[key] = operator(self[key]) - return result - - def binary_operator(self, other, operator): - try: - self.assert_same_keys(other) - result = self._get_empty_self() - for key in self: - result[key] = operator(self[key], other[key]) - return result - except TypeError: - result = self._get_empty_self() - for key in self: - result[key] = operator(self[key], other) - return result - - def reverse_binary_operator(self, other, operator): - try: - self.assert_same_keys(other) - result = self._get_empty_self() - for key in self: - result[key] = operator(other[key], self[key]) - return result - except TypeError: - result = self._get_empty_self() - for key in self: - result[key] = operator(other, self[key]) - return result - - def __neg__(self): return self.unary_operator(operator.neg) - def __pos__(self): return self.unary_operator(operator.pos) - def __abs__(self): return self.unary_operator(operator.abs) - def __invert__(self): return self.unary_operator(operator.invert) - - def __add__(self, other): return self.binary_operator(other, operator.add) - def __sub__(self, other): return self.binary_operator(other, operator.sub) - def __mul__(self, other): return self.binary_operator(other, operator.mul) - def __div__(self, other): return self.binary_operator(other, operator.div) - def __mod__(self, other): return self.binary_operator(other, operator.mod) - def __pow__(self, other): return self.binary_operator(other, operator.pow) - def __lshift__(self, other): return self.binary_operator(other, operator.lshift) - def __rshift__(self, other): return self.binary_operator(other, operator.rshift) - def __and__(self, other): return self.binary_operator(other, operator.and_) - def __or__(self, other): return self.binary_operator(other, operator.or_) - def __xor__(self, other): return self.binary_operator(other, operator.xor) - - def __radd__(self, other): return self.reverse_binary_operator(other, operator.add) - def __rsub__(self, other): return self.reverse_binary_operator(other, operator.sub) - def __rmul__(self, other): return self.reverse_binary_operator(other, operator.mul) - def __rdiv__(self, other): return self.reverse_binary_operator(other, operator.div) - def __rmod__(self, other): return self.reverse_binary_operator(other, operator.mod) - def __rpow__(self, other): return self.reverse_binary_operator(other, operator.pow) - def __rlshift__(self, other): return self.reverse_binary_operator(other, operator.lshift) - def __rrshift__(self, other): return self.reverse_binary_operator(other, operator.rshift) - def __rand__(self, other): return self.reverse_binary_operator(other, operator.and_) - def __ror__(self, other): return self.reverse_binary_operator(other, operator.or_) - def __rxor__(self, other): return self.reverse_binary_operator(other, operator.xor) - - def __iadd__(self, other): - self.assert_same_keys(other) - for key in self: - self[key] += other[key] - return self - - def __isub__(self, other): - self.assert_same_keys(other) - for key in self: - self[key] -= other[key] - return self - - def __imul__(self, other): - self.assert_same_keys(other) - for key in self: - self[key] *= other[key] - return self - - def __idiv__(self, other): - self.assert_same_keys(other) - for key in self: - self[key] /= other[key] - return self - - def __imod__(self, other): - self.assert_same_keys(other) - for key in self: - self[key] %= other[key] - return self - - def __ipow__(self, other): - self.assert_same_keys(other) - for key in self: - self[key] **= other[key] - return self - - def __ilshift__(self, other): - self.assert_same_keys(other) - for key in self: - self[key] <<= other[key] - return self - - def __irshift__(self, other): - self.assert_same_keys(other) - for key in self: - self[key] >>= other[key] - return self - - def __iand__(self, other): - self.assert_same_keys(other) - for key in self: - self[key] &= other[key] - return self - - def __ior__(self, other): - self.assert_same_keys(other) - for key in self: - self[key] |= other[key] - return self - - def __ixor__(self, other): - self.assert_same_keys(other) - for key in self: - self[key] ^= other[key] - return self diff --git a/pytools/batchjob.py b/pytools/batchjob.py index 2afde16b595c1d7133457197bafabd7af570ce1f..6756e6acb20df31c5fdbb88fb3896718ab3440cb 100644 --- a/pytools/batchjob.py +++ b/pytools/batchjob.py @@ -23,7 +23,7 @@ def get_timestamp(): class BatchJob(object): - def __init__(self, moniker, main_file, aux_files=[], timestamp=None): + def __init__(self, moniker, main_file, aux_files=(), timestamp=None): import os import os.path @@ -70,8 +70,8 @@ class INHERIT(object): # noqa class GridEngineJob(BatchJob): - def submit(self, env={"LD_LIBRARY_PATH": INHERIT, "PYTHONPATH": INHERIT}, - memory_megs=None, extra_args=[]): + def submit(self, env=(("LD_LIBRARY_PATH", INHERIT), ("PYTHONPATH", INHERIT)), + memory_megs=None, extra_args=()): from subprocess import Popen args = [ "-N", self.moniker, @@ -79,7 +79,7 @@ class GridEngineJob(BatchJob): ] from os import getenv - + env = dict(env) for var, value in six.iteritems(env): if value is INHERIT: value = getenv(var) @@ -97,8 +97,8 @@ class GridEngineJob(BatchJob): class PBSJob(BatchJob): - def submit(self, env={"LD_LIBRARY_PATH": INHERIT, "PYTHONPATH": INHERIT}, - memory_megs=None, extra_args=[]): + def submit(self, env=(("LD_LIBRARY_PATH", INHERIT), ("PYTHONPATH", INHERIT)), + memory_megs=None, extra_args=()): from subprocess import Popen args = [ "-N", self.moniker, @@ -110,6 +110,7 @@ class PBSJob(BatchJob): from os import getenv + env = dict(env) for var, value in six.iteritems(env): if value is INHERIT: value = getenv(var) diff --git a/pytools/datatable.py b/pytools/datatable.py index 1d534fecd190b31b6ed52479a658a3d222f3658b..83e5032e602093b472a24a18eeb04ed6b184df7b 100644 --- a/pytools/datatable.py +++ b/pytools/datatable.py @@ -1,8 +1,9 @@ from __future__ import absolute_import -from pytools import Record + import six -from six.moves import range -from six.moves import zip +from six.moves import range, zip + +from pytools import Record class Row(Record): @@ -100,7 +101,7 @@ class DataTable: def get(self, **kwargs): filtered = self.filtered(**kwargs) - if len(filtered) < 1: + if not filtered: raise RuntimeError("no matching entry for get()") if len(filtered) > 1: raise RuntimeError("more than one matching entry for get()") @@ -175,7 +176,7 @@ class DataTable: Assumes both tables are sorted ascendingly by the column by which they are joined. - """ + """ # pylint:disable=too-many-locals,too-many-branches def without(indexable, idx): return indexable[:idx] + indexable[idx+1:] diff --git a/pytools/debug.py b/pytools/debug.py index d0468443e8dfa9138f9aa3f0c043c22b45ae325a..08fbaa3354bed837763fd50a67b323d07f102a53 100644 --- a/pytools/debug.py +++ b/pytools/debug.py @@ -1,8 +1,8 @@ -from __future__ import absolute_import -from __future__ import print_function -from pytools import memoize +from __future__ import absolute_import, print_function + import six from six.moves import input +from pytools import memoize # {{{ debug files ------------------------------------------------------------- @@ -17,9 +17,10 @@ def make_unique_filesystem_object(stem, extension="", directory="", import os if creator is None: - def creator(name): + def default_creator(name): return os.fdopen(os.open(name, os.O_CREAT | os.O_WRONLY | os.O_EXCL, 0o444), "w") + creator = default_creator i = 0 while True: @@ -56,7 +57,7 @@ class RefDebugQuit(Exception): pass -def refdebug(obj, top_level=True, exclude=[]): +def refdebug(obj, top_level=True, exclude=()): # noqa: E501 pylint:disable=too-many-locals,too-many-branches,too-many-statements from types import FrameType def is_excluded(o): @@ -76,65 +77,66 @@ def refdebug(obj, top_level=True, exclude=[]): refdebug(obj, top_level=False, exclude=exclude) except RefDebugQuit: pass - else: - import gc - print_head = True - print("-------------->") - try: - reflist = [x for x in gc.get_referrers(obj) - if not is_excluded(x)] - - idx = 0 - while True: - if print_head: - print("referring to", id(obj), type(obj), obj) - print("----------------------") - print_head = False - r = reflist[idx] - - if isinstance(r, FrameType): - s = str(r.f_code) - else: - s = str(r) - - print("%d/%d: " % (idx, len(reflist)), id(r), type(r), s) - - if isinstance(r, dict): - for k, v in six.iteritems(r): - if v is obj: - print("...referred to from key", k) - - print("[d]ig, [n]ext, [p]rev, [e]val, [r]eturn, [q]uit?") - - response = input() - - if response == "d": - refdebug(r, top_level=False, exclude=exclude+[reflist]) - print_head = True - elif response == "n": - if idx + 1 < len(reflist): - idx += 1 - elif response == "p": - if idx - 1 >= 0: - idx -= 1 - elif response == "e": - print("type expression, obj is your object:") - expr_str = input() - try: - res = eval(expr_str, {"obj": r}) - except Exception: - from traceback import print_exc - print_exc() - print(res) - elif response == "r": - return - elif response == "q": - raise RefDebugQuit() - else: - print("WHAT YOU SAY!!! (invalid choice)") - - finally: - print("<--------------") + return + + import gc + print_head = True + print("-------------->") + try: + reflist = [x for x in gc.get_referrers(obj) + if not is_excluded(x)] + + idx = 0 + while True: + if print_head: + print("referring to", id(obj), type(obj), obj) + print("----------------------") + print_head = False + r = reflist[idx] + + if isinstance(r, FrameType): + s = str(r.f_code) + else: + s = str(r) + + print("%d/%d: " % (idx, len(reflist)), id(r), type(r), s) + + if isinstance(r, dict): + for k, v in six.iteritems(r): + if v is obj: + print("...referred to from key", k) + + print("[d]ig, [n]ext, [p]rev, [e]val, [r]eturn, [q]uit?") + + response = input() + + if response == "d": + refdebug(r, top_level=False, exclude=exclude+[reflist]) + print_head = True + elif response == "n": + if idx + 1 < len(reflist): + idx += 1 + elif response == "p": + if idx - 1 >= 0: + idx -= 1 + elif response == "e": + print("type expression, obj is your object:") + expr_str = input() + try: + res = eval(expr_str, {"obj": r}) # pylint:disable=eval-used + except Exception: # pylint:disable=broad-except + from traceback import print_exc + print_exc() + print(res) + elif response == "r": + return + elif response == "q": + raise RefDebugQuit() + else: + print("WHAT YOU SAY!!! (invalid choice)") + + finally: + print("<--------------") # }}} @@ -153,8 +155,8 @@ def setup_readline(): if exists(hist_filename): try: readline.read_history_file(hist_filename) - except Exception: - # http://docs.python.org/3/howto/pyporting.html#capturing-the-currently-raised-exception # noqa + except Exception: # pylint:disable=broad-except + # http://docs.python.org/3/howto/pyporting.html#capturing-the-currently-raised-exception # noqa: E501 pylint:disable=line-too-long import sys e = sys.exc_info()[1] @@ -191,16 +193,16 @@ class SetPropagatingDict(dict): del self.target_dict[key] -def shell(locals=None, globals=None): +def shell(locals_=None, globals_=None): from inspect import currentframe, getouterframes calling_frame = getouterframes(currentframe())[1][0] - if locals is None: - locals = calling_frame.f_locals - if globals is None: - globals = calling_frame.f_globals + if locals_ is None: + locals_ = calling_frame.f_locals + if globals_ is None: + globals_ = calling_frame.f_globals - ns = SetPropagatingDict([locals, globals], locals) + ns = SetPropagatingDict([locals_, globals_], locals_) if HAVE_READLINE: readline.set_completer( diff --git a/pytools/diskdict.py b/pytools/diskdict.py deleted file mode 100644 index ca44098a5b53cf83e04daf99c3b0e16d54c5f9b2..0000000000000000000000000000000000000000 --- a/pytools/diskdict.py +++ /dev/null @@ -1,175 +0,0 @@ -from __future__ import absolute_import -# see end of file for sqlite import - -from pytools import memoize -import six - - -@memoize -def get_disk_dict(name, version, **kwargs): - import sys - import os - - from os.path import join - from tempfile import gettempdir - import getpass - cache_dir = join(gettempdir(), - "%s-v%s-uid%s-py%s" % ( - name, version, - getpass.getuser(), ".".join(str(i) for i in sys.version_info))) - - # {{{ ensure cache directory exists - - try: - os.mkdir(cache_dir) - except OSError as e: - from errno import EEXIST - if e.errno != EEXIST: - raise - - # }}} - - return DiskDict(join(cache_dir, "database.sqlite"), **kwargs) - - -class DiskDict(object): - """Provides a disk-backed dictionary. Unlike :mod:`shelve`, this class allows - arbitrary values for keys, at a slight performance penalty. - - Note that this is a dangerous game: The :func:`hash` of many objects - changes between runs. In particular, ``hash(None)`` changes between runs. - :class:`str`, :class:`unicode`, :class:`int`, :class:`tuple` and - :class:`long` seem to be constant for a given Python executable, but they - may change for a new version. - - So don't use this class for data that you absolutely *have* to be able - to retrieve. It's fine for caches and the like, though. - """ - def __init__(self, dbfilename, version_base=(), dep_modules=[], - commit_interval=1): - self.db_conn = sqlite.connect(dbfilename, timeout=30) - - try: - self.db_conn.execute("select * from data;") - except sqlite.OperationalError: - self.db_conn.execute(""" - create table data ( - id integer primary key autoincrement, - key_hash integer, - key_pickle blob, - version_hash integer, - version_pickle blob, - when_inserted timestamp default current_timestamp, - result_pickle blob)""") - - def mtime(file): - if not isinstance(file, six.string_types): - # assume file names a module - file = file.__file__ - - import os - return os.stat(file).st_mtime - - from six.moves.cPickle import dumps - self.version = (version_base,) + tuple( - mtime(dm) for dm in dep_modules) - self.version_pickle = dumps(self.version) - self.version_hash = hash(self.version) - - self.cache = {} - - self.commit_interval = commit_interval - self.commit_countdown = self.commit_interval - - from warnings import warn - warn("Given that Python 3+ uses hash randomization, DiskDict will typically " - "be entirely useless and should not be used . Since object hashes " - "will change between runs, it will be unable to retrieve objects " - "from the dictionary in a second run, defeating the purpose of " - "persisting to disk." - "DiskDict is deprecated and will be removed in 2018. " - "Use pytools.persistent_dict instead.", DeprecationWarning, - stacklevel=2) - - def __contains__(self, key): - if key in self.cache: - return True - else: - from six.moves.cPickle import loads - for key_pickle, version_pickle, result_pickle in self.db_conn.execute( - "select key_pickle, version_pickle, result_pickle from data" - " where key_hash = ? and version_hash = ?", - (hash(key), self.version_hash)): - if loads(six.binary_type(key_pickle)) == key \ - and loads(six.binary_type(version_pickle)) == self.version: - result = loads(six.binary_type(result_pickle)) - self.cache[key] = result - return True - - return False - - def __getitem__(self, key): - try: - return self.cache[key] - except KeyError: - from six.moves.cPickle import loads - for key_pickle, version_pickle, result_pickle in self.db_conn.execute( - "select key_pickle, version_pickle, result_pickle from data" - " where key_hash = ? and version_hash = ?", - (hash(key), self.version_hash)): - if loads(six.binary_type(key_pickle)) == key \ - and loads(six.binary_type(version_pickle)) == self.version: - result = loads(six.binary_type(result_pickle)) - self.cache[key] = result - return result - - raise KeyError(key) - - def __delitem__(self, key): - if key in self.cache: - del self.cache[key] - - from six.moves.cPickle import loads - for item_id, key_pickle, version_pickle in self.db_conn.execute( - "select id, key_pickle, version_pickle from data" - " where key_hash = ? and version_hash = ?", - (hash(key), self.version_hash)): - if (loads(six.binary_type(key_pickle)) == key - and loads(six.binary_type(version_pickle)) == self.version): - self.db_conn.execute("delete from data where id = ?", (item_id,)) - - self.commit_countdown -= 1 - if self.commit_countdown <= 0: - self.commit_countdown = self.commit_interval - self.db_conn.commit() - - def __setitem__(self, key, value): - del self[key] - - self.cache[key] = value - - from six.moves.cPickle import dumps - self.db_conn.execute("insert into data" - " (key_hash, key_pickle, version_hash, " - " version_pickle, result_pickle)" - " values (?,?,?,?,?)", - (hash(key), sqlite.Binary(dumps(key)), - self.version_hash, self.version_pickle, - sqlite.Binary(dumps(value)))) - - self.commit_countdown -= 1 - if self.commit_countdown <= 0: - self.commit_countdown = self.commit_interval - self.db_conn.commit() - - -try: - import sqlite3 as sqlite -except ImportError: - try: - from pysqlite2 import dbapi2 as sqlite - except ImportError: - import warnings - warnings.warn("DiskDict will be memory-only: " - "a usable version of sqlite was not found.") - DiskDict = dict # noqa diff --git a/pytools/importlib_backport.py b/pytools/importlib_backport.py index 0d5275c3b542fbbe3fcf1ba7fb9c216f3c9aa110..86dd4c6f8b51387da17e0a25076c4f911f81ed54 100644 --- a/pytools/importlib_backport.py +++ b/pytools/importlib_backport.py @@ -66,12 +66,12 @@ def _resolve_name(name, package, level): if not hasattr(package, 'rindex'): raise ValueError("'package' not set to a string") dot = len(package) - for x in six.moves.xrange(level, 1, -1): + for _ in six.moves.xrange(level, 1, -1): try: dot = package.rindex('.', 0, dot) except ValueError: raise ValueError("attempted relative import beyond top-level " - "package") + "package") return "%s.%s" % (package[:dot], name) diff --git a/pytools/lex.py b/pytools/lex.py index 3322d0c6d0f88d3d98efe14a34622f0353df5eb6..2ad9cee47e6800c7477692cae9408300e7768749 100644 --- a/pytools/lex.py +++ b/pytools/lex.py @@ -6,6 +6,7 @@ import six class RuleError(RuntimeError): def __init__(self, rule): + RuntimeError.__init__(self) self.Rule = rule def __str__(self): @@ -14,6 +15,7 @@ class RuleError(RuntimeError): class InvalidTokenError(RuntimeError): def __init__(self, s, str_index): + RuntimeError.__init__(self) self.string = s self.index = str_index @@ -24,6 +26,7 @@ class InvalidTokenError(RuntimeError): class ParseError(RuntimeError): def __init__(self, msg, s, token): + RuntimeError.__init__(self) self.message = msg self.string = s self.Token = token @@ -46,56 +49,58 @@ class RE(object): return "RE(%s)" % self.Content -def lex(lex_table, s, debug=False, match_objects=False): - rule_dict = dict(lex_table) +def _matches_rule(rule, s, start, rule_dict, debug=False): + if debug: + print("Trying", rule, "on", s[start:]) - def matches_rule(rule, s, start): - if debug: - print("Trying", rule, "on", s[start:]) - if isinstance(rule, tuple): - if rule[0] == "|": - for subrule in rule[1:]: - length, match_obj = matches_rule( - subrule, s, start) - if length: - return length, match_obj - return 0, None + if isinstance(rule, tuple): + if rule[0] == "|": + for subrule in rule[1:]: + length, match_obj = _matches_rule( + subrule, s, start, rule_dict, debug) + if not length: + continue + return length, match_obj + else: + my_match_length = 0 + for subrule in rule: + length, _ = _matches_rule( + subrule, s, start, rule_dict, debug) + if not length: + break + my_match_length += length + start += length else: - my_match_length = 0 - for subrule in rule: - length, _ = matches_rule(subrule, s, start) - if length: - my_match_length += length - start += length - else: - return 0, None return my_match_length, None - elif isinstance(rule, six.string_types): - return matches_rule(rule_dict[rule], s, start) - elif isinstance(rule, RE): - match_obj = rule.RE.match(s, start) - if match_obj: - return match_obj.end()-start, match_obj - else: - return 0, None - else: - raise RuleError(rule) + return 0, None + + elif isinstance(rule, six.string_types): + return _matches_rule(rule_dict[rule], s, start, rule_dict, debug) + elif isinstance(rule, RE): + match_obj = rule.RE.match(s, start) + if match_obj: + return match_obj.end()-start, match_obj + return 0, None + + raise RuleError(rule) + + +def lex(lex_table, s, debug=False, match_objects=False): + rule_dict = dict(lex_table) result = [] i = 0 while i < len(s): - rule_matched = False for name, rule in lex_table: - length, match_obj = matches_rule(rule, s, i) + length, match_obj = _matches_rule(rule, s, i, rule_dict, debug) if length: if match_objects: result.append((name, s[i:i+length], i, match_obj)) else: result.append((name, s[i:i+length], i)) i += length - rule_matched = True break - if not rule_matched: + else: raise InvalidTokenError(s, i) return result diff --git a/pytools/log.py b/pytools/log.py index 5f49fe66c52194527452acd956ae67572be9413b..8d579bde1876264188f9623fe480c09e7360941c 100644 --- a/pytools/log.py +++ b/pytools/log.py @@ -799,23 +799,6 @@ class LogManager(object): return (data_x, descr_x, unit_x), \ (data_y, descr_y, unit_y) - def plot_gnuplot(self, gp, expr_x, expr_y, **kwargs): - """Plot data to Gnuplot.py. - - @arg gp: a Gnuplot.Gnuplot instance to which the plot is sent. - @arg expr_x: an allowed argument to :meth:`get_joint_dataset`. - @arg expr_y: an allowed argument to :meth:`get_joint_dataset`. - @arg kwargs: keyword arguments that are directly passed on to - C{Gnuplot.Data}. - """ - (data_x, descr_x, unit_x), (data_y, descr_y, unit_y) = \ - self.get_plot_data(expr_x, expr_y) - - gp.xlabel("%s [%s]" % (descr_x, unit_x)) - gp.ylabel("%s [%s]" % (descr_y, unit_y)) - from gnuplot_py import Data - gp.plot(Data(data_x, data_y, **kwargs)) - def write_datafile(self, filename, expr_x, expr_y): (data_x, label_x), (data_y, label_y) = self.get_plot_data( expr_x, expr_y) diff --git a/pytools/mpi.py b/pytools/mpi.py index 8f4b2bba06c0151f093a4bbb4d69f1a987ceffc4..fdd97860abc078c9ac845b3c7148e4aea5108748 100644 --- a/pytools/mpi.py +++ b/pytools/mpi.py @@ -13,7 +13,7 @@ def check_for_mpi_relaunch(argv): sys.exit() -def run_with_mpi_ranks(py_script, ranks, callable, args=(), kwargs=None): +def run_with_mpi_ranks(py_script, ranks, callable_, args=(), kwargs=None): if kwargs is None: kwargs = {} @@ -23,7 +23,7 @@ def run_with_mpi_ranks(py_script, ranks, callable, args=(), kwargs=None): newenv["PYTOOLS_RUN_WITHIN_MPI"] = "1" from pickle import dumps - callable_and_args = dumps((callable, args, kwargs)) + callable_and_args = dumps((callable_, args, kwargs)) from subprocess import check_call check_call(["mpirun", "-np", str(ranks), diff --git a/pytools/mpiwrap.py b/pytools/mpiwrap.py index 5c5ed24b484b780a6ade4ddff6d062d5451978cc..cf504f97b9d674b51e658099db1567e626c806c2 100644 --- a/pytools/mpiwrap.py +++ b/pytools/mpiwrap.py @@ -1,22 +1,21 @@ """See pytools.prefork for this module's reason for being.""" from __future__ import absolute_import -import mpi4py.rc - +import mpi4py.rc # pylint:disable=import-error mpi4py.rc.initialize = False -import pytools.prefork -pytools.prefork.enable_prefork() +from mpi4py.MPI import * # noqa pylint:disable=wildcard-import,wrong-import-position -from mpi4py.MPI import * # noqa +import pytools.prefork # pylint:disable=wrong-import-position +pytools.prefork.enable_prefork() -if Is_initialized(): # noqa +if Is_initialized(): # noqa pylint:disable=undefined-variable raise RuntimeError("MPI already initialized before MPI wrapper import") def InitWithAutoFinalize(*args, **kwargs): # noqa - result = Init(*args, **kwargs) # noqa + result = Init(*args, **kwargs) # noqa pylint:disable=undefined-variable import atexit - atexit.register(Finalize) # noqa + atexit.register(Finalize) # noqa pylint:disable=undefined-variable return result diff --git a/pytools/obj_array.py b/pytools/obj_array.py index 33e975f4c9d781807010f1ad78c05f24a3951d30..d326a433b7ede8195de214de46111a305c63c351 100644 --- a/pytools/obj_array.py +++ b/pytools/obj_array.py @@ -26,15 +26,14 @@ Mapping def gen_len(expr): - from pytools.obj_array import is_obj_array if is_obj_array(expr): return len(expr) else: return 1 -def gen_slice(expr, slice): - result = expr[slice] +def gen_slice(expr, slice_): + result = expr[slice_] if len(result) == 1: return result[0] else: @@ -79,7 +78,6 @@ def make_obj_array(res_list): def setify_field(f): - from hedge.tools import is_obj_array if is_obj_array(f): return set(f) else: diff --git a/pytools/persistent_dict.py b/pytools/persistent_dict.py index 75b162d90a83ea317a6e03b978fe72255ce8c0f3..741afb69bdb59c34cfd244857b1f6296f3e3cf7a 100644 --- a/pytools/persistent_dict.py +++ b/pytools/persistent_dict.py @@ -28,7 +28,6 @@ THE SOFTWARE. """ import logging -logger = logging.getLogger(__name__) try: import collections.abc as abc @@ -36,13 +35,15 @@ except ImportError: # Python 2 import collections as abc -import functools -import six -import sys import os import shutil +import sys import errno +import six + +logger = logging.getLogger(__name__) + __doc__ = """ Persistent Hashing and Persistent Dictionaries ============================================== @@ -71,9 +72,9 @@ except ImportError: new_hash = sha.new -def _make_dir_recursively(dir): +def _make_dir_recursively(dir_): try: - os.makedirs(dir) + os.makedirs(dir_) except OSError as e: from errno import EEXIST if e.errno != EEXIST: @@ -87,37 +88,6 @@ def update_checksum(checksum, obj): checksum.update(obj) -def _tracks_stacklevel(cls, exclude=frozenset(["__init__"])): - """Changes all the methods of `cls` to track the call stack level in a member - called `_stacklevel`. - """ - def make_wrapper(f): - @functools.wraps(f) - def wrapper(obj, *args, **kwargs): - assert obj._stacklevel >= 0, obj._stacklevel - # Increment by 2 because the method is wrapped. - obj._stacklevel += 2 - try: - return f(obj, *args, **kwargs) - finally: - obj._stacklevel -= 2 - - return wrapper - - for member in cls.__dict__: - f = getattr(cls, member) - - if member in exclude: - continue - - if not six.callable(f): - continue - - setattr(cls, member, make_wrapper(f)) - - return cls - - # {{{ cleanup managers class CleanupBase(object): @@ -141,7 +111,7 @@ class CleanupManager(CleanupBase): class LockManager(CleanupBase): - def __init__(self, cleanup_m, lock_file, _stacklevel=1): + def __init__(self, cleanup_m, lock_file, stacklevel=0): self.lock_file = lock_file attempts = 0 @@ -162,7 +132,7 @@ class LockManager(CleanupBase): from warnings import warn warn("could not obtain lock--delete '%s' if necessary" % self.lock_file, - stacklevel=1 + _stacklevel) + stacklevel=1 + stacklevel) if attempts > 3 * 60: raise RuntimeError("waited more than three minutes " "on the lock file '%s'" @@ -171,7 +141,6 @@ class LockManager(CleanupBase): cleanup_m.register(self) def clean_up(self): - import os os.close(self.fd) os.unlink(self.lock_file) @@ -221,7 +190,7 @@ class KeyBuilder(object): digest = None try: - digest = key._pytools_persistent_hash_digest + digest = key._pytools_persistent_hash_digest # noqa pylint:disable=protected-access except AttributeError: pass @@ -251,8 +220,10 @@ class KeyBuilder(object): if not isinstance(key, type): try: - key._pytools_persistent_hash_digest = digest - except Exception: + key._pytools_persistent_hash_digest = digest # noqa pylint:disable=protected-access + except AttributeError: + pass + except TypeError: pass key_hash.update(digest) @@ -264,26 +235,32 @@ class KeyBuilder(object): # {{{ updaters - def update_for_int(self, key_hash, key): + @staticmethod + def update_for_int(key_hash, key): key_hash.update(str(key).encode("utf8")) update_for_long = update_for_int update_for_bool = update_for_int - def update_for_float(self, key_hash, key): + @staticmethod + def update_for_float(key_hash, key): key_hash.update(repr(key).encode("utf8")) if sys.version_info >= (3,): - def update_for_str(self, key_hash, key): + @staticmethod + def update_for_str(key_hash, key): key_hash.update(key.encode('utf8')) - def update_for_bytes(self, key_hash, key): + @staticmethod + def update_for_bytes(key_hash, key): key_hash.update(key) else: - def update_for_str(self, key_hash, key): + @staticmethod + def update_for_str(key_hash, key): key_hash.update(key) - def update_for_unicode(self, key_hash, key): + @staticmethod + def update_for_unicode(key_hash, key): key_hash.update(key.encode('utf8')) def update_for_tuple(self, key_hash, key): @@ -294,10 +271,13 @@ class KeyBuilder(object): for set_key in sorted(key): self.rec(key_hash, set_key) - def update_for_NoneType(self, key_hash, key): # noqa + @staticmethod + def update_for_NoneType(key_hash, key): # noqa + del key key_hash.update("".encode('utf8')) - def update_for_dtype(self, key_hash, key): + @staticmethod + def update_for_dtype(key_hash, key): key_hash.update(key.str.encode('utf8')) # }}} @@ -421,8 +401,6 @@ class _LRUCache(abc.MutableMapping): (len(self.cache), len(self.lru_order)) assert len(self.lru_order) <= self.maxsize - return node[0] - # }}} @@ -440,12 +418,8 @@ class CollisionWarning(UserWarning): pass -@_tracks_stacklevel class _PersistentDictBase(object): def __init__(self, identifier, key_builder=None, container_dir=None): - # for issuing warnings - self._stacklevel = 0 - self.identifier = identifier if key_builder is None: @@ -466,25 +440,28 @@ class _PersistentDictBase(object): self._make_container_dir() - def _warn(self, msg, category=UserWarning): + @staticmethod + def _warn(msg, category=UserWarning, stacklevel=0): from warnings import warn - warn(msg, category, stacklevel=1 + self._stacklevel) + warn(msg, category, stacklevel=1 + stacklevel) - def store_if_not_present(self, key, value): - self.store(key, value, _skip_if_present=True) + def store_if_not_present(self, key, value, _stacklevel=0): + self.store(key, value, _skip_if_present=True, _stacklevel=1 + _stacklevel) - def store(self, key, value, _skip_if_present=False): + def store(self, key, value, _skip_if_present=False, _stacklevel=0): raise NotImplementedError() - def fetch(self, key): + def fetch(self, key, _stacklevel=0): raise NotImplementedError() - def _read(self, path): + @staticmethod + def _read(path): from six.moves.cPickle import load with open(path, "rb") as inf: return load(inf) - def _write(self, path, value): + @staticmethod + def _write(path, value): from six.moves.cPickle import dump, HIGHEST_PROTOCOL with open(path, "wb") as outf: dump(value, outf, protocol=HIGHEST_PROTOCOL) @@ -508,7 +485,7 @@ class _PersistentDictBase(object): def _make_container_dir(self): _make_dir_recursively(self.container_dir) - def _collision_check(self, key, stored_key): + def _collision_check(self, key, stored_key, _stacklevel): if stored_key != key: # Key collision, oh well. self._warn("%s: key collision in cache at '%s' -- these are " @@ -517,21 +494,19 @@ class _PersistentDictBase(object): "(that is not considering some elements relevant " "for equality comparison)" % (self.identifier, self.container_dir), - CollisionWarning) + CollisionWarning, + 1 + _stacklevel) # This is here so we can step through equality comparison to # see what is actually non-equal. - stored_key == key + stored_key == key # pylint:disable=pointless-statement raise NoSuchEntryError(key) def __getitem__(self, key): - return self.fetch(key) + return self.fetch(key, _stacklevel=1) def __setitem__(self, key, value): - self.store(key, value) - - def __delitem__(self, key): - raise NotImplementedError() + self.store(key, value, _stacklevel=1) def clear(self): try: @@ -543,7 +518,6 @@ class _PersistentDictBase(object): self._make_container_dir() -@_tracks_stacklevel class WriteOncePersistentDict(_PersistentDictBase): """A concurrent disk-backed dictionary that disallows overwriting/deletion. @@ -570,7 +544,7 @@ class WriteOncePersistentDict(_PersistentDictBase): _PersistentDictBase.__init__(self, identifier, key_builder, container_dir) self._cache = _LRUCache(in_mem_cache_size) - def _spin_until_removed(self, lock_file): + def _spin_until_removed(self, lock_file, stacklevel): from os.path import exists attempts = 0 @@ -582,20 +556,21 @@ class WriteOncePersistentDict(_PersistentDictBase): if attempts > 10: self._warn("waiting until unlocked--delete '%s' if necessary" - % lock_file) + % lock_file, stacklevel=1 + stacklevel) if attempts > 3 * 60: raise RuntimeError("waited more than three minutes " "on the lock file '%s'" "--something is wrong" % lock_file) - def store(self, key, value, _skip_if_present=False): + def store(self, key, value, _skip_if_present=False, _stacklevel=0): hexdigest_key = self.key_builder(key) cleanup_m = CleanupManager() try: try: - LockManager(cleanup_m, self._lock_file(hexdigest_key)) + LockManager(cleanup_m, self._lock_file(hexdigest_key), + 1 + _stacklevel) item_dir_m = ItemDirManager( cleanup_m, self._item_dir(hexdigest_key), delete_on_error=False) @@ -613,15 +588,15 @@ class WriteOncePersistentDict(_PersistentDictBase): self._write(value_path, value) self._write(key_path, key) - logger.debug("%s: disk cache store [key=%s]" % ( - self.identifier, hexdigest_key)) + logger.debug("%s: disk cache store [key=%s]", + self.identifier, hexdigest_key) except Exception: cleanup_m.error_clean_up() raise finally: cleanup_m.clean_up() - def fetch(self, key): + def fetch(self, key, _stacklevel=0): hexdigest_key = self.key_builder(key) # {{{ in memory cache @@ -631,9 +606,9 @@ class WriteOncePersistentDict(_PersistentDictBase): except KeyError: pass else: - logger.debug("%s: in mem cache hit [key=%s]" % ( - self.identifier, hexdigest_key)) - self._collision_check(key, stored_key) + logger.debug("%s: in mem cache hit [key=%s]", + self.identifier, hexdigest_key) + self._collision_check(key, stored_key, 1 + _stacklevel) return stored_value # }}} @@ -644,12 +619,12 @@ class WriteOncePersistentDict(_PersistentDictBase): from os.path import isdir if not isdir(item_dir): - logger.debug("%s: disk cache miss [key=%s]" % ( - self.identifier, hexdigest_key)) + logger.debug("%s: disk cache miss [key=%s]", + self.identifier, hexdigest_key) raise NoSuchEntryError(key) lock_file = self._lock_file(hexdigest_key) - self._spin_until_removed(lock_file) + self._spin_until_removed(lock_file, 1 + _stacklevel) # }}} @@ -668,15 +643,16 @@ class WriteOncePersistentDict(_PersistentDictBase): "encountered an invalid " "key file for key %s. Remove the directory " "'%s' if necessary. (caught: %s)" - % (self.identifier, hexdigest_key, item_dir, str(e))) + % (self.identifier, hexdigest_key, item_dir, str(e)), + stacklevel=1 + _stacklevel) raise NoSuchEntryError(key) - self._collision_check(key, read_key) + self._collision_check(key, read_key, 1 + _stacklevel) # }}} - logger.debug("%s: disk cache hit [key=%s]" % ( - self.identifier, hexdigest_key)) + logger.debug("%s: disk cache hit [key=%s]", + self.identifier, hexdigest_key) # {{{ load contents @@ -687,7 +663,8 @@ class WriteOncePersistentDict(_PersistentDictBase): "encountered an invalid " "key file for key %s. Remove the directory " "'%s' if necessary." - % (self.identifier, hexdigest_key, item_dir)) + % (self.identifier, hexdigest_key, item_dir), + stacklevel=1 + _stacklevel) raise NoSuchEntryError(key) # }}} @@ -700,17 +677,18 @@ class WriteOncePersistentDict(_PersistentDictBase): self._cache.clear() -@_tracks_stacklevel class PersistentDict(_PersistentDictBase): """A concurrent disk-backed dictionary. .. automethod:: __init__ .. automethod:: __getitem__ .. automethod:: __setitem__ + .. automethod:: __delitem__ .. automethod:: clear .. automethod:: store .. automethod:: store_if_not_present .. automethod:: fetch + .. automethod:: remove """ def __init__(self, identifier, key_builder=None, container_dir=None): """ @@ -720,14 +698,14 @@ class PersistentDict(_PersistentDictBase): """ _PersistentDictBase.__init__(self, identifier, key_builder, container_dir) - def store(self, key, value, _skip_if_present=False): + def store(self, key, value, _skip_if_present=False, _stacklevel=0): hexdigest_key = self.key_builder(key) cleanup_m = CleanupManager() try: try: LockManager(cleanup_m, self._lock_file(hexdigest_key), - 1 + self._stacklevel) + 1 + _stacklevel) item_dir_m = ItemDirManager( cleanup_m, self._item_dir(hexdigest_key), delete_on_error=True) @@ -745,29 +723,29 @@ class PersistentDict(_PersistentDictBase): self._write(value_path, value) self._write(key_path, key) - logger.debug("%s: cache store [key=%s]" % ( - self.identifier, hexdigest_key)) + logger.debug("%s: cache store [key=%s]", + self.identifier, hexdigest_key) except Exception: cleanup_m.error_clean_up() raise finally: cleanup_m.clean_up() - def fetch(self, key): + def fetch(self, key, _stacklevel=0): hexdigest_key = self.key_builder(key) item_dir = self._item_dir(hexdigest_key) from os.path import isdir if not isdir(item_dir): - logger.debug("%s: cache miss [key=%s]" % ( - self.identifier, hexdigest_key)) + logger.debug("%s: cache miss [key=%s]", + self.identifier, hexdigest_key) raise NoSuchEntryError(key) cleanup_m = CleanupManager() try: try: LockManager(cleanup_m, self._lock_file(hexdigest_key), - 1 + self._stacklevel) + 1 + _stacklevel) item_dir_m = ItemDirManager( cleanup_m, item_dir, delete_on_error=False) @@ -783,15 +761,16 @@ class PersistentDict(_PersistentDictBase): self._warn("pytools.persistent_dict.PersistentDict(%s) " "encountered an invalid " "key file for key %s. Entry deleted." - % (self.identifier, hexdigest_key)) + % (self.identifier, hexdigest_key), + stacklevel=1 + _stacklevel) raise NoSuchEntryError(key) - self._collision_check(key, read_key) + self._collision_check(key, read_key, 1 + _stacklevel) # }}} - logger.debug("%s: cache hit [key=%s]" % ( - self.identifier, hexdigest_key)) + logger.debug("%s: cache hit [key=%s]", + self.identifier, hexdigest_key) # {{{ load value @@ -802,7 +781,8 @@ class PersistentDict(_PersistentDictBase): self._warn("pytools.persistent_dict.PersistentDict(%s) " "encountered an invalid " "key file for key %s. Entry deleted." - % (self.identifier, hexdigest_key)) + % (self.identifier, hexdigest_key), + stacklevel=1 + _stacklevel) raise NoSuchEntryError(key) return read_contents @@ -815,7 +795,7 @@ class PersistentDict(_PersistentDictBase): finally: cleanup_m.clean_up() - def remove(self, key): + def remove(self, key, _stacklevel=0): hexdigest_key = self.key_builder(key) item_dir = self._item_dir(hexdigest_key) @@ -827,7 +807,7 @@ class PersistentDict(_PersistentDictBase): try: try: LockManager(cleanup_m, self._lock_file(hexdigest_key), - 1 + self._stacklevel) + 1 + _stacklevel) item_dir_m = ItemDirManager( cleanup_m, item_dir, delete_on_error=False) key_file = self._key_file(hexdigest_key) @@ -841,10 +821,11 @@ class PersistentDict(_PersistentDictBase): self._warn("pytools.persistent_dict.PersistentDict(%s) " "encountered an invalid " "key file for key %s. Entry deleted." - % (self.identifier, hexdigest_key)) + % (self.identifier, hexdigest_key), + stacklevel=1 + _stacklevel) raise NoSuchEntryError(key) - self._collision_check(key, read_key) + self._collision_check(key, read_key, 1 + _stacklevel) # }}} @@ -857,7 +838,7 @@ class PersistentDict(_PersistentDictBase): cleanup_m.clean_up() def __delitem__(self, key): - self.remove(key) + self.remove(key, _stacklevel=1) # }}} diff --git a/pytools/prefork.py b/pytools/prefork.py index 68a46d865e89aedd4b8315a6979b777a0fd95e99..be388f3d5ab60e8abc8b42898dc3ad9619ce624d 100644 --- a/pytools/prefork.py +++ b/pytools/prefork.py @@ -17,11 +17,12 @@ class DirectForker(object): self.apids = {} self.count = 0 - def call(self, cmdline, cwd=None): - from subprocess import call + @staticmethod + def call(cmdline, cwd=None): + from subprocess import call as spcall try: - return call(cmdline, cwd=cwd) + return spcall(cmdline, cwd=cwd) except OSError as e: raise ExecError("error invoking '%s': %s" % (" ".join(cmdline), e)) @@ -40,7 +41,8 @@ class DirectForker(object): raise ExecError("error invoking '%s': %s" % (" ".join(cmdline), e)) - def call_capture_output(self, cmdline, cwd=None, error_on_nonzero=True): + @staticmethod + def call_capture_output(cmdline, cwd=None, error_on_nonzero=True): from subprocess import Popen, PIPE try: @@ -123,7 +125,7 @@ def _fork_server(sock): try: while True: func_name, args, kwargs = _recv_packet( - sock, who="Prefork server", partner="parent" + sock, who="Prefork server", partner="parent" ) if func_name == "quit": @@ -133,7 +135,8 @@ def _fork_server(sock): else: try: result = funcs[func_name](*args, **kwargs) - except Exception as e: + # FIXME: Is catching all exceptions the right course of action? + except Exception as e: # pylint:disable=broad-except _send_packet(sock, ("exception", e)) else: _send_packet(sock, ("ok", result)) @@ -141,7 +144,7 @@ def _fork_server(sock): sock.close() import os - os._exit(0) + os._exit(0) # pylint:disable=protected-access class IndirectForker(object): @@ -155,13 +158,14 @@ class IndirectForker(object): def _remote_invoke(self, name, *args, **kwargs): _send_packet(self.socket, (name, args, kwargs)) status, result = _recv_packet( - self.socket, who="Prefork client", partner="prefork server" + self.socket, who="Prefork client", partner="prefork server" ) if status == "exception": raise result - elif status == "ok": - return result + + assert status == "ok" + return result def _quit(self): self._remote_invoke("quit") @@ -186,8 +190,11 @@ class IndirectForker(object): return self._remote_invoke("waitall") +forker = DirectForker() + + def enable_prefork(): - global forker + global forker # pylint:disable=global-statement if isinstance(forker, IndirectForker): return @@ -208,9 +215,6 @@ def enable_prefork(): forker = IndirectForker(fork_res, s_parent) -forker = DirectForker() - - def call(cmdline, cwd=None): return forker.call(cmdline, cwd) diff --git a/pytools/py_codegen.py b/pytools/py_codegen.py index 2adb0aaa031a4b7155c3093b8309b1edebdf1a8e..97aef192041f131ef026633f89550605275585a3 100644 --- a/pytools/py_codegen.py +++ b/pytools/py_codegen.py @@ -22,6 +22,10 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ +import marshal +import imp +from types import FunctionType, ModuleType + import six @@ -121,13 +125,10 @@ class PicklableModule(object): self.mod_globals = mod_globals def __getstate__(self): - import marshal - nondefault_globals = {} functions = {} modules = {} - from types import FunctionType, ModuleType for k, v in six.iteritems(self.mod_globals): if isinstance(v, FunctionType): functions[k] = ( @@ -139,43 +140,37 @@ class PicklableModule(object): elif k not in _empty_module_dict: nondefault_globals[k] = v - import imp return (1, imp.get_magic(), functions, modules, nondefault_globals) def __setstate__(self, obj): - v = obj[0] - if v == 0: + if obj[0] == 0: magic, functions, nondefault_globals = obj[1:] modules = {} - elif v == 1: + elif obj[0] == 1: magic, functions, modules, nondefault_globals = obj[1:] else: raise ValueError("unknown version of PicklableModule") - import imp if magic != imp.get_magic(): raise ValueError("cannot unpickle function binary: " "incorrect magic value (got: %s, expected: %s)" % (magic, imp.get_magic())) - import marshal - mod_globals = _empty_module_dict.copy() mod_globals.update(nondefault_globals) - self.mod_globals = mod_globals from pytools.importlib_backport import import_module - for k, mod_name in six.iteritems(modules): mod_globals[k] = import_module(mod_name) - from types import FunctionType - for k, v in six.iteritems(functions): - name, code_bytes, argdefs = v + for k, (name, code_bytes, argdefs) in six.iteritems(functions): f = FunctionType( - marshal.loads(code_bytes), mod_globals, argdefs=argdefs) + marshal.loads(code_bytes), mod_globals, name=name, + argdefs=argdefs) mod_globals[k] = f + self.mod_globals = mod_globals + # }}} diff --git a/pytools/spatial_btree.py b/pytools/spatial_btree.py index dba6ec835367f85ade5ff7635922ab0820397a6b..f29cb903cddd5f1a9a9f0127f207be001e0129eb 100644 --- a/pytools/spatial_btree.py +++ b/pytools/spatial_btree.py @@ -106,7 +106,7 @@ class SpatialBinaryTreeBucket: # No subdivisions yet. if len(self.elements) > self.max_elements_per_box: # Too many elements. Need to subdivide. - self.all_buckets = [] + self.all_buckets = [] # noqa: E501 pylint:disable=attribute-defined-outside-init self.buckets = make_buckets( self.bottom_left, self.top_right, self.all_buckets, @@ -142,7 +142,7 @@ class SpatialBinaryTreeBucket: yield result # Perform linear search. - for el, bbox in self.elements: + for el, _ in self.elements: yield el def visualize(self, file): diff --git a/pytools/stopwatch.py b/pytools/stopwatch.py index e34bb9f3644e503a4ab33f990bda2b9679a81c29..75e3ed6e49d643862e7fb0b7f1974187325b92ef 100644 --- a/pytools/stopwatch.py +++ b/pytools/stopwatch.py @@ -68,5 +68,5 @@ def print_job_summary(): HIDDEN_JOBS = [] VISIBLE_JOBS = [] -JOB_TIMES = pytools.DictionaryWithDefault(lambda x: 0) +JOB_TIMES = pytools.DependentDictionary(lambda x: 0) PRINT_JOBS = pytools.Reference(True) diff --git a/pytools/test.py b/pytools/test.py index e0b28e85beabeaeccf6f209e5bbf7900a9735a4b..067ef57e6d4594a68de2bbca81a4893e8a365180 100644 --- a/pytools/test.py +++ b/pytools/test.py @@ -1,6 +1,6 @@ from __future__ import absolute_import try: - from py.test import mark as mark_test + from py.test import mark as mark_test # pylint:disable=unused-import except ImportError: class _Mark: def __getattr__(self, name): diff --git a/pytools/version.py b/pytools/version.py index 4ed105cfd6b4a73ed12fbf38aa2b9b6fcc9f6b54..6ba2863610f2fe534ddbfabbbd4a41673be4294c 100644 --- a/pytools/version.py +++ b/pytools/version.py @@ -1,3 +1,3 @@ -VERSION = (2018, 5, 2) +VERSION = (2019, 1) VERSION_STATUS = "" VERSION_TEXT = ".".join(str(x) for x in VERSION) + VERSION_STATUS diff --git a/setup.py b/setup.py index 42b70c7613564c58f1d99acb3d3cdac6d4587d1a..adba753c7690f5c7a258d324d3799a0aa98618ca 100644 --- a/setup.py +++ b/setup.py @@ -29,8 +29,10 @@ setup(name="pytools", 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', + 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', 'Topic :: Scientific/Engineering', 'Topic :: Scientific/Engineering :: Information Analysis', 'Topic :: Scientific/Engineering :: Mathematics', diff --git a/test/test_data_table.py b/test/test_data_table.py index c1e795a47c0d7cc27ac719735cb46ece1da6bbd2..c95405d26591bd4aee5d32fdfef472a54e60a4bf 100644 --- a/test/test_data_table.py +++ b/test/test_data_table.py @@ -64,7 +64,7 @@ def test_sort(): def test_aggregate(): et = get_employee_table() et.sort(["dept"]) - agg = et.aggregated(["dept"], "lastname", lambda lst: ",".join(lst)) + agg = et.aggregated(["dept"], "lastname", ",".join) assert len(agg) == 4 for dept, lastnames in agg: lastnames = lastnames.split(",") diff --git a/test/test_persistent_dict.py b/test/test_persistent_dict.py index 1318bbb0387d8d9e1e9351ea7902cec2df00b510..dde65e4892b8b770baacf10ba794450622694ae3 100644 --- a/test/test_persistent_dict.py +++ b/test/test_persistent_dict.py @@ -1,16 +1,14 @@ -from __future__ import division, with_statement, absolute_import +from __future__ import absolute_import, division, with_statement -import pytest # noqa +import shutil import sys # noqa import tempfile -import shutil -from six.moves import range -from six.moves import zip +import pytest +from six.moves import range, zip -from pytools.persistent_dict import ( - PersistentDict, WriteOncePersistentDict, NoSuchEntryError, - ReadOnlyEntryError, CollisionWarning) +from pytools.persistent_dict import (CollisionWarning, NoSuchEntryError, + PersistentDict, ReadOnlyEntryError, WriteOncePersistentDict) # {{{ type for testing @@ -97,7 +95,7 @@ def test_persistent_dict_storage_and_lookup(): # check not found with pytest.raises(NoSuchEntryError): - pdict[3000] + pdict.fetch(3000) finally: shutil.rmtree(tmpdir) @@ -112,7 +110,7 @@ def test_persistent_dict_deletion(): del pdict[0] with pytest.raises(NoSuchEntryError): - pdict[0] + pdict.fetch(0) with pytest.raises(NoSuchEntryError): del pdict[1] @@ -138,7 +136,7 @@ def test_persistent_dict_synchronization(): # check deletion del pdict1[0] with pytest.raises(NoSuchEntryError): - pdict2[0] + pdict2.fetch(0) finally: shutil.rmtree(tmpdir) @@ -157,7 +155,7 @@ def test_persistent_dict_cache_collisions(): # check lookup with pytest.warns(CollisionWarning): with pytest.raises(NoSuchEntryError): - pdict[key2] + pdict.fetch(key2) # check deletion with pytest.warns(CollisionWarning): @@ -181,11 +179,11 @@ def test_persistent_dict_clear(): pdict = PersistentDict("pytools-test", container_dir=tmpdir) pdict[0] = 1 - pdict[0] + pdict.fetch(0) pdict.clear() with pytest.raises(NoSuchEntryError): - pdict[0] + pdict.fetch(0) finally: shutil.rmtree(tmpdir) @@ -211,7 +209,7 @@ def test_write_once_persistent_dict_storage_and_lookup(in_mem_cache_size): # check not found with pytest.raises(NoSuchEntryError): - pdict[1] + pdict.fetch(1) # check store_if_not_present pdict.store_if_not_present(0, 2) @@ -234,22 +232,22 @@ def test_write_once_persistent_dict_lru_policy(): pdict[3] = PDictTestingKeyOrValue(3) pdict[4] = PDictTestingKeyOrValue(4) - val1 = pdict[1] - - assert pdict[1] is val1 - pdict[2] - assert pdict[1] is val1 - pdict[2] - pdict[3] - assert pdict[1] is val1 - pdict[2] - pdict[3] - pdict[2] - assert pdict[1] is val1 - pdict[2] - pdict[3] - pdict[4] - assert pdict[1] is not val1 + val1 = pdict.fetch(1) + + assert pdict.fetch(1) is val1 + pdict.fetch(2) + assert pdict.fetch(1) is val1 + pdict.fetch(2) + pdict.fetch(3) + assert pdict.fetch(1) is val1 + pdict.fetch(2) + pdict.fetch(3) + pdict.fetch(2) + assert pdict.fetch(1) is val1 + pdict.fetch(2) + pdict.fetch(3) + pdict.fetch(4) + assert pdict.fetch(1) is not val1 finally: shutil.rmtree(tmpdir) @@ -285,7 +283,7 @@ def test_write_once_persistent_dict_cache_collisions(): # check lookup with pytest.warns(CollisionWarning): with pytest.raises(NoSuchEntryError): - pdict[key2] + pdict.fetch(key2) # check update with pytest.raises(ReadOnlyEntryError): @@ -305,11 +303,11 @@ def test_write_once_persistent_dict_clear(): pdict = WriteOncePersistentDict("pytools-test", container_dir=tmpdir) pdict[0] = 1 - pdict[0] + pdict.fetch(0) pdict.clear() with pytest.raises(NoSuchEntryError): - pdict[0] + pdict.fetch(0) finally: shutil.rmtree(tmpdir) @@ -318,5 +316,4 @@ if __name__ == "__main__": if len(sys.argv) > 1: exec(sys.argv[1]) else: - from pytest import main - main([__file__]) + pytest.main([__file__]) diff --git a/test/test_py_codegen.py b/test/test_py_codegen.py index 0015e7a719fcaed2fed754fbbbe6b1b7346c3f0d..01fe3c36f58b43c4b171803cb9511155911b3f30 100644 --- a/test/test_py_codegen.py +++ b/test/test_py_codegen.py @@ -1,9 +1,9 @@ -from __future__ import division, with_statement, absolute_import +from __future__ import absolute_import, division, with_statement + +import sys -import pytest # noqa import pytools import pytools.py_codegen as codegen -import sys def test_pickling_with_module_import(): @@ -34,5 +34,5 @@ if __name__ == "__main__": if len(sys.argv) > 1: exec(sys.argv[1]) else: - from py.test import main + from pytest import main main([__file__]) diff --git a/test/test_pytools.py b/test/test_pytools.py index 65514dd927c09b3b7a69e76d309e004cb5098d8b..91748c82e52fc2b362023adc096116417d9db2d1 100644 --- a/test/test_pytools.py +++ b/test/test_pytools.py @@ -1,9 +1,9 @@ -from __future__ import division, with_statement -from __future__ import absolute_import +from __future__ import absolute_import, division, with_statement -import pytest import sys +import pytest + @pytest.mark.skipif("sys.version_info < (2, 5)") def test_memoize_method_clear(): @@ -23,7 +23,7 @@ def test_memoize_method_clear(): sc.f() assert sc.run_count == 1 - sc.f.clear_cache(sc) + sc.f.clear_cache(sc) # pylint: disable=no-member def test_memoize_method_with_uncached(): @@ -35,6 +35,7 @@ def test_memoize_method_with_uncached(): @memoize_method_with_uncached(uncached_args=[1], uncached_kwargs=["z"]) def f(self, x, y, z): + del x, y, z self.run_count += 1 return 17 @@ -45,7 +46,7 @@ def test_memoize_method_with_uncached(): sc.f(18, 19, z=20) assert sc.run_count == 2 - sc.f.clear_cache(sc) + sc.f.clear_cache(sc) # pylint: disable=no-member def test_memoize_method_nested(): @@ -156,45 +157,6 @@ def test_spatial_btree(dims, do_plot=False): pt.show() -def test_diskdict(): - if sys.platform.startswith("win"): - pytest.xfail("unreliable on windows") - - from pytools.diskdict import DiskDict - - from tempfile import NamedTemporaryFile - - with NamedTemporaryFile() as ntf: - d = DiskDict(ntf.name) - - key_val = [ - ((), "hi"), - (frozenset([1, 2, "hi"]), 5) - ] - - for k, v in key_val: - d[k] = v - for k, v in key_val: - assert d[k] == v - del d - - d = DiskDict(ntf.name) - for k, v in key_val: - del d[k] - del d - - d = DiskDict(ntf.name) - for k, v in key_val: - d[k] = v - del d - - d = DiskDict(ntf.name) - for k, v in key_val: - assert k in d - assert d[k] == v - del d - - def test_generate_numbered_unique_names(): from pytools import generate_numbered_unique_names