Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • isuruf/pymbolic
  • inducer/pymbolic
  • xywei/pymbolic
  • wence-/pymbolic
  • kaushikcfd/pymbolic
  • fikl2/pymbolic
  • zweiner2/pymbolic
7 results
Show changes
from __future__ import annotations
__copyright__ = "Copyright (C) 2023 University of Illinois Board of Trustees"
__license__ = """
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from pymbolic.mapper.persistent_hash import PersistentHashWalkMapper
def test_persistent_hash_simple() -> None:
import hashlib
from testlib import generate_random_expression
expr = generate_random_expression(seed=(333))
key_hash = hashlib.sha256()
phwm = PersistentHashWalkMapper(key_hash)
phwm(expr)
assert key_hash.hexdigest() == \
"1a1cd91483015333f2a9b06ab049a8edabc72aafc1f9b6d7cd831a39068e50da"
from __future__ import division
from __future__ import annotations
from pymbolic.mapper.evaluator import evaluate_kw
from pymbolic.mapper.flattener import FlattenMapper
from pymbolic.mapper.stringifier import StringifyMapper
__copyright__ = "Copyright (C) 2009-2013 Andreas Kloeckner"
......@@ -22,21 +27,68 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import pymbolic
import pymbolic.primitives as prim
import logging
from functools import reduce
from typing import TYPE_CHECKING
import pytest
from testlib import generate_random_expression
from pytools.lex import ParseError
import pymbolic.primitives as prim
from pymbolic import parse
from pymbolic.mapper import IdentityMapper, WalkMapper
from pymbolic.mapper.dependency import CachedDependencyMapper, DependencyMapper
if TYPE_CHECKING:
from pymbolic.typing import Expression
logger = logging.getLogger(__name__)
# {{{ utilities
def assert_parsed_same_as_python(expr_str):
# makes sure that has only one line
expr_str, = expr_str.split("\n")
import ast
from pymbolic.interop.ast import ASTToPymbolic
ast2p = ASTToPymbolic()
try:
expr_parsed_by_python = ast2p(ast.parse(expr_str).body[0].value)
except SyntaxError:
with pytest.raises(ParseError):
parse(expr_str)
else:
expr_parsed_by_pymbolic = parse(expr_str)
assert expr_parsed_by_python == expr_parsed_by_pymbolic
from pymbolic.mapper import IdentityMapper
try:
reduce
except NameError:
from functools import reduce
def assert_parse_roundtrip(expr_str):
from pymbolic.mapper.stringifier import StringifyMapper
expr = parse(expr_str)
strified = StringifyMapper()(expr)
assert strified == expr_str, (strified, expr_str)
# }}}
EXPRESSION_COLLECTION = [
parse("(x[2]+y.data)*(x+z)**3"),
parse("(~x)//2 | (y >> 2) & (z << 3)"),
parse("x and (not y or z)"),
parse("x if not (y and z) else x+1"),
]
pymbolic.disable_subscript_by_getitem()
# {{{ test_integer_power
def test_integer_power():
from pymbolic.algorithm import integer_power
......@@ -49,21 +101,33 @@ def test_integer_power():
]:
assert base**expn == integer_power(base, expn)
# }}}
# {{{ test_expand
def test_expand():
from pymbolic import var, expand
from pymbolic import expand, var
x = var("x")
u = (x+1)**5
expand(u)
# }}}
# {{{ test_substitute
def test_substitute():
from pymbolic import parse, substitute, evaluate
from pymbolic import evaluate, parse, substitute
u = parse("5+x.min**2")
xmin = parse("x.min")
assert evaluate(substitute(u, {xmin: 25})) == 630
# }}}
# {{{ test_no_comparison
def test_no_comparison():
from pymbolic import parse
......@@ -77,20 +141,27 @@ def test_no_comparison():
except TypeError:
pass
else:
assert False
raise AssertionError
expect_typeerror(lambda: x < y)
expect_typeerror(lambda: x <= y)
expect_typeerror(lambda: x > y)
expect_typeerror(lambda: x >= y)
# }}}
# {{{ test_structure_preservation
def test_structure_preservation():
x = prim.Sum((5, 7))
from pymbolic.mapper import IdentityMapper
x2 = IdentityMapper()(x)
assert x == x2
# }}}
# {{{ test_sympy_interaction
def test_sympy_interaction():
pytest.importorskip("sympy")
......@@ -98,13 +169,11 @@ def test_sympy_interaction():
import sympy as sp
x, y = sp.symbols("x y")
f = sp.symbols("f")
f = sp.Function("f")
s1_expr = 1/f(x/sp.sqrt(x**2+y**2)).diff(x, 5)
s1_expr = 1/f(x/sp.sqrt(x**2+y**2)).diff(x, 5) # pylint:disable=not-callable
from pymbolic.sympy_interface import (
SympyToPymbolicMapper,
PymbolicToSympyMapper)
from pymbolic.interop.sympy import PymbolicToSympyMapper, SympyToPymbolicMapper
s2p = SympyToPymbolicMapper()
p2s = PymbolicToSympyMapper()
......@@ -118,6 +187,8 @@ def test_sympy_interaction():
assert sp.ratsimp(s1_expr - s3_expr) == 0
# }}}
# {{{ fft
......@@ -158,26 +229,28 @@ def test_fft():
from pymbolic.algorithm import fft, sym_fft
vars = numpy.array([var(chr(97+i)) for i in range(16)], dtype=object)
print(vars)
logger.info("vars: %s", vars)
print(fft(vars))
logger.info("fft: %s", fft(vars))
traced_fft = sym_fft(vars)
from pymbolic.mapper.stringifier import PREC_NONE
from pymbolic.mapper.c_code import CCodeMapper
from pymbolic.mapper.stringifier import PREC_NONE
ccm = CCodeMapper()
code = [ccm(tfi, PREC_NONE) for tfi in traced_fft]
for cse_name, cse_str in enumerate(ccm.cse_name_list):
print("%s = %s" % (cse_name, cse_str))
logger.info("%s = %s", cse_name, cse_str)
for i, line in enumerate(code):
print("result[%d] = %s" % (i, line))
logger.info("result[%d] = %s", i, line)
# }}}
# {{{ test_sparse_multiply
def test_sparse_multiply():
numpy = pytest.importorskip("numpy")
pytest.importorskip("scipy")
......@@ -196,6 +269,8 @@ def test_sparse_multiply():
assert la.norm(mat_vec-mat_vec_2) < 1e-14
# }}}
# {{{ parser
......@@ -204,38 +279,47 @@ def test_parser():
parse("(2*a[1]*b[1]+2*a[0]*b[0])*(hankel_1(-1,sqrt(a[1]**2+a[0]**2)*k) "
"-hankel_1(1,sqrt(a[1]**2+a[0]**2)*k))*k /(4*sqrt(a[1]**2+a[0]**2)) "
"+hankel_1(0,sqrt(a[1]**2+a[0]**2)*k)")
print(repr(parse("d4knl0")))
print(repr(parse("0.")))
print(repr(parse("0.e1")))
logger.info("%r", parse("d4knl0"))
logger.info("%r", parse("0."))
logger.info("%r", parse("0.e1"))
assert parse("0.e1") == 0
assert parse("1e-12") == 1e-12
print(repr(parse("a >= 1")))
print(repr(parse("a <= 1")))
print(repr(parse(":")))
print(repr(parse("1:")))
print(repr(parse(":2")))
print(repr(parse("1:2")))
print(repr(parse("::")))
print(repr(parse("1::")))
print(repr(parse(":1:")))
print(repr(parse("::1")))
print(repr(parse("3::1")))
print(repr(parse(":5:1")))
print(repr(parse("3:5:1")))
print(repr(parse("g[i,k]+2.0*h[i,k]")))
print(repr(parse("g[i,k]+(+2.0)*h[i,k]")))
print(repr(parse("a - b - c")))
print(repr(parse("-a - -b - -c")))
print(repr(parse("- - - a - - - - b - - - - - c")))
print(repr(parse("~(a ^ b)")))
print(repr(parse("(a | b) | ~(~a & ~b)")))
print(repr(parse("3 << 1")))
print(repr(parse("1 >> 3")))
print(parse("3::1"))
logger.info("%r", parse("a >= 1"))
logger.info("%r", parse("a <= 1"))
logger.info("%r", parse(":"))
logger.info("%r", parse("1:"))
logger.info("%r", parse(":2"))
logger.info("%r", parse("1:2"))
logger.info("%r", parse("::"))
logger.info("%r", parse("1::"))
logger.info("%r", parse(":1:"))
logger.info("%r", parse("::1"))
logger.info("%r", parse("3::1"))
logger.info("%r", parse(":5:1"))
logger.info("%r", parse("3:5:1"))
assert_parse_roundtrip("()")
assert_parse_roundtrip("(3,)")
assert_parse_roundtrip("[x + 3, 3, 5]")
assert_parse_roundtrip("[]")
assert_parse_roundtrip("[x]")
assert_parse_roundtrip("g[i, k] + 2.0*h[i, k]")
parse("g[i,k]+(+2.0)*h[i, k]")
logger.info("%r", parse("a - b - c"))
logger.info("%r", parse("-a - -b - -c"))
logger.info("%r", parse("- - - a - - - - b - - - - - c"))
logger.info("%r", parse("~(a ^ b)"))
logger.info("%r", parse("(a | b) | ~(~a & ~b)"))
logger.info("%r", parse("3 << 1"))
logger.info("%r", parse("1 >> 3"))
logger.info(parse("3::1"))
assert parse("e1") == prim.Variable("e1")
assert parse("d1") == prim.Variable("d1")
......@@ -250,9 +334,23 @@ def test_parser():
assert parse("f(x,(y,z),z, name=15, name2=17)") == f(
x, (y, z), z, name=15, name2=17)
assert_parsed_same_as_python("5+i if i>=0 else (0 if i<-1 else 10)")
assert_parsed_same_as_python("0 if 1 if 2 else 3 else 4")
assert_parsed_same_as_python("0 if (1 if 2 else 3) else 4")
assert_parsed_same_as_python("(2, 3,)")
assert_parsed_same_as_python("-3**0.5")
assert_parsed_same_as_python("1/2/7")
with pytest.deprecated_call():
parse("1+if(0, 1, 2)")
assert eval(str(parse("1729 if True or False else 42"))) == 1729
# }}}
# {{{ test_mappers
def test_mappers():
from pymbolic import variables
f, x, y, z = variables("f x y z")
......@@ -260,30 +358,40 @@ def test_mappers():
for expr in [
f(x, (y, z), name=z**2)
]:
from pymbolic.mapper import WalkMapper
from pymbolic.mapper.dependency import DependencyMapper
str(expr)
IdentityMapper()(expr)
WalkMapper()(expr)
DependencyMapper()(expr)
# }}}
# {{{ test_func_dep_consistency
def test_func_dep_consistency():
from pymbolic import var
from pymbolic.mapper.dependency import DependencyMapper
f = var('f')
x = var('x')
f = var("f")
x = var("x")
dep_map = DependencyMapper(include_calls="descend_args")
assert dep_map(f(x)) == set([x])
assert dep_map(f(x=x)) == set([x])
assert dep_map(f(x)) == {x}
assert dep_map(f(x=x)) == {x}
# }}}
# {{{ test_conditions
def test_conditions():
from pymbolic import var
x = var('x')
y = var('y')
x = var("x")
y = var("y")
assert str(x.eq(y).and_(x.le(5))) == "x == y and x <= 5"
# }}}
# {{{ test_graphviz
def test_graphviz():
from pymbolic import parse
......@@ -294,7 +402,9 @@ def test_graphviz():
from pymbolic.mapper.graphviz import GraphvizMapper
gvm = GraphvizMapper()
gvm(expr)
print(gvm.get_dot_code())
logger.info("%s", gvm.get_dot_code())
# }}}
# {{{ geometric algebra
......@@ -305,6 +415,7 @@ def test_geometric_algebra(dims):
pytest.importorskip("numpy")
import numpy as np
from pymbolic.geometric_algebra import MultiVector as MV # noqa
vec1 = MV(np.random.randn(dims))
......@@ -358,7 +469,7 @@ def test_geometric_algebra(dims):
# contractions
# (3.18) in [DFM]
assert abs(b.scalar_product(a ^ c) - (b >> a).scalar_product(c)) < 1e-13
assert abs(b.scalar_product(a ^ c) - (b >> a).scalar_product(c)) < 1e-12
# duality, (3.20) in [DFM]
assert ((a ^ b) << c) .close_to(a << (b << c))
......@@ -368,7 +479,7 @@ def test_geometric_algebra(dims):
assert (c << c.I.rev()).close_to(c | c.I.rev())
# inverse
for div in list(b.gen_blades()) + [vec1, vec1.I]:
for div in [*b.gen_blades(), vec1, vec1.I]:
assert (div.inv()*div).close_to(1)
assert (div*div.inv()).close_to(1)
assert ((1/div)*div).close_to(1)
......@@ -378,7 +489,7 @@ def test_geometric_algebra(dims):
# reverse properties (Sec 2.9.5 [DFM])
assert c.rev().rev() == c
assert (b ^ c).rev() .close_to((c.rev() ^ b.rev()))
assert (b ^ c).rev() .close_to(c.rev() ^ b.rev())
# dual properties
# (1.2.26) in [HS]
......@@ -387,7 +498,7 @@ def test_geometric_algebra(dims):
# involution properties (Sec 2.9.5 DFW)
assert c.invol().invol() == c
assert (b ^ c).invol() .close_to((b.invol() ^ c.invol()))
assert (b ^ c).invol() .close_to(b.invol() ^ c.invol())
# commutator properties
......@@ -401,6 +512,8 @@ def test_geometric_algebra(dims):
# }}}
# {{{ test_ast_interop
def test_ast_interop():
src = """
def f():
......@@ -411,7 +524,7 @@ def test_ast_interop():
import ast
mod = ast.parse(src.replace("\n ", "\n"))
print(ast.dump(mod))
logger.info("%s", ast.dump(mod))
from pymbolic.interop.ast import ASTToPymbolic
ast2p = ASTToPymbolic()
......@@ -428,11 +541,15 @@ def test_ast_interop():
lhs = ast2p(lhs)
rhs = ast2p(stmt.value)
print(lhs, rhs)
logger.info("lhs %s rhs %s", lhs, rhs)
# }}}
# {{{ test_compile
def test_compile():
from pymbolic import parse, compile
from pymbolic import compile, parse
code = compile(parse("x ** y"), ["x", "y"])
assert code(2, 5) == 32
......@@ -441,50 +558,529 @@ def test_compile():
code = pickle.loads(pickle.dumps(code))
assert code(3, 3) == 27
# }}}
# {{{ test_pickle
def test_pickle():
from pickle import dumps, loads
for expr in EXPRESSION_COLLECTION:
pickled = loads(dumps(expr))
assert hash(expr) == hash(pickled)
assert expr == pickled
class OldTimeyExpression(prim.ExpressionNode):
init_arg_names = ()
def __getinitargs__(self):
return ()
def test_pickle_backward_compat():
from pickle import dumps, loads
expr = 3*OldTimeyExpression()
pickled = loads(dumps(expr))
with pytest.warns(DeprecationWarning):
assert hash(expr) == hash(pickled)
with pytest.warns(DeprecationWarning):
assert expr == pickled
# }}}
# {{{ test_unifier
def test_unifier():
from pymbolic import var
from pymbolic.mapper.unifier import UnidirectionalUnifier
a, b, c, d, e, f = [var(s) for s in "abcdef"]
a, b, c, d, e, f = (var(s) for s in "abcdef")
def match_found(records, eqns):
for record in records:
if eqns <= set(record.equations):
return True
return False
return any(eqns <= set(record.equations) for record in records)
recs = UnidirectionalUnifier("abc")(a+b*c, d+e*f)
assert len(recs) == 2
assert match_found(recs, set([(a, d), (b, e), (c, f)]))
assert match_found(recs, set([(a, d), (b, f), (c, e)]))
assert match_found(recs, {(a, d), (b, e), (c, f)})
assert match_found(recs, {(a, d), (b, f), (c, e)})
recs = UnidirectionalUnifier("abc")(a+b, d+e+f)
assert len(recs) == 6
assert match_found(recs, set([(a, d), (b, e+f)]))
assert match_found(recs, set([(a, e), (b, d+f)]))
assert match_found(recs, set([(a, f), (b, d+e)]))
assert match_found(recs, set([(b, d), (a, e+f)]))
assert match_found(recs, set([(b, e), (a, d+f)]))
assert match_found(recs, set([(b, f), (a, d+e)]))
assert match_found(recs, {(a, d), (b, e+f)})
assert match_found(recs, {(a, e), (b, d+f)})
assert match_found(recs, {(a, f), (b, d+e)})
assert match_found(recs, {(b, d), (a, e+f)})
assert match_found(recs, {(b, e), (a, d+f)})
assert match_found(recs, {(b, f), (a, d+e)})
vals = [var("v" + str(i)) for i in range(100)]
recs = UnidirectionalUnifier("a")(sum(vals[1:]) + a, sum(vals))
assert len(recs) == 1
assert match_found(recs, set([(a, var("v0"))]))
assert match_found(recs, {(a, var("v0"))})
recs = UnidirectionalUnifier("abc")(a+b+c, d+e)
assert len(recs) == 0
recs = UnidirectionalUnifier("abc")(f(a+b, f(a+c)), f(b+c, f(b+d)))
assert len(recs) == 1
assert match_found(recs, set([(a, b), (b, c), (c, d)]))
assert match_found(recs, {(a, b), (b, c), (c, d)})
# }}}
# {{{ test_long_sympy_mapping
def test_long_sympy_mapping():
sp = pytest.importorskip("sympy")
from pymbolic.interop.sympy import SympyToPymbolicMapper
SympyToPymbolicMapper()(sp.sympify(int(10**20)))
SympyToPymbolicMapper()(sp.sympify(int(10)))
SympyToPymbolicMapper()(sp.sympify(10**20))
SympyToPymbolicMapper()(sp.sympify(10))
# }}}
# {{{ test_stringifier_preserve_shift_order
def test_stringifier_preserve_shift_order():
for expr in [
parse("(a << b) >> 2"),
parse("a << (b >> 2)")
]:
assert parse(str(expr)) == expr
# }}}
# {{{ test_latex_mapper
LATEX_TEMPLATE = r"""\documentclass{article}
\usepackage{amsmath}
\begin{document}
%s
\end{document}"""
def test_latex_mapper():
from pymbolic import parse
from pymbolic.mapper.stringifier import LaTeXMapper, StringifyMapper
tm = LaTeXMapper()
sm = StringifyMapper()
equations = []
def add(expr):
# Add an equation to the list of tests.
equations.append(r"\[{}\] % from: {}".format(tm(expr), sm(expr)))
add(parse("a * b + c"))
add(parse("f(a,b,c)"))
add(parse("a ** b ** c"))
add(parse("(a | b) ^ ~c"))
add(parse("a << b"))
add(parse("a >> b"))
add(parse("a[i,j,k]"))
add(parse("a[1:3]"))
add(parse("a // b"))
add(parse("not (a or b) and c"))
add(parse("(a % b) % c"))
add(parse("(a >= b) or (b <= c)"))
add(prim.Min((1,)) + prim.Max((1, 2)))
add(prim.Substitution(prim.Variable("x") ** 2, ("x",), (2,)))
add(prim.Derivative(parse("x**2"), ("x",)))
# Run LaTeX and ensure the file compiles.
import os
import shutil
import subprocess
import tempfile
latex_dir = tempfile.mkdtemp("pymbolic")
try:
tex_file_path = os.path.join(latex_dir, "input.tex")
with open(tex_file_path, "w") as tex_file:
contents = LATEX_TEMPLATE % "\n".join(equations)
tex_file.write(contents)
try:
subprocess.check_output(
["latex",
"-interaction=nonstopmode",
"-output-directory=%s" % latex_dir,
tex_file_path],
universal_newlines=True)
except FileNotFoundError:
pytest.skip("latex command not found")
except subprocess.CalledProcessError as err:
raise AssertionError(str(err.output)) from None
finally:
shutil.rmtree(latex_dir)
# }}}
# {{{ test_flop_counter
def test_flop_counter():
x = prim.Variable("x")
y = prim.Variable("y")
z = prim.Variable("z")
subexpr = prim.make_common_subexpression(3 * (x**2 + y + z))
expr = 3*subexpr + subexpr
from pymbolic.mapper.flop_counter import CSEAwareFlopCounter, FlopCounter
assert FlopCounter()(expr) == 4 * 2 + 2
assert CSEAwareFlopCounter()(expr) == 4 + 2
# }}}
# {{{ test_make_sym_vector
def test_make_sym_vector():
numpy = pytest.importorskip("numpy")
from pymbolic.primitives import make_sym_vector
assert len(make_sym_vector("vec", 2)) == 2
assert len(make_sym_vector("vec", numpy.int32(2))) == 2
assert len(make_sym_vector("vec", [1, 2, 3])) == 3
# }}}
# {{{ test_multiplicative_stringify_preserves_association
def test_multiplicative_stringify_preserves_association():
for inner in ["*", " / ", " // ", " % "]:
for outer in ["*", " / ", " // ", " % "]:
if outer == inner:
continue
assert_parse_roundtrip(f"x{outer}(y{inner}z)")
assert_parse_roundtrip(f"(y{inner}z){outer}x")
assert_parse_roundtrip("(-1)*(((-1)*x) / 5)")
# }}}
# {{{ test_differentiator_flags_for_nonsmooth_and_discontinuous
def test_differentiator_flags_for_nonsmooth_and_discontinuous():
import pymbolic.functions as pf
from pymbolic.mapper.differentiator import differentiate
x = prim.Variable("x")
with pytest.raises(ValueError):
differentiate(pf.fabs(x), x)
result = differentiate(pf.fabs(x), x, allowed_nonsmoothness="continuous")
assert result == pf.sign(x)
with pytest.raises(ValueError):
differentiate(pf.sign(x), x)
result = differentiate(pf.sign(x), x, allowed_nonsmoothness="discontinuous")
assert result == 0
# }}}
# {{{ test_diff_cse
def test_diff_cse():
from pymbolic import evaluate_kw
from pymbolic.mapper.differentiator import differentiate
m = prim.Variable("math")
x = prim.Variable("x")
cse = prim.make_common_subexpression(x**2 + 1)
expr = m.attr("exp")(cse)*m.attr("sin")(cse**2)
diff_result = differentiate(expr, x)
import math
from functools import partial
my_eval = partial(evaluate_kw, math=math)
x0 = 5
h = 0.001
fprime = my_eval(diff_result, x=x0)
fprime_num_1 = (my_eval(expr, x=x0+h) - my_eval(expr, x=x0-h))/(2*h)
fprime_num_2 = (my_eval(expr, x=x0+0.5*h) - my_eval(expr, x=x0-0.5*h))/h
err1 = abs(fprime - fprime_num_1)/abs(fprime)
err2 = abs(fprime - fprime_num_2)/abs(fprime)
assert err2 < 1.1 * 0.5**2 * err1
# }}}
# {{{ test_coefficient_collector
def test_coefficient_collector():
from pymbolic.mapper.coefficient import CoefficientCollector
x = prim.Variable("x")
y = prim.Variable("y")
z = prim.Variable("z")
cc = CoefficientCollector([x.name, y.name])
assert cc(2*x + y) == {x: 2, y: 1}
assert cc(2*x + y - z) == {x: 2, y: 1, 1: -z}
assert cc(x/2 + z**2) == {x: prim.Quotient(1, 2), 1: z**2}
# }}}
# {{{ test_np_bool_handling
def test_np_bool_handling():
from pymbolic.mapper.evaluator import evaluate
numpy = pytest.importorskip("numpy")
expr = prim.LogicalNot(numpy.bool_(False))
assert evaluate(expr) is True
# }}}
# {{{ test_mapper_method_of_parent_class
def test_mapper_method_of_parent_class():
class SpatialConstant(prim.Variable):
mapper_method = "map_spatial_constant"
class MyMapper(IdentityMapper):
def map_spatial_constant(self, expr):
return 2*expr
c = SpatialConstant("k")
assert MyMapper()(c) == 2*c
assert IdentityMapper()(c) == c
# }}}
# {{{ test_equality_complexity
@pytest.mark.xfail
def test_equality_complexity():
# NOTE: https://github.com/inducer/pymbolic/issues/73
from numpy.random import default_rng
def construct_intestine_graph(depth=64, seed=0):
rng = default_rng(seed)
x = prim.Variable("x")
for _ in range(depth):
coeff1, coeff2 = rng.integers(1, 10, 2)
x = coeff1 * x + coeff2 * x
return x
def check_equality():
graph1 = construct_intestine_graph()
graph2 = construct_intestine_graph()
graph3 = construct_intestine_graph(seed=3)
assert graph1 == graph2
assert graph2 == graph1
assert graph1 != graph3
assert graph2 != graph3
# NOTE: this should finish in a second!
import multiprocessing
p = multiprocessing.Process(target=check_equality)
p.start()
p.join(timeout=1)
is_alive = p.is_alive()
if p.is_alive():
p.terminate()
assert not is_alive
# }}}
# {{{ test_cached_mapper_memoizes
class InCacheVerifier(WalkMapper):
def __init__(self, cached_mapper, walk_call_functions=True):
super().__init__()
self.cached_mapper = cached_mapper
self.walk_call_functions = walk_call_functions
def post_visit(self, expr):
if isinstance(expr, prim.ExpressionNode):
assert (self.cached_mapper.get_cache_key(expr)
in self.cached_mapper._cache)
def map_call(self, expr):
if not self.visit(expr):
return
if self.walk_call_functions:
self.rec(expr.function)
for child in expr.parameters:
self.rec(child)
self.post_visit(expr)
def test_cached_mapper_memoizes():
from testlib import (
AlwaysFlatteningCachedIdentityMapper,
AlwaysFlatteningIdentityMapper,
)
ntests = 40
for i in range(ntests):
expr = generate_random_expression(seed=(5+i))
# {{{ always flattening identity mapper
# Note: Prefer AlwaysFlatteningIdentityMapper over IdentityMapper as
# the flattening logic in IdentityMapper checks for identity across
# traversal results => leading to discrepancy b/w
# 'CachedIdentityMapper' and 'IdentityMapper'
cached_mapper = AlwaysFlatteningCachedIdentityMapper()
uncached_mapper = AlwaysFlatteningIdentityMapper()
assert uncached_mapper(expr) == cached_mapper(expr)
verifier = InCacheVerifier(cached_mapper)
verifier(expr)
# }}}
# {{{ dependency mapper
mapper = DependencyMapper(include_calls="descend_args")
cached_mapper = CachedDependencyMapper(include_calls="descend_args")
assert cached_mapper(expr) == mapper(expr)
verifier = InCacheVerifier(cached_mapper,
# dep. mapper does not go over functions
walk_call_functions=False
)
verifier(expr)
# }}}
def test_cached_mapper_differentiates_float_int():
# pymbolic.git<=d343cf14 failed this regression.
from pymbolic.mapper import CachedIdentityMapper
expr = prim.Sum((4, 4.0))
cached_mapper = CachedIdentityMapper()
new_expr = cached_mapper(expr)
assert isinstance(new_expr.children[0], int)
assert isinstance(new_expr.children[1], float)
# }}}
# {{{ test_mapper_optimizer
def test_mapper_optimizer():
from testlib import BIG_EXPR_STR, OptimizedRenamer, Renamer
from pymbolic.mapper import CachedIdentityMapper
expr = parse(BIG_EXPR_STR)
expr = CachedIdentityMapper()(expr) # remove duplicate nodes
result_ref = Renamer()(expr)
result_opt = OptimizedRenamer()(expr)
assert result_ref == result_opt
# }}}
def test_nodecount():
from pymbolic.mapper.analysis import get_num_nodes
expr = prim.Sum((4, 4.0))
assert get_num_nodes(expr) == 3
x = prim.Variable("x")
y = prim.Variable("y")
z = prim.Variable("z")
subexpr = prim.make_common_subexpression(4 * (x**2 + y + z))
expr = 3*subexpr + subexpr + subexpr + subexpr
expr = expr + expr + expr
assert get_num_nodes(expr) == 12
def test_python_ast_interop_roundtrip():
from pymbolic.interop.ast import ASTToPymbolic, PymbolicToASTMapper
ast2p = ASTToPymbolic()
p2ast = PymbolicToASTMapper()
ntests = 40
for i in range(ntests):
expr = generate_random_expression(seed=(5+i))
assert ast2p(p2ast(expr)) == expr
# {{{ test derived stringifiers
@prim.expr_dataclass()
class CustomOperator:
child: Expression
def make_stringifier(self, originating_stringifier=None):
return OperatorStringifier()
class OperatorStringifier(StringifyMapper[[]]):
def map_custom_operator(self, expr: CustomOperator):
return f"Op({self.rec(expr.child)})"
def test_derived_stringifier() -> None:
str(CustomOperator(5))
# }}}
# {{{ test_flatten
class IntegerFlattenMapper(FlattenMapper):
def is_expr_integer_valued(self, expr: Expression) -> bool:
return True
def test_flatten():
expr = parse("(3 + x) % 1")
assert IntegerFlattenMapper()(expr) != expr
assert FlattenMapper()(expr) == expr
assert evaluate_kw(IntegerFlattenMapper()(expr), x=1) == 0
assert abs(evaluate_kw(FlattenMapper()(expr), x=1.1) - 0.1) < 1e-12
expr = parse("(3 + x) // 1")
assert IntegerFlattenMapper()(expr) != expr
assert FlattenMapper()(expr) == expr
assert evaluate_kw(IntegerFlattenMapper()(expr), x=1) == 4
assert abs(evaluate_kw(FlattenMapper()(expr), x=1.1) - 4) < 1e-12
# }}}
if __name__ == "__main__":
......@@ -492,7 +1088,7 @@ if __name__ == "__main__":
if len(sys.argv) > 1:
exec(sys.argv[1])
else:
from py.test.cmdline import main
from pytest import main
main([__file__])
# vim: fdm=marker
from __future__ import division
from __future__ import annotations
__copyright__ = "Copyright (C) 2017 Matt Wala"
......@@ -23,9 +24,11 @@ THE SOFTWARE.
"""
import pytest
import pymbolic.primitives as prim
x_, y_, i_, j_ = (prim.Variable(s) for s in "x y i j".split())
x_, y_, i_, j_ = (prim.Variable(s) for s in ["x", "y", "i", "j"])
# {{{ to pymbolic test
......@@ -52,9 +55,12 @@ def _test_to_pymbolic(mapper, sym, use_symengine):
assert mapper(sym.exp(x)) == prim.Variable("exp")(x_)
# indexed accesses
i, j = sym.symbols("i,j")
if not use_symengine:
i, j = sym.symbols("i,j")
assert mapper(sym.tensor.indexed.Indexed(x, i, j)) == x_[i_, j_]
idx = sym.Indexed(x, i, j)
else:
idx = sym.Function("Indexed")(x, i, j)
assert mapper(idx) == x_[i_, j_]
# constants
import math
......@@ -95,12 +101,14 @@ def _test_from_pymbolic(mapper, sym, use_symengine):
sym.Subs(x**2, (x,), (y,))
deriv = sym.Derivative(x**2, x)
assert mapper(prim.Derivative(x_**2, ("x",))) == deriv
floordiv = sym.floor(x / y)
assert mapper(prim.FloorDiv(x_, y_)) == floordiv
if use_symengine:
assert mapper(x_[0]) == sym.Symbol("x_0")
assert mapper(x_[0]) == sym.Function("Indexed")("x", 0)
else:
i, j = sym.symbols("i,j")
assert mapper(x_[i_, j_]) == sym.tensor.indexed.Indexed(x, i, j)
assert mapper(x_[i_, j_]) == sym.Indexed(x, i, j)
assert mapper(prim.Variable("f")(x_)) == sym.Function("f")(x)
......@@ -123,15 +131,44 @@ def test_pymbolic_to_sympy():
_test_from_pymbolic(mapper, sym, False)
def test_sympy_if_condition():
pytest.importorskip("sympy")
# {{{ roundtrip tests
def _test_roundtrip(forward, backward, sym, use_symengine):
exprs = [
2 + x_,
2 * x_,
x_ ** 2,
x_[0],
x_[i_, j_],
prim.Variable("f")(x_),
prim.If(prim.Comparison(x_, "<=", y_), 1, 0),
]
for expr in exprs:
assert expr == backward(forward(expr))
# }}}
def test_pymbolic_to_sympy_roundtrip():
sym = pytest.importorskip("sympy")
from pymbolic.interop.sympy import PymbolicToSympyMapper, SympyToPymbolicMapper
forward = PymbolicToSympyMapper()
backward = SympyToPymbolicMapper()
# Test round trip to sympy and back
expr = prim.If(prim.Comparison(x_, "<=", y_), 1, 0)
assert backward(forward(expr)) == expr
_test_roundtrip(forward, backward, sym, False)
def test_pymbolic_to_symengine_roundtrip():
sym = pytest.importorskip("symengine")
from pymbolic.interop.symengine import (
PymbolicToSymEngineMapper,
SymEngineToPymbolicMapper,
)
forward = PymbolicToSymEngineMapper()
backward = SymEngineToPymbolicMapper()
_test_roundtrip(forward, backward, sym, True)
if __name__ == "__main__":
......@@ -139,5 +176,5 @@ if __name__ == "__main__":
if len(sys.argv) > 1:
exec(sys.argv[1])
else:
from py.test.cmdline import main
from pytest import main
main([__file__])
from __future__ import annotations
__copyright__ = "Copyright (C) 2022 Kaushik Kulkarni"
__license__ = """
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from dataclasses import dataclass, replace
import numpy as np
from pytools import UniqueNameGenerator
import pymbolic.primitives as prim
from pymbolic.mapper import CachedIdentityMapper, IdentityMapper
from pymbolic.mapper.optimize import optimize_mapper
@dataclass(frozen=True, eq=True)
class RandomExpressionGeneratorContext:
rng: np.random.Generator
vng: UniqueNameGenerator
current_depth: int
max_depth: int
def with_increased_depth(self):
return replace(self, current_depth=self.current_depth+1)
def _generate_random_expr_inner(
context: RandomExpressionGeneratorContext) -> prim.ExpressionNode:
if context.current_depth >= context.max_depth:
# force expression to be a leaf type
return context.rng.integers(0, 42)
bucket = context.rng.integers(0, 100) / 100.0
# {{{ set some distribution of expression types
# 'weight' is proportional to the probability of seeing an expression type
weights = [1, 1, 1, 1, 1]
expr_types = [prim.Variable, prim.Sum, prim.Product, prim.Quotient,
prim.Call]
assert len(weights) == len(expr_types)
# }}}
buckets = np.cumsum(weights, dtype="float64")/np.sum(weights)
expr_type = expr_types[np.searchsorted(buckets, bucket)]
if expr_type == prim.Variable:
return prim.Variable(context.vng("x"))
elif expr_type in [prim.Sum, prim.Product]:
left = _generate_random_expr_inner(context.with_increased_depth())
right = _generate_random_expr_inner(context.with_increased_depth())
return expr_type((left, right))
elif expr_type == prim.Quotient:
num = _generate_random_expr_inner(context.with_increased_depth())
den = _generate_random_expr_inner(context.with_increased_depth())
return prim.Quotient(num, den)
elif expr_type == prim.Call:
nargs = 3
return prim.Variable(context.vng("f"))(
*[_generate_random_expr_inner(context.with_increased_depth())
for _ in range(nargs)])
else:
raise NotImplementedError(expr_type)
def generate_random_expression(seed: int, max_depth: int = 8) -> prim.ExpressionNode:
from numpy.random import default_rng
rng = default_rng(seed)
vng = UniqueNameGenerator()
context = RandomExpressionGeneratorContext(rng,
vng=vng,
max_depth=max_depth,
current_depth=0)
return _generate_random_expr_inner(context)
# {{{ custom mappers for tests
class _AlwaysFlatteningMixin:
def map_sum(self, expr, *args, **kwargs):
children = [self.rec(child, *args, **kwargs) for child in expr.children]
from pymbolic.primitives import flattened_sum
return flattened_sum(children)
def map_product(self, expr, *args, **kwargs):
children = [self.rec(child, *args, **kwargs) for child in expr.children]
from pymbolic.primitives import flattened_product
return flattened_product(children)
class AlwaysFlatteningIdentityMapper(_AlwaysFlatteningMixin,
IdentityMapper):
pass
class AlwaysFlatteningCachedIdentityMapper(_AlwaysFlatteningMixin,
CachedIdentityMapper):
pass
# }}}
# {{{ supporting bits for mapper optimizer test
BIG_EXPR_STR = """
(-1)*((cse_577[_pt_data_48[((iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 4302160) // 10, 0], _pt_data_49[(iface_ensm15*1075540 +
iel_ensm15*10 + idof_ensm15) % 10]] if _pt_data_48[((iface_ensm15*1075540 +
iel_ensm15*10 + idof_ensm15) % 4302160) // 10, 0] != -1 else 0) +
(cse_577[_pt_data_46[((iface_ensm15*1075540 + iel_ensm15*10 + idof_ensm15) %
4302160) // 10, 0], _pt_data_47[(iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 10]] if _pt_data_46[((iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 4302160) // 10, 0] != -1 else 0) +
(cse_577[_pt_data_7[((iface_ensm15*1075540 + iel_ensm15*10 + idof_ensm15) %
4302160) // 10, 0], _pt_data_43[(iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 10]] if _pt_data_7[((iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 4302160) // 10, 0] != -1 else 0) +
(cse_577[_pt_data_44[((iface_ensm15*1075540 + iel_ensm15*10 + idof_ensm15) %
4302160) // 10, 0], _pt_data_45[(iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 10]] if _pt_data_44[((iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 4302160) // 10, 0] != -1 else 0) +
(cse_579[_pt_data_68[((iface_ensm15*1075540 + iel_ensm15*10 + idof_ensm15) %
4302160) // 10, 0], _pt_data_69[(iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 10]] if _pt_data_68[((iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 4302160) // 10, 0] != -1 else 0) +
(cse_579[_pt_data_66[((iface_ensm15*1075540 + iel_ensm15*10 + idof_ensm15) %
4302160) // 10, 0], _pt_data_67[(iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 10]] if _pt_data_66[((iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 4302160) // 10, 0] != -1 else 0) +
(cse_579[_pt_data_50[((iface_ensm15*1075540 + iel_ensm15*10 + idof_ensm15) %
4302160) // 10, 0], _pt_data_63[(iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 10]] if _pt_data_50[((iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 4302160) // 10, 0] != -1 else 0) +
(cse_579[_pt_data_64[((iface_ensm15*1075540 + iel_ensm15*10 + idof_ensm15) %
4302160) // 10, 0], _pt_data_65[(iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 10]] if _pt_data_64[((iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 4302160) // 10, 0] != -1 else 0) +
(cse_581[_pt_data_88[((iface_ensm15*1075540 + iel_ensm15*10 + idof_ensm15) %
4302160) // 10, 0], _pt_data_89[(iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 10]] if _pt_data_88[((iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 4302160) // 10, 0] != -1 else 0) +
(cse_581[_pt_data_86[((iface_ensm15*1075540 + iel_ensm15*10 + idof_ensm15) %
4302160) // 10, 0], _pt_data_87[(iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 10]] if _pt_data_86[((iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 4302160) // 10, 0] != -1 else 0) +
(cse_581[_pt_data_70[((iface_ensm15*1075540 + iel_ensm15*10 + idof_ensm15) %
4302160) // 10, 0], _pt_data_83[(iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 10]] if _pt_data_70[((iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 4302160) // 10, 0] != -1 else 0) +
(cse_581[_pt_data_84[((iface_ensm15*1075540 + iel_ensm15*10 + idof_ensm15) %
4302160) // 10, 0], _pt_data_85[(iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 10]] if _pt_data_84[((iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 4302160) // 10, 0] != -1 else 0) +
(cse_582[_pt_data_107[((iface_ensm15*1075540 + iel_ensm15*10 + idof_ensm15) %
4302160) // 10, 0], _pt_data_108[(iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 10]] if _pt_data_107[((iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 4302160) // 10, 0] != -1 else 0) +
(cse_582[_pt_data_105[((iface_ensm15*1075540 + iel_ensm15*10 + idof_ensm15) %
4302160) // 10, 0], _pt_data_106[(iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 10]] if _pt_data_105[((iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 4302160) // 10, 0] != -1 else 0) +
(cse_582[_pt_data_90[((iface_ensm15*1075540 + iel_ensm15*10 + idof_ensm15) %
4302160) // 10, 0], _pt_data_102[(iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 10]] if _pt_data_90[((iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 4302160) // 10, 0] != -1 else 0) +
(cse_582[_pt_data_103[((iface_ensm15*1075540 + iel_ensm15*10 + idof_ensm15) %
4302160) // 10, 0], _pt_data_104[(iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 10]] if _pt_data_103[((iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 4302160) // 10, 0] != -1 else 0)) +
(cse_572[_pt_data_48[((iface_ensm15*1075540 + iel_ensm15*10 + idof_ensm15) %
4302160) // 10, 0], _pt_data_49[(iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 10]] if _pt_data_48[((iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 4302160) // 10, 0] != -1 else 0) +
(cse_572[_pt_data_46[((iface_ensm15*1075540 + iel_ensm15*10 + idof_ensm15) %
4302160) // 10, 0], _pt_data_47[(iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 10]] if _pt_data_46[((iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 4302160) // 10, 0] != -1 else 0) +
(cse_572[_pt_data_7[((iface_ensm15*1075540 + iel_ensm15*10 + idof_ensm15) %
4302160) // 10, 0], _pt_data_43[(iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 10]] if _pt_data_7[((iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 4302160) // 10, 0] != -1 else 0) +
(cse_572[_pt_data_44[((iface_ensm15*1075540 + iel_ensm15*10 + idof_ensm15) %
4302160) // 10, 0], _pt_data_45[(iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 10]] if
_pt_data_44[((iface_ensm15*1075540 + iel_ensm15*10 + idof_ensm15) % 4302160) //
10, 0] != -1 else 0) + (cse_573[_pt_data_68[((iface_ensm15*1075540
+ iel_ensm15*10 +
idof_ensm15) %
4302160) // 10, 0],
_pt_data_69[(iface_ensm15*1075540 +
iel_ensm15*10 +
idof_ensm15) % 10]] if
_pt_data_68[((iface_ensm15*1075540 +
iel_ensm15*10 + idof_ensm15)
% 4302160) // 10, 0] != -1
else 0) +
(cse_573[_pt_data_66[((iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 4302160) // 10, 0],
_pt_data_67[(iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 10]] if
_pt_data_66[((iface_ensm15*1075540 + iel_ensm15*10 + idof_ensm15)
% 4302160) // 10, 0] != -1 else 0) +
(cse_573[_pt_data_50[((iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 4302160) // 10, 0],
_pt_data_63[(iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 10]] if
_pt_data_50[((iface_ensm15*1075540 + iel_ensm15*10 + idof_ensm15)
% 4302160) // 10, 0] != -1 else 0) +
(cse_573[_pt_data_64[((iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 4302160) // 10, 0],
_pt_data_65[(iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 10]] if
_pt_data_64[((iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 4302160) // 10, 0] != -1 else 0) +
(cse_574[_pt_data_88[((iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 4302160) // 10, 0],
_pt_data_89[(iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 10]] if
_pt_data_88[((iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 4302160) // 10, 0] != -1 else 0) +
(cse_574[_pt_data_86[((iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 4302160) // 10, 0],
_pt_data_87[(iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 10]] if
_pt_data_86[((iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 4302160) // 10, 0] != -1 else 0)
+ (cse_574[_pt_data_70[((iface_ensm15*1075540 + iel_ensm15*10
+ idof_ensm15) % 4302160) // 10, 0],
_pt_data_83[(iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 10]] if
_pt_data_70[((iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 4302160) // 10, 0] != -1 else
0) + (cse_574[_pt_data_84[((iface_ensm15*1075540 +
iel_ensm15*10 + idof_ensm15) %
4302160) // 10, 0],
_pt_data_85[(iface_ensm15*1075540 +
iel_ensm15*10 + idof_ensm15) %
10]] if
_pt_data_84[((iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 4302160) // 10, 0] != -1
else 0) +
(cse_575[_pt_data_107[((iface_ensm15*1075540 +
iel_ensm15*10 + idof_ensm15) %
4302160) // 10, 0],
_pt_data_108[(iface_ensm15*1075540 + iel_ensm15*10
+ idof_ensm15) % 10]] if
_pt_data_107[((iface_ensm15*1075540 + iel_ensm15*10 +
idof_ensm15) % 4302160) // 10, 0] != -1
else 0) + (cse_575[_pt_data_105[((iface_ensm15*1075540 +
iel_ensm15*10 +
idof_ensm15) % 4302160)
// 10, 0],
_pt_data_106[(iface_ensm15*1075540 +
iel_ensm15*10 +
idof_ensm15) % 10]] if
_pt_data_105[((iface_ensm15*1075540 +
iel_ensm15*10 + idof_ensm15) %
4302160) // 10, 0] != -1 else 0)
+ (cse_575[_pt_data_90[((iface_ensm15*1075540 +
iel_ensm15*10 +
idof_ensm15) %
4302160) // 10, 0],
_pt_data_102[(iface_ensm15*1075540 +
iel_ensm15*10 +
idof_ensm15) % 10]] if
_pt_data_90[((iface_ensm15*1075540 +
iel_ensm15*10 + idof_ensm15) %
4302160) // 10, 0] != -1 else
0) +
(cse_575[_pt_data_103[((iface_ensm15*1075540 +
iel_ensm15*10 +
idof_ensm15) % 4302160)
// 10, 0],
_pt_data_104[(iface_ensm15*1075540 +
iel_ensm15*10 +
idof_ensm15) % 10]] if
_pt_data_103[((iface_ensm15*1075540 +
iel_ensm15*10 + idof_ensm15) %
4302160) // 10, 0] != -1 else 0)
"""
_replacements = {
"iface_ensm15": prim.Variable("_0"),
"iel_ensm15": prim.Variable("_1"),
"idof_ensm15": prim.Variable("_2"),
}
class Renamer(IdentityMapper):
def map_variable(self, expr):
return _replacements.get(expr.name, expr)
@optimize_mapper(drop_args=True, drop_kwargs=True,
inline_cache=True, inline_rec=True,
inline_get_cache_key=True)
class OptimizedRenamer(CachedIdentityMapper):
def map_variable(self, expr):
return _replacements.get(expr.name, expr)
def get_cache_key(self, expr):
# Must add 'type(expr)', to differentiate between python scalar types.
# In Python, the following conditions are true: "hash(4) == hash(4.0)"
# and "4 == 4.0", but their traversal results cannot be re-used.
return (type(expr), expr)
# }}}
# vim: foldmethod=marker