diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 43f333238700403fd422bcbd84f2ac56374f6a6e..d5c5dd00837524548141ef02f0846c3d30b61caf 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -8,20 +8,17 @@ on:
         - cron:  '17 3 * * 0'
 
 jobs:
-    flake8:
-        name: Flake8
+    ruff:
+        name: Ruff
         runs-on: ubuntu-latest
         steps:
         -   uses: actions/checkout@v4
         -
             uses: actions/setup-python@v5
-            with:
-                # matches compat target in setup.py
-                python-version: '3.8'
         -   name: "Main Script"
             run: |
-                curl -L -O https://gitlab.tiker.net/inducer/ci-support/raw/main/prepare-and-run-flake8.sh
-                . ./prepare-and-run-flake8.sh "$(basename $GITHUB_REPOSITORY)" test examples
+                pip install ruff
+                ruff check
 
     pylint:
         name: Pylint
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 76110d5e761c9a84e12bd9d81ca61983286b2f0a..969bc695a06e2075fdba2f2bbc879a2a247794bf 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -86,12 +86,12 @@ Documentation:
   tags:
   - python3
 
-Flake8:
+Ruff:
   script:
-  - curl -L -O https://gitlab.tiker.net/inducer/ci-support/raw/main/prepare-and-run-flake8.sh
-  - . ./prepare-and-run-flake8.sh "$CI_PROJECT_NAME" test examples
+  - pipx install ruff
+  - ruff check
   tags:
-  - python3
+  - docker-runner
   except:
   - tags
 
diff --git a/arraycontext/__init__.py b/arraycontext/__init__.py
index b01b9917864052ca11f754dbb53bef30130da402..27eb5325575f2c96dcdeeb262b7c12b5c4c98de3 100644
--- a/arraycontext/__init__.py
+++ b/arraycontext/__init__.py
@@ -28,86 +28,129 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
 THE SOFTWARE.
 """
 
-import sys
-
 from .container import (
-    ArrayContainer, ArrayContainerT, NotAnArrayContainerError, deserialize_container,
-    get_container_context_opt, get_container_context_recursively,
-    get_container_context_recursively_opt, is_array_container,
-    is_array_container_type, register_multivector_as_array_container,
-    serialize_container)
+    ArrayContainer,
+    ArrayContainerT,
+    NotAnArrayContainerError,
+    deserialize_container,
+    get_container_context_opt,
+    get_container_context_recursively,
+    get_container_context_recursively_opt,
+    is_array_container,
+    is_array_container_type,
+    register_multivector_as_array_container,
+    serialize_container,
+)
 from .container.arithmetic import with_container_arithmetic
 from .container.dataclass import dataclass_array_container
 from .container.traversal import (
-    flat_size_and_dtype, flatten, freeze, from_numpy, map_array_container,
-    map_reduce_array_container, mapped_over_array_containers,
-    multimap_array_container, multimap_reduce_array_container,
-    multimapped_over_array_containers, outer, rec_map_array_container,
-    rec_map_reduce_array_container, rec_multimap_array_container,
-    rec_multimap_reduce_array_container, stringify_array_container_tree, thaw,
-    to_numpy, unflatten, with_array_context)
+    flat_size_and_dtype,
+    flatten,
+    freeze,
+    from_numpy,
+    map_array_container,
+    map_reduce_array_container,
+    mapped_over_array_containers,
+    multimap_array_container,
+    multimap_reduce_array_container,
+    multimapped_over_array_containers,
+    outer,
+    rec_map_array_container,
+    rec_map_reduce_array_container,
+    rec_multimap_array_container,
+    rec_multimap_reduce_array_container,
+    stringify_array_container_tree,
+    thaw,
+    to_numpy,
+    unflatten,
+    with_array_context,
+)
 from .context import (
-    Array, ArrayContext, ArrayOrContainer, ArrayOrContainerOrScalar,
-    ArrayOrContainerOrScalarT, ArrayOrContainerT, ArrayT, Scalar, ScalarLike,
-    tag_axes)
+    Array,
+    ArrayContext,
+    ArrayOrContainer,
+    ArrayOrContainerOrScalar,
+    ArrayOrContainerOrScalarT,
+    ArrayOrContainerT,
+    ArrayT,
+    Scalar,
+    ScalarLike,
+    tag_axes,
+)
 from .impl.jax import EagerJAXArrayContext
 from .impl.pyopencl import PyOpenCLArrayContext
 from .impl.pytato import PytatoJAXArrayContext, PytatoPyOpenCLArrayContext
 from .loopy import make_loopy_program
+
 # deprecated, remove in 2022.
 from .metadata import _FirstAxisIsElementsTag
 from .pytest import (
-    PytestArrayContextFactory, PytestPyOpenCLArrayContextFactory,
+    PytestArrayContextFactory,
+    PytestPyOpenCLArrayContextFactory,
     pytest_generate_tests_for_array_contexts,
-    pytest_generate_tests_for_pyopencl_array_context)
+    pytest_generate_tests_for_pyopencl_array_context,
+)
 from .transform_metadata import CommonSubexpressionTag, ElementwiseMapKernelTag
 
 
 __all__ = (
-        "ArrayContext", "Scalar", "Array",
-        "Scalar", "ScalarLike",
-        "Array", "ArrayT",
-        "ArrayOrContainer", "ArrayOrContainerT",
-        "ArrayOrContainerOrScalar", "ArrayOrContainerOrScalarT",
-        "tag_axes",
-
-        "CommonSubexpressionTag",
-        "ElementwiseMapKernelTag",
-
-        "ArrayContainer", "ArrayContainerT",
-        "NotAnArrayContainerError",
-        "is_array_container", "is_array_container_type",
-        "get_container_context_opt",
-        "get_container_context_recursively_opt",
-        "get_container_context_recursively",
-        "serialize_container", "deserialize_container",
-        "register_multivector_as_array_container",
-        "with_container_arithmetic",
-        "dataclass_array_container",
-
-        "stringify_array_container_tree",
-        "map_array_container", "multimap_array_container",
-        "rec_map_array_container", "rec_multimap_array_container",
-        "mapped_over_array_containers",
-        "multimapped_over_array_containers",
-        "map_reduce_array_container", "multimap_reduce_array_container",
-        "rec_map_reduce_array_container", "rec_multimap_reduce_array_container",
-        "thaw", "freeze",
-        "flatten", "unflatten", "flat_size_and_dtype",
-        "from_numpy", "to_numpy", "with_array_context",
-        "outer",
-
-        "PyOpenCLArrayContext", "PytatoPyOpenCLArrayContext",
-        "PytatoJAXArrayContext",
-        "EagerJAXArrayContext",
-
-        "make_loopy_program",
-
-        "PytestArrayContextFactory",
-        "PytestPyOpenCLArrayContextFactory",
-        "pytest_generate_tests_for_array_contexts",
-        "pytest_generate_tests_for_pyopencl_array_context"
-        )
+    "Array",
+    "Array",
+    "ArrayContainer",
+    "ArrayContainerT",
+    "ArrayContext",
+    "ArrayOrContainer",
+    "ArrayOrContainerOrScalar",
+    "ArrayOrContainerOrScalarT",
+    "ArrayOrContainerT",
+    "ArrayT",
+    "CommonSubexpressionTag",
+    "EagerJAXArrayContext",
+    "ElementwiseMapKernelTag",
+    "NotAnArrayContainerError",
+    "PyOpenCLArrayContext",
+    "PytatoJAXArrayContext",
+    "PytatoPyOpenCLArrayContext",
+    "PytestArrayContextFactory",
+    "PytestPyOpenCLArrayContextFactory",
+    "Scalar",
+    "Scalar",
+    "ScalarLike",
+    "dataclass_array_container",
+    "deserialize_container",
+    "flat_size_and_dtype",
+    "flatten",
+    "freeze",
+    "from_numpy",
+    "get_container_context_opt",
+    "get_container_context_recursively",
+    "get_container_context_recursively_opt",
+    "is_array_container",
+    "is_array_container_type",
+    "make_loopy_program",
+    "map_array_container",
+    "map_reduce_array_container",
+    "mapped_over_array_containers",
+    "multimap_array_container",
+    "multimap_reduce_array_container",
+    "multimapped_over_array_containers",
+    "outer",
+    "pytest_generate_tests_for_array_contexts",
+    "pytest_generate_tests_for_pyopencl_array_context",
+    "rec_map_array_container",
+    "rec_map_reduce_array_container",
+    "rec_multimap_array_container",
+    "rec_multimap_reduce_array_container",
+    "register_multivector_as_array_container",
+    "serialize_container",
+    "stringify_array_container_tree",
+    "tag_axes",
+    "thaw",
+    "to_numpy",
+    "unflatten",
+    "with_array_context",
+    "with_container_arithmetic"
+)
 
 
 # {{{ deprecation handling
@@ -135,25 +178,19 @@ _depr_name_to_replacement_and_obj = {
         "DeviceScalar": ("Scalar", Scalar, 2023),
         }
 
-if sys.version_info >= (3, 7):
-    def __getattr__(name):
-        replacement_and_obj = _depr_name_to_replacement_and_obj.get(name, None)
-        if replacement_and_obj is not None:
-            replacement, obj, year = replacement_and_obj
-            from warnings import warn
-            warn(f"'arraycontext.{name}' is deprecated. "
-                    f"Use '{replacement}' instead. "
-                    f"'arraycontext.{name}' will continue to work until {year}.",
-                    DeprecationWarning, stacklevel=2)
-            return obj
-        else:
-            raise AttributeError(name)
-else:
-    FirstAxisIsElementsTag = _FirstAxisIsElementsTag
-    _acf = _deprecated_acf
-    get_container_context = get_container_context_opt
-    DeviceArray = Array
-    DeviceScalar = Scalar
+
+def __getattr__(name):
+    replacement_and_obj = _depr_name_to_replacement_and_obj.get(name, None)
+    if replacement_and_obj is not None:
+        replacement, obj, year = replacement_and_obj
+        from warnings import warn
+        warn(f"'arraycontext.{name}' is deprecated. "
+                f"Use '{replacement}' instead. "
+                f"'arraycontext.{name}' will continue to work until {year}.",
+                DeprecationWarning, stacklevel=2)
+        return obj
+    else:
+        raise AttributeError(name)
 
 # }}}
 
diff --git a/arraycontext/container/__init__.py b/arraycontext/container/__init__.py
index fcb130fbbb819ab92eb88fc13ff8bb7691e3f356..ea20a5acf08839f20037c088bdace340c6cfe6c1 100644
--- a/arraycontext/container/__init__.py
+++ b/arraycontext/container/__init__.py
@@ -339,17 +339,17 @@ def get_container_context_recursively(ary: ArrayContainer) -> Optional[ArrayCont
 # FYI: This doesn't, and never should, make arraycontext directly depend on pymbolic.
 # (Though clearly there exists a dependency via loopy.)
 
-def _serialize_multivec_as_container(mv: "MultiVector") -> Iterable[Tuple[Any, Any]]:
+def _serialize_multivec_as_container(mv: MultiVector) -> Iterable[Tuple[Any, Any]]:
     return list(mv.data.items())
 
 
-def _deserialize_multivec_as_container(template: "MultiVector",
-        iterable: Iterable[Tuple[Any, Any]]) -> "MultiVector":
+def _deserialize_multivec_as_container(template: MultiVector,
+        iterable: Iterable[Tuple[Any, Any]]) -> MultiVector:
     from pymbolic.geometric_algebra import MultiVector
     return MultiVector(dict(iterable), space=template.space)
 
 
-def _get_container_context_opt_from_multivec(mv: "MultiVector") -> None:
+def _get_container_context_opt_from_multivec(mv: MultiVector) -> None:
     return None
 
 
diff --git a/arraycontext/container/arithmetic.py b/arraycontext/container/arithmetic.py
index 148d34bfde614b123ec3fd4ade567723e20f6e82..4bba2412a91dc496838164bd141d84a026305cff 100644
--- a/arraycontext/container/arithmetic.py
+++ b/arraycontext/container/arithmetic.py
@@ -383,7 +383,7 @@ def with_container_arithmetic(
             if not t:
                 return "()"
             else:
-                return "(%s,)" % ", ".join(t)
+                return "({},)".format(", ".join(t))
 
         gen(f"cls._outer_bcast_types = {tup_str(outer_bcast_type_names)}")
         gen(f"cls._bcast_numpy_array = {bcast_numpy_array}")
diff --git a/arraycontext/container/dataclass.py b/arraycontext/container/dataclass.py
index e9ab38d4eec04fda544f3382b2938dd01f8b219d..36c940307b7e7e88fefcaa94e457908767b334da 100644
--- a/arraycontext/container/dataclass.py
+++ b/arraycontext/container/dataclass.py
@@ -96,7 +96,9 @@ def dataclass_array_container(cls: type) -> type:
             # * `_BaseGenericAlias` catches `List`, `Tuple`, etc.
             # * `_SpecialForm` catches `Any`, `Literal`, etc.
             from typing import (  # type: ignore[attr-defined]
-                _BaseGenericAlias, _SpecialForm)
+                _BaseGenericAlias,
+                _SpecialForm,
+            )
             if isinstance(f.type, (_BaseGenericAlias, _SpecialForm)):
                 # NOTE: anything except a Union is not allowed
                 raise TypeError(
diff --git a/arraycontext/container/traversal.py b/arraycontext/container/traversal.py
index 940f5ea7fd7b9c526f0054472600693329619456..4a60a8f95a83d88cea3df8b11ff952354abe4f2d 100644
--- a/arraycontext/container/traversal.py
+++ b/arraycontext/container/traversal.py
@@ -75,11 +75,21 @@ from warnings import warn
 import numpy as np
 
 from arraycontext.container import (
-    ArrayContainer, NotAnArrayContainerError, deserialize_container,
-    get_container_context_recursively_opt, serialize_container)
+    ArrayContainer,
+    NotAnArrayContainerError,
+    deserialize_container,
+    get_container_context_recursively_opt,
+    serialize_container,
+)
 from arraycontext.context import (
-    Array, ArrayContext, ArrayOrContainer, ArrayOrContainerOrScalar,
-    ArrayOrContainerT, ArrayT, ScalarLike)
+    Array,
+    ArrayContext,
+    ArrayOrContainer,
+    ArrayOrContainerOrScalar,
+    ArrayOrContainerT,
+    ArrayT,
+    ScalarLike,
+)
 
 
 # {{{ array container traversal helpers
@@ -383,9 +393,9 @@ def keyed_map_array_container(
     """
     try:
         iterable = serialize_container(ary)
-    except NotAnArrayContainerError:
+    except NotAnArrayContainerError as err:
         raise ValueError(
-                f"Non-array container type has no key: {type(ary).__name__}")
+                f"Non-array container type has no key: {type(ary).__name__}") from err
     else:
         return deserialize_container(ary, [
             (key, f(key, subary)) for key, subary in iterable
@@ -410,7 +420,7 @@ def rec_keyed_map_array_container(
             return cast(ArrayOrContainerT, f(keys, cast(ArrayT, _ary)))
         else:
             return deserialize_container(_ary, [
-                (key, rec(keys + (key,), subary)) for key, subary in iterable
+                (key, rec((*keys, key), subary)) for key, subary in iterable
                 ])
 
     return rec((), ary)
@@ -423,7 +433,7 @@ def rec_keyed_map_array_container(
 def map_reduce_array_container(
         reduce_func: Callable[[Iterable[Any]], Any],
         map_func: Callable[[Any], Any],
-        ary: ArrayOrContainerT) -> "Array":
+        ary: ArrayOrContainerT) -> Array:
     """Perform a map-reduce over array containers.
 
     :param reduce_func: callable used to reduce over the components of *ary*
@@ -699,7 +709,7 @@ def flatten(
 
             if subary_c.dtype != common_dtype:
                 raise ValueError("arrays in container have different dtypes: "
-                        f"got {subary_c.dtype}, expected {common_dtype}")
+                        f"got {subary_c.dtype}, expected {common_dtype}") from None
 
             try:
                 flat_subary = actx.np.ravel(subary_c, order="C")
@@ -785,12 +795,13 @@ def unflatten(
 
             if (offset + template_subary_c.size) > ary.size:
                 raise ValueError("'template' and 'ary' sizes do not match: "
-                    "'template' is too large")
+                    "'template' is too large") from None
 
             if strict:
                 if template_subary_c.dtype != ary.dtype:
                     raise ValueError("'template' dtype does not match 'ary': "
-                            f"got {template_subary_c.dtype}, expected {ary.dtype}")
+                            f"got {template_subary_c.dtype}, expected {ary.dtype}"
+                        ) from None
             else:
                 # NOTE: still require that *template* has a uniform dtype
                 if common_dtype is None:
@@ -799,7 +810,7 @@ def unflatten(
                     if common_dtype != template_subary_c.dtype:
                         raise ValueError("arrays in 'template' have different "
                                 f"dtypes: got {template_subary_c.dtype}, but "
-                                f"expected {common_dtype}.")
+                                f"expected {common_dtype}.") from None
 
             # }}}
 
@@ -833,7 +844,7 @@ def unflatten(
                     raise ValueError(
                             # Mypy has a point: nobody promised a .strides attribute.
                             f"strides do not match template: got {subary.strides}, "
-                            f"expected {template_subary_c.strides}")
+                            f"expected {template_subary_c.strides}") from None
 
             # }}}
 
@@ -863,7 +874,7 @@ def unflatten(
 
 
 def flat_size_and_dtype(
-        ary: ArrayOrContainer) -> "Tuple[int, Optional[np.dtype[Any]]]":
+        ary: ArrayOrContainer) -> Tuple[int, Optional[np.dtype[Any]]]:
     """
     :returns: a tuple ``(size, dtype)`` that would be the length and
         :class:`numpy.dtype` of the one-dimensional array returned by
@@ -884,7 +895,7 @@ def flat_size_and_dtype(
 
             if subary_c.dtype != common_dtype:
                 raise ValueError("arrays in container have different dtypes: "
-                        f"got {subary_c.dtype}, expected {common_dtype}")
+                        f"got {subary_c.dtype}, expected {common_dtype}") from None
 
             return subary_c.size
         else:
diff --git a/arraycontext/context.py b/arraycontext/context.py
index 505d9df60654e58dbbdefef19d866482fe8100fb..a4fbbdc576c1693d4e935a8c589238fe38ca83e4 100644
--- a/arraycontext/context.py
+++ b/arraycontext/context.py
@@ -160,8 +160,17 @@ THE SOFTWARE.
 
 from abc import ABC, abstractmethod
 from typing import (
-    TYPE_CHECKING, Any, Callable, Dict, Mapping, Optional, Protocol, Tuple, TypeVar,
-    Union)
+    TYPE_CHECKING,
+    Any,
+    Callable,
+    Dict,
+    Mapping,
+    Optional,
+    Protocol,
+    Tuple,
+    TypeVar,
+    Union,
+)
 
 import numpy as np
 
diff --git a/arraycontext/fake_numpy.py b/arraycontext/fake_numpy.py
index 2de78d1b6905cc155e665300c7eefd58e3a03247..e31bae7d53bfefa837ba58677f0638009485013f 100644
--- a/arraycontext/fake_numpy.py
+++ b/arraycontext/fake_numpy.py
@@ -112,7 +112,7 @@ class BaseFakeNumpyNamespace:
                 axis=0):
         num = operator.index(num)
         if num < 0:
-            raise ValueError("Number of samples, %s, must be non-negative." % num)
+            raise ValueError(f"Number of samples, {num}, must be non-negative.")
         div = (num - 1) if endpoint else num
 
         # Convert float/complex array scalars to float, gh-3504
@@ -140,8 +140,8 @@ class BaseFakeNumpyNamespace:
 
         if div > 0:
             step = delta / div
-            #any_step_zero = _nx.asanyarray(step == 0).any()
-            any_step_zero = self._array_context.to_numpy((step == 0)).any()
+            # any_step_zero = _nx.asanyarray(step == 0).any()
+            any_step_zero = self._array_context.to_numpy(step == 0).any()
             if any_step_zero:
                 delta_actx = self._array_context.from_numpy(delta)
 
@@ -176,10 +176,10 @@ class BaseFakeNumpyNamespace:
         # https://github.com/inducer/pytato/issues/456
         if retstep:
             return y, step
-            #return y.astype(dtype), step
+            # return y.astype(dtype), step
         else:
             return y
-            #return y.astype(dtype)
+            # return y.astype(dtype)
 
     # }}}
 
diff --git a/arraycontext/impl/jax/__init__.py b/arraycontext/impl/jax/__init__.py
index e5fef3edac9241fd9c283f2f8de2f89488406716..92838351091d4c8207bdd5abfdc478f98a0f34c3 100644
--- a/arraycontext/impl/jax/__init__.py
+++ b/arraycontext/impl/jax/__init__.py
@@ -33,8 +33,7 @@ import numpy as np
 
 from pytools.tag import ToTagSetConvertible
 
-from arraycontext.container.traversal import (
-    rec_map_array_container, with_array_context)
+from arraycontext.container.traversal import rec_map_array_container, with_array_context
 from arraycontext.context import Array, ArrayContext, ArrayOrContainer, ScalarLike
 
 
diff --git a/arraycontext/impl/jax/fake_numpy.py b/arraycontext/impl/jax/fake_numpy.py
index d20448a4d4060c8667485ada97b582ff961ed42b..662b86c05a19e3d40f9448a9ffb82b01dc1bd3ee 100644
--- a/arraycontext/impl/jax/fake_numpy.py
+++ b/arraycontext/impl/jax/fake_numpy.py
@@ -28,10 +28,11 @@ import numpy as np
 
 from arraycontext.container import NotAnArrayContainerError, serialize_container
 from arraycontext.container.traversal import (
-    rec_map_array_container, rec_map_reduce_array_container,
-    rec_multimap_array_container)
-from arraycontext.fake_numpy import (
-    BaseFakeNumpyLinalgNamespace, BaseFakeNumpyNamespace)
+    rec_map_array_container,
+    rec_map_reduce_array_container,
+    rec_multimap_array_container,
+)
+from arraycontext.fake_numpy import BaseFakeNumpyLinalgNamespace, BaseFakeNumpyNamespace
 
 
 class EagerJAXFakeNumpyLinalgNamespace(BaseFakeNumpyLinalgNamespace):
@@ -102,7 +103,7 @@ class EagerJAXFakeNumpyNamespace(BaseFakeNumpyNamespace):
         if order in "AK":
             from warnings import warn
             warn(f"ravel with order='{order}' not supported by JAX,"
-                 " using order=C.")
+                 " using order=C.", stacklevel=1)
             order = "C"
 
         return rec_map_array_container(
diff --git a/arraycontext/impl/pyopencl/__init__.py b/arraycontext/impl/pyopencl/__init__.py
index 982fd431f4a6cf5537ef497cf0814f88a95c5dc7..de9c43139af80b44ef617ba7d181ce2033372db0 100644
--- a/arraycontext/impl/pyopencl/__init__.py
+++ b/arraycontext/impl/pyopencl/__init__.py
@@ -35,8 +35,7 @@ import numpy as np
 
 from pytools.tag import ToTagSetConvertible
 
-from arraycontext.container.traversal import (
-    rec_map_array_container, with_array_context)
+from arraycontext.container.traversal import rec_map_array_container, with_array_context
 from arraycontext.context import Array, ArrayContext, ArrayOrContainer, ScalarLike
 
 
@@ -132,16 +131,18 @@ class PyOpenCLArrayContext(ArrayContext):
                 warn("PyOpenCLArrayContext created without an allocator on a GPU. "
                      "This can lead to high numbers of memory allocations. "
                      "Please consider using a pyopencl.tools.MemoryPool. "
-                     "Run with allocator=False to disable this warning.")
+                     "Run with allocator=False to disable this warning.",
+                     stacklevel=2)
 
             if __debug__:
                 # Use "running on GPU" as a proxy for "they care about speed".
                 warn("You are using the PyOpenCLArrayContext on a GPU, but you "
                         "are running Python in debug mode. Use 'python -O' for "
-                        "a noticeable speed improvement.")
+                        "a noticeable speed improvement.",
+                        stacklevel=2)
 
         self._loopy_transform_cache: \
-                Dict["lp.TranslationUnit", "lp.TranslationUnit"] = {}
+                Dict[lp.TranslationUnit, lp.TranslationUnit] = {}
 
         # TODO: Ideally this should only be `(TaggableCLArray,)`, but
         # that would break the logic in the downstream users.
diff --git a/arraycontext/impl/pyopencl/fake_numpy.py b/arraycontext/impl/pyopencl/fake_numpy.py
index c62766570a0eea0e7efb6b0166f542822ebc3d38..22b1bff9044ec3283af5c1c6f45e43dd54cf45dd 100644
--- a/arraycontext/impl/pyopencl/fake_numpy.py
+++ b/arraycontext/impl/pyopencl/fake_numpy.py
@@ -33,8 +33,11 @@ import numpy as np
 
 from arraycontext.container import NotAnArrayContainerError, serialize_container
 from arraycontext.container.traversal import (
-    rec_map_array_container, rec_map_reduce_array_container,
-    rec_multimap_array_container, rec_multimap_reduce_array_container)
+    rec_map_array_container,
+    rec_map_reduce_array_container,
+    rec_multimap_array_container,
+    rec_multimap_reduce_array_container,
+)
 from arraycontext.fake_numpy import BaseFakeNumpyLinalgNamespace
 from arraycontext.loopy import LoopyBasedFakeNumpyNamespace
 
diff --git a/arraycontext/impl/pytato/__init__.py b/arraycontext/impl/pytato/__init__.py
index a32e8de0f4097d888737199e0cc0087eedb89638..6e04bdcdeae931429ffbf4a64d5091b9b1c5c196 100644
--- a/arraycontext/impl/pytato/__init__.py
+++ b/arraycontext/impl/pytato/__init__.py
@@ -45,15 +45,23 @@ THE SOFTWARE.
 import abc
 import sys
 from typing import (
-    TYPE_CHECKING, Any, Callable, Dict, FrozenSet, Optional, Tuple, Type, Union)
+    TYPE_CHECKING,
+    Any,
+    Callable,
+    Dict,
+    FrozenSet,
+    Optional,
+    Tuple,
+    Type,
+    Union,
+)
 
 import numpy as np
 
 from pytools import memoize_method
 from pytools.tag import Tag, ToTagSetConvertible, normalize_tags
 
-from arraycontext.container.traversal import (
-    rec_map_array_container, with_array_context)
+from arraycontext.container.traversal import rec_map_array_container, with_array_context
 from arraycontext.context import Array, ArrayContext, ArrayOrContainer, ScalarLike
 from arraycontext.metadata import NameHint
 
@@ -63,7 +71,7 @@ if TYPE_CHECKING:
     import pytato
 
 if getattr(sys, "_BUILDING_SPHINX_DOCS", False):
-    import pyopencl as cl  # noqa: F811
+    import pyopencl as cl
 
 import logging
 
@@ -90,7 +98,7 @@ def _preprocess_array_tags(tags: ToTagSetConvertible) -> FrozenSet[Tag]:
                     f"arraycontext.metadata.NameHint('{name_hint.name}') "
                     "to pytato.tags.PrefixNamed, "
                     f"PrefixNamed('{prefix_named.prefix}') "
-                    "was already present.")
+                    "was already present.", stacklevel=1)
 
         tags = (
                 (tags | frozenset({PrefixNamed(name_hint.name)}))
@@ -411,17 +419,20 @@ class PytatoPyOpenCLArrayContext(_BasePytatoArrayContext):
 
                 from warnings import warn
                 warn("Running on an Nvidia GPU, reducing the argument "
-                    f"size limit from 4352 to {limit}.")
+                    f"size limit from 4352 to {limit}.", stacklevel=1)
             else:
                 limit = dev.max_parameter_size
 
             if self._force_svm_arg_limit is not None:
                 limit = self._force_svm_arg_limit
 
-            logger.info(f"limiting argument buffer size for {dev} to {limit} bytes")
+            logger.info(
+                    "limiting argument buffer size for %s to %d bytes",
+                    dev, limit)
 
             from arraycontext.impl.pytato.utils import (
-                ArgSizeLimitingPytatoLoopyPyOpenCLTarget)
+                ArgSizeLimitingPytatoLoopyPyOpenCLTarget,
+            )
             return ArgSizeLimitingPytatoLoopyPyOpenCLTarget(limit)
         else:
             return super().get_target()
@@ -435,10 +446,14 @@ class PytatoPyOpenCLArrayContext(_BasePytatoArrayContext):
 
         from arraycontext.container.traversal import rec_keyed_map_array_container
         from arraycontext.impl.pyopencl.taggable_cl_array import (
-            TaggableCLArray, to_tagged_cl_array)
+            TaggableCLArray,
+            to_tagged_cl_array,
+        )
         from arraycontext.impl.pytato.compile import _ary_container_key_stringifier
         from arraycontext.impl.pytato.utils import (
-            _normalize_pt_expr, get_cl_axes_from_pt_axes)
+            _normalize_pt_expr,
+            get_cl_axes_from_pt_axes,
+        )
 
         array_as_dict: Dict[str, Union[cla.Array, TaggableCLArray, pt.Array]] = {}
         key_to_frozen_subary: Dict[str, TaggableCLArray] = {}
@@ -608,7 +623,7 @@ class PytatoPyOpenCLArrayContext(_BasePytatoArrayContext):
         processed_kwargs = {}
 
         for kw, arg in sorted(kwargs.items()):
-            if isinstance(arg, (pt.Array,) + SCALAR_CLASSES):
+            if isinstance(arg, (pt.Array, *SCALAR_CLASSES)):
                 pass
             elif isinstance(arg, TaggableCLArray):
                 arg = self.thaw(arg)
diff --git a/arraycontext/impl/pytato/compile.py b/arraycontext/impl/pytato/compile.py
index 6adea7a238d62f2aaac92a84e6a73e70a8e07778..41e1c0de5318c24ed065d527a2bfe3998b374655 100644
--- a/arraycontext/impl/pytato/compile.py
+++ b/arraycontext/impl/pytato/compile.py
@@ -46,7 +46,10 @@ from arraycontext.container import ArrayContainer, is_array_container_type
 from arraycontext.container.traversal import rec_keyed_map_array_container
 from arraycontext.context import ArrayT
 from arraycontext.impl.pytato import (
-    PytatoJAXArrayContext, PytatoPyOpenCLArrayContext, _BasePytatoArrayContext)
+    PytatoJAXArrayContext,
+    PytatoPyOpenCLArrayContext,
+    _BasePytatoArrayContext,
+)
 
 
 logger = logging.getLogger(__name__)
@@ -148,9 +151,9 @@ def _get_arg_id_to_arg_and_arg_id_to_descr(args: Tuple[Any, ...],
             arg_id_to_descr[arg_id] = ScalarInputDescriptor(np.dtype(type(arg)))
         elif is_array_container_type(arg.__class__):
             def id_collector(keys, ary):
-                arg_id = (kw,) + keys  # noqa: B023
-                arg_id_to_arg[arg_id] = ary  # noqa: B023
-                arg_id_to_descr[arg_id] = LeafArrayDescriptor(  # noqa: B023
+                arg_id = (kw, *keys)  # noqa: B023
+                arg_id_to_arg[arg_id] = ary
+                arg_id_to_descr[arg_id] = LeafArrayDescriptor(
                         np.dtype(ary.dtype), ary.shape)
                 return ary
 
@@ -181,7 +184,9 @@ def _to_input_for_compiled(ary: ArrayT, actx: PytatoPyOpenCLArrayContext):
     import pyopencl.array as cla
 
     from arraycontext.impl.pyopencl.taggable_cl_array import (
-        TaggableCLArray, to_tagged_cl_array)
+        TaggableCLArray,
+        to_tagged_cl_array,
+    )
     if isinstance(ary, pt.Array):
         dag = pt.make_dict_of_named_arrays({"_actx_out": ary})
         # Transform the DAG to give metadata inference a chance to do its job
@@ -220,7 +225,7 @@ def _get_f_placeholder_args(arg, kw, arg_id_to_name, actx):
                                    tags=arg.tags)
     elif is_array_container_type(arg.__class__):
         def _rec_to_placeholder(keys, ary):
-            name = arg_id_to_name[(kw,) + keys]
+            name = arg_id_to_name[(kw, *keys)]
             # Transform the DAG to give metadata inference a chance to do its job
             ary = _to_input_for_compiled(ary, actx)
             return pt.make_placeholder(name,
@@ -458,7 +463,7 @@ class LazilyCompilingFunctionCaller(LazilyPyOpenCLCompilingFunctionCaller):
         warn("LazilyCompilingFunctionCaller has been renamed to"
              " LazilyPyOpenCLCompilingFunctionCaller. This will be"
              " an error in 2023.", DeprecationWarning, stacklevel=2)
-        return super(LazilyCompilingFunctionCaller, cls).__new__(cls)
+        return super().__new__(cls)
 
     def _dag_to_transformed_loopy_prg(self, dict_of_named_arrays):
         from warnings import warn
@@ -489,7 +494,7 @@ class LazilyJAXCompilingFunctionCaller(BaseLazilyCompilingFunctionCaller):
         self.actx._compile_trace_callback(
                 prg_id, "pre_transform_dag", dict_of_named_arrays)
 
-        with ProcessLogger(logger, "transform_dag for '{prg_id}'"):
+        with ProcessLogger(logger, f"transform_dag for '{prg_id}'"):
             pt_dict_of_named_arrays = self.actx.transform_dag(dict_of_named_arrays)
 
         self.actx._compile_trace_callback(
@@ -753,7 +758,7 @@ class CompiledJAXFunctionReturningArray(CompiledFunction):
         input_kwargs_for_loopy = _args_to_device_buffers(
                 self.actx, self.input_id_to_name_in_program, arg_id_to_arg)
 
-        evt, out_dict = self.pytato_program(**input_kwargs_for_loopy)
+        _evt, out_dict = self.pytato_program(**input_kwargs_for_loopy)
 
         return self.actx.thaw(out_dict[self.output_name])
 
diff --git a/arraycontext/impl/pytato/fake_numpy.py b/arraycontext/impl/pytato/fake_numpy.py
index 21cab42ef2b93ccf394822101809823fb55cace6..aa0e0e8940d38cb2802aec2df23038a1bb10ec75 100644
--- a/arraycontext/impl/pytato/fake_numpy.py
+++ b/arraycontext/impl/pytato/fake_numpy.py
@@ -30,8 +30,10 @@ import pytato as pt
 
 from arraycontext.container import NotAnArrayContainerError, serialize_container
 from arraycontext.container.traversal import (
-    rec_map_array_container, rec_map_reduce_array_container,
-    rec_multimap_array_container)
+    rec_map_array_container,
+    rec_map_reduce_array_container,
+    rec_multimap_array_container,
+)
 from arraycontext.fake_numpy import BaseFakeNumpyLinalgNamespace
 from arraycontext.loopy import LoopyBasedFakeNumpyNamespace
 
diff --git a/arraycontext/impl/pytato/utils.py b/arraycontext/impl/pytato/utils.py
index c014a93cba8639dd0268ba9cc7e58199fe9ffa97..0af542041b1d4d022f1f5a311185461e1a0a6c71 100644
--- a/arraycontext/impl/pytato/utils.py
+++ b/arraycontext/impl/pytato/utils.py
@@ -26,8 +26,15 @@ THE SOFTWARE.
 from typing import TYPE_CHECKING, Any, Dict, Mapping, Optional, Set, Tuple
 
 from pytato.array import (
-    AbstractResultWithNamedArrays, Array, Axis as PtAxis, DataWrapper,
-    DictOfNamedArrays, Placeholder, SizeParam, make_placeholder)
+    AbstractResultWithNamedArrays,
+    Array,
+    Axis as PtAxis,
+    DataWrapper,
+    DictOfNamedArrays,
+    Placeholder,
+    SizeParam,
+    make_placeholder,
+)
 from pytato.target.loopy import LoopyPyOpenCLTarget
 from pytato.transform import CopyMapper
 from pytools import UniqueNameGenerator, memoize_method
diff --git a/arraycontext/loopy.py b/arraycontext/loopy.py
index af663e9bc181dceaf3fb330e6c2555c572b56897..dc5d84f427a96d31c016c617836047f82d842656 100644
--- a/arraycontext/loopy.py
+++ b/arraycontext/loopy.py
@@ -27,6 +27,8 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
 THE SOFTWARE.
 """
 
+from typing import ClassVar, Mapping
+
 import numpy as np
 
 import loopy as lp
@@ -70,9 +72,9 @@ def get_default_entrypoint(t_unit):
     except AttributeError:
         try:
             return t_unit.root_kernel
-        except AttributeError:
+        except AttributeError as err:
             raise TypeError("unable to find default entry point for loopy "
-                    "translation unit")
+                    "translation unit") from err
 
 
 def _get_scalar_func_loopy_program(actx, c_name, nargs, naxes):
@@ -109,14 +111,14 @@ def _get_scalar_func_loopy_program(actx, c_name, nargs, naxes):
                         lp.GlobalArg("inp%d" % i,
                                      dtype=None, shape=lp.auto, offset=lp.auto)
                         for i in range(nargs)] + [...],
-                name="actx_special_%s" % c_name,
+                name=f"actx_special_{c_name}",
                 tags=(ElementwiseMapKernelTag(),))
 
     return get(c_name, nargs, naxes)
 
 
 class LoopyBasedFakeNumpyNamespace(BaseFakeNumpyNamespace):
-    _numpy_to_c_arc_functions = {
+    _numpy_to_c_arc_functions: ClassVar[Mapping[str, str]] = {
             "arcsin": "asin",
             "arccos": "acos",
             "arctan": "atan",
@@ -127,7 +129,7 @@ class LoopyBasedFakeNumpyNamespace(BaseFakeNumpyNamespace):
             "arctanh": "atanh",
             }
 
-    _c_to_numpy_arc_functions = {c_name: numpy_name
+    _c_to_numpy_arc_functions: ClassVar[Mapping[str, str]] = {c_name: numpy_name
             for numpy_name, c_name in _numpy_to_c_arc_functions.items()}
 
     def __getattr__(self, name):
diff --git a/arraycontext/metadata.py b/arraycontext/metadata.py
index 95fc639ea8f219150aa31b5134a0a3db2a35a3ac..39974172656e0399be6f3a55cb5e70bbd24c687e 100644
--- a/arraycontext/metadata.py
+++ b/arraycontext/metadata.py
@@ -27,7 +27,6 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
 THE SOFTWARE.
 """
 
-import sys
 from dataclasses import dataclass
 from warnings import warn
 
@@ -54,25 +53,23 @@ class NameHint(UniqueTag):
 
 try:
     from meshmode.transform_metadata import (
-        FirstAxisIsElementsTag as _FirstAxisIsElementsTag)
+        FirstAxisIsElementsTag as _FirstAxisIsElementsTag,
+    )
 except ImportError:
     # placeholder in case meshmode is too old to have it.
     class _FirstAxisIsElementsTag(Tag):  # type: ignore[no-redef]
         pass
 
 
-if sys.version_info >= (3, 7):
-    def __getattr__(name):
-        if name == "FirstAxisIsElementsTag":
-            warn(f"'arraycontext.{name}' is deprecated. "
-                    f"Use 'meshmode.transform_metadata.{name}' instead. "
-                    f"'arraycontext.{name}' will continue to work until 2022.",
-                    DeprecationWarning, stacklevel=2)
-            return _FirstAxisIsElementsTag
-        else:
-            raise AttributeError(name)
-else:
-    FirstAxisIsElementsTag = _FirstAxisIsElementsTag
+def __getattr__(name):
+    if name == "FirstAxisIsElementsTag":
+        warn(f"'arraycontext.{name}' is deprecated. "
+                f"Use 'meshmode.transform_metadata.{name}' instead. "
+                f"'arraycontext.{name}' will continue to work until 2022.",
+                DeprecationWarning, stacklevel=2)
+        return _FirstAxisIsElementsTag
+    else:
+        raise AttributeError(name)
 
 # }}}
 
diff --git a/arraycontext/pytest.py b/arraycontext/pytest.py
index 66fe5a0580a5fea01a9d326905cdb1f64aca6a4b..3ea7d065d644dbe6b02ba3cfb3605558f6647973 100644
--- a/arraycontext/pytest.py
+++ b/arraycontext/pytest.py
@@ -100,7 +100,7 @@ class _PytestPyOpenCLArrayContextFactoryWithClass(PytestPyOpenCLArrayContextFact
         # holding a reference to the context to keep it alive in turn.
         # On some implementations (notably Intel CPU), holding a reference
         # to a queue does not keep the context alive.
-        ctx, queue = self.get_command_queue()
+        _ctx, queue = self.get_command_queue()
 
         alloc = None
 
@@ -111,7 +111,8 @@ class _PytestPyOpenCLArrayContextFactoryWithClass(PytestPyOpenCLArrayContextFact
             from warnings import warn
             warn("Disabling SVM due to memory leak "
                  "in Nvidia CL when running pytest. "
-                 "See https://github.com/inducer/arraycontext/issues/196")
+                 "See https://github.com/inducer/arraycontext/issues/196",
+                 stacklevel=1)
 
         return self.actx_class(
                 queue,
@@ -119,11 +120,9 @@ class _PytestPyOpenCLArrayContextFactoryWithClass(PytestPyOpenCLArrayContextFact
                 force_device_scalars=self.force_device_scalars)
 
     def __str__(self):
-        return ("<%s for <pyopencl.Device '%s' on '%s'>>" %
-                (
-                    self.actx_class.__name__,
-                    self.device.name.strip(),
-                    self.device.platform.name.strip()))
+        return (f"<{self.actx_class.__name__} "
+            f"for <pyopencl.Device '{self.device.name.strip()}' "
+            f"on '{self.device.platform.name.strip()}'>>")
 
 
 class _PytestPyOpenCLArrayContextFactoryWithClassAndHostScalars(
@@ -154,7 +153,7 @@ class _PytestPytatoPyOpenCLArrayContextFactory(PytestPyOpenCLArrayContextFactory
         # holding a reference to the context to keep it alive in turn.
         # On some implementations (notably Intel CPU), holding a reference
         # to a queue does not keep the context alive.
-        ctx, queue = self.get_command_queue()
+        _ctx, queue = self.get_command_queue()
 
         alloc = None
 
@@ -165,15 +164,15 @@ class _PytestPytatoPyOpenCLArrayContextFactory(PytestPyOpenCLArrayContextFactory
             from warnings import warn
             warn("Disabling SVM due to memory leak "
                  "in Nvidia CL when running pytest. "
-                 "See https://github.com/inducer/arraycontext/issues/196")
+                 "See https://github.com/inducer/arraycontext/issues/196",
+                 stacklevel=1)
 
         return self.actx_class(queue, allocator=alloc)
 
     def __str__(self):
-        return ("<PytatoPyOpenCLArrayContext for <pyopencl.Device '%s' on '%s'>>" %
-                (
-                    self.device.name.strip(),
-                    self.device.platform.name.strip()))
+        return ("<PytatoPyOpenCLArrayContext for "
+                f"<pyopencl.Device '{self.device.name.strip()}' "
+                f"on '{self.device.platform.name.strip()}'>>")
 
 
 class _PytestEagerJaxArrayContextFactory(PytestArrayContextFactory):
diff --git a/doc/make_numpy_coverage_table.py b/doc/make_numpy_coverage_table.py
index f30d328ccdd403672bfebd83be34a6d4224b342d..19d09d4a6f19c31358110ed2284a6b4dadc297a2 100644
--- a/doc/make_numpy_coverage_table.py
+++ b/doc/make_numpy_coverage_table.py
@@ -15,6 +15,7 @@ Workflow:
 """
 
 import pathlib
+
 from mako.template import Template
 
 import arraycontext
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000000000000000000000000000000000000..37727dabe63831214b0baa3e2115500e3fa8b88c
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,65 @@
+[tool.ruff]
+target-version = "py38"
+preview = true
+
+[tool.ruff.lint]
+extend-select = [
+    "B",   # flake8-bugbear
+    "C",   # flake8-comprehensions
+    "E",   # pycodestyle
+    "F",   # pyflakes
+    "G",   # flake8-logging-format
+    "I",   # flake8-isort
+    "N",   # pep8-naming
+    "NPY", # numpy
+    "Q",   # flake8-quotes
+    "UP",  # pyupgrade
+    "RUF", # ruff
+    "W",   # pycodestyle
+]
+extend-ignore = [
+    "C90",  # McCabe complexity
+    "E221", # multiple spaces before operator
+    "E226", # missing whitespace around arithmetic operator
+    "E402", # module-level import not at top of file
+    "UP006", # updated annotations due to __future__ import
+    "UP007", # updated annotations due to __future__ import
+]
+
+[tool.ruff.lint.flake8-quotes]
+docstring-quotes = "double"
+inline-quotes = "double"
+multiline-quotes = "double"
+
+[tool.ruff.lint.isort]
+combine-as-imports = true
+known-local-folder = [
+    "arraycontext",
+]
+known-first-party = [
+  "pytools",
+  "pyopencl",
+  "pytato",
+  "loopy",
+]
+lines-after-imports = 2
+
+[tool.mypy]
+python_version = "3.8"
+warn_unused_ignores = true
+# TODO: enable this
+# check_untyped_defs = true
+
+[[tool.mypy.overrides]]
+
+module = [
+  "islpy",
+  "loopy.*",
+  "meshmode.*",
+  "pymbolic",
+  "pymbolic.*",
+  "pyopencl.*",
+  "jax.*",
+]
+
+ignore_missing_imports = true
diff --git a/setup.cfg b/setup.cfg
deleted file mode 100644
index 60eab3b1f7e570c6f8920d9ca32504f63faaac3e..0000000000000000000000000000000000000000
--- a/setup.cfg
+++ /dev/null
@@ -1,40 +0,0 @@
-[flake8]
-min_python_version = 3.6
-ignore = E126,E127,E128,E123,E226,E241,E242,E265,W503,E402
-max-line-length=85
-
-inline-quotes = "
-docstring-quotes = """
-multiline-quotes = """
-# enable-flake8-bugbear
-
-[isort]
-known_firstparty=pytools,pyopencl,pymbolic,islpy,loopy,pytato
-known_local_folder=arraycontext
-line_length = 85
-lines_after_imports = 2
-combine_as_imports = True
-multi_line_output = 4
-
-[mypy]
-# it reads pytato code, and pytato is 3.8+
-python_version = 3.8
-warn_unused_ignores = True
-
-[mypy-islpy]
-ignore_missing_imports = True
-
-[mypy-loopy.*]
-ignore_missing_imports = True
-
-[mypy-numpy]
-ignore_missing_imports = True
-
-[mypy-meshmode.*]
-ignore_missing_imports = True
-
-[mypy-pymbolic.*]
-ignore_missing_imports = True
-
-[mypy-pyopencl.*]
-ignore_missing_imports = True
diff --git a/setup.py b/setup.py
index 0dd5c69685d4283255f63641e178c757338accb5..659052654f8bbc4d2cbb99de17b3a778b1d22d53 100644
--- a/setup.py
+++ b/setup.py
@@ -1,21 +1,20 @@
 #!/usr/bin/env python
-# -*- coding: utf-8 -*-
 
 
 def main():
-    from setuptools import setup, find_packages
+    from setuptools import find_packages, setup
 
     version_dict = {}
     init_filename = "arraycontext/version.py"
     exec(
-        compile(open(init_filename, "r").read(), init_filename, "exec"), version_dict
+        compile(open(init_filename).read(), init_filename, "exec"), version_dict
     )
 
     setup(
         name="arraycontext",
         version=version_dict["VERSION_TEXT"],
         description="Choose your favorite numpy-workalike",
-        long_description=open("README.rst", "rt").read(),
+        long_description=open("README.rst").read(),
         author="Andreas Kloeckner",
         author_email="inform@tiker.net",
         license="MIT",
diff --git a/test/test_arraycontext.py b/test/test_arraycontext.py
index e53f429568e17bcc95b958a8606713043f708b3a..02452ec67dae3d80807e12a6765fa28c1b87e44f 100644
--- a/test/test_arraycontext.py
+++ b/test/test_arraycontext.py
@@ -29,14 +29,27 @@ import pytest
 
 from pytools.obj_array import make_obj_array
 
-from arraycontext import (  # noqa: F401
-    ArrayContainer, ArrayContext, EagerJAXArrayContext, FirstAxisIsElementsTag,
-    PyOpenCLArrayContext, PytatoPyOpenCLArrayContext, dataclass_array_container,
-    deserialize_container, pytest_generate_tests_for_array_contexts,
-    serialize_container, tag_axes, with_array_context, with_container_arithmetic)
+from arraycontext import (
+    ArrayContainer,
+    ArrayContext,
+    EagerJAXArrayContext,
+    FirstAxisIsElementsTag,
+    PyOpenCLArrayContext,
+    PytatoPyOpenCLArrayContext,
+    dataclass_array_container,
+    deserialize_container,
+    pytest_generate_tests_for_array_contexts,
+    serialize_container,
+    tag_axes,
+    with_array_context,
+    with_container_arithmetic,
+)
 from arraycontext.pytest import (
-    _PytestEagerJaxArrayContextFactory, _PytestPyOpenCLArrayContextFactoryWithClass,
-    _PytestPytatoJaxArrayContextFactory, _PytestPytatoPyOpenCLArrayContextFactory)
+    _PytestEagerJaxArrayContextFactory,
+    _PytestPyOpenCLArrayContextFactoryWithClass,
+    _PytestPytatoJaxArrayContextFactory,
+    _PytestPytatoPyOpenCLArrayContextFactory,
+)
 
 
 logger = logging.getLogger(__name__)
@@ -133,7 +146,7 @@ class DOFArray:
         return self.data[i]
 
     def __repr__(self):
-        return f"DOFArray({repr(self.data)})"
+        return f"DOFArray({self.data!r})"
 
     @classmethod
     def _serialize_init_arrays_code(cls, instance_name):
@@ -182,7 +195,7 @@ def _deserialize_dof_container(  # type: ignore[misc]
 
 @with_array_context.register(DOFArray)
 # https://github.com/python/mypy/issues/13040
-def _with_actx_dofarray(ary: DOFArray, actx: ArrayContext) -> DOFArray:  # type: ignore[misc]  # noqa: E501
+def _with_actx_dofarray(ary: DOFArray, actx: ArrayContext) -> DOFArray:  # type: ignore[misc]
     return type(ary)(actx, ary.data)
 
 # }}}
@@ -411,7 +424,7 @@ def test_array_context_np_like(actx_factory, sym_name, n_args, dtype):
     assert_close_to_numpy(
             actx, lambda _np, *_args: getattr(_np, sym_name)(*_args), args)
 
-    for c in (42.0,) + _get_test_containers(actx):
+    for c in (42.0, *_get_test_containers(actx)):
         result = getattr(actx.np, sym_name)(c)
         result = actx.thaw(actx.freeze(result))
 
@@ -434,32 +447,36 @@ def test_array_context_np_like(actx_factory, sym_name, n_args, dtype):
 # {{{ array manipulations
 
 def test_actx_stack(actx_factory):
+    rng = np.random.default_rng()
+
     actx = actx_factory()
 
     ndofs = 5000
-    args = [np.random.randn(ndofs) for i in range(10)]
+    args = [rng.normal(size=ndofs) for i in range(10)]
 
     assert_close_to_numpy_in_containers(
             actx, lambda _np, *_args: _np.stack(_args), args)
 
 
 def test_actx_concatenate(actx_factory):
+    rng = np.random.default_rng()
     actx = actx_factory()
 
     ndofs = 5000
-    args = [np.random.randn(ndofs) for i in range(10)]
+    args = [rng.normal(size=ndofs) for i in range(10)]
 
     assert_close_to_numpy(
             actx, lambda _np, *_args: _np.concatenate(_args), args)
 
 
 def test_actx_reshape(actx_factory):
+    rng = np.random.default_rng()
     actx = actx_factory()
 
     for new_shape in [(3, 2), (3, -1), (6,), (-1,)]:
         assert_close_to_numpy(
                 actx, lambda _np, *_args: _np.reshape(*_args),
-                (np.random.randn(2, 3), new_shape))
+                (rng.normal(size=(2, 3)), new_shape))
 
 
 def test_actx_ravel(actx_factory):
@@ -478,6 +495,7 @@ def test_actx_ravel(actx_factory):
 # {{{ arithmetic same as numpy
 
 def test_dof_array_arithmetic_same_as_numpy(actx_factory):
+    rng = np.random.default_rng()
     actx = actx_factory()
 
     ndofs = 50_000
@@ -499,12 +517,12 @@ def test_dof_array_arithmetic_same_as_numpy(actx_factory):
             (operator.truediv, 2, False),
             (operator.pow, 2, False),
             # FIXME pyopencl.Array doesn't do mod.
-            #(operator.mod, 2, True),
-            #(operator.mod, 2, False),
-            #(operator.imod, 2, True),
-            #(operator.imod, 2, False),
+            # (operator.mod, 2, True),
+            # (operator.mod, 2, False),
+            # (operator.imod, 2, True),
+            # (operator.imod, 2, False),
             # FIXME: Two outputs
-            #(divmod, 2, False),
+            # (divmod, 2, False),
 
             (operator.iadd, 2, False),
             (operator.isub, 2, False),
@@ -559,9 +577,9 @@ def test_dof_array_arithmetic_same_as_numpy(actx_factory):
                 op_func_actx = op_func
 
             args = [
-                    (0.5+np.random.rand(ndofs)
+                    (0.5+rng.uniform(size=ndofs)
                         if not use_integers else
-                        np.random.randint(3, 200, ndofs))
+                        rng.integers(3, 200, size=ndofs))
 
                     if is_array_flag else
                     (uniform(0.5, 2)
@@ -631,9 +649,10 @@ def test_dof_array_arithmetic_same_as_numpy(actx_factory):
 
 @pytest.mark.parametrize("op", ["sum", "min", "max"])
 def test_reductions_same_as_numpy(actx_factory, op):
+    rng = np.random.default_rng()
     actx = actx_factory()
 
-    ary = np.random.randn(3000)
+    ary = rng.normal(size=3000)
     np_red = getattr(np, op)(ary)
     actx_red = getattr(actx.np, op)(actx.from_numpy(ary))
     actx_red = actx.to_numpy(actx_red)
@@ -713,8 +732,9 @@ def test_array_equal(actx_factory):
 ])
 def test_array_context_einsum_array_manipulation(actx_factory, spec):
     actx = actx_factory()
+    rng = np.random.default_rng()
 
-    mat = actx.from_numpy(np.random.randn(10, 10))
+    mat = actx.from_numpy(rng.normal(size=(10, 10)))
     res = actx.to_numpy(actx.einsum(spec, mat,
                                     tagged=(FirstAxisIsElementsTag())))
     ans = np.einsum(spec, actx.to_numpy(mat))
@@ -728,9 +748,10 @@ def test_array_context_einsum_array_manipulation(actx_factory, spec):
 ])
 def test_array_context_einsum_array_matmatprods(actx_factory, spec):
     actx = actx_factory()
+    rng = np.random.default_rng()
 
-    mat_a = actx.from_numpy(np.random.randn(5, 5))
-    mat_b = actx.from_numpy(np.random.randn(5, 5))
+    mat_a = actx.from_numpy(rng.normal(size=(5, 5)))
+    mat_b = actx.from_numpy(rng.normal(size=(5, 5)))
     res = actx.to_numpy(actx.einsum(spec, mat_a, mat_b,
                                     tagged=(FirstAxisIsElementsTag())))
     ans = np.einsum(spec, actx.to_numpy(mat_a), actx.to_numpy(mat_b))
@@ -742,10 +763,11 @@ def test_array_context_einsum_array_matmatprods(actx_factory, spec):
 ])
 def test_array_context_einsum_array_tripleprod(actx_factory, spec):
     actx = actx_factory()
+    rng = np.random.default_rng()
 
-    mat_a = actx.from_numpy(np.random.randn(7, 5))
-    mat_b = actx.from_numpy(np.random.randn(5, 7))
-    vec = actx.from_numpy(np.random.randn(7))
+    mat_a = actx.from_numpy(rng.normal(size=(7, 5)))
+    mat_b = actx.from_numpy(rng.normal(size=(5, 7)))
+    vec = actx.from_numpy(rng.normal(size=(7)))
     res = actx.to_numpy(actx.einsum(spec, mat_a, mat_b, vec,
                                     tagged=(FirstAxisIsElementsTag())))
     ans = np.einsum(spec,
@@ -768,8 +790,11 @@ def test_container_map_on_device_scalar(actx_factory):
     arys += (np.pi,)
 
     from arraycontext import (
-        map_array_container, map_reduce_array_container, rec_map_array_container,
-        rec_map_reduce_array_container)
+        map_array_container,
+        map_reduce_array_container,
+        rec_map_array_container,
+        rec_map_reduce_array_container,
+    )
 
     for size, ary in zip(expected_sizes, arys[:-1]):
         result = map_array_container(lambda x: x, ary)
@@ -785,7 +810,7 @@ def test_container_map_on_device_scalar(actx_factory):
 
 def test_container_map(actx_factory):
     actx = actx_factory()
-    ary_dof, ary_of_dofs, mat_of_dofs, dc_of_dofs, bcast_dc_of_dofs = \
+    ary_dof, ary_of_dofs, mat_of_dofs, dc_of_dofs, _bcast_dc_of_dofs = \
             _get_test_containers(actx)
 
     # {{{ check
@@ -838,7 +863,7 @@ def test_container_map(actx_factory):
 
 def test_container_multimap(actx_factory):
     actx = actx_factory()
-    ary_dof, ary_of_dofs, mat_of_dofs, dc_of_dofs, bcast_dc_of_dofs = \
+    ary_dof, ary_of_dofs, mat_of_dofs, dc_of_dofs, _bcast_dc_of_dofs = \
             _get_test_containers(actx)
 
     # {{{ check
@@ -960,13 +985,15 @@ def test_container_arithmetic(actx_factory):
 
 def test_container_freeze_thaw(actx_factory):
     actx = actx_factory()
-    ary_dof, ary_of_dofs, mat_of_dofs, dc_of_dofs, bcast_dc_of_dofs = \
+    ary_dof, ary_of_dofs, mat_of_dofs, dc_of_dofs, _bcast_dc_of_dofs = \
             _get_test_containers(actx)
 
     # {{{ check
 
     from arraycontext import (
-        get_container_context_opt, get_container_context_recursively_opt)
+        get_container_context_opt,
+        get_container_context_recursively_opt,
+    )
 
     assert get_container_context_opt(ary_of_dofs) is None
     assert get_container_context_opt(mat_of_dofs) is None
@@ -1129,13 +1156,14 @@ def test_flatten_with_leaf_class(actx_factory):
 
 def test_numpy_conversion(actx_factory):
     actx = actx_factory()
+    rng = np.random.default_rng()
 
     nelements = 42
     ac = MyContainer(
             name="test_numpy_conversion",
-            mass=np.random.rand(nelements, nelements),
-            momentum=make_obj_array([np.random.rand(nelements) for _ in range(3)]),
-            enthalpy=np.array(np.random.rand()),
+            mass=rng.uniform(size=(nelements, nelements)),
+            momentum=make_obj_array([rng.uniform(size=nelements) for _ in range(3)]),
+            enthalpy=np.array(rng.uniform()),
             )
 
     ac_actx = actx.from_numpy(ac)
@@ -1219,11 +1247,12 @@ def scale_and_orthogonalize(alpha, vel):
 
 def test_actx_compile(actx_factory):
     actx = actx_factory()
+    rng = np.random.default_rng()
 
     compiled_rhs = actx.compile(scale_and_orthogonalize)
 
-    v_x = np.random.rand(10)
-    v_y = np.random.rand(10)
+    v_x = rng.uniform(size=10)
+    v_y = rng.uniform(size=10)
 
     vel = actx.from_numpy(Velocity2D(v_x, v_y, actx))
 
@@ -1236,11 +1265,12 @@ def test_actx_compile(actx_factory):
 
 def test_actx_compile_python_scalar(actx_factory):
     actx = actx_factory()
+    rng = np.random.default_rng()
 
     compiled_rhs = actx.compile(scale_and_orthogonalize)
 
-    v_x = np.random.rand(10)
-    v_y = np.random.rand(10)
+    v_x = rng.uniform(size=10)
+    v_y = rng.uniform(size=10)
 
     vel = actx.from_numpy(Velocity2D(v_x, v_y, actx))
 
@@ -1253,11 +1283,12 @@ def test_actx_compile_python_scalar(actx_factory):
 
 def test_actx_compile_kwargs(actx_factory):
     actx = actx_factory()
+    rng = np.random.default_rng()
 
     compiled_rhs = actx.compile(scale_and_orthogonalize)
 
-    v_x = np.random.rand(10)
-    v_y = np.random.rand(10)
+    v_x = rng.uniform(size=10)
+    v_y = rng.uniform(size=10)
 
     vel = actx.from_numpy(Velocity2D(v_x, v_y, actx))
 
@@ -1273,6 +1304,7 @@ def test_actx_compile_with_tuple_output_keys(actx_factory):
     # key stringification logic.
     from arraycontext import from_numpy, to_numpy
     actx = actx_factory()
+    rng = np.random.default_rng()
 
     def my_rhs(scale, vel):
         result = np.empty((1, 1), dtype=object)
@@ -1281,8 +1313,8 @@ def test_actx_compile_with_tuple_output_keys(actx_factory):
 
     compiled_rhs = actx.compile(my_rhs)
 
-    v_x = np.random.rand(10)
-    v_y = np.random.rand(10)
+    v_x = rng.uniform(size=10)
+    v_y = rng.uniform(size=10)
 
     vel = from_numpy(Velocity2D(v_x, v_y, actx), actx)
 
@@ -1300,9 +1332,9 @@ def test_actx_compile_with_tuple_output_keys(actx_factory):
 def test_container_equality(actx_factory):
     actx = actx_factory()
 
-    ary_dof, _, _, dc_of_dofs, bcast_dc_of_dofs = \
+    ary_dof, _, _, _dc_of_dofs, bcast_dc_of_dofs = \
             _get_test_containers(actx)
-    _, _, _, dc_of_dofs_2, bcast_dc_of_dofs_2 = \
+    _, _, _, _dc_of_dofs_2, bcast_dc_of_dofs_2 = \
             _get_test_containers(actx)
 
     # MyContainer sets eq_comparison to False, so equality comparison should
diff --git a/test/test_pytato_arraycontext.py b/test/test_pytato_arraycontext.py
index eea114468b25e4333baf0c02a37dfe76e46e9e0e..7922f3833c8ea9168fb88f5cc0595c163320fbed 100644
--- a/test/test_pytato_arraycontext.py
+++ b/test/test_pytato_arraycontext.py
@@ -29,7 +29,9 @@ import pytest
 from pytools.tag import Tag
 
 from arraycontext import (
-    PytatoPyOpenCLArrayContext, pytest_generate_tests_for_array_contexts)
+    PytatoPyOpenCLArrayContext,
+    pytest_generate_tests_for_array_contexts,
+)
 from arraycontext.pytest import _PytestPytatoPyOpenCLArrayContextFactory
 
 
diff --git a/test/test_utils.py b/test/test_utils.py
index 4bb49c87ec183487ace560a7058ee53f5ae1eea4..04817d6a3ae0afa500cd6faafd176021812771f4 100644
--- a/test/test_utils.py
+++ b/test/test_utils.py
@@ -151,7 +151,10 @@ def test_stringify_array_container_tree() -> None:
     from dataclasses import dataclass
 
     from arraycontext import (
-        Array, dataclass_array_container, stringify_array_container_tree)
+        Array,
+        dataclass_array_container,
+        stringify_array_container_tree,
+    )
 
     @dataclass_array_container
     @dataclass(frozen=True)