diff --git a/pytools/persistent_dict.py b/pytools/persistent_dict.py
index 3fe69482955889499083d459dac68c995470e6a7..06e0f00334b791bcbc7dd94d225c22603db92783 100644
--- a/pytools/persistent_dict.py
+++ b/pytools/persistent_dict.py
@@ -2,7 +2,10 @@
 
 from __future__ import division, with_statement, absolute_import
 
-__copyright__ = "Copyright (C) 2011,2014 Andreas Kloeckner"
+__copyright__ = """
+Copyright (C) 2011,2014 Andreas Kloeckner
+Copyright (C) 2017 Matt Wala
+"""
 
 __license__ = """
 Permission is hereby granted, free of charge, to any person obtaining a copy
@@ -28,9 +31,11 @@ import logging
 logger = logging.getLogger(__name__)
 
 
+import collections
 import six
 import sys
 import os
+import shutil
 import errno
 
 __doc__ = """
@@ -41,8 +46,11 @@ This module contains functionality that allows hashing with keys that remain
 valid across interpreter invocations, unlike Python's built-in hashes.
 
 .. autoexception:: NoSuchEntryError
+.. autoexception:: ReadOnlyEntryError
+
 .. autoclass:: KeyBuilder
 .. autoclass:: PersistentDict
+.. autoclass:: WriteOncePersistentDict
 """
 
 try:
@@ -54,17 +62,13 @@ except ImportError:
     new_hash = sha.new
 
 
-def _erase_dir(dir):
-    from os import listdir, unlink, rmdir
-    from os.path import join, isdir
-    for name in listdir(dir):
-        sub_name = join(dir, name)
-        if isdir(sub_name):
-            _erase_dir(sub_name)
-        else:
-            unlink(sub_name)
-
-    rmdir(dir)
+def _make_dir_recursively(dir):
+    try:
+        os.makedirs(dir)
+    except OSError as e:
+        from errno import EEXIST
+        if e.errno != EEXIST:
+            raise
 
 
 def update_checksum(checksum, obj):
@@ -97,34 +101,33 @@ class CleanupManager(CleanupBase):
 
 
 class LockManager(CleanupBase):
-    def __init__(self, cleanup_m, container_dir):
-        if container_dir is not None:
-            self.lock_file = os.path.join(container_dir, "lock")
+    def __init__(self, cleanup_m, lock_file):
+        self.lock_file = lock_file
 
-            attempts = 0
-            while True:
-                try:
-                    self.fd = os.open(self.lock_file,
-                            os.O_CREAT | os.O_WRONLY | os.O_EXCL)
-                    break
-                except OSError:
-                    pass
+        attempts = 0
+        while True:
+            try:
+                self.fd = os.open(self.lock_file,
+                        os.O_CREAT | os.O_WRONLY | os.O_EXCL)
+                break
+            except OSError:
+                pass
 
-                from time import sleep
-                sleep(1)
+            from time import sleep
+            sleep(1)
 
-                attempts += 1
+            attempts += 1
 
-                if attempts > 10:
-                    from warnings import warn
-                    warn("could not obtain lock--delete '%s' if necessary"
-                            % self.lock_file)
-                if attempts > 3 * 60:
-                    raise RuntimeError("waited more than three minutes "
-                            "on the lock file '%s'"
-                            "--something is wrong" % self.lock_file)
+            if attempts > 10:
+                from warnings import warn
+                warn("could not obtain lock--delete '%s' if necessary"
+                        % self.lock_file)
+            if attempts > 3 * 60:
+                raise RuntimeError("waited more than three minutes "
+                        "on the lock file '%s'"
+                        "--something is wrong" % self.lock_file)
 
-            cleanup_m.register(self)
+        cleanup_m.register(self)
 
     def clean_up(self):
         import os
@@ -136,34 +139,26 @@ class LockManager(CleanupBase):
 
 
 class ItemDirManager(CleanupBase):
-    def __init__(self, cleanup_m, path):
-        from os import mkdir
-        import errno
+    def __init__(self, cleanup_m, path, delete_on_error):
+        from os.path import isdir
 
+        self.existed = isdir(path)
         self.path = path
-        try:
-            mkdir(self.path)
-        except OSError as e:
-            if e.errno != errno.EEXIST:
-                raise
-            self.existed = True
-        else:
-            cleanup_m.register(self)
-            self.existed = False
+        self.delete_on_error = delete_on_error
 
-    def sub(self, n):
-        from os.path import join
-        return join(self.path, n)
+        cleanup_m.register(self)
 
     def reset(self):
         try:
-            _erase_dir(self.path)
+            shutil.rmtree(self.path)
         except OSError as e:
             if e.errno != errno.ENOENT:
                 raise
 
+    def mkdir(self):
+        from os import mkdir
         try:
-            os.mkdir(self.path)
+            mkdir(self.path)
         except OSError as e:
             if e.errno != errno.EEXIST:
                 raise
@@ -172,7 +167,8 @@ class ItemDirManager(CleanupBase):
         pass
 
     def error_clean_up(self):
-        _erase_dir(self.path)
+        if self.delete_on_error:
+            self.reset()
 
 # }}}
 
@@ -268,25 +264,139 @@ class KeyBuilder(object):
 # }}}
 
 
+# {{{ lru cache
+
+class _LinkedList(object):
+    """The list operates on nodes of the form [value, leftptr, rightpr]. To create a
+    node of this form you can use `LinkedList.new_node().`
+
+    Supports inserting at the left and deleting from an arbitrary location.
+    """
+    def __init__(self):
+        self.count = 0
+        self.head = None
+        self.end = None
+
+    @staticmethod
+    def new_node(element):
+        return [element, None, None]
+
+    def __len__(self):
+        return self.count
+
+    def appendleft_node(self, node):
+        self.count += 1
+
+        if self.head is None:
+            self.head = self.end = node
+            return
+
+        self.head[1] = node
+        node[2] = self.head
+
+        self.head = node
+
+    def pop_node(self):
+        end = self.end
+        self.remove_node(end)
+        return end
+
+    def remove_node(self, node):
+        self.count -= 1
+
+        if self.head is self.end:
+            assert node is self.head
+            self.head = self.end = None
+            return
+
+        left = node[1]
+        right = node[2]
+
+        if left is None:
+            self.head = right
+        else:
+            left[2] = right
+
+        if right is None:
+            self.end = left
+        else:
+            right[1] = left
+
+        node[1] = node[2] = None
+
+
+class _LRUCache(collections.MutableMapping):
+    """A mapping that keeps at most *maxsize* items with an LRU replacement policy.
+    """
+    def __init__(self, maxsize):
+        self.lru_order = _LinkedList()
+        self.maxsize = maxsize
+        self.cache = {}
+
+    def __delitem__(self, item):
+        node = self.cache[item]
+        self.lru_order.remove_node(node)
+        del self.cache[item]
+
+    def __getitem__(self, item):
+        node = self.cache[item]
+        self.lru_order.remove_node(node)
+        self.lru_order.appendleft_node(node)
+        # A linked list node contains a tuple of the form (item, value).
+        return node[0][1]
+
+    def __contains__(self, item):
+        return item in self.cache
+
+    def __iter__(self):
+        return iter(self.cache)
+
+    def __len__(self):
+        return len(self.cache)
+
+    def clear(self):
+        self.cache.clear()
+        self.lru_order = _LinkedList()
+
+    def __setitem__(self, item, value):
+        if self.maxsize < 1:
+            return
+
+        try:
+            node = self.cache[item]
+            self.lru_order.remove_node(node)
+        except KeyError:
+            if len(self.lru_order) >= self.maxsize:
+                # Make room for new elements.
+                end_node = self.lru_order.pop_node()
+                del self.cache[end_node[0][0]]
+
+            node = self.lru_order.new_node((item, value))
+            self.cache[item] = node
+
+        self.lru_order.appendleft_node(node)
+
+        assert len(self.cache) == len(self.lru_order), \
+                (len(self.cache), len(self.lru_order))
+        assert len(self.lru_order) <= self.maxsize
+
+        return node[0]
+
+# }}}
+
+
 # {{{ top-level
 
 class NoSuchEntryError(KeyError):
     pass
 
 
-class PersistentDict(object):
-    def __init__(self, identifier, key_builder=None, container_dir=None):
-        """
-        :arg identifier: a file-name-compatible string identifying this
-            dictionary
-        :arg key_builder: a subclass of :class:`KeyBuilder`
+class ReadOnlyEntryError(KeyError):
+    pass
 
-        .. automethod:: __getitem__
-        .. automethod:: __setitem__
-        .. automethod:: __delitem__
-        .. automethod:: clear
-        """
 
+class _PersistentDictBase(object):
+    def __init__(self, identifier, key_builder=None, container_dir=None):
         self.identifier = identifier
 
         if key_builder is None:
@@ -307,52 +417,258 @@ class PersistentDict(object):
 
         self._make_container_dir()
 
+    def store(self, key, value):
+        raise NotImplementedError()
+
+    def fetch(self, key):
+        raise NotImplementedError()
+
+    def _read(self, path):
+        from six.moves.cPickle import load
+        with open(path, "rb") as inf:
+            return load(inf)
+
+    def _write(self, path, value):
+        from six.moves.cPickle import dump, HIGHEST_PROTOCOL
+        with open(path, "wb") as outf:
+            dump(value, outf, protocol=HIGHEST_PROTOCOL)
+
+    def _item_dir(self, hexdigest_key):
+        from os.path import join
+        return join(self.container_dir, hexdigest_key)
+
+    def _key_file(self, hexdigest_key):
+        from os.path import join
+        return join(self._item_dir(hexdigest_key), "key")
+
+    def _contents_file(self, hexdigest_key):
+        from os.path import join
+        return join(self._item_dir(hexdigest_key), "contents")
+
+    def _lock_file(self, hexdigest_key):
+        from os.path import join
+        return join(self.container_dir, str(hexdigest_key) + ".lock")
+
     def _make_container_dir(self):
-        # {{{ ensure container directory exists
+        _make_dir_recursively(self.container_dir)
+
+    def _collision_check(self, key, stored_key):
+        if stored_key != key:
+            # Key collision, oh well.
+            from warnings import warn
+            warn("%s: key collision in cache at '%s' -- these are "
+                    "sufficiently unlikely that they're often "
+                    "indicative of a broken implementation "
+                    "of equality comparison"
+                    % (self.identifier, self.container_dir))
+            # This is here so we can debug the equality comparison
+            stored_key == key
+            raise NoSuchEntryError(key)
 
+    def __getitem__(self, key):
+        return self.fetch(key)
+
+    def __setitem__(self, key, value):
+        self.store(key, value)
+
+    def __delitem__(self, key):
+        raise NotImplementedError()
+
+    def clear(self):
         try:
-            os.makedirs(self.container_dir)
+            shutil.rmtree(self.container_dir)
         except OSError as e:
-            from errno import EEXIST
-            if e.errno != EEXIST:
+            if e.errno != errno.ENOENT:
                 raise
 
-        # }}}
+        self._make_container_dir()
 
-    def store(self, key, value, info_files={}):
+
+class WriteOncePersistentDict(_PersistentDictBase):
+    def __init__(self, identifier, key_builder=None, container_dir=None,
+             in_mem_cache_size=256):
+        """
+        :arg identifier: a file-name-compatible string identifying this
+            dictionary
+        :arg key_builder: a subclass of :class:`KeyBuilder`
+        :arg in_mem_cache_size: retain an in-memory cache of up to
+            *in_mem_cache_size* items
+
+        .. automethod:: __getitem__
+        .. automethod:: __setitem__
+        .. automethod:: clear
+        """
+        _PersistentDictBase.__init__(self, identifier, key_builder, container_dir)
+        self._cache = _LRUCache(in_mem_cache_size)
+
+    def _spin_until_removed(self, lock_file):
+        from os.path import exists
+
+        attempts = 0
+        while exists(lock_file):
+            from time import sleep
+            sleep(1)
+
+            attempts += 1
+
+            if attempts > 10:
+                from warnings import warn
+                warn("waiting until unlocked--delete '%s' if necessary"
+                        % lock_file)
+
+            if attempts > 3 * 60:
+                raise RuntimeError("waited more than three minutes "
+                        "on the lock file '%s'"
+                        "--something is wrong" % lock_file)
+
+    def store(self, key, value):
         hexdigest_key = self.key_builder(key)
 
         cleanup_m = CleanupManager()
         try:
             try:
-                LockManager(cleanup_m, self.container_dir)
-
-                from os.path import join
-                item_dir_m = ItemDirManager(cleanup_m,
-                        join(self.container_dir, hexdigest_key))
+                LockManager(cleanup_m, self._lock_file(hexdigest_key))
+                item_dir_m = ItemDirManager(
+                        cleanup_m, self._item_dir(hexdigest_key),
+                        delete_on_error=False)
 
                 if item_dir_m.existed:
-                    item_dir_m.reset()
+                    raise ReadOnlyEntryError(key)
 
-                for info_name, info_value in six.iteritems(info_files):
-                    info_path = item_dir_m.sub("info_"+info_name)
+                item_dir_m.mkdir()
 
-                    with open(info_path, "wt") as outf:
-                        outf.write(info_value)
+                key_path = self._key_file(hexdigest_key)
+                value_path = self._contents_file(hexdigest_key)
 
-                from six.moves.cPickle import dump, HIGHEST_PROTOCOL
-                value_path = item_dir_m.sub("contents")
-                with open(value_path, "wb") as outf:
-                    dump(value, outf, protocol=HIGHEST_PROTOCOL)
+                self._write(value_path, value)
+                self._write(key_path, key)
 
-                logger.debug("%s: cache store [key=%s]" % (
+                logger.debug("%s: disk cache store [key=%s]" % (
+                        self.identifier, hexdigest_key))
+            except:
+                cleanup_m.error_clean_up()
+                raise
+        finally:
+            cleanup_m.clean_up()
+
+    def fetch(self, key):
+        hexdigest_key = self.key_builder(key)
+
+        # {{{ in memory cache
+
+        try:
+            stored_key, stored_value = self._cache[hexdigest_key]
+        except KeyError:
+            pass
+        else:
+            logger.debug("%s: in mem cache hit [key=%s]" % (
                     self.identifier, hexdigest_key))
+            self._collision_check(key, stored_key)
+            return stored_value
+
+        # }}}
+
+        # {{{ check path exists and is unlocked
+
+        item_dir = self._item_dir(hexdigest_key)
+
+        from os.path import isdir
+        if not isdir(item_dir):
+            logger.debug("%s: disk cache miss [key=%s]" % (
+                    self.identifier, hexdigest_key))
+            raise NoSuchEntryError(key)
+
+        lock_file = self._lock_file(hexdigest_key)
+        self._spin_until_removed(lock_file)
+
+        # }}}
+
+        key_file = self._key_file(hexdigest_key)
+        contents_file = self._contents_file(hexdigest_key)
+
+        # Note: Unlike PersistentDict, this doesn't autodelete invalid entires,
+        # because that would lead to a race condition.
+
+        # {{{ load key file and do equality check
+
+        try:
+            read_key = self._read(key_file)
+        except:
+            from warnings import warn
+            warn("pytools.persistent_dict.WriteOncePersistentDict(%s) "
+                    "encountered an invalid "
+                    "key file for key %s. Remove the directory "
+                    "'%s' if necessary."
+                    % (self.identifier, hexdigest_key, item_dir))
+            raise NoSuchEntryError(key)
+
+        self._collision_check(key, read_key)
+
+        # }}}
+
+        logger.debug("%s: disk cache hit [key=%s]" % (
+                self.identifier, hexdigest_key))
+
+        # {{{ load contents
+
+        try:
+            read_contents = self._read(contents_file)
+        except:
+            warn("pytools.persistent_dict.WriteOncePersistentDict(%s) "
+                    "encountered an invalid "
+                    "key file for key %s. Remove the directory "
+                    "'%s' if necessary."
+                    % (self.identifier, hexdigest_key, item_dir))
+            raise NoSuchEntryError(key)
+
+        # }}}
+
+        self._cache[hexdigest_key] = (key, read_contents)
+        return read_contents
+
+    def clear(self):
+        _PersistentDictBase.clear(self)
+        self._cache.clear()
+
+
+class PersistentDict(_PersistentDictBase):
+    def __init__(self, identifier, key_builder=None, container_dir=None):
+        """
+        :arg identifier: a file-name-compatible string identifying this
+            dictionary
+        :arg key_builder: a subclass of :class:`KeyBuilder`
+
+        .. automethod:: __getitem__
+        .. automethod:: __setitem__
+        .. automethod:: __delitem__
+        .. automethod:: clear
+        """
+        _PersistentDictBase.__init__(self, identifier, key_builder, container_dir)
+
+    def store(self, key, value):
+        hexdigest_key = self.key_builder(key)
+
+        cleanup_m = CleanupManager()
+        try:
+            try:
+                LockManager(cleanup_m, self._lock_file(hexdigest_key))
+                item_dir_m = ItemDirManager(
+                        cleanup_m, self._item_dir(hexdigest_key),
+                        delete_on_error=True)
+
+                if item_dir_m.existed:
+                    item_dir_m.reset()
+
+                item_dir_m.mkdir()
+
+                key_path = self._key_file(hexdigest_key)
+                value_path = self._contents_file(hexdigest_key)
 
-                # Write key last, so that if the reader below
-                key_path = item_dir_m.sub("key")
-                with open(key_path, "wb") as outf:
-                    dump(key, outf, protocol=HIGHEST_PROTOCOL)
+                self._write(value_path, value)
+                self._write(key_path, key)
 
+                logger.debug("%s: cache store [key=%s]" % (
+                        self.identifier, hexdigest_key))
             except:
                 cleanup_m.error_clean_up()
                 raise
@@ -361,38 +677,29 @@ class PersistentDict(object):
 
     def fetch(self, key):
         hexdigest_key = self.key_builder(key)
+        item_dir = self._item_dir(hexdigest_key)
 
-        from os.path import join, isdir
-        item_dir = join(self.container_dir, hexdigest_key)
+        from os.path import isdir
         if not isdir(item_dir):
             logger.debug("%s: cache miss [key=%s]" % (
-                self.identifier, hexdigest_key))
+                    self.identifier, hexdigest_key))
             raise NoSuchEntryError(key)
 
         cleanup_m = CleanupManager()
         try:
             try:
-                LockManager(cleanup_m, self.container_dir)
-
-                item_dir_m = ItemDirManager(cleanup_m, item_dir)
-                key_path = item_dir_m.sub("key")
-                value_path = item_dir_m.sub("contents")
+                LockManager(cleanup_m, self._lock_file(hexdigest_key))
+                item_dir_m = ItemDirManager(
+                        cleanup_m, item_dir, delete_on_error=False)
 
-                from six.moves.cPickle import load
+                key_path = self._key_file(hexdigest_key)
+                value_path = self._contents_file(hexdigest_key)
 
-                # {{{ load key file
-
-                exc = None
+                # {{{ load key
 
                 try:
-                    with open(key_path, "rb") as inf:
-                        read_key = load(inf)
-                except IOError as e:
-                    exc = e
-                except EOFError as e:
-                    exc = e
-
-                if exc is not None:
+                    read_key = self._read(key_path)
+                except:
                     item_dir_m.reset()
                     from warnings import warn
                     warn("pytools.persistent_dict.PersistentDict(%s) "
@@ -401,36 +708,18 @@ class PersistentDict(object):
                             % (self.identifier, hexdigest_key))
                     raise NoSuchEntryError(key)
 
-                # }}}
+                self._collision_check(key, read_key)
 
-                if read_key != key:
-                    # Key collision, oh well.
-                    from warnings import warn
-                    warn("%s: key collision in cache at '%s' -- these are "
-                            "sufficiently unlikely that they're often "
-                            "indicative of a broken implementation "
-                            "of equality comparison"
-                            % (self.identifier, self.container_dir))
-                    # This is here so we can debug the equality comparison
-                    read_key == key
-                    raise NoSuchEntryError(key)
+                # }}}
 
                 logger.debug("%s: cache hit [key=%s]" % (
-                    self.identifier, hexdigest_key))
+                        self.identifier, hexdigest_key))
 
                 # {{{ load value
 
-                exc = None
-
                 try:
-                    with open(value_path, "rb") as inf:
-                        read_contents = load(inf)
-                except IOError as e:
-                    exc = e
-                except EOFError as e:
-                    exc = e
-
-                if exc is not None:
+                    read_contents = self._read(value_path)
+                except:
                     item_dir_m.reset()
                     from warnings import warn
                     warn("pytools.persistent_dict.PersistentDict(%s) "
@@ -439,10 +728,10 @@ class PersistentDict(object):
                             % (self.identifier, hexdigest_key))
                     raise NoSuchEntryError(key)
 
-                # }}}
-
                 return read_contents
 
+                # }}}
+
             except:
                 cleanup_m.error_clean_up()
                 raise
@@ -452,17 +741,36 @@ class PersistentDict(object):
     def remove(self, key):
         hexdigest_key = self.key_builder(key)
 
-        from os.path import join, isdir
-        item_dir = join(self.container_dir, hexdigest_key)
+        item_dir = self._item_dir(hexdigest_key)
+        from os.path import isdir
         if not isdir(item_dir):
             raise NoSuchEntryError(key)
 
         cleanup_m = CleanupManager()
         try:
             try:
-                LockManager(cleanup_m, self.container_dir)
+                LockManager(cleanup_m, self._lock_file(hexdigest_key))
+                item_dir_m = ItemDirManager(
+                        cleanup_m, item_dir, delete_on_error=False)
+                key_file = self._key_file(hexdigest_key)
+
+                # {{{ load key
+
+                try:
+                    read_key = self._read(key_file)
+                except:
+                    item_dir_m.reset()
+                    from warnings import warn
+                    warn("pytools.persistent_dict.PersistentDict(%s) "
+                            "encountered an invalid "
+                            "key file for key %s. Entry deleted."
+                            % (self.identifier, hexdigest_key))
+                    raise NoSuchEntryError(key)
+
+                self._collision_check(key, read_key)
+
+                # }}}
 
-                item_dir_m = ItemDirManager(cleanup_m, item_dir)
                 item_dir_m.reset()
 
             except:
@@ -471,24 +779,9 @@ class PersistentDict(object):
         finally:
             cleanup_m.clean_up()
 
-    def __getitem__(self, key):
-        return self.fetch(key)
-
-    def __setitem__(self, key, value):
-        return self.store(key, value)
-
     def __delitem__(self, key):
         self.remove(key)
 
-    def clear(self):
-        try:
-            _erase_dir(self.container_dir)
-        except OSError as e:
-            if e.errno != errno.ENOENT:
-                raise
-
-        self._make_container_dir()
-
 # }}}
 
 # vim: foldmethod=marker
diff --git a/pytools/version.py b/pytools/version.py
index c66785cc9772cfec91f2e9906198d4a7686c9ee4..a63be7670cea7fe4443b2630e80a30195c368d48 100644
--- a/pytools/version.py
+++ b/pytools/version.py
@@ -1,3 +1,3 @@
-VERSION = (2017, 4)
+VERSION = (2017, 5)
 VERSION_STATUS = ""
 VERSION_TEXT = ".".join(str(x) for x in VERSION) + VERSION_STATUS
diff --git a/test/test_persistent_dict.py b/test/test_persistent_dict.py
index 3da5748140e6e6512f877834df109f7dfd6ccc86..dfb1cdc4f9d69b0314990e5de16b770331fec556 100644
--- a/test/test_persistent_dict.py
+++ b/test/test_persistent_dict.py
@@ -2,39 +2,291 @@ from __future__ import division, with_statement, absolute_import
 
 import pytest  # noqa
 import sys  # noqa
+import tempfile
+import shutil
+
 from six.moves import range
 from six.moves import zip
 
+from pytools.persistent_dict import (
+        PersistentDict, WriteOncePersistentDict, NoSuchEntryError,
+        ReadOnlyEntryError)
+
+
+# {{{ type for testing
+
+class PDictTestingKeyOrValue(object):
+
+    def __init__(self, val, hash_key=None):
+        self.val = val
+        if hash_key is None:
+            hash_key = val
+        self.hash_key = hash_key
+
+    def __getstate__(self):
+        return {"val": self.val, "hash_key": self.hash_key}
+
+    def __eq__(self, other):
+        return self.val == other.val
+
+    def __ne__(self, other):
+        return not self.__eq__(other)
+
+    def update_persistent_hash(self, key_hash, key_builder):
+        key_builder.rec(key_hash, self.hash_key)
+
+    def __repr__(self):
+        return "PDictTestingKeyOrValue(val=%r,hash_key=%r)" % (
+                (self.val, self.hash_key))
+
+    __str__ = __repr__
+
+# }}}
+
+
+def test_persistent_dict_storage_and_lookup():
+    try:
+        tmpdir = tempfile.mkdtemp()
+        pdict = PersistentDict("pytools-test", container_dir=tmpdir)
+
+        from random import randrange
+
+        def rand_str(n=20):
+            return "".join(
+                    chr(65+randrange(26))
+                    for i in range(n))
+
+        keys = [(randrange(2000), rand_str(), None) for i in range(20)]
+        values = [randrange(2000) for i in range(20)]
+
+        d = dict(list(zip(keys, values)))
+
+        # {{{ check lookup
+
+        for k, v in zip(keys, values):
+            pdict[k] = v
+
+        for k, v in d.items():
+            assert d[k] == pdict[k]
+
+        # }}}
+
+        # {{{ check updating
+
+        for k, v in zip(keys, values):
+            pdict[k] = v + 1
+
+        for k, v in d.items():
+            assert d[k] + 1 == pdict[k]
+
+        # }}}
+
+        # {{{ check not found
+
+        with pytest.raises(NoSuchEntryError):
+            pdict[3000]
+
+        # }}}
+
+    finally:
+        shutil.rmtree(tmpdir)
+
+
+def test_persistent_dict_deletion():
+    try:
+        tmpdir = tempfile.mkdtemp()
+        pdict = PersistentDict("pytools-test", container_dir=tmpdir)
+
+        pdict[0] = 0
+        del pdict[0]
+
+        with pytest.raises(NoSuchEntryError):
+            pdict[0]
+
+        with pytest.raises(NoSuchEntryError):
+            del pdict[1]
+
+    finally:
+        shutil.rmtree(tmpdir)
+
+
+def test_persistent_dict_synchronization():
+    try:
+        tmpdir = tempfile.mkdtemp()
+        pdict1 = PersistentDict("pytools-test", container_dir=tmpdir)
+        pdict2 = PersistentDict("pytools-test", container_dir=tmpdir)
+
+        # check lookup
+        pdict1[0] = 1
+        assert pdict2[0] == 1
+
+        # check updating
+        pdict1[0] = 2
+        assert pdict2[0] == 2
+
+        # check deletion
+        del pdict1[0]
+        with pytest.raises(NoSuchEntryError):
+            pdict2[0]
+
+    finally:
+        shutil.rmtree(tmpdir)
+
+
+def test_persistent_dict_cache_collisions():
+    try:
+        tmpdir = tempfile.mkdtemp()
+        pdict = PersistentDict("pytools-test", container_dir=tmpdir)
+
+        key1 = PDictTestingKeyOrValue(1, hash_key=0)
+        key2 = PDictTestingKeyOrValue(2, hash_key=0)
+
+        pdict[key1] = 1
+
+        # check lookup
+        with pytest.warns(UserWarning):
+            with pytest.raises(NoSuchEntryError):
+                pdict[key2]
+
+        # check deletion
+        with pytest.warns(UserWarning):
+            with pytest.raises(NoSuchEntryError):
+                del pdict[key2]
+
+        # check presence after deletion
+        pdict[key1]
+
+    finally:
+        shutil.rmtree(tmpdir)
+
+
+def test_persistent_dict_clear():
+    try:
+        tmpdir = tempfile.mkdtemp()
+        pdict = PersistentDict("pytools-test", container_dir=tmpdir)
+
+        pdict[0] = 1
+        pdict[0]
+        pdict.clear()
+
+        with pytest.raises(NoSuchEntryError):
+            pdict[0]
+
+    finally:
+        shutil.rmtree(tmpdir)
+
+
+@pytest.mark.parametrize("in_mem_cache_size", (0, 256))
+def test_write_once_persistent_dict_storage_and_lookup(in_mem_cache_size):
+    try:
+        tmpdir = tempfile.mkdtemp()
+        pdict = WriteOncePersistentDict(
+                "pytools-test", container_dir=tmpdir,
+                in_mem_cache_size=in_mem_cache_size)
+
+        # check lookup
+        pdict[0] = 1
+        assert pdict[0] == 1
+        # do two lookups to test the cache
+        assert pdict[0] == 1
+
+        # check updating
+        with pytest.raises(ReadOnlyEntryError):
+            pdict[0] = 2
+
+        # check not found
+        with pytest.raises(NoSuchEntryError):
+            pdict[1]
+
+    finally:
+        shutil.rmtree(tmpdir)
+
+
+def test_write_once_persistent_dict_lru_policy():
+    try:
+        tmpdir = tempfile.mkdtemp()
+        pdict = WriteOncePersistentDict(
+                "pytools-test", container_dir=tmpdir, in_mem_cache_size=3)
+
+        pdict[1] = PDictTestingKeyOrValue(1)
+        pdict[2] = PDictTestingKeyOrValue(2)
+        pdict[3] = PDictTestingKeyOrValue(3)
+        pdict[4] = PDictTestingKeyOrValue(4)
+
+        val1 = pdict[1]
+
+        assert pdict[1] is val1
+        pdict[2]
+        assert pdict[1] is val1
+        pdict[2]
+        pdict[3]
+        assert pdict[1] is val1
+        pdict[2]
+        pdict[3]
+        pdict[2]
+        assert pdict[1] is val1
+        pdict[2]
+        pdict[3]
+        pdict[4]
+        assert pdict[1] is not val1
+
+    finally:
+        shutil.rmtree(tmpdir)
+
+
+def test_write_once_persistent_dict_synchronization():
+    try:
+        tmpdir = tempfile.mkdtemp()
+        pdict1 = WriteOncePersistentDict("pytools-test", container_dir=tmpdir)
+        pdict2 = WriteOncePersistentDict("pytools-test", container_dir=tmpdir)
+
+        # check lookup
+        pdict1[1] = 0
+        assert pdict2[1] == 0
+
+        # check updating
+        with pytest.raises(ReadOnlyEntryError):
+            pdict2[1] = 1
+
+    finally:
+        shutil.rmtree(tmpdir)
+
 
-def test_persistent_dict():
-    from pytools.persistent_dict import PersistentDict
-    pdict = PersistentDict("pytools-test")
-    pdict.clear()
+def test_write_once_persistent_dict_cache_collisions():
+    try:
+        tmpdir = tempfile.mkdtemp()
+        pdict = WriteOncePersistentDict("pytools-test", container_dir=tmpdir)
 
-    from random import randrange
+        key1 = PDictTestingKeyOrValue(1, hash_key=0)
+        key2 = PDictTestingKeyOrValue(2, hash_key=0)
+        pdict[key1] = 1
 
-    def rand_str(n=20):
-        return "".join(
-                chr(65+randrange(26))
-                for i in range(n))
+        # check lookup
+        with pytest.warns(UserWarning):
+            with pytest.raises(NoSuchEntryError):
+                pdict[key2]
 
-    keys = [(randrange(2000), rand_str(), None) for i in range(20)]
-    values = [randrange(2000) for i in range(20)]
+        # check update
+        with pytest.raises(ReadOnlyEntryError):
+            pdict[key2] = 1
 
-    d = dict(list(zip(keys, values)))
+    finally:
+        shutil.rmtree(tmpdir)
 
-    for k, v in zip(keys, values):
-        pdict[k] = v
-        pdict.store(k, v, info_files={"hey": str(v)})
 
-    for k, v in list(d.items()):
-        assert d[k] == pdict[k]
+def test_write_once_persistent_dict_clear():
+    try:
+        tmpdir = tempfile.mkdtemp()
+        pdict = WriteOncePersistentDict("pytools-test", container_dir=tmpdir)
 
-    for k, v in zip(keys, values):
-        pdict.store(k, v+1, info_files={"hey": str(v)})
+        pdict[0] = 1
+        pdict[0]
+        pdict.clear()
 
-    for k, v in list(d.items()):
-        assert d[k] + 1 == pdict[k]
+        with pytest.raises(NoSuchEntryError):
+            pdict[0]
+    finally:
+        shutil.rmtree(tmpdir)
 
 
 if __name__ == "__main__":