From 8a2c5f23c819b0b8b37067d33a46a948a275cdb9 Mon Sep 17 00:00:00 2001
From: Andreas Kloeckner <inform@tiker.net>
Date: Sun, 20 Jan 2013 20:42:51 -0500
Subject: [PATCH] Shrink sizes on which scans are tested, to avoid puzzling
 out-of-memory errors.

---
 test/test_algorithm.py | 7 +++++--
 1 file changed, 5 insertions(+), 2 deletions(-)

diff --git a/test/test_algorithm.py b/test/test_algorithm.py
index c927fee4..0bb4f084 100644
--- a/test/test_algorithm.py
+++ b/test/test_algorithm.py
@@ -470,7 +470,7 @@ scan_test_counts = [
     2 ** 20 + 1,
     2 ** 20,
     2 ** 23 + 3,
-    2 ** 24 + 5
+    # larger sizes cause out of memory on low-end AMD APUs
     ]
 
 @pytools.test.mark_test.opencl
@@ -536,6 +536,8 @@ def test_partition(ctx_factory):
 
     from pyopencl.clrandom import rand as clrand
     for n in scan_test_counts:
+        print "part", n
+
         a_dev = clrand(queue, (n,), dtype=np.int32, a=0, b=1000)
         a = a_dev.get()
 
@@ -714,7 +716,8 @@ def test_sort(ctx_factory):
 
     from time import time
 
-    for n in scan_test_counts:
+    # intermediate arrays for largest size cause out-of-memory on low-end GPUs
+    for n in scan_test_counts[:-1]:
         print(n)
 
         print("  rng")
-- 
GitLab