diff --git a/aksetup_helper.py b/aksetup_helper.py index 38906d85e624312092020159d2fe1a2776f63c5e..6335e1046f2f7fc55c6e2f8446a67db7869b62f4 100644 --- a/aksetup_helper.py +++ b/aksetup_helper.py @@ -495,21 +495,28 @@ class BoostLibraries(Libraries): % humanize(lib_base_name)) -def set_up_shipped_boost_if_requested(project_name, conf): +def set_up_shipped_boost_if_requested(project_name, conf, source_path=None, + boost_chrono=False): """Set up the package to use a shipped version of Boost. Return a tuple of a list of extra C files to build and extra defines to be used. + + :arg boost_chrono: one of *False* and ``"header_only"`` + (only relevant in shipped mode) """ from os.path import exists import sys + if source_path is None: + source_path = "bpl-subset/bpl_subset" + if conf["USE_SHIPPED_BOOST"]: - if not exists("bpl-subset/bpl_subset/boost/version.hpp"): + if not exists("%s/boost/version.hpp" % source_path): print(DASH_SEPARATOR) print("The shipped Boost library was not found, but " "USE_SHIPPED_BOOST is True.") - print("(The files should be under bpl-subset/.)") + print("(The files should be under %s/.)" % source_path) print(DASH_SEPARATOR) print("If you got this package from git, you probably want to do") print("") @@ -527,34 +534,37 @@ def set_up_shipped_boost_if_requested(project_name, conf): count_down_delay(delay=10) if conf["USE_SHIPPED_BOOST"]: - conf["BOOST_INC_DIR"] = ["bpl-subset/bpl_subset"] + conf["BOOST_INC_DIR"] = [source_path] conf["BOOST_LIB_DIR"] = [] conf["BOOST_PYTHON_LIBNAME"] = [] conf["BOOST_THREAD_LIBNAME"] = [] from glob import glob - source_files = (glob("bpl-subset/bpl_subset/libs/*/*/*/*.cpp") - + glob("bpl-subset/bpl_subset/libs/*/*/*.cpp") - + glob("bpl-subset/bpl_subset/libs/*/*.cpp")) + source_files = (glob(source_path + "/libs/*/*/*/*.cpp") + + glob(source_path + "/libs/*/*/*.cpp") + + glob(source_path + "/libs/*/*.cpp")) # make sure next line succeeds even on Windows source_files = [f.replace("\\", "/") for f in source_files] source_files = [f for f in source_files - if not f.startswith("bpl-subset/bpl_subset/libs/thread/src")] + if not f.startswith(source_path + "/libs/thread/src")] if sys.platform == "win32": source_files += glob( - "bpl-subset/bpl_subset/libs/thread/src/win32/*.cpp") + source_path + "/libs/thread/src/win32/*.cpp") source_files += glob( - "bpl-subset/bpl_subset/libs/thread/src/tss_null.cpp") + source_path + "/libs/thread/src/tss_null.cpp") else: source_files += glob( - "bpl-subset/bpl_subset/libs/thread/src/pthread/*.cpp") + source_path + "/libs/thread/src/pthread/*.cpp") + + source_files = [f for f in source_files + if not f.endswith("once_atomic.cpp")] from os.path import isdir - main_boost_inc = "bpl-subset/bpl_subset/boost" - bpl_project_boost_inc = "bpl-subset/bpl_subset/%sboost" % project_name + main_boost_inc = source_path + "/boost" + bpl_project_boost_inc = source_path + "/%sboost" % project_name if not isdir(bpl_project_boost_inc): try: @@ -565,18 +575,24 @@ def set_up_shipped_boost_if_requested(project_name, conf): print("Copying files, hang on... (do not interrupt)") copytree(main_boost_inc, bpl_project_boost_inc) - return (source_files, - { - # do not pick up libboost link dependency on windows - "BOOST_ALL_NO_LIB": 1, - "BOOST_THREAD_BUILD_DLL": 1, - - "BOOST_MULTI_INDEX_DISABLE_SERIALIZATION": 1, - "BOOST_THREAD_DONT_USE_CHRONO": 1, - "BOOST_PYTHON_SOURCE": 1, - "boost": '%sboost' % project_name - } - ) + defines = { + # do not pick up libboost link dependency on windows + "BOOST_ALL_NO_LIB": 1, + "BOOST_THREAD_BUILD_DLL": 1, + + "BOOST_MULTI_INDEX_DISABLE_SERIALIZATION": 1, + "BOOST_PYTHON_SOURCE": 1, + "boost": '%sboost' % project_name, + } + + if boost_chrono is False: + defines["BOOST_THREAD_DONT_USE_CHRONO"] = 1 + elif boost_chrono == "header_only": + defines["BOOST_CHRONO_HEADER_ONLY"] = 1 + else: + raise ValueError("invalid value of 'boost_chrono'") + + return (source_files, defines) else: return [], {} @@ -733,7 +749,8 @@ def check_git_submodules(): status = l[0] sha, package = l[1:].split(" ", 1) - if package == "bpl-subset": + if package == "bpl-subset" or ( + package.startswith("boost") and package.endswith("subset")): # treated separately continue diff --git a/doc/array.rst b/doc/array.rst index 0b0168b6a571a7bb6d36fa6f7f5dca1f0121e140..e572c900fbbc01ff95fcef8bbc4aa59a2399ba1b 100644 --- a/doc/array.rst +++ b/doc/array.rst @@ -29,6 +29,21 @@ Vector Types Python code. For each type, a `make_type` function is also provided (e.g. `make_float3(x,y,z)`). + If you want to construct a pre-initialized vector type you have three new + functions to choose from: + + * `zeros_type()` + * `ones_type()` + * `filled_type(fill_value)` + + .. versionadded:: 2014.1 + + .. versionchanged:: 2014.1 + The `make_type` functions have a default value (0) for each component. + Relying on the default values has been deprecated. Either specify all + components or use one of th new flavors mentioned above for constructing + a vector. + Custom data types ^^^^^^^^^^^^^^^^^ diff --git a/doc/misc.rst b/doc/misc.rst index 1785d511271873234bd9ce8f79b037be484b7e08..794fa982281e38718217bf4cb31a93dcfc4af50d 100644 --- a/doc/misc.rst +++ b/doc/misc.rst @@ -459,7 +459,8 @@ Citing PyOpenCL We are not asking you to gratuitously cite PyOpenCL in work that is otherwise unrelated to software. That said, if you do discuss some of the development aspects of your code and would like to highlight a few of the ideas behind -PyOpenCL, feel free to cite this article: +PyOpenCL, feel free to cite `this article +<http://dx.doi.org/10.1016/j.parco.2011.09.001>`_: Andreas Klöckner, Nicolas Pinto, Yunsup Lee, Bryan Catanzaro, Paul Ivanov, Ahmed Fasih, PyCUDA and PyOpenCL: A scripting-based approach to GPU diff --git a/pyopencl/array.py b/pyopencl/array.py index f72543a212d7fa4d1f6b0ce166507878f824e2b7..7622f370539476d895e5d7a215c8e088272248f7 100644 --- a/pyopencl/array.py +++ b/pyopencl/array.py @@ -99,14 +99,30 @@ def _create_vector_types(): setattr(vec, name, dtype) - my_field_names = ",".join(field_names[:count]) - my_field_names_defaulted = ",".join( - "%s=0" % fn for fn in field_names[:count]) - setattr(vec, "make_"+name, - staticmethod(eval( - "lambda %s: array((%s), dtype=my_dtype)" - % (my_field_names_defaulted, my_field_names), - dict(array=np.array, my_dtype=dtype)))) + def create_array(dtype, count, padded_count, *args, **kwargs): + if len(args) < count: + from warnings import warn + warn("default values for make_xxx are deprecated;" + " instead specify all parameters or use" + " array.vec.zeros_xxx", DeprecationWarning) + padded_args = tuple(list(args)+[0]*(padded_count-len(args))) + array = eval("array(padded_args, dtype=dtype)", + dict(array=np.array, padded_args=padded_args, + dtype=dtype)) + for key, val in kwargs.items(): + array[key] = val + return array + + setattr(vec, "make_"+name, staticmethod(eval( + "lambda *args, **kwargs: create_array(dtype, %i, %i, " + "*args, **kwargs)" % (count, padded_count), + dict(create_array=create_array, dtype=dtype)))) + setattr(vec, "filled_"+name, staticmethod(eval( + "lambda val: vec.make_%s(*[val]*%i)" % (name, count)))) + setattr(vec, "zeros_"+name, + staticmethod(eval("lambda: vec.filled_%s(0)" % (name)))) + setattr(vec, "ones_"+name, + staticmethod(eval("lambda: vec.filled_%s(1)" % (name)))) vec.types[np.dtype(base_type), count] = dtype vec.type_to_scalar_and_count[dtype] = np.dtype(base_type), count