Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • mattwala/sumpy
  • isuruf/sumpy
  • xywei/sumpy
  • inducer/sumpy
  • fikl2/sumpy
  • ben_sepanski/sumpy
6 results
Show changes
Commits on Source (1008)
Showing
with 565 additions and 745 deletions
# https://editorconfig.org/
# https://github.com/editorconfig/editorconfig-vim
# https://github.com/editorconfig/editorconfig-emacs
root = true
[*]
indent_style = space
end_of_line = lf
charset = utf-8
trim_trailing_whitespace = true
insert_final_newline = true
[*.py]
indent_size = 4
[*.rst]
indent_size = 4
[*.cpp]
indent_size = 2
[*.hpp]
indent_size = 2
# There may be one in doc/
[Makefile]
indent_style = tab
# https://github.com/microsoft/vscode/issues/1679
[*.md]
trim_trailing_whitespace = false
\ No newline at end of file
version: 2
updates:
# Set update schedule for GitHub Actions
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"
# vim: sw=4
name: Gitlab mirror
on:
push:
branches:
- main
jobs:
autopush:
name: Automatic push to gitlab.tiker.net
if: startsWith(github.repository, 'inducer/')
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- run: |
curl -L -O https://tiker.net/ci-support-v0
. ./ci-support-v0
mirror_github_to_gitlab
env:
GITLAB_AUTOPUSH_KEY: ${{ secrets.GITLAB_AUTOPUSH_KEY }}
# vim: sw=4
name: CI
on:
push:
branches:
- main
pull_request:
schedule:
- cron: '17 3 * * 0'
jobs:
ruff:
name: Ruff
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: "Main Script"
run: |
pipx install ruff
ruff check
typos:
name: Typos
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: crate-ci/typos@master
mypy:
name: Mypy
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
-
uses: actions/setup-python@v5
with:
python-version: '3.x'
- name: "Main Script"
run: |
curl -L -O https://tiker.net/ci-support-v0
. ./ci-support-v0
build_py_project_in_conda_env
python -m pip install mypy
mypy $(get_proj_name)
pylint:
name: Pylint
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: "Main Script"
run: |
USE_CONDA_BUILD=1
EXTRA_INSTALL="pyvisfile scipy matplotlib"
curl -L -O https://tiker.net/ci-support-v0
. ci-support-v0
build_py_project
run_pylint "$(basename $GITHUB_REPOSITORY)" examples/*.py test/*.py
docs:
name: Documentation
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: "Main Script"
run: |
CONDA_ENVIRONMENT=.test-conda-env-py3.yml
curl -L -O https://tiker.net/ci-support-v0
. ci-support-v0
build_py_project_in_conda_env
build_docs
pytest:
name: Conda Pytest
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: "Main Script"
run: |
grep -v symengine .test-conda-env-py3.yml > .test-conda-env.yml
CONDA_ENVIRONMENT=.test-conda-env.yml
curl -L -O https://gitlab.tiker.net/inducer/ci-support/raw/main/build-and-test-py-project-within-miniconda.sh
. ./build-and-test-py-project-within-miniconda.sh
pytest_symengine:
name: Conda Pytest Symengine
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: "Main Script"
run: |
curl -L -O https://gitlab.tiker.net/inducer/ci-support/raw/main/build-and-test-py-project-within-miniconda.sh
. ./build-and-test-py-project-within-miniconda.sh
examples:
name: Conda Examples
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: "Main Script"
run: |
grep -v symengine .test-conda-env-py3.yml > .test-conda-env.yml
CONDA_ENVIRONMENT=.test-conda-env.yml
curl -L -O https://tiker.net/ci-support-v0
. ci-support-v0
EXTRA_INSTALL="pyvisfile scipy"
build_py_project_in_conda_env
run_examples
downstream_tests:
strategy:
matrix:
downstream_project: [pytential]
name: Tests for downstream project ${{ matrix.downstream_project }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: "Main Script"
env:
DOWNSTREAM_PROJECT: ${{ matrix.downstream_project }}
run: |
curl -L -O https://tiker.net/ci-support-v0
. ./ci-support-v0
if [[ "$DOWNSTREAM_PROJECT" == "pytential" && "$GITHUB_HEAD_REF" == "e2p" ]]; then
DOWNSTREAM_PROJECT=https://github.com/isuruf/pytential.git@e2p
fi
test_downstream "$DOWNSTREAM_PROJECT"
# vim: sw=4
...@@ -20,3 +20,5 @@ doc/_build ...@@ -20,3 +20,5 @@ doc/_build
sumpy/_git_rev.py sumpy/_git_rev.py
.asv .asv
*.vts
...@@ -6,7 +6,7 @@ ...@@ -6,7 +6,7 @@
# - export PY_EXE=python3.5 # - export PY_EXE=python3.5
# - export PYOPENCL_TEST=nvi:k40 # - export PYOPENCL_TEST=nvi:k40
# - export EXTRA_INSTALL="numpy mako" # - export EXTRA_INSTALL="numpy mako"
# - curl -L -O -k https://gitlab.tiker.net/inducer/ci-support/raw/master/build-and-test-py-project.sh # - curl -L -O https://gitlab.tiker.net/inducer/ci-support/raw/main/build-and-test-py-project.sh
# - ". ./build-and-test-py-project.sh" # - ". ./build-and-test-py-project.sh"
# tags: # tags:
# - python3.5 # - python3.5
...@@ -17,15 +17,20 @@ ...@@ -17,15 +17,20 @@
# reports: # reports:
# junit: test/pytest.xml # junit: test/pytest.xml
Python 2.7 POCL: stages:
- test
- deploy
Pytest POCL:
stage: test
script: script:
- export PY_EXE=python2.7 - export PY_EXE=python3
- export PYOPENCL_TEST=portable - export PYOPENCL_TEST=portable:pthread
- export EXTRA_INSTALL="pybind11 numpy mako" - export EXTRA_INSTALL="pybind11 numpy mako mpi4py"
- curl -L -O -k https://gitlab.tiker.net/inducer/ci-support/raw/master/build-and-test-py-project.sh - curl -L -O https://gitlab.tiker.net/inducer/ci-support/raw/main/build-and-test-py-project.sh
- ". ./build-and-test-py-project.sh" - ". ./build-and-test-py-project.sh"
tags: tags:
- python2.7 - python3
- pocl - pocl
except: except:
- tags - tags
...@@ -33,83 +38,130 @@ Python 2.7 POCL: ...@@ -33,83 +38,130 @@ Python 2.7 POCL:
reports: reports:
junit: test/pytest.xml junit: test/pytest.xml
Python 3 POCL: Pytest Titan V:
stage: test
script: script:
- export PY_EXE=python3 - py_version=3
- export PYOPENCL_TEST=portable - export PYOPENCL_TEST=nvi:titan
- export EXTRA_INSTALL="pybind11 numpy mako" - EXTRA_INSTALL="pybind11 numpy mako mpi4py"
- curl -L -O -k https://gitlab.tiker.net/inducer/ci-support/raw/master/build-and-test-py-project.sh - curl -L -O https://gitlab.tiker.net/inducer/ci-support/raw/main/build-and-test-py-project.sh
- ". ./build-and-test-py-project.sh" - ". ./build-and-test-py-project.sh"
tags: tags:
- python3 - python3
- pocl - nvidia-titan-v
except: except:
- tags - tags
allow_failure: True
artifacts: artifacts:
reports: reports:
junit: test/pytest.xml junit: test/pytest.xml
Python 3 Titan X: Pytest Conda:
stage: test
script: script:
- py_version=3 # Disable caching to ensure SymEngine code generation is exercised.
- export PYOPENCL_TEST=nvi:titan - export SUMPY_NO_CACHE=1
- EXTRA_INSTALL="pybind11 numpy mako" - export SUMPY_FORCE_SYMBOLIC_BACKEND=symengine
- curl -L -O -k https://gitlab.tiker.net/inducer/ci-support/raw/master/build-and-test-py-project.sh - export PYOPENCL_TEST=portable:pthread
- ". ./build-and-test-py-project.sh" - curl -L -O https://gitlab.tiker.net/inducer/ci-support/raw/main/build-and-test-py-project-within-miniconda.sh
- ". ./build-and-test-py-project-within-miniconda.sh"
tags: tags:
- python3 - large-node
- nvidia-titan-x
except: except:
- tags - tags
allow_failure: True
artifacts: artifacts:
reports: reports:
junit: test/pytest.xml junit: test/pytest.xml
Python 3.6 Conda: Pytest POCL Titan V:
stage: test
script: script:
# Disable caching to ensure SymEngine code generation is exercised. # Disable caching to ensure SymEngine code generation is exercised.
- export SUMPY_NO_CACHE=1 - export SUMPY_NO_CACHE=1
- export SUMPY_FORCE_SYMBOLIC_BACKEND=symengine - export SUMPY_FORCE_SYMBOLIC_BACKEND=symengine
- CONDA_ENVIRONMENT=.test-conda-env-py3.yml - export PYOPENCL_TEST=portable:titan
- curl -L -O -k https://gitlab.tiker.net/inducer/ci-support/raw/master/build-and-test-py-project-within-miniconda.sh - curl -L -O https://gitlab.tiker.net/inducer/ci-support/raw/main/build-and-test-py-project-within-miniconda.sh
- ". ./build-and-test-py-project-within-miniconda.sh" - ". ./build-and-test-py-project-within-miniconda.sh"
tags: tags:
- linux - nvidia-titan-v
except: except:
- tags - tags
artifacts: artifacts:
reports: reports:
junit: test/pytest.xml junit: test/pytest.xml
Examples Conda:
stage: test
script: |
grep -v symengine .test-conda-env-py3.yml > .test-conda-env.yml
CONDA_ENVIRONMENT=.test-conda-env.yml
curl -L -O https://tiker.net/ci-support-v0
. ci-support-v0
EXTRA_INSTALL="pyvisfile scipy"
build_py_project_in_conda_env
run_examples
tags:
- large-node
except:
- tags
Documentation: Documentation:
stage: deploy
script: |
EXTRA_INSTALL="pybind11 numpy mako"
curl -L -O https://tiker.net/ci-support-v0
. ci-support-v0
build_py_project_in_venv
build_docs
build_asv_html
maybe_upload_docs
tags:
- linux
Ruff:
stage: test
script: script:
- EXTRA_INSTALL="pybind11 numpy mako" - pipx install ruff
- curl -L -O -k https://gitlab.tiker.net/inducer/ci-support/raw/master/build-docs.sh - ruff check
- ". ./build-docs.sh"
tags: tags:
- python3 - docker-runner
only: except:
- master - tags
Flake8: Pylint:
script: script:
- curl -L -O -k https://gitlab.tiker.net/inducer/ci-support/raw/master/prepare-and-run-flake8.sh - EXTRA_INSTALL="pybind11 numpy mako scipy matplotlib pyvisfile mpi4py"
- ". ./prepare-and-run-flake8.sh sumpy test" - curl -L -O https://tiker.net/ci-support-v0
- . ci-support-v0
- build_py_project
- run_pylint "$(get_proj_name)" examples/*.py test/*.py
tags: tags:
- python3 - python3
except: except:
- tags - tags
Benchmarks: Mypy:
script: script: |
- CONDA_ENVIRONMENT=.test-conda-env-py3.yml curl -L -O https://tiker.net/ci-support-v0
- PROJECT=sumpy . ./ci-support-v0
- PYOPENCL_TEST=portable build_py_project_in_venv
- curl -L -O -k https://gitlab.tiker.net/inducer/ci-support/raw/master/build-and-benchmark-py-project.sh python -m pip install mypy
- ". ./build-and-benchmark-py-project.sh" mypy $(get_proj_name)
tags: tags:
- linux - python3
- benchmark
except: except:
- tags - tags
Downstream:
parallel:
matrix:
- DOWNSTREAM_PROJECT: [pytential]
tags:
- large-node
- "docker-runner"
script: |
curl -L -O https://tiker.net/ci-support-v0
. ./ci-support-v0
test_downstream "$DOWNSTREAM_PROJECT"
# vim: sw=2
- arg: py-version
val: '3.10'
- arg: extension-pkg-whitelist
val: mayavi
- arg: ignored-modules
val:
- symengine
name: test-conda-env-py3 name: test-conda-env-py3
channels: channels:
- inducer
- conda-forge - conda-forge
- defaults - nodefaults
dependencies: dependencies:
- git - git
- conda-forge::numpy - numpy
- conda-forge::sympy - sympy
- pocl - pocl
- pocl-cuda
- islpy - islpy
- pyopencl - pyopencl
- python=3 - python=3
- symengine=0.3.0 - python-symengine
- python-symengine=0.3.0 - pyfmmlib
- pyvkfft
- mpi4py
- pip # This is intended to prevent conda from selecting 'external' (i.e. empty) builds
- pip: # of OpenMPI to satisfy the MPI dependency of mpi4py. It did so in May 2024, leading
- git+https://gitlab.tiker.net/inducer/boxtree # to confusing failues saying
- git+https://github.com/inducer/pymbolic # 'libmpi.so.40: cannot open shared object file: No such file or directory'.
- git+https://github.com/inducer/loopy # https://github.com/conda-forge/openmpi-feedstock/issues/153
# https://conda-forge.org/docs/user/tipsandtricks/#using-external-message-passing-interface-mpi-libraries
- openmpi>=5=h*
cff-version: 1.2.0
message: "If you use this software, please cite it as below."
authors:
- family-names: "Kloeckner"
given-names: "Andreas"
orcid: "https://orcid.org/0000-0003-1228-519X"
- family-names: Fernando
given-names: Isuru
- family-names: Wala
given-names: Matt
- family-names: Fikl
given-names: Alexandru
- family-names: Beams
given-names: Natalie
- family-names: Gao
given-names: Hao
title: "sumpy"
version: 2022.1
doi: 10.5281/zenodo.7349787
date-released: 2022-11-23
url: "https://github.com/inducer/sumpy"
license: MIT
include test/*.py
include examples/*.py
include doc/*.rst
include doc/Makefile
include doc/*.py
include doc/images/*.png
include doc/_static/*.css
include doc/_templates/*.html
include README.rst
include requirements.txt
sumpy: n-body kernels and translation operators sumpy: n-body kernels and translation operators
=============================================== ===============================================
.. image:: https://gitlab.tiker.net/inducer/sumpy/badges/master/pipeline.svg .. image:: https://gitlab.tiker.net/inducer/sumpy/badges/main/pipeline.svg
:alt: Gitlab Build Status :alt: Gitlab Build Status
:target: https://gitlab.tiker.net/inducer/sumpy/commits/master :target: https://gitlab.tiker.net/inducer/sumpy/commits/main
.. image:: https://dev.azure.com/ak-spam/inducer/_apis/build/status/inducer.sumpy?branchName=master .. image:: https://github.com/inducer/sumpy/actions/workflows/ci.yml/badge.svg
:alt: Azure Build Status :alt: Github Build Status
:target: https://dev.azure.com/ak-spam/inducer/_build/latest?definitionId=17&branchName=master :target: https://github.com/inducer/sumpy/actions/workflows/ci.yml
.. image:: https://badge.fury.io/py/sumpy.png .. image:: https://badge.fury.io/py/sumpy.svg
:alt: Python Package Index Release Page :alt: Python Package Index Release Page
:target: https://pypi.org/project/sumpy/ :target: https://pypi.org/project/sumpy/
.. image:: https://zenodo.org/badge/1856097.svg
:alt: Zenodo DOI for latest release
:target: https://zenodo.org/badge/latestdoi/1856097
Sumpy is mainly a 'scaffolding' package for Fast Multipole and quadrature methods. sumpy is mainly a 'scaffolding' package for Fast Multipole and quadrature methods.
If you're building one of those and need code generation for the required Multipole If you're building one of those and need code generation for the required multipole
and local expansions, come right on in. Together with boxtree, there is a full, and local expansions, come right on in. Together with ``boxtree``, there is a full,
symbolically kernel-independent FMM implementation here. symbolically kernel-independent FMM implementation here.
Sumpy relies on It relies on
* `numpy <http://pypi.python.org/pypi/numpy>`_ for arrays * `boxtree <https://pypi.org/project/boxtree>`__ for FMM tree building
* `boxtree <http://pypi.python.org/pypi/boxtree>`_ for FMM tree building * `loopy <https://pypi.org/project/loopy>`__ for fast array operations
* `sumpy <http://pypi.python.org/pypi/sumpy>`_ for expansions and analytical routines * `pytest <https://pypi.org/project/pytest>`__ for automated testing
* `loopy <http://pypi.python.org/pypi/loo.py>`_ for fast array operations
* `pytest <http://pypi.python.org/pypi/pytest>`_ for automated testing
and, indirectly, and, indirectly,
* `PyOpenCL <http://pypi.python.org/pypi/pyopencl>`_ as computational infrastructure * `PyOpenCL <https://pypi.org/project/pyopencl>`__ as computational infrastructure
PyOpenCL is likely the only package you'll have to install
by hand, all the others will be installed automatically.
Resources: Resources:
* `documentation <http://documen.tician.de/sumpy>`_ * `documentation <https://documen.tician.de/sumpy>`__
* `source code via git <http://github.com/inducer/sumpy>`_ * `source code via git <https://github.com/inducer/sumpy>`__
* `benchmarks <https://documen.tician.de/sumpy/benchmarks>`__
If you can see inside the UIUC firewall, you may browse
`benchmark results <http://koelsch.d.tiker.net/benchmarks/asv/sumpy/>`_.
...@@ -20,7 +20,7 @@ ...@@ -20,7 +20,7 @@
// List of branches to benchmark. If not provided, defaults to "master" // List of branches to benchmark. If not provided, defaults to "master"
// (for git) or "default" (for mercurial). // (for git) or "default" (for mercurial).
// "branches": ["master"], // for git "branches": ["main"], // for git
// "branches": ["default"], // for mercurial // "branches": ["default"], // for mercurial
// The DVCS being used. If not set, it will be automatically // The DVCS being used. If not set, it will be automatically
...@@ -41,7 +41,7 @@ ...@@ -41,7 +41,7 @@
//"install_timeout": 600, //"install_timeout": 600,
// the base URL to show a commit for the project. // the base URL to show a commit for the project.
"show_commit_url": "http://gitlab.tiker.net/inducer/sumpy/commits/", "show_commit_url": "https://gitlab.tiker.net/inducer/sumpy/commits/",
// The Pythons you'd like to test against. If not provided, defaults // The Pythons you'd like to test against. If not provided, defaults
// to the current version of Python used to run `asv`. // to the current version of Python used to run `asv`.
...@@ -49,7 +49,7 @@ ...@@ -49,7 +49,7 @@
// The list of conda channel names to be searched for benchmark // The list of conda channel names to be searched for benchmark
// dependency packages in the specified order // dependency packages in the specified order
"conda_channels": ["conda-forge", "defaults"], "conda_channels": ["conda-forge"],
// The matrix of dependencies to test. Each key is the name of a // The matrix of dependencies to test. Each key is the name of a
// package (in PyPI) and the values are version numbers. An empty // package (in PyPI) and the values are version numbers. An empty
...@@ -68,13 +68,15 @@ ...@@ -68,13 +68,15 @@
// }, // },
"matrix": { "matrix": {
"numpy" : [""], "numpy" : [""],
"sympy" : ["1.0"], "sympy" : [""],
"pyopencl" : [""], "pyopencl" : [""],
"islpy" : [""], "islpy" : [""],
"pocl" : [""], "pocl" : [""],
"pyvkfft": [""],
"pip+git+https://github.com/inducer/pymbolic#egg=pymbolic": [""], "pip+git+https://github.com/inducer/pymbolic#egg=pymbolic": [""],
"pip+git+https://gitlab.tiker.net/inducer/boxtree#egg=boxtree": [""], "pip+git+https://gitlab.tiker.net/inducer/boxtree#egg=boxtree": [""],
"pip+git+https://github.com/inducer/loopy#egg=loopy": [""], "pip+git+https://github.com/inducer/loopy#egg=loopy": [""],
"pip" : [""],
}, },
// Combinations of libraries/python versions can be excluded/included // Combinations of libraries/python versions can be excluded/included
......
jobs:
-
job: 'Python3'
pool:
vmImage: 'ubuntu-latest'
steps:
-
script: |
set -e
CONDA_ENVIRONMENT=.test-conda-env-py3.yml
curl -L -O -k https://gitlab.tiker.net/inducer/ci-support/raw/master/build-and-test-py-project-within-miniconda.sh
. ./build-and-test-py-project-within-miniconda.sh
displayName: 'Pytest Conda'
-
task: PublishTestResults@2
inputs:
testResultsFormat: 'JUnit'
testResultsFiles: 'test/pytest.xml'
-
job: 'Flake8'
pool:
vmImage: 'ubuntu-latest'
strategy:
matrix:
Python37:
python.version: '3.7'
steps:
-
task: UsePythonVersion@0
inputs:
versionSpec: '$(python.version)'
-
script: |
set -e
curl -L -O -k https://gitlab.tiker.net/inducer/ci-support/raw/master/prepare-and-run-flake8.sh
. ./prepare-and-run-flake8.sh sumpy test
displayName: 'Flake8'
from __future__ import annotations
import logging
import numpy as np import numpy as np
import pytest
import pyopencl as cl
from pyopencl.tools import ( # noqa from pyopencl.tools import ( # noqa
pytest_generate_tests_for_pyopencl as pytest_generate_tests) pytest_generate_tests_for_pyopencl as pytest_generate_tests,
)
from sumpy.expansion.multipole import (
VolumeTaylorMultipoleExpansion, H2DMultipoleExpansion,
VolumeTaylorMultipoleExpansionBase,
LaplaceConformingVolumeTaylorMultipoleExpansion,
HelmholtzConformingVolumeTaylorMultipoleExpansion)
from sumpy.expansion.local import ( from sumpy.expansion.local import (
VolumeTaylorLocalExpansion, H2DLocalExpansion, H2DLocalExpansion,
LaplaceConformingVolumeTaylorLocalExpansion, LinearPDEConformingVolumeTaylorLocalExpansion,
HelmholtzConformingVolumeTaylorLocalExpansion) VolumeTaylorLocalExpansion,
)
from sumpy.expansion.multipole import (
H2DMultipoleExpansion,
LinearPDEConformingVolumeTaylorMultipoleExpansion,
VolumeTaylorMultipoleExpansion,
)
from sumpy.kernel import HelmholtzKernel, LaplaceKernel
from sumpy.kernel import (LaplaceKernel, HelmholtzKernel, AxisTargetDerivative,
DirectionalSourceDerivative)
import logging
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
import sympy
import six
import pymbolic.mapper.flop_counter import pymbolic.mapper.flop_counter
import sumpy.symbolic as sym import sumpy.symbolic as sym
from sumpy.assignment_collection import SymbolicAssignmentCollection from sumpy.assignment_collection import SymbolicAssignmentCollection
from sumpy.codegen import to_loopy_insns from sumpy.codegen import to_loopy_insns
class Param: class Param:
def __init__(self, dim, order): def __init__(self, dim, order):
self.dim = dim self.dim = dim
self.order = order self.order = order
def __repr__(self): def __repr__(self):
return "{}D_order_{}".format(self.dim, self.order) return f"{self.dim}D_order_{self.order}"
class TranslationBenchmarkSuite: class TranslationBenchmarkSuite:
params = [ params = (
Param(2, 10), Param(2, 10),
Param(2, 15), Param(2, 15),
Param(2, 20), Param(2, 20),
Param(3, 5), Param(3, 5),
Param(3, 10), Param(3, 10),
] )
param_names = ['order'] param_names = ("order",)
def setup(self, param): def setup(self, param):
logging.basicConfig(level=logging.INFO) logging.basicConfig(level=logging.INFO)
np.random.seed(17) np.random.seed(17) # noqa: NPY002
if self.__class__ == TranslationBenchmarkSuite: if self.__class__ == TranslationBenchmarkSuite:
raise NotImplementedError raise NotImplementedError
mpole_expn_class = self.mpole_expn_class mpole_expn_class = self.mpole_expn_class
...@@ -64,66 +65,71 @@ class TranslationBenchmarkSuite: ...@@ -64,66 +65,71 @@ class TranslationBenchmarkSuite:
m_expn = self.mpole_expn_class(knl, order=param.order) m_expn = self.mpole_expn_class(knl, order=param.order)
l_expn = self.local_expn_class(knl, order=param.order) l_expn = self.local_expn_class(knl, order=param.order)
src_coeff_exprs = [sym.Symbol("src_coeff%d" % i) src_coeff_exprs = [
for i in range(len(m_expn))] sym.Symbol(f"src_coeff{i}")
for i in range(len(m_expn))]
dvec = sym.make_sym_vector("d", knl.dim) dvec = sym.make_sym_vector("d", knl.dim)
src_rscale = sym.Symbol("src_rscale") src_rscale = sym.Symbol("src_rscale")
tgt_rscale = sym.Symbol("tgt_rscale") tgt_rscale = sym.Symbol("tgt_rscale")
result = l_expn.translate_from(m_expn, src_coeff_exprs, src_rscale,
dvec, tgt_rscale)
sac = SymbolicAssignmentCollection() sac = SymbolicAssignmentCollection()
try:
result = l_expn.translate_from(m_expn, src_coeff_exprs, src_rscale,
dvec, tgt_rscale, sac)
except TypeError:
# Support older interface to make it possible to compare
# in CI run
result = l_expn.translate_from(m_expn, src_coeff_exprs, src_rscale,
dvec, tgt_rscale)
for i, expr in enumerate(result): for i, expr in enumerate(result):
sac.assign_unique("coeff%d" % i, expr) sac.assign_unique(f"coeff{i}", expr)
sac.run_global_cse() sac.run_global_cse()
insns = to_loopy_insns(six.iteritems(sac.assignments)) insns = to_loopy_insns(sac.assignments.items())
counter = pymbolic.mapper.flop_counter.CSEAwareFlopCounter() counter = pymbolic.mapper.flop_counter.CSEAwareFlopCounter()
return sum([counter.rec(insn.expression)+1 for insn in insns]) return sum(counter.rec(insn.expression)+1 for insn in insns)
track_m2l_op_count.unit = "ops" track_m2l_op_count.unit = "ops"
track_m2l_op_count.timeout = 200.0 track_m2l_op_count.timeout = 300.0
class LaplaceVolumeTaylorTranslation(TranslationBenchmarkSuite): class LaplaceVolumeTaylorTranslation(TranslationBenchmarkSuite):
knl = LaplaceKernel knl = LaplaceKernel
local_expn_class = VolumeTaylorLocalExpansion local_expn_class = VolumeTaylorLocalExpansion
mpole_expn_class = VolumeTaylorMultipoleExpansion mpole_expn_class = VolumeTaylorMultipoleExpansion
params = [ params = (
Param(2, 10), Param(2, 10),
Param(3, 5), Param(3, 5),
] )
class LaplaceConformingVolumeTaylorTranslation(TranslationBenchmarkSuite): class LaplaceConformingVolumeTaylorTranslation(TranslationBenchmarkSuite):
knl = LaplaceKernel knl = LaplaceKernel
local_expn_class = LaplaceConformingVolumeTaylorLocalExpansion local_expn_class = LinearPDEConformingVolumeTaylorLocalExpansion
mpole_expn_class = LaplaceConformingVolumeTaylorMultipoleExpansion mpole_expn_class = LinearPDEConformingVolumeTaylorMultipoleExpansion
class HelmholtzVolumeTaylorTranslation(TranslationBenchmarkSuite): class HelmholtzVolumeTaylorTranslation(TranslationBenchmarkSuite):
knl = HelmholtzKernel knl = HelmholtzKernel
local_expn_class = VolumeTaylorLocalExpansion local_expn_class = VolumeTaylorLocalExpansion
mpole_expn_class = VolumeTaylorMultipoleExpansion mpole_expn_class = VolumeTaylorMultipoleExpansion
params = [ params = (
Param(2, 10), Param(2, 10),
Param(3, 5), Param(3, 5),
] )
class HelmholtzConformingVolumeTaylorTranslation(TranslationBenchmarkSuite): class HelmholtzConformingVolumeTaylorTranslation(TranslationBenchmarkSuite):
knl = HelmholtzKernel knl = HelmholtzKernel
local_expn_class = HelmholtzConformingVolumeTaylorLocalExpansion local_expn_class = LinearPDEConformingVolumeTaylorLocalExpansion
mpole_expn_class = HelmholtzConformingVolumeTaylorMultipoleExpansion mpole_expn_class = LinearPDEConformingVolumeTaylorMultipoleExpansion
class Helmholtz2DTranslation(TranslationBenchmarkSuite): class Helmholtz2DTranslation(TranslationBenchmarkSuite):
knl = HelmholtzKernel knl = HelmholtzKernel
local_expn_class = H2DLocalExpansion local_expn_class = H2DLocalExpansion
mpole_expn_class = H2DMultipoleExpansion mpole_expn_class = H2DMultipoleExpansion
params = [ params = (
Param(2, 10), Param(2, 10),
Param(2, 15), Param(2, 15),
Param(2, 20), Param(2, 20),
] )
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
import pyopencl as cl from __future__ import annotations
import sumpy.toys as t
import numpy as np
import numpy.linalg as la
import matplotlib.pyplot as plt
from sumpy.visualization import FieldPlotter
from pytools import add_tuples
from sumpy.expansion.local import VolumeTaylorLocalExpansion from sumpy.expansion.local import VolumeTaylorLocalExpansion
from sumpy.expansion.multipole import (VolumeTaylorMultipoleExpansion, from sumpy.expansion.multipole import (
LaplaceConformingVolumeTaylorMultipoleExpansion, LaplaceConformingVolumeTaylorMultipoleExpansion,
HelmholtzConformingVolumeTaylorMultipoleExpansion) LinearPDEConformingVolumeTaylorMultipoleExpansion,
VolumeTaylorMultipoleExpansion,
from sumpy.kernel import (YukawaKernel, HelmholtzKernel, LaplaceKernel) )
from sumpy.kernel import HelmholtzKernel, LaplaceKernel, YukawaKernel # noqa: F401
from sumpy.symbolic import make_sym_vector from sumpy.symbolic import make_sym_vector
order = 2 order = 2
dim = 2 dim = 2
if 0: if 0:
knl = LaplaceKernel(dim) knl = LaplaceKernel(dim)
extra_kernel_kwargs = {} extra_kernel_kwargs = {}
mpole_expn_reduced_class = LaplaceConformingVolumeTaylorMultipoleExpansion mpole_expn_reduced_class = LaplaceConformingVolumeTaylorMultipoleExpansion
else: else:
helm_k = 1.2 helm_k = 1.2
knl = HelmholtzKernel(dim) knl = HelmholtzKernel(dim)
extra_kernel_kwargs={"k": helm_k} extra_kernel_kwargs = {"k": helm_k}
mpole_expn_reduced_class = HelmholtzConformingVolumeTaylorMultipoleExpansion mpole_expn_reduced_class = LinearPDEConformingVolumeTaylorMultipoleExpansion
mpole_expn_reduced = mpole_expn_reduced_class(knl, order) mpole_expn_reduced = mpole_expn_reduced_class(knl, order)
mpole_expn = VolumeTaylorMultipoleExpansion(knl, order) mpole_expn = VolumeTaylorMultipoleExpansion(knl, order)
local_expn = VolumeTaylorLocalExpansion(knl, order) local_expn = VolumeTaylorLocalExpansion(knl, order)
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
from pytools import factorial reduced_wrangler = mpole_expn_reduced.expansion_terms_wrangler
full_wrangler = mpole_expn.expansion_terms_wrangler
def mi_factorial(n):
return np.prod([factorial(n1) for n1 in n])
```
%% Cell type:code id: tags:
``` python
reduced_wrangler = mpole_expn_reduced.derivative_wrangler
full_wrangler = mpole_expn.derivative_wrangler
reduced_derivatives = list(make_sym_vector("deriv", len(reduced_wrangler.stored_identifiers))) reduced_derivatives = list(
full_derivatives = reduced_wrangler.get_full_kernel_derivatives_from_stored(reduced_derivatives, 1) make_sym_vector("deriv", len(reduced_wrangler.stored_identifiers))
)
full_derivatives = reduced_wrangler.get_full_kernel_derivatives_from_stored(
reduced_derivatives, 1
)
print(reduced_derivatives) print(reduced_derivatives)
print(full_derivatives) print(full_derivatives)
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
full_coeffs = list(make_sym_vector("coeff", len(reduced_wrangler.get_full_coefficient_identifiers()))) full_coeffs = list(
make_sym_vector("coeff", len(reduced_wrangler.get_full_coefficient_identifiers()))
reduced_coeffs = reduced_wrangler.get_stored_mpole_coefficients_from_full(full_mpole_coefficients=full_coeffs, rscale=1) )
reduced_coeffs = reduced_wrangler.get_stored_mpole_coefficients_from_full(
full_mpole_coefficients=full_coeffs, rscale=1
)
print(full_coeffs) print(full_coeffs)
print(reduced_coeffs) print(reduced_coeffs)
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
dvec = make_sym_vector("d", dim) dvec = make_sym_vector("d", dim)
translated_reduce_coeffs = mpole_expn_reduced.translate_from(mpole_expn_reduced, reduced_coeffs, 1, dvec, 1) translated_reduce_coeffs = mpole_expn_reduced.translate_from(
mpole_expn_reduced, reduced_coeffs, 1, dvec, 1
)
translated_full_coeffs = mpole_expn.translate_from(mpole_expn, full_coeffs, 1, dvec, 1) translated_full_coeffs = mpole_expn.translate_from(mpole_expn, full_coeffs, 1, dvec, 1)
translated_full_coeffs translated_full_coeffs
``` ```
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
eval_reduced = sum(a*b for a, b in zip(translated_reduce_coeffs, reduced_derivatives)) eval_reduced = sum(a * b for a, b in zip(translated_reduce_coeffs, reduced_derivatives,
eval_full = sum(a*b for a, b in zip(translated_full_coeffs, full_derivatives)) strict=True))
eval_full = sum(a * b for a, b in zip(translated_full_coeffs, full_derivatives,
strict=True))
(eval_full-eval_reduced).simplify() (eval_full - eval_reduced).simplify()
``` ```
......
...@@ -9,7 +9,7 @@ BUILDDIR = _build ...@@ -9,7 +9,7 @@ BUILDDIR = _build
# User-friendly check for sphinx-build # User-friendly check for sphinx-build
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don\'t have Sphinx installed, grab it from http://sphinx-doc.org/) $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don\'t have Sphinx installed, grab it from https://sphinx-doc.org/)
endif endif
# Internal variables. # Internal variables.
......
#!/usr/bin/env python3 from importlib import metadata
# -*- coding: utf-8 -*- from urllib.request import urlopen
#
# sumpy documentation build configuration file, created by
# sphinx-quickstart on Wed Apr 6 14:00:41 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory, _conf_url = \
# add these directories to sys.path here. If the directory is relative to the "https://raw.githubusercontent.com/inducer/sphinxconfig/main/sphinxconfig.py"
# documentation root, use os.path.abspath to make it absolute, like shown here. with urlopen(_conf_url) as _inf:
#sys.path.insert(0, os.path.abspath('.')) exec(compile(_inf.read(), _conf_url, "exec"), globals())
# -- General configuration ------------------------------------------------ copyright = "2016-21, sumpy contributors"
release = metadata.version("sumpy")
version = ".".join(release.split(".")[:2])
# If your documentation needs a minimal Sphinx version, state it here. intersphinx_mapping = {
#needs_sphinx = '1.0' "arraycontext": ("https://documen.tician.de/arraycontext/", None),
"boxtree": ("https://documen.tician.de/boxtree/", None),
# Add any Sphinx extension module names here, as strings. They can be "loopy": ("https://documen.tician.de/loopy/", None),
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom "matplotlib": ("https://matplotlib.org/stable/", None),
# ones. "numpy": ("https://numpy.org/doc/stable/", None),
extensions = [ "pymbolic": ("https://documen.tician.de/pymbolic/", None),
'sphinx.ext.autodoc', "pyopencl": ("https://documen.tician.de/pyopencl/", None),
'sphinx.ext.doctest', "pytential": ("https://documen.tician.de/pytential/", None),
'sphinx.ext.intersphinx', "python": ("https://docs.python.org/3/", None),
'sphinx.ext.coverage', "pytools": ("https://documen.tician.de/pytools/", None),
'sphinx.ext.mathjax', "sympy": ("https://docs.sympy.org/latest/", None),
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'sumpy'
copyright = '2016, Andreas Kloeckner'
author = 'Andreas Kloeckner'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '2016.1'
# The full version, including alpha/beta/rc tags.
release = '2016.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
html_theme = "alabaster"
html_theme_options = {
"extra_nav_links": {
"🚀 Github": "https://github.com/inducer/sumpy",
"💾 Download Releases": "https://pypi.python.org/pypi/sumpy",
}
}
html_sidebars = {
'**': [
'about.html',
'navigation.html',
'relations.html',
'searchbox.html',
]
}
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#html_title = 'sumpy v2016.1'
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'sumpydoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
} }
# Grouping the document tree into LaTeX files. List of tuples nitpick_ignore_regex = [
# (source start file, target name, title, ["py:class", r"symengine\.(.+)"], # :cry:
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'sumpy.tex', 'sumpy Documentation',
'Andreas Kloeckner', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'sumpy', 'sumpy Documentation',
[author], 1)
] ]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'sumpy', 'sumpy Documentation',
author, 'sumpy', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
intersphinx_mapping = {
'http://docs.python.org/': None,
'http://docs.scipy.org/doc/numpy/': None,
'http://documen.tician.de/modepy/': None,
'http://documen.tician.de/pyopencl/': None,
'http://documen.tician.de/pymbolic/': None,
'http://documen.tician.de/loopy/': None,
'http://documen.tician.de/pytential/': None,
'http://documen.tician.de/boxtree/': None,
}
Differentiation and Evaluation Working with Values of Potentials
============================== =================================
Visualization of Potentials Visualization of Potentials
--------------------------- ---------------------------
...@@ -11,7 +11,7 @@ Differentiation of Potentials ...@@ -11,7 +11,7 @@ Differentiation of Potentials
.. automodule:: sumpy.point_calculus .. automodule:: sumpy.point_calculus
Support for Numerical Experiments with Expansions Support for Numerical Experiments with Expansions ("Expansion toys")
------------------------------------------------- --------------------------------------------------------------------
.. automodule:: sumpy.toys .. automodule:: sumpy.toys
...@@ -3,6 +3,11 @@ Expansions ...@@ -3,6 +3,11 @@ Expansions
.. automodule:: sumpy.expansion .. automodule:: sumpy.expansion
Differential Operators
----------------------
.. automodule:: sumpy.expansion.diff_op
Local Expansions Local Expansions
---------------- ----------------
...@@ -12,3 +17,13 @@ Multipole Expansions ...@@ -12,3 +17,13 @@ Multipole Expansions
-------------------- --------------------
.. automodule:: sumpy.expansion.multipole .. automodule:: sumpy.expansion.multipole
Multipole to Local Translations
-------------------------------
.. automodule:: sumpy.expansion.m2l
Estimating Expansion Orders
---------------------------
.. automodule:: sumpy.expansion.level_to_order