Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • mattwala/sumpy
  • isuruf/sumpy
  • xywei/sumpy
  • inducer/sumpy
  • fikl2/sumpy
  • ben_sepanski/sumpy
6 results
Show changes
Commits on Source (1008)
Showing
with 565 additions and 745 deletions
# https://editorconfig.org/
# https://github.com/editorconfig/editorconfig-vim
# https://github.com/editorconfig/editorconfig-emacs
root = true
[*]
indent_style = space
end_of_line = lf
charset = utf-8
trim_trailing_whitespace = true
insert_final_newline = true
[*.py]
indent_size = 4
[*.rst]
indent_size = 4
[*.cpp]
indent_size = 2
[*.hpp]
indent_size = 2
# There may be one in doc/
[Makefile]
indent_style = tab
# https://github.com/microsoft/vscode/issues/1679
[*.md]
trim_trailing_whitespace = false
\ No newline at end of file
version: 2
updates:
# Set update schedule for GitHub Actions
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"
# vim: sw=4
name: Gitlab mirror
on:
push:
branches:
- main
jobs:
autopush:
name: Automatic push to gitlab.tiker.net
if: startsWith(github.repository, 'inducer/')
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- run: |
curl -L -O https://tiker.net/ci-support-v0
. ./ci-support-v0
mirror_github_to_gitlab
env:
GITLAB_AUTOPUSH_KEY: ${{ secrets.GITLAB_AUTOPUSH_KEY }}
# vim: sw=4
name: CI
on:
push:
branches:
- main
pull_request:
schedule:
- cron: '17 3 * * 0'
jobs:
ruff:
name: Ruff
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: "Main Script"
run: |
pipx install ruff
ruff check
typos:
name: Typos
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: crate-ci/typos@master
mypy:
name: Mypy
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
-
uses: actions/setup-python@v5
with:
python-version: '3.x'
- name: "Main Script"
run: |
curl -L -O https://tiker.net/ci-support-v0
. ./ci-support-v0
build_py_project_in_conda_env
python -m pip install mypy
mypy $(get_proj_name)
pylint:
name: Pylint
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: "Main Script"
run: |
USE_CONDA_BUILD=1
EXTRA_INSTALL="pyvisfile scipy matplotlib"
curl -L -O https://tiker.net/ci-support-v0
. ci-support-v0
build_py_project
run_pylint "$(basename $GITHUB_REPOSITORY)" examples/*.py test/*.py
docs:
name: Documentation
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: "Main Script"
run: |
CONDA_ENVIRONMENT=.test-conda-env-py3.yml
curl -L -O https://tiker.net/ci-support-v0
. ci-support-v0
build_py_project_in_conda_env
build_docs
pytest:
name: Conda Pytest
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: "Main Script"
run: |
grep -v symengine .test-conda-env-py3.yml > .test-conda-env.yml
CONDA_ENVIRONMENT=.test-conda-env.yml
curl -L -O https://gitlab.tiker.net/inducer/ci-support/raw/main/build-and-test-py-project-within-miniconda.sh
. ./build-and-test-py-project-within-miniconda.sh
pytest_symengine:
name: Conda Pytest Symengine
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: "Main Script"
run: |
curl -L -O https://gitlab.tiker.net/inducer/ci-support/raw/main/build-and-test-py-project-within-miniconda.sh
. ./build-and-test-py-project-within-miniconda.sh
examples:
name: Conda Examples
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: "Main Script"
run: |
grep -v symengine .test-conda-env-py3.yml > .test-conda-env.yml
CONDA_ENVIRONMENT=.test-conda-env.yml
curl -L -O https://tiker.net/ci-support-v0
. ci-support-v0
EXTRA_INSTALL="pyvisfile scipy"
build_py_project_in_conda_env
run_examples
downstream_tests:
strategy:
matrix:
downstream_project: [pytential]
name: Tests for downstream project ${{ matrix.downstream_project }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: "Main Script"
env:
DOWNSTREAM_PROJECT: ${{ matrix.downstream_project }}
run: |
curl -L -O https://tiker.net/ci-support-v0
. ./ci-support-v0
if [[ "$DOWNSTREAM_PROJECT" == "pytential" && "$GITHUB_HEAD_REF" == "e2p" ]]; then
DOWNSTREAM_PROJECT=https://github.com/isuruf/pytential.git@e2p
fi
test_downstream "$DOWNSTREAM_PROJECT"
# vim: sw=4
......@@ -20,3 +20,5 @@ doc/_build
sumpy/_git_rev.py
.asv
*.vts
......@@ -6,7 +6,7 @@
# - export PY_EXE=python3.5
# - export PYOPENCL_TEST=nvi:k40
# - export EXTRA_INSTALL="numpy mako"
# - curl -L -O -k https://gitlab.tiker.net/inducer/ci-support/raw/master/build-and-test-py-project.sh
# - curl -L -O https://gitlab.tiker.net/inducer/ci-support/raw/main/build-and-test-py-project.sh
# - ". ./build-and-test-py-project.sh"
# tags:
# - python3.5
......@@ -17,15 +17,20 @@
# reports:
# junit: test/pytest.xml
Python 2.7 POCL:
stages:
- test
- deploy
Pytest POCL:
stage: test
script:
- export PY_EXE=python2.7
- export PYOPENCL_TEST=portable
- export EXTRA_INSTALL="pybind11 numpy mako"
- curl -L -O -k https://gitlab.tiker.net/inducer/ci-support/raw/master/build-and-test-py-project.sh
- export PY_EXE=python3
- export PYOPENCL_TEST=portable:pthread
- export EXTRA_INSTALL="pybind11 numpy mako mpi4py"
- curl -L -O https://gitlab.tiker.net/inducer/ci-support/raw/main/build-and-test-py-project.sh
- ". ./build-and-test-py-project.sh"
tags:
- python2.7
- python3
- pocl
except:
- tags
......@@ -33,83 +38,130 @@ Python 2.7 POCL:
reports:
junit: test/pytest.xml
Python 3 POCL:
Pytest Titan V:
stage: test
script:
- export PY_EXE=python3
- export PYOPENCL_TEST=portable
- export EXTRA_INSTALL="pybind11 numpy mako"
- curl -L -O -k https://gitlab.tiker.net/inducer/ci-support/raw/master/build-and-test-py-project.sh
- py_version=3
- export PYOPENCL_TEST=nvi:titan
- EXTRA_INSTALL="pybind11 numpy mako mpi4py"
- curl -L -O https://gitlab.tiker.net/inducer/ci-support/raw/main/build-and-test-py-project.sh
- ". ./build-and-test-py-project.sh"
tags:
- python3
- pocl
- nvidia-titan-v
except:
- tags
allow_failure: True
artifacts:
reports:
junit: test/pytest.xml
Python 3 Titan X:
Pytest Conda:
stage: test
script:
- py_version=3
- export PYOPENCL_TEST=nvi:titan
- EXTRA_INSTALL="pybind11 numpy mako"
- curl -L -O -k https://gitlab.tiker.net/inducer/ci-support/raw/master/build-and-test-py-project.sh
- ". ./build-and-test-py-project.sh"
# Disable caching to ensure SymEngine code generation is exercised.
- export SUMPY_NO_CACHE=1
- export SUMPY_FORCE_SYMBOLIC_BACKEND=symengine
- export PYOPENCL_TEST=portable:pthread
- curl -L -O https://gitlab.tiker.net/inducer/ci-support/raw/main/build-and-test-py-project-within-miniconda.sh
- ". ./build-and-test-py-project-within-miniconda.sh"
tags:
- python3
- nvidia-titan-x
- large-node
except:
- tags
allow_failure: True
artifacts:
reports:
junit: test/pytest.xml
Python 3.6 Conda:
Pytest POCL Titan V:
stage: test
script:
# Disable caching to ensure SymEngine code generation is exercised.
- export SUMPY_NO_CACHE=1
- export SUMPY_FORCE_SYMBOLIC_BACKEND=symengine
- CONDA_ENVIRONMENT=.test-conda-env-py3.yml
- curl -L -O -k https://gitlab.tiker.net/inducer/ci-support/raw/master/build-and-test-py-project-within-miniconda.sh
- export PYOPENCL_TEST=portable:titan
- curl -L -O https://gitlab.tiker.net/inducer/ci-support/raw/main/build-and-test-py-project-within-miniconda.sh
- ". ./build-and-test-py-project-within-miniconda.sh"
tags:
- linux
- nvidia-titan-v
except:
- tags
artifacts:
reports:
junit: test/pytest.xml
Examples Conda:
stage: test
script: |
grep -v symengine .test-conda-env-py3.yml > .test-conda-env.yml
CONDA_ENVIRONMENT=.test-conda-env.yml
curl -L -O https://tiker.net/ci-support-v0
. ci-support-v0
EXTRA_INSTALL="pyvisfile scipy"
build_py_project_in_conda_env
run_examples
tags:
- large-node
except:
- tags
Documentation:
stage: deploy
script: |
EXTRA_INSTALL="pybind11 numpy mako"
curl -L -O https://tiker.net/ci-support-v0
. ci-support-v0
build_py_project_in_venv
build_docs
build_asv_html
maybe_upload_docs
tags:
- linux
Ruff:
stage: test
script:
- EXTRA_INSTALL="pybind11 numpy mako"
- curl -L -O -k https://gitlab.tiker.net/inducer/ci-support/raw/master/build-docs.sh
- ". ./build-docs.sh"
- pipx install ruff
- ruff check
tags:
- python3
only:
- master
- docker-runner
except:
- tags
Flake8:
Pylint:
script:
- curl -L -O -k https://gitlab.tiker.net/inducer/ci-support/raw/master/prepare-and-run-flake8.sh
- ". ./prepare-and-run-flake8.sh sumpy test"
- EXTRA_INSTALL="pybind11 numpy mako scipy matplotlib pyvisfile mpi4py"
- curl -L -O https://tiker.net/ci-support-v0
- . ci-support-v0
- build_py_project
- run_pylint "$(get_proj_name)" examples/*.py test/*.py
tags:
- python3
except:
- tags
Benchmarks:
script:
- CONDA_ENVIRONMENT=.test-conda-env-py3.yml
- PROJECT=sumpy
- PYOPENCL_TEST=portable
- curl -L -O -k https://gitlab.tiker.net/inducer/ci-support/raw/master/build-and-benchmark-py-project.sh
- ". ./build-and-benchmark-py-project.sh"
Mypy:
script: |
curl -L -O https://tiker.net/ci-support-v0
. ./ci-support-v0
build_py_project_in_venv
python -m pip install mypy
mypy $(get_proj_name)
tags:
- linux
- benchmark
- python3
except:
- tags
Downstream:
parallel:
matrix:
- DOWNSTREAM_PROJECT: [pytential]
tags:
- large-node
- "docker-runner"
script: |
curl -L -O https://tiker.net/ci-support-v0
. ./ci-support-v0
test_downstream "$DOWNSTREAM_PROJECT"
# vim: sw=2
- arg: py-version
val: '3.10'
- arg: extension-pkg-whitelist
val: mayavi
- arg: ignored-modules
val:
- symengine
name: test-conda-env-py3
channels:
- inducer
- conda-forge
- defaults
- nodefaults
dependencies:
- git
- conda-forge::numpy
- conda-forge::sympy
- numpy
- sympy
- pocl
- pocl-cuda
- islpy
- pyopencl
- python=3
- symengine=0.3.0
- python-symengine=0.3.0
- python-symengine
- pyfmmlib
- pyvkfft
- mpi4py
- pip
- pip:
- git+https://gitlab.tiker.net/inducer/boxtree
- git+https://github.com/inducer/pymbolic
- git+https://github.com/inducer/loopy
# This is intended to prevent conda from selecting 'external' (i.e. empty) builds
# of OpenMPI to satisfy the MPI dependency of mpi4py. It did so in May 2024, leading
# to confusing failues saying
# 'libmpi.so.40: cannot open shared object file: No such file or directory'.
# https://github.com/conda-forge/openmpi-feedstock/issues/153
# https://conda-forge.org/docs/user/tipsandtricks/#using-external-message-passing-interface-mpi-libraries
- openmpi>=5=h*
cff-version: 1.2.0
message: "If you use this software, please cite it as below."
authors:
- family-names: "Kloeckner"
given-names: "Andreas"
orcid: "https://orcid.org/0000-0003-1228-519X"
- family-names: Fernando
given-names: Isuru
- family-names: Wala
given-names: Matt
- family-names: Fikl
given-names: Alexandru
- family-names: Beams
given-names: Natalie
- family-names: Gao
given-names: Hao
title: "sumpy"
version: 2022.1
doi: 10.5281/zenodo.7349787
date-released: 2022-11-23
url: "https://github.com/inducer/sumpy"
license: MIT
include test/*.py
include examples/*.py
include doc/*.rst
include doc/Makefile
include doc/*.py
include doc/images/*.png
include doc/_static/*.css
include doc/_templates/*.html
include README.rst
include requirements.txt
sumpy: n-body kernels and translation operators
===============================================
.. image:: https://gitlab.tiker.net/inducer/sumpy/badges/master/pipeline.svg
.. image:: https://gitlab.tiker.net/inducer/sumpy/badges/main/pipeline.svg
:alt: Gitlab Build Status
:target: https://gitlab.tiker.net/inducer/sumpy/commits/master
.. image:: https://dev.azure.com/ak-spam/inducer/_apis/build/status/inducer.sumpy?branchName=master
:alt: Azure Build Status
:target: https://dev.azure.com/ak-spam/inducer/_build/latest?definitionId=17&branchName=master
.. image:: https://badge.fury.io/py/sumpy.png
:target: https://gitlab.tiker.net/inducer/sumpy/commits/main
.. image:: https://github.com/inducer/sumpy/actions/workflows/ci.yml/badge.svg
:alt: Github Build Status
:target: https://github.com/inducer/sumpy/actions/workflows/ci.yml
.. image:: https://badge.fury.io/py/sumpy.svg
:alt: Python Package Index Release Page
:target: https://pypi.org/project/sumpy/
.. image:: https://zenodo.org/badge/1856097.svg
:alt: Zenodo DOI for latest release
:target: https://zenodo.org/badge/latestdoi/1856097
Sumpy is mainly a 'scaffolding' package for Fast Multipole and quadrature methods.
If you're building one of those and need code generation for the required Multipole
and local expansions, come right on in. Together with boxtree, there is a full,
sumpy is mainly a 'scaffolding' package for Fast Multipole and quadrature methods.
If you're building one of those and need code generation for the required multipole
and local expansions, come right on in. Together with ``boxtree``, there is a full,
symbolically kernel-independent FMM implementation here.
Sumpy relies on
It relies on
* `numpy <http://pypi.python.org/pypi/numpy>`_ for arrays
* `boxtree <http://pypi.python.org/pypi/boxtree>`_ for FMM tree building
* `sumpy <http://pypi.python.org/pypi/sumpy>`_ for expansions and analytical routines
* `loopy <http://pypi.python.org/pypi/loo.py>`_ for fast array operations
* `pytest <http://pypi.python.org/pypi/pytest>`_ for automated testing
* `boxtree <https://pypi.org/project/boxtree>`__ for FMM tree building
* `loopy <https://pypi.org/project/loopy>`__ for fast array operations
* `pytest <https://pypi.org/project/pytest>`__ for automated testing
and, indirectly,
* `PyOpenCL <http://pypi.python.org/pypi/pyopencl>`_ as computational infrastructure
PyOpenCL is likely the only package you'll have to install
by hand, all the others will be installed automatically.
* `PyOpenCL <https://pypi.org/project/pyopencl>`__ as computational infrastructure
Resources:
* `documentation <http://documen.tician.de/sumpy>`_
* `source code via git <http://github.com/inducer/sumpy>`_
If you can see inside the UIUC firewall, you may browse
`benchmark results <http://koelsch.d.tiker.net/benchmarks/asv/sumpy/>`_.
* `documentation <https://documen.tician.de/sumpy>`__
* `source code via git <https://github.com/inducer/sumpy>`__
* `benchmarks <https://documen.tician.de/sumpy/benchmarks>`__
......@@ -20,7 +20,7 @@
// List of branches to benchmark. If not provided, defaults to "master"
// (for git) or "default" (for mercurial).
// "branches": ["master"], // for git
"branches": ["main"], // for git
// "branches": ["default"], // for mercurial
// The DVCS being used. If not set, it will be automatically
......@@ -41,7 +41,7 @@
//"install_timeout": 600,
// the base URL to show a commit for the project.
"show_commit_url": "http://gitlab.tiker.net/inducer/sumpy/commits/",
"show_commit_url": "https://gitlab.tiker.net/inducer/sumpy/commits/",
// The Pythons you'd like to test against. If not provided, defaults
// to the current version of Python used to run `asv`.
......@@ -49,7 +49,7 @@
// The list of conda channel names to be searched for benchmark
// dependency packages in the specified order
"conda_channels": ["conda-forge", "defaults"],
"conda_channels": ["conda-forge"],
// The matrix of dependencies to test. Each key is the name of a
// package (in PyPI) and the values are version numbers. An empty
......@@ -68,13 +68,15 @@
// },
"matrix": {
"numpy" : [""],
"sympy" : ["1.0"],
"sympy" : [""],
"pyopencl" : [""],
"islpy" : [""],
"pocl" : [""],
"pyvkfft": [""],
"pip+git+https://github.com/inducer/pymbolic#egg=pymbolic": [""],
"pip+git+https://gitlab.tiker.net/inducer/boxtree#egg=boxtree": [""],
"pip+git+https://github.com/inducer/loopy#egg=loopy": [""],
"pip" : [""],
},
// Combinations of libraries/python versions can be excluded/included
......
jobs:
-
job: 'Python3'
pool:
vmImage: 'ubuntu-latest'
steps:
-
script: |
set -e
CONDA_ENVIRONMENT=.test-conda-env-py3.yml
curl -L -O -k https://gitlab.tiker.net/inducer/ci-support/raw/master/build-and-test-py-project-within-miniconda.sh
. ./build-and-test-py-project-within-miniconda.sh
displayName: 'Pytest Conda'
-
task: PublishTestResults@2
inputs:
testResultsFormat: 'JUnit'
testResultsFiles: 'test/pytest.xml'
-
job: 'Flake8'
pool:
vmImage: 'ubuntu-latest'
strategy:
matrix:
Python37:
python.version: '3.7'
steps:
-
task: UsePythonVersion@0
inputs:
versionSpec: '$(python.version)'
-
script: |
set -e
curl -L -O -k https://gitlab.tiker.net/inducer/ci-support/raw/master/prepare-and-run-flake8.sh
. ./prepare-and-run-flake8.sh sumpy test
displayName: 'Flake8'
from __future__ import annotations
import logging
import numpy as np
import pytest
import pyopencl as cl
from pyopencl.tools import ( # noqa
pytest_generate_tests_for_pyopencl as pytest_generate_tests)
pytest_generate_tests_for_pyopencl as pytest_generate_tests,
)
from sumpy.expansion.multipole import (
VolumeTaylorMultipoleExpansion, H2DMultipoleExpansion,
VolumeTaylorMultipoleExpansionBase,
LaplaceConformingVolumeTaylorMultipoleExpansion,
HelmholtzConformingVolumeTaylorMultipoleExpansion)
from sumpy.expansion.local import (
VolumeTaylorLocalExpansion, H2DLocalExpansion,
LaplaceConformingVolumeTaylorLocalExpansion,
HelmholtzConformingVolumeTaylorLocalExpansion)
H2DLocalExpansion,
LinearPDEConformingVolumeTaylorLocalExpansion,
VolumeTaylorLocalExpansion,
)
from sumpy.expansion.multipole import (
H2DMultipoleExpansion,
LinearPDEConformingVolumeTaylorMultipoleExpansion,
VolumeTaylorMultipoleExpansion,
)
from sumpy.kernel import HelmholtzKernel, LaplaceKernel
from sumpy.kernel import (LaplaceKernel, HelmholtzKernel, AxisTargetDerivative,
DirectionalSourceDerivative)
import logging
logger = logging.getLogger(__name__)
import sympy
import six
import pymbolic.mapper.flop_counter
import sumpy.symbolic as sym
from sumpy.assignment_collection import SymbolicAssignmentCollection
from sumpy.codegen import to_loopy_insns
class Param:
def __init__(self, dim, order):
self.dim = dim
self.order = order
def __repr__(self):
return "{}D_order_{}".format(self.dim, self.order)
return f"{self.dim}D_order_{self.order}"
class TranslationBenchmarkSuite:
params = [
params = (
Param(2, 10),
Param(2, 15),
Param(2, 20),
Param(3, 5),
Param(3, 10),
]
)
param_names = ['order']
param_names = ("order",)
def setup(self, param):
logging.basicConfig(level=logging.INFO)
np.random.seed(17)
np.random.seed(17) # noqa: NPY002
if self.__class__ == TranslationBenchmarkSuite:
raise NotImplementedError
mpole_expn_class = self.mpole_expn_class
......@@ -64,66 +65,71 @@ class TranslationBenchmarkSuite:
m_expn = self.mpole_expn_class(knl, order=param.order)
l_expn = self.local_expn_class(knl, order=param.order)
src_coeff_exprs = [sym.Symbol("src_coeff%d" % i)
for i in range(len(m_expn))]
src_coeff_exprs = [
sym.Symbol(f"src_coeff{i}")
for i in range(len(m_expn))]
dvec = sym.make_sym_vector("d", knl.dim)
src_rscale = sym.Symbol("src_rscale")
tgt_rscale = sym.Symbol("tgt_rscale")
result = l_expn.translate_from(m_expn, src_coeff_exprs, src_rscale,
dvec, tgt_rscale)
sac = SymbolicAssignmentCollection()
try:
result = l_expn.translate_from(m_expn, src_coeff_exprs, src_rscale,
dvec, tgt_rscale, sac)
except TypeError:
# Support older interface to make it possible to compare
# in CI run
result = l_expn.translate_from(m_expn, src_coeff_exprs, src_rscale,
dvec, tgt_rscale)
for i, expr in enumerate(result):
sac.assign_unique("coeff%d" % i, expr)
sac.assign_unique(f"coeff{i}", expr)
sac.run_global_cse()
insns = to_loopy_insns(six.iteritems(sac.assignments))
insns = to_loopy_insns(sac.assignments.items())
counter = pymbolic.mapper.flop_counter.CSEAwareFlopCounter()
return sum([counter.rec(insn.expression)+1 for insn in insns])
return sum(counter.rec(insn.expression)+1 for insn in insns)
track_m2l_op_count.unit = "ops"
track_m2l_op_count.timeout = 200.0
track_m2l_op_count.timeout = 300.0
class LaplaceVolumeTaylorTranslation(TranslationBenchmarkSuite):
knl = LaplaceKernel
local_expn_class = VolumeTaylorLocalExpansion
mpole_expn_class = VolumeTaylorMultipoleExpansion
params = [
params = (
Param(2, 10),
Param(3, 5),
]
)
class LaplaceConformingVolumeTaylorTranslation(TranslationBenchmarkSuite):
knl = LaplaceKernel
local_expn_class = LaplaceConformingVolumeTaylorLocalExpansion
mpole_expn_class = LaplaceConformingVolumeTaylorMultipoleExpansion
local_expn_class = LinearPDEConformingVolumeTaylorLocalExpansion
mpole_expn_class = LinearPDEConformingVolumeTaylorMultipoleExpansion
class HelmholtzVolumeTaylorTranslation(TranslationBenchmarkSuite):
knl = HelmholtzKernel
local_expn_class = VolumeTaylorLocalExpansion
mpole_expn_class = VolumeTaylorMultipoleExpansion
params = [
params = (
Param(2, 10),
Param(3, 5),
]
)
class HelmholtzConformingVolumeTaylorTranslation(TranslationBenchmarkSuite):
knl = HelmholtzKernel
local_expn_class = HelmholtzConformingVolumeTaylorLocalExpansion
mpole_expn_class = HelmholtzConformingVolumeTaylorMultipoleExpansion
local_expn_class = LinearPDEConformingVolumeTaylorLocalExpansion
mpole_expn_class = LinearPDEConformingVolumeTaylorMultipoleExpansion
class Helmholtz2DTranslation(TranslationBenchmarkSuite):
knl = HelmholtzKernel
local_expn_class = H2DLocalExpansion
mpole_expn_class = H2DMultipoleExpansion
params = [
params = (
Param(2, 10),
Param(2, 15),
Param(2, 20),
]
)
%% Cell type:code id: tags:
``` python
import pyopencl as cl
import sumpy.toys as t
import numpy as np
import numpy.linalg as la
import matplotlib.pyplot as plt
from sumpy.visualization import FieldPlotter
from pytools import add_tuples
from __future__ import annotations
from sumpy.expansion.local import VolumeTaylorLocalExpansion
from sumpy.expansion.multipole import (VolumeTaylorMultipoleExpansion,
from sumpy.expansion.multipole import (
LaplaceConformingVolumeTaylorMultipoleExpansion,
HelmholtzConformingVolumeTaylorMultipoleExpansion)
from sumpy.kernel import (YukawaKernel, HelmholtzKernel, LaplaceKernel)
LinearPDEConformingVolumeTaylorMultipoleExpansion,
VolumeTaylorMultipoleExpansion,
)
from sumpy.kernel import HelmholtzKernel, LaplaceKernel, YukawaKernel # noqa: F401
from sumpy.symbolic import make_sym_vector
order = 2
dim = 2
if 0:
knl = LaplaceKernel(dim)
extra_kernel_kwargs = {}
mpole_expn_reduced_class = LaplaceConformingVolumeTaylorMultipoleExpansion
else:
helm_k = 1.2
knl = HelmholtzKernel(dim)
extra_kernel_kwargs={"k": helm_k}
mpole_expn_reduced_class = HelmholtzConformingVolumeTaylorMultipoleExpansion
extra_kernel_kwargs = {"k": helm_k}
mpole_expn_reduced_class = LinearPDEConformingVolumeTaylorMultipoleExpansion
mpole_expn_reduced = mpole_expn_reduced_class(knl, order)
mpole_expn = VolumeTaylorMultipoleExpansion(knl, order)
local_expn = VolumeTaylorLocalExpansion(knl, order)
```
%% Cell type:code id: tags:
``` python
from pytools import factorial
def mi_factorial(n):
return np.prod([factorial(n1) for n1 in n])
```
%% Cell type:code id: tags:
``` python
reduced_wrangler = mpole_expn_reduced.derivative_wrangler
full_wrangler = mpole_expn.derivative_wrangler
reduced_wrangler = mpole_expn_reduced.expansion_terms_wrangler
full_wrangler = mpole_expn.expansion_terms_wrangler
reduced_derivatives = list(make_sym_vector("deriv", len(reduced_wrangler.stored_identifiers)))
full_derivatives = reduced_wrangler.get_full_kernel_derivatives_from_stored(reduced_derivatives, 1)
reduced_derivatives = list(
make_sym_vector("deriv", len(reduced_wrangler.stored_identifiers))
)
full_derivatives = reduced_wrangler.get_full_kernel_derivatives_from_stored(
reduced_derivatives, 1
)
print(reduced_derivatives)
print(full_derivatives)
```
%% Cell type:code id: tags:
``` python
full_coeffs = list(make_sym_vector("coeff", len(reduced_wrangler.get_full_coefficient_identifiers())))
reduced_coeffs = reduced_wrangler.get_stored_mpole_coefficients_from_full(full_mpole_coefficients=full_coeffs, rscale=1)
full_coeffs = list(
make_sym_vector("coeff", len(reduced_wrangler.get_full_coefficient_identifiers()))
)
reduced_coeffs = reduced_wrangler.get_stored_mpole_coefficients_from_full(
full_mpole_coefficients=full_coeffs, rscale=1
)
print(full_coeffs)
print(reduced_coeffs)
```
%% Cell type:code id: tags:
``` python
dvec = make_sym_vector("d", dim)
translated_reduce_coeffs = mpole_expn_reduced.translate_from(mpole_expn_reduced, reduced_coeffs, 1, dvec, 1)
translated_reduce_coeffs = mpole_expn_reduced.translate_from(
mpole_expn_reduced, reduced_coeffs, 1, dvec, 1
)
translated_full_coeffs = mpole_expn.translate_from(mpole_expn, full_coeffs, 1, dvec, 1)
translated_full_coeffs
```
%% Cell type:code id: tags:
``` python
eval_reduced = sum(a*b for a, b in zip(translated_reduce_coeffs, reduced_derivatives))
eval_full = sum(a*b for a, b in zip(translated_full_coeffs, full_derivatives))
eval_reduced = sum(a * b for a, b in zip(translated_reduce_coeffs, reduced_derivatives,
strict=True))
eval_full = sum(a * b for a, b in zip(translated_full_coeffs, full_derivatives,
strict=True))
(eval_full-eval_reduced).simplify()
(eval_full - eval_reduced).simplify()
```
......
......@@ -9,7 +9,7 @@ BUILDDIR = _build
# User-friendly check for sphinx-build
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don\'t have Sphinx installed, grab it from http://sphinx-doc.org/)
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don\'t have Sphinx installed, grab it from https://sphinx-doc.org/)
endif
# Internal variables.
......
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# sumpy documentation build configuration file, created by
# sphinx-quickstart on Wed Apr 6 14:00:41 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
from importlib import metadata
from urllib.request import urlopen
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
_conf_url = \
"https://raw.githubusercontent.com/inducer/sphinxconfig/main/sphinxconfig.py"
with urlopen(_conf_url) as _inf:
exec(compile(_inf.read(), _conf_url, "exec"), globals())
# -- General configuration ------------------------------------------------
copyright = "2016-21, sumpy contributors"
release = metadata.version("sumpy")
version = ".".join(release.split(".")[:2])
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'sumpy'
copyright = '2016, Andreas Kloeckner'
author = 'Andreas Kloeckner'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '2016.1'
# The full version, including alpha/beta/rc tags.
release = '2016.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
html_theme = "alabaster"
html_theme_options = {
"extra_nav_links": {
"🚀 Github": "https://github.com/inducer/sumpy",
"💾 Download Releases": "https://pypi.python.org/pypi/sumpy",
}
}
html_sidebars = {
'**': [
'about.html',
'navigation.html',
'relations.html',
'searchbox.html',
]
}
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#html_title = 'sumpy v2016.1'
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'sumpydoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
intersphinx_mapping = {
"arraycontext": ("https://documen.tician.de/arraycontext/", None),
"boxtree": ("https://documen.tician.de/boxtree/", None),
"loopy": ("https://documen.tician.de/loopy/", None),
"matplotlib": ("https://matplotlib.org/stable/", None),
"numpy": ("https://numpy.org/doc/stable/", None),
"pymbolic": ("https://documen.tician.de/pymbolic/", None),
"pyopencl": ("https://documen.tician.de/pyopencl/", None),
"pytential": ("https://documen.tician.de/pytential/", None),
"python": ("https://docs.python.org/3/", None),
"pytools": ("https://documen.tician.de/pytools/", None),
"sympy": ("https://docs.sympy.org/latest/", None),
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'sumpy.tex', 'sumpy Documentation',
'Andreas Kloeckner', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'sumpy', 'sumpy Documentation',
[author], 1)
nitpick_ignore_regex = [
["py:class", r"symengine\.(.+)"], # :cry:
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'sumpy', 'sumpy Documentation',
author, 'sumpy', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
intersphinx_mapping = {
'http://docs.python.org/': None,
'http://docs.scipy.org/doc/numpy/': None,
'http://documen.tician.de/modepy/': None,
'http://documen.tician.de/pyopencl/': None,
'http://documen.tician.de/pymbolic/': None,
'http://documen.tician.de/loopy/': None,
'http://documen.tician.de/pytential/': None,
'http://documen.tician.de/boxtree/': None,
}
Differentiation and Evaluation
==============================
Working with Values of Potentials
=================================
Visualization of Potentials
---------------------------
......@@ -11,7 +11,7 @@ Differentiation of Potentials
.. automodule:: sumpy.point_calculus
Support for Numerical Experiments with Expansions
-------------------------------------------------
Support for Numerical Experiments with Expansions ("Expansion toys")
--------------------------------------------------------------------
.. automodule:: sumpy.toys
......@@ -3,6 +3,11 @@ Expansions
.. automodule:: sumpy.expansion
Differential Operators
----------------------
.. automodule:: sumpy.expansion.diff_op
Local Expansions
----------------
......@@ -12,3 +17,13 @@ Multipole Expansions
--------------------
.. automodule:: sumpy.expansion.multipole
Multipole to Local Translations
-------------------------------
.. automodule:: sumpy.expansion.m2l
Estimating Expansion Orders
---------------------------
.. automodule:: sumpy.expansion.level_to_order