Skip to content

Commit

Permalink
More robust build-catalogue (arbor-sim#1784)
Browse files Browse the repository at this point in the history
- build-catalogue
  - now installed by pip
  - enable GPU-backends
  - isolate from being installed in a non-default dir
  - rename to arbor-build-catalogue
  - allow hand-written C++ mechanisms to be linked in the same catalogue.
- CMake
  - enable PIC on arbor always to allow linking to shared objects
  - use relative paths when configuring a-b-c, to make relocation less of a problem
- Python
    - ensure a-b-c is installed by pip, along with headers and libarbor.a
    - throw errors if tools (cmake scripts, arbor package, modcc) missing
    - transition setup.py setuptools -> skbuild.
- Wheels
    - Add NML2 support to wheels
    - scripts/build-wheels.sh builds wheels, in principle valid for submission to PyPI, on your own hardware. This should be kept in sync with .github/workflows/ciwheel.yml
    - scripts/patchwheel.py corrects the rpath in the libraries in the wheels, working around a bad interplay between auditwheel and skbuild, see pypa/auditwheel#363
    - Python Wheels are tested as part of the Github Action 
    - Add nml and bundled status to config().
  • Loading branch information
thorstenhater authored and max9901 committed Feb 3, 2022
1 parent 03bfbf4 commit 241d6ca
Show file tree
Hide file tree
Showing 25 changed files with 429 additions and 367 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/basic.yml
Original file line number Diff line number Diff line change
Expand Up @@ -180,5 +180,5 @@ jobs:
run: scripts/run_python_examples.sh
- name: Build and test a catalogue
run: |
build-catalogue -v default mechanisms/default
arbor-build-catalogue -v default mechanisms/default
./scripts/test-catalogue.py ./default-catalogue.so
21 changes: 13 additions & 8 deletions .github/workflows/ciwheel.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,9 @@ on:
tags:
- v*

schedule:
- cron: '0 2 * * 0' # run at 2 AM every sunday

jobs:
build_binary_wheels:
name: Build wheels on ${{ matrix.os }}
Expand All @@ -22,29 +25,29 @@ jobs:

- name: Build wheels Linux
if: ${{ startsWith(matrix.os, 'ubuntu') }}
uses: pypa/cibuildwheel@v1.9.0
uses: pypa/cibuildwheel@v2.3.0
with:
output-dir: dist
env:
CIBW_BEFORE_BUILD: python -m pip install numpy setuptools
CIBW_BEFORE_ALL: yum -y install libxml2-devel
CIBW_BEFORE_BUILD: python -m pip install numpy setuptools scikit-build ninja cmake
CIBW_BUILD: "cp3?-manylinux_x86_64"
CIBW_SKIP: "cp35-*"
CIBW_MANYLINUX_X86_64_IMAGE: manylinux2014
CIBW_ARCHS_LINUX: x86_64
# CIBW_TEST_COMMAND: TODO
CIBW_REPAIR_WHEEL_COMMAND: 'auditwheel repair -w {dest_dir} {wheel} && python /project/scripts/patchwheel.py {dest_dir}'
CIBW_TEST_COMMAND: python -m unittest discover -v -s {project}/python

- name: Build wheels macos
if: ${{ startsWith(matrix.os, 'macos') }}
uses: pypa/cibuildwheel@v1.9.0
uses: pypa/cibuildwheel@v2.3.0
with:
output-dir: dist
env:
MACOSX_DEPLOYMENT_TARGET: "10.15" #needed to undo some CIBW settings
CIBW_BEFORE_BUILD: python -m pip install numpy setuptools
CIBW_BEFORE_BUILD: python -m pip install numpy setuptools scikit-build ninja cmake
CIBW_BUILD: "cp3?-macosx_x86_64"
CIBW_SKIP: "cp35-*"
CIBW_ARCHS_MACOS: x86_64 universal2
# CIBW_TEST_COMMAND: TODO
CIBW_TEST_COMMAND: python -m unittest discover -v -s {project}/python

# this action runs auditwheel automatically with the following args:
# https://cibuildwheel.readthedocs.io/en/stable/options/#repair-wheel-command
Expand All @@ -61,6 +64,8 @@ jobs:
steps:
- name: Set up Python
uses: actions/setup-python@v2
- name: Get packages
run: python -m pip install numpy setuptools scikit-build ninja cmake
- uses: actions/checkout@v2
with:
fetch-depth: 0
Expand Down
17 changes: 12 additions & 5 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,10 @@ enable_language(CXX)

include(GNUInstallDirs)

# Effectively adds '-fpic' flag to CXX_FLAGS. Needed for dynamic catalogues.
set(CMAKE_POSITION_INDEPENDENT_CODE ON)


# Turn on this option to force the compilers to produce color output when output is
# redirected from the terminal (e.g. when using ninja or a pager).

Expand Down Expand Up @@ -107,6 +111,9 @@ endif()
# as to enable CUDA tests in generator expressions.)
if(ARB_GPU STREQUAL "cuda")
set(ARB_WITH_NVCC TRUE)
# CMake 3.18 and later set the default CUDA architecture for
# each target according to CMAKE_CUDA_ARCHITECTURES.

# This fixes nvcc picking up a wrong host compiler for linking, causing
# issues with outdated libraries, eg libstdc++ and std::filesystem. Must
# happen before all calls to enable_language(CUDA)
Expand Down Expand Up @@ -164,6 +171,9 @@ set(CMAKE_CXX_EXTENSIONS OFF)

# Data and internal scripts go here
set(ARB_INSTALL_DATADIR ${CMAKE_INSTALL_FULL_DATAROOTDIR}/arbor)
# Derived paths for arbor-build-catalogue
file(RELATIVE_PATH ARB_REL_DATADIR ${CMAKE_INSTALL_FULL_BINDIR} ${CMAKE_INSTALL_FULL_DATAROOTDIR}/arbor)
file(RELATIVE_PATH ARB_REL_PACKAGEDIR ${CMAKE_INSTALL_FULL_BINDIR} ${CMAKE_INSTALL_FULL_LIBDIR}/cmake/arbor)

# Interface library `arbor-config-defs` collects configure-time defines
# for arbor, arborenv, arborio, of the form ARB_HAVE_XXX. These
Expand Down Expand Up @@ -208,8 +218,8 @@ install(TARGETS arborio-public-deps EXPORT arborio-targets)

# Add scripts and supporting CMake for setting up external catalogues

configure_file(scripts/build-catalogue.in ${CMAKE_CURRENT_BINARY_DIR}/build-catalogue @ONLY)
install(PROGRAMS ${CMAKE_CURRENT_BINARY_DIR}/build-catalogue DESTINATION ${CMAKE_INSTALL_FULL_BINDIR})
configure_file(scripts/build-catalogue.in ${CMAKE_CURRENT_BINARY_DIR}/arbor-build-catalogue @ONLY)
install(PROGRAMS ${CMAKE_CURRENT_BINARY_DIR}/arbor-build-catalogue DESTINATION ${CMAKE_INSTALL_FULL_BINDIR})
install(FILES mechanisms/BuildModules.cmake DESTINATION ${ARB_INSTALL_DATADIR})
install(FILES mechanisms/generate_catalogue DESTINATION ${ARB_INSTALL_DATADIR} PERMISSIONS OWNER_READ OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE)
# External libraries in `ext` sub-directory: json, tinyopt and randon123.
Expand Down Expand Up @@ -290,9 +300,6 @@ if(ARB_WITH_PYTHON)
find_package(Python3 ${arb_py_version} COMPONENTS Interpreter Development REQUIRED)
endif()

# Required to link the dynamic libraries for python modules.
# Effectively adds '-fpic' flag to CXX_FLAGS.
set(CMAKE_POSITION_INDEPENDENT_CODE ON)
else()
# If not building the Python module, the interpreter is still required
# to build some targets, e.g. when building the documentation.
Expand Down
4 changes: 4 additions & 0 deletions arbor/include/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,10 @@ if(ARB_WITH_PROFILING)
# define ARB_PROFILE_ENABLED in version.hpp
list(APPEND arb_features PROFILE)
endif()
if(ARB_USE_BUNDLED_LIBS)
# define ARB_BUNDLED_ENABLED in version.hpp
list(APPEND arb_features BUNDLED)
endif()
if(ARB_VECTORIZE)
list(APPEND arb_features VECTORIZE)
endif()
Expand Down
3 changes: 1 addition & 2 deletions arborio/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,7 @@ if(ARB_WITH_NEUROML)
neuroml.cpp
nml_parse_morphology.cpp
xml.cpp
xmlwrap.cpp
)
xmlwrap.cpp)
find_package(LibXml2 REQUIRED)
endif()

Expand Down
2 changes: 1 addition & 1 deletion ci/gitlab-cscs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ single_node_release:
only: ['master', 'staging', 'trying']
stage: test
script:
- unit
- unit --gtest_filter="-mechcat.loading"
- unit-local
- unit-modcc
variables:
Expand Down
1 change: 1 addition & 0 deletions cmake/arbor-config.cmake.in
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ function(_append_property target property)
endfunction()

set(ARB_VECTORIZE @ARB_VECTORIZE@)
set(ARB_WITH_GPU @ARB_WITH_GPU@)
set(ARB_ARCH @ARB_ARCH@)
set(ARB_MODCC_FLAGS @ARB_MODCC_FLAGS@)
set(ARB_CXX @CMAKE_CXX_COMPILER@)
Expand Down
14 changes: 7 additions & 7 deletions doc/concepts/mechanisms.rst
Original file line number Diff line number Diff line change
Expand Up @@ -90,17 +90,17 @@ This will produce a catalogue loadable at runtime by calling ``load_catalogue``
with a filename in both C++ and Python. The steps are

1. Prepare a directory containing your NMODL files (.mod suffixes required)
2. Call ``build-catalogue`` installed by arbor
2. Call ``arbor-build-catalogue`` installed by arbor

.. code-block :: bash
build-catalogue <name> <path/to/nmodl>
All files with the suffix ``.mod`` located in ``<path/to/nmodl>`` will be baked into
a catalogue named ``lib<name>-catalogue.so`` and placed into your current working
directory. Note that these files are platform-specific and should only be used
on the combination of OS, compiler, arbor, and machine they were built with.
arbor-build-catalogue <name> <path/to/nmodl>
All files with the suffix ``.mod`` located in ``<path/to/nmodl>`` will be baked
into a catalogue named ``lib<name>-catalogue.so`` and placed into your current
working directory. Note that these files are platform-specific and should only
be used on the combination of OS, compiler, arbor, and machine they were built
with. See our internal documentation for more advanced usage of the builder.
Errors might be diagnosable by passing the ``-v`` flag.

This catalogue can then be used similarly to the built-in ones
Expand Down
132 changes: 71 additions & 61 deletions doc/install/python.rst
Original file line number Diff line number Diff line change
Expand Up @@ -6,29 +6,24 @@ Python Installation
Arbor's Python API will be the most convenient interface for most users.

.. note::
Arbor requires Python version 3.6 and later. It is advised that you update `pip` as well.
Arbor requires Python version 3.6 and later. It is advised that you update ``pip`` as well.
We strongly encourage using ``pip`` to install Arbor.

To get help in case of problems installing with pip, run pip with the ``--verbose`` flag, and attach the output
(along with the pip command itself) to a ticket on `Arbor's issues page <https://github.com/arbor-sim/arbor/issues>`_.

Getting Arbor
-------------

Every point release of Arbor is pushed to the Python Package Index.
For x86-64 Linux and MacOS plaftorms, we provide binary wheels.
For x86-64 Linux and MacOS platforms, we provide binary wheels.
The easiest way to get Arbor is with
`pip <https://packaging.python.org/tutorials/installing-packages>`_:

.. code-block:: bash
pip3 install arbor
.. note::
For other platforms, `pip` will build Arbor from source.
You will need to have some development packages installed in order to build Arbor this way.

* Ubuntu/Debian: `git cmake gcc python3-dev python3-pip libxml2-dev`
* Fedora/CentOS/OpenSuse: `git cmake gcc-c++ python3-devel python3-pip libxml2-devel`
* MacOS: get `brew` `here <https://brew.sh>`_ and run `brew install cmake clang python3 libxml2`
* Windows: the simplest way is to use `WSL <https://docs.microsoft.com/en-us/windows/wsl/install-win10>`_ and then follow the instructions for Ubuntu.

To test that Arbor is available, try the following in a Python interpreter
to see information about the version and enabled features:

Expand All @@ -41,9 +36,20 @@ to see information about the version and enabled features:
You are now ready to use Arbor! You can continue reading these documentation pages, have a look at the
:ref:`Python API reference<pyoverview>`, or visit the :ref:`tutorial`.

.. Note::
To get help in case of problems installing with pip, run pip with the ``--verbose`` flag, and attach the output
(along with the pip command itself) to a ticket on `Arbor's issues page <https://github.com/arbor-sim/arbor/issues>`_.
.. Warning::

For builds from Arbor's source, you will need to have some development packages installed. Installing Arbor
for any other platforms than listed above, ``pip`` will attempt a build from source and thus require these
packages as well.

* Ubuntu/Debian: `git cmake gcc python3-dev python3-pip libxml2-dev`
* Fedora/CentOS/OpenSuse: `git cmake gcc-c++ python3-devel python3-pip libxml2-devel`
* MacOS: get `brew` `here <https://brew.sh>`_ and run `brew install cmake clang python3 libxml2`
* Windows: the simplest way is to use `WSL <https://docs.microsoft.com/en-us/windows/wsl/install-win10>`_ and then follow the instructions for Ubuntu.

In addition, you'll need a few Python packages present:

``pip3 install ninja scikit-build wheel setuptools numpy``

.. _in_python_custom:

Expand Down Expand Up @@ -71,88 +77,92 @@ Every time you make changes to the code, you'll have to repeat the second step.
Advanced options
^^^^^^^^^^^^^^^^

By default Arbor is installed with multi-threading enabled. To enable more advanced forms of parallelism,
Arbor comes with a few compilation options. These can be used on both local (``pip3 install ./arbor``) and
remote (``pip3 install arbor``) copies of Arbor. Below we assume you are working off a local copy.
By default Arbor is installed with multi-threading enabled. To enable more
advanced forms of parallelism and other features, Arbor comes with a few
compilation options. These are of the form ``-D<KEY>=<VALUE>``, must be appended
to the ``pip`` invocation via ``--install-option="-D<...>" --install-option="-D<...>" ...`` and can
be used on both local (``pip3 install ./arbor``) and remote (``pip3 install
arbor``) copies of Arbor. See the examples below.

.. Note::

The following optional flags can be used to configure the installation:
If you run into build issues while experimenting with build options, be sure
to remove the ``_skbuild`` directory. If you had Arbor installed already,
you may need to remove it first before you can (re)compile it with the flags you need.

Also, make sure to pass each option individually via
``--install-option="..."``.

The following flags can be used to configure the installation:

* ``ARB_WITH_NEUROML=<ON|OFF>``: Enable support for NeuroML2 morphologies,
requires ``libxml2`` library. Default ``OFF``
* ``ARB_WITH_MPI=<ON|OFF>``: Enable MPI support, requires MPI library.
Default ``OFF``.
* ``ARB_GPU=<none|cuda|cuda-clang|hip>``: Enable GPU support for NVIDIA GPUs
with nvcc using ``cuda``, or with clang using ``cuda-clang`` (both require
cudaruntime). Enable GPU support for AMD GPUs with hipcc using ``hip``. By
default set to ``none``, which disables GPU support.
* ``ARB_VECTORIZE=<ON|OFF>``: Enable vectorization. The architecture argument,
documented below, may also have to be set appropriately to generated
vectorized code. See :ref:`install-architecture` for details.
* ``ARB_ARCH=<native|*>``: CPU micro-architecture to target. The advised
default is ``native``. See `here
<https://gcc.gnu.org/onlinedocs/gcc/x86-Options.html>`_ for a full list of
options.

.. note::

* ``--mpi``: Enable MPI support (requires MPI library).
* ``--gpu``: Enable GPU support for NVIDIA GPUs with nvcc using ``cuda``, or with clang using ``cuda-clang`` (both require cudaruntime).
Enable GPU support for AMD GPUs with hipcc using ``hip``. By default set to ``none``, which disables gpu support.
* ``--vec``: Enable vectorization. The ``--arch`` argument, documented below, may also have to be set appropriately to generated vectorized code.
See :ref:`install-architecture` for details.
* ``--arch``: CPU micro-architecture to target. The advised default is ``native``.
See `here <https://gcc.gnu.org/onlinedocs/gcc/x86-Options.html>`_ for a full list of options.
* ``--makejobs``: Specify the amount of jobs to ``make`` the project with for faster build times on multicore systems. By default set to ``2``.
There are more, advanced flags that can be set. We are using ``scikit-build``
and ``CMake`` under the hood, so all flags and options valid in ``CMake`` can
be used in this fashion.

Detailed instructions on how to install using CMake are in the :ref:`Python
configuration <install-python>` section of the :ref:`installation guide
<in_build_install>`. CMake is recommended if you need more control over
compilation and installation, plan to use Arbor with C++, or if you are
integrating with package managers such as Spack and EasyBuild.

In the examples below we assume you are installing from a local copy.

**Vanilla install** with no additional features enabled:

.. code-block:: bash
pip3 install arbor
pip3 install ./arbor
**With MPI support**. This might require loading an MPI module or setting the ``CC`` and ``CXX``
:ref:`environment variables <install-mpi>`:

.. code-block:: bash
pip3 install --install-option='--mpi' ./arbor
pip3 install ./arbor --install-option="-DARB_WITH_MPI=ON"
**Compile with** :ref:`vectorization <install-vectorize>` on a system with a SkyLake
:ref:`architecture <install-architecture>`:

.. code-block:: bash
pip3 install --install-option='--vec' --install-option='--arch=skylake' arbor
pip3 install ./arbor --install-option="-DARB_VECTORIZE=ON" --install-option="-DARB_ARCH=skylake"
**Enable NVIDIA GPUs (compiled with nvcc)**. This requires the :ref:`CUDA toolkit <install-gpu>`:

.. code-block:: bash
pip3 install --install-option='--gpu=cuda' ./arbor
pip3 install ./arbor --install-option="-DARB_GPU=cuda"
**Enable NVIDIA GPUs (compiled with clang)**. This also requires the :ref:`CUDA toolkit <install-gpu>`:

.. code-block:: bash
pip3 install --install-option='--gpu=cuda-clang' ./arbor
pip3 install ./arbor --install-option="-DARB_GPU=cuda-clang"
**Enable AMD GPUs (compiled with hipcc)**. This requires setting the ``CC`` and ``CXX``
:ref:`environment variables <install-gpu>`

.. code-block:: bash
pip3 install --install-option='--gpu=hip' ./arbor
.. Note::
Setuptools compiles the Arbor C++ library and wrapper, as well as dependencies you did not have installed
yet (e.g. `numpy`). It may take a few minutes. Pass the ``--verbose`` flag to pip
to see the individual steps being performed if you are concerned that progress
is halting.

If you had Arbor installed already, you may need to remove it first before you can (re)compile
it with the flags you need.

.. Note::
Detailed instructions on how to install using CMake are in the
:ref:`Python configuration <install-python>` section of the :ref:`installation guide <in_build_install>`.
CMake is recommended if you need more control over compilation and installation, plan to use Arbor with C++,
or if you are integrating with package managers such as Spack and EasyBuild.

Dependencies
^^^^^^^^^^^^

If a downstream dependency requires Arbor be built with
a specific feature enabled, use ``requirements.txt`` to
`define the constraints <https://pip.pypa.io/en/stable/reference/pip_install/#per-requirement-overrides>`_.
For example, a package that depends on `arbor` version 0.3 or later
with MPI support would add the following to its requirements:

.. code-block:: python
arbor >= 0.3 --install-option='--gpu=cuda' \
--install-option='--mpi'
pip3 install ./arbor --install-option="-DARB_GPU=hip"
Note on performance
-------------------
Expand Down
Loading

0 comments on commit 241d6ca

Please sign in to comment.