diff --git a/.github/workflows/build-test.yml b/.github/workflows/build-test.yml
index 80fbffb72..ed7b1cbc4 100644
--- a/.github/workflows/build-test.yml
+++ b/.github/workflows/build-test.yml
@@ -60,7 +60,7 @@ jobs:
exit 1
fi
DEMO_CMAKE="./demos/CMakeLists.txt"
- if [ $(echo $(grep "find_package(control_libraries" "${DEMO_CMAKE}") | tr -d -c 0-9) -ne "${VERSION}" ]; then
+ if [ $(echo $(grep "project(clproto VERSION" "${DEMO_CMAKE}") | tr -d -c 0-9) -ne "${VERSION}" ]; then
echo "::error file="${DEMO_CMAKE}",title=Check failed::Version in "${DEMO_CMAKE}" does not correspond to VERSION. ${MESSAGE}"
exit 1
fi
diff --git a/.github/workflows/generate-docs.yml b/.github/workflows/generate-docs.yml
index 5353ddffd..0beed7a76 100644
--- a/.github/workflows/generate-docs.yml
+++ b/.github/workflows/generate-docs.yml
@@ -1,6 +1,6 @@
name: Generate and Deploy Documentation
on:
- push:
+ push:
branches:
- main
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 15e8de962..df8aa2d74 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -13,7 +13,6 @@ Release Versions:
## Upcoming changes (in development)
-- build: remove deprecated Dockerfiles and scripts and update installation instructions (#176)
- refactor: optimize copy and swap constructor for robot model (#174)
- fix: refactor cmake project to deal with robot model dependencies (#178)
- feat: integrate minimum distance calculation feature into robot model(#167)
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 4b065887f..50d9d59cf 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -1,12 +1,8 @@
cmake_minimum_required(VERSION 3.15)
project(all_control_libraries)
-option(BUILD_PROTOCOL "Build and install the protocol" ON)
-
add_subdirectory(source)
-if(BUILD_PROTOCOL)
- add_subdirectory(protocol/clproto_cpp)
-endif()
+add_subdirectory(protocol/clproto_cpp)
if(BUILD_TESTING)
# reexport the test target defined in the subdirectories
diff --git a/Dockerfile b/Dockerfile
index eedf3983f..3e312d515 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -7,6 +7,7 @@ RUN apt-get update && apt-get install -y \
g++ \
git \
libgtest-dev \
+ libeigen3-dev \
python3-pip \
ssh \
sudo \
@@ -82,7 +83,7 @@ ARG TARGETPLATFORM
ARG CACHEID
COPY dependencies/base_dependencies.cmake CMakeLists.txt
RUN --mount=type=cache,target=/build,id=cmake-base-deps-${TARGETPLATFORM}-${CACHEID},uid=1000 \
- cmake -B build -DBUILD_TESTING=OFF -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} && cmake --build build && cmake --install build --prefix /tmp/deps
+ cmake -B build -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} && cmake --build build && cmake --install build --prefix /tmp/deps
FROM base as pinocchio-dependencies
COPY --from=apt-dependencies /tmp/apt /
@@ -125,7 +126,6 @@ FROM base as dependencies
ARG TARGETPLATFORM
ARG CACHEID
# Needed to build `osqp-eigen`
-COPY --from=apt-dependencies /tmp/apt /
COPY --from=base-dependencies /tmp/deps /usr
COPY dependencies/dependencies.cmake CMakeLists.txt
RUN --mount=type=cache,target=/build,id=cmake-deps-${TARGETPLATFORM}-${CACHEID},uid=1000 \
diff --git a/Dockerfile.base b/Dockerfile.base
new file mode 100644
index 000000000..1832f4b31
--- /dev/null
+++ b/Dockerfile.base
@@ -0,0 +1,160 @@
+ARG BASE_TAG=22.04
+FROM ubuntu:${BASE_TAG} as core-build-dependencies
+ENV DEBIAN_FRONTEND=noninteractive
+
+# install core compilation and access dependencies for building the libraries
+RUN apt-get update && apt-get install -y \
+ autoconf \
+ automake \
+ build-essential \
+ cmake \
+ curl \
+ g++ \
+ gcc \
+ git \
+ gnupg2 \
+ libtool \
+ lsb-release \
+ make \
+ pkg-config \
+ wget \
+ && apt-get clean \
+ && rm -rf /var/lib/apt/lists/*
+
+
+FROM core-build-dependencies as google-dependencies
+
+RUN apt-get update && apt-get install -y \
+ libgtest-dev \
+ && apt-get clean \
+ && rm -rf /var/lib/apt/lists/*
+
+# install gtest
+WORKDIR /tmp
+RUN mkdir gtest_build && cd gtest_build && cmake /usr/src/gtest && make -j \
+ && cp lib/* /usr/local/lib || cp *.a /usr/local/lib
+
+RUN rm -rf /tmp/* && ldconfig
+
+
+FROM core-build-dependencies as robot-model-dependencies
+
+RUN apt-get update && apt-get install -y \
+ libboost-all-dev \
+ liburdfdom-dev \
+ && apt-get clean \
+ && rm -rf /var/lib/apt/lists/*
+
+WORKDIR /tmp
+ARG EIGEN_TAG=3.4.0
+RUN wget -c https://gitlab.com/libeigen/eigen/-/archive/${EIGEN_TAG}/eigen-${EIGEN_TAG}.tar.gz -O - | tar -xz \
+ && cd eigen-${EIGEN_TAG} && mkdir build && cd build && env CXXFLAGS=-DEIGEN_MPL2_ONLY cmake .. && make install \
+ && cd ../.. && rm -r eigen-${EIGEN_TAG} || exit 1
+
+ARG OSQP_TAG=0.6.2
+RUN git clone --depth 1 -b v${OSQP_TAG} --recursive https://github.com/oxfordcontrol/osqp \
+ && cd osqp && mkdir build && cd build && cmake -G "Unix Makefiles" .. && cmake --build . --target install \
+ && cd ../.. && rm -r osqp || exit 1
+
+ARG OSQP_EIGEN_TAG=0.6.4
+RUN git clone --depth 1 -b v${OSQP_EIGEN_TAG} https://github.com/robotology/osqp-eigen.git \
+ && cd osqp-eigen && mkdir build && cd build && cmake .. && make -j && make install \
+ && cd ../.. && rm -r osqp-eigen || exit 1
+
+ARG PINOCCHIO_TAG=2.6.9
+RUN git clone --depth 1 -b v${PINOCCHIO_TAG} --recursive https://github.com/stack-of-tasks/pinocchio \
+ && cd pinocchio && mkdir build && cd build \
+ && cmake .. -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=/usr/local -DBUILD_PYTHON_INTERFACE=OFF \
+ -DBUILD_TESTING=OFF && make -j $(nproc --ignore=1) && make install && cd ../.. && rm -r pinocchio || exit 1
+
+RUN ldconfig
+
+
+FROM robot-model-dependencies as development-dependencies
+RUN apt-get update && apt-get install -y \
+ clang \
+ gdb \
+ python3 \
+ python3-dev \
+ python3-pip \
+ tar \
+ unzip \
+ && apt-get clean \
+ && rm -rf /var/lib/apt/lists/*
+
+# install python requirements
+RUN pip3 install pytest numpy setuptools pybind11
+
+# install google dependencies
+COPY --from=google-dependencies /usr/include/gtest /usr/include/gtest
+COPY --from=google-dependencies /usr/local/lib/libgtest* /usr/local/lib/
+
+
+FROM development-dependencies as proto-dependencies-20.04
+COPY --from=ghcr.io/aica-technology/control-libraries/proto-dependencies:20.04 /usr/local/include/google /usr/local/include/google
+COPY --from=ghcr.io/aica-technology/control-libraries/proto-dependencies:20.04 /usr/local/lib/libproto* /usr/local/lib/
+COPY --from=ghcr.io/aica-technology/control-libraries/proto-dependencies:20.04 /usr/local/bin/protoc /usr/local/bin
+RUN ldconfig
+
+
+FROM development-dependencies as proto-dependencies-22.04
+COPY --from=ghcr.io/aica-technology/control-libraries/proto-dependencies:22.04 /usr/local/include/google /usr/local/include/google
+COPY --from=ghcr.io/aica-technology/control-libraries/proto-dependencies:22.04 /usr/local/lib/libproto* /usr/local/lib/
+COPY --from=ghcr.io/aica-technology/control-libraries/proto-dependencies:22.04 /usr/local/bin/protoc /usr/local/bin
+RUN ldconfig
+
+
+FROM development-dependencies as proto-dependencies-latest
+COPY --from=ghcr.io/aica-technology/control-libraries/proto-dependencies:latest /usr/local/include/google /usr/local/include/google
+COPY --from=ghcr.io/aica-technology/control-libraries/proto-dependencies:latest /usr/local/lib/libproto* /usr/local/lib/
+COPY --from=ghcr.io/aica-technology/control-libraries/proto-dependencies:latest /usr/local/bin/protoc /usr/local/bin
+RUN ldconfig
+
+
+FROM proto-dependencies-${BASE_TAG} as license-information
+RUN mkdir -p /usr/share/doc/control-libraries
+COPY ./licenses /usr/share/doc/control-libraries/licenses
+
+
+FROM license-information as ssh-configuration
+
+RUN apt-get update && apt-get install -y \
+ sudo \
+ libssl-dev \
+ ssh \
+ iputils-ping \
+ rsync \
+ && apt-get clean \
+ && rm -rf /var/lib/apt/lists/*
+
+# Configure sshd server settings
+RUN ( \
+ echo 'LogLevel DEBUG2'; \
+ echo 'PubkeyAuthentication yes'; \
+ echo 'Subsystem sftp /usr/lib/openssh/sftp-server'; \
+ ) > /etc/ssh/sshd_config_development \
+ && mkdir /run/sshd
+
+ENV USER developer
+ENV HOME /home/${USER}
+
+# create and configure a new user
+ARG UID=1000
+ARG GID=1000
+RUN addgroup --gid ${GID} ${USER}
+RUN adduser --gecos "Remote User" --uid ${UID} --gid ${GID} ${USER} && yes | passwd ${USER}
+RUN usermod -a -G dialout ${USER}
+RUN echo "${USER} ALL=(ALL) NOPASSWD: ALL" > /etc/sudoers.d/99_aptget
+RUN chmod 0440 /etc/sudoers.d/99_aptget && chown root:root /etc/sudoers.d/99_aptget
+
+# Configure sshd entrypoint to authorise the new user for ssh access and
+# optionally update UID and GID when invoking the container with the entrypoint script
+COPY ./docker/sshd_entrypoint.sh /sshd_entrypoint.sh
+RUN chmod 744 /sshd_entrypoint.sh
+
+# create the credentials to be able to pull private repos using ssh
+RUN mkdir /root/.ssh/ && ssh-keyscan github.com | tee -a /root/.ssh/known_hosts
+
+RUN echo "session required pam_limits.so" | tee --append /etc/pam.d/common-session > /dev/null
+
+WORKDIR ${HOME}
diff --git a/Dockerfile.proto b/Dockerfile.proto
new file mode 100644
index 000000000..e468f892e
--- /dev/null
+++ b/Dockerfile.proto
@@ -0,0 +1,34 @@
+ARG BASE_TAG=22.04
+FROM ubuntu:${BASE_TAG} as build-stage
+ENV DEBIAN_FRONTEND=noninteractive
+
+RUN apt-get update && apt-get install -y \
+ autoconf \
+ cmake \
+ g++ \
+ gcc \
+ libtool \
+ make \
+ wget \
+ && apt-get clean \
+ && rm -rf /var/lib/apt/lists/*
+
+WORKDIR /tmp
+ARG PROTOBUF_VERSION=21.0
+RUN wget -O protobuf-cpp-"${PROTOBUF_VERSION}".tar.gz \
+ https://github.com/protocolbuffers/protobuf/releases/download/v${PROTOBUF_VERSION}/protobuf-cpp-3.${PROTOBUF_VERSION}.tar.gz \
+ && tar -xzf protobuf-cpp-"${PROTOBUF_VERSION}".tar.gz \
+ && rm protobuf-cpp-"${PROTOBUF_VERSION}".tar.gz
+
+WORKDIR /tmp/protobuf-3."${PROTOBUF_VERSION}"
+RUN ./autogen.sh \
+ && ./configure \
+ && make -j $(nproc --ignore=1) \
+ && make install
+
+
+FROM ubuntu:${BASE_TAG} as google-dependencies
+COPY --from=build-stage /usr/local/include/google /usr/local/include/google
+COPY --from=build-stage /usr/local/lib/libproto* /usr/local/lib/
+COPY --from=build-stage /usr/local/bin/protoc /usr/local/bin
+RUN ldconfig
diff --git a/README.md b/README.md
index 489562e3d..cda084a8c 100644
--- a/README.md
+++ b/README.md
@@ -1,7 +1,6 @@
-
-
+
@@ -11,12 +10,44 @@
The `control-libraries` project is a collection of modules to facilitate the creation of control loop algorithms for
robotics, including trajectory planning, kinematics, dynamics and control.
-Code documentation is available at
-[aica-technology.github.io/control-libraries](https://aica-technology.github.io/control-libraries).
-
-The rolling version of the project is available on the
-[`main`](https://github.com/aica-technology/control-libraries/tree/main) branch. Refer to the
-[Releases](https://github.com/aica-technology/control-libraries/releases) page for other versions.
+Code documentation is available at
+aica-technology.github.io/control-libraries.
+
+## Releases
+
+The latest stable version of the project is available on the
+[`main`](https://github.com/aica-technology/control-libraries/tree/main), while the latest pre-release development
+build is available on the [`develop`](https://github.com/aica-technology/control-libraries/tree/develop) branch.
+
+Refer to the [Releases](https://github.com/aica-technology/control-libraries/releases) page for other versions.
+
+
+
## Core libraries
@@ -29,115 +60,32 @@ The core libraries are implemented in C++ and comprise the following modules:
Source code, documentation and installation instructions are available under the [source](./source) folder.
-## Protocol
-
-There is a module that defines the protocol for sending and receiving messages containing control libraries data across
-any network, based on the Google Protocol Buffer. For its implementation, installation and documentation, see the
-[protocol](./protocol) folder.
-
## Python bindings
There exist Python bindings for the control library modules and the protocol module. See the [python](./python)
folder for installation instructions.
+## Protocol
+
+There is a module that defines the protocol for sending and receiving messages containing control libraries
+data across any network, based on the Google Protocol Buffer. For its implementation, installation and
+documentation, see the [protocol](./protocol) folder.
+
## Demos
For examples and demos in C++ and Python, refer to the [demos](./demos) folder.
## Contributing
-We welcome user engagement to find bugs, resolve issues and suggest useful features. Refer to the
-[contribution guidelines](./CONTRIBUTING.md) for more information.
+We welcome user engagement to find bugs, resolve issues and suggest useful features.
+Refer to the [contribution guidelines](./CONTRIBUTING.md) for more information.
## License
-This project is provided free and open-source under the GPLv3 license. See the [licenses](./licenses) folder for more
-information.
-
-## Installation
-
-### Supported platforms
-
-These libraries have been developed and tested on Linux Ubuntu 20.04 and 22.04. They should also work on macOS and
-Windows, though the installation steps may differ. At this time no guarantees are made for library support on non-Linux
-systems.
-
-### Installation with the install script
-
-This project uses CMake to generate static library objects for each of the modules. To facilitate the installation
-process, an [install script](./install.sh) is provided.
-
-The install script takes care of all the installation steps, including the installation and configuration of all
-dependencies. It can be run with several optional arguments:
-- `-y`, `--auto`: Any input prompts will be suppressed and install steps automatically approved.
-- `-d [path]`, `--dir [path]`: If provided, the installation directory will be changed to `[path]`.
-- `--clean`: Any previously installed header files from `/usr/local/include` and any shared library files from
- `/usr/local/lib` will be deleted before the installation.
-- `--cleandir [path]`: Any previously installed header files shared library files from `[path]` will be deleted before
- the installation.
-
-### Advanced options
-
-Users who prefer to perform the installation manually and/or have already installed some dependencies can selectively
-do the steps from the install script.
-
-The CMake configuration flags for control libraries `BUILD_CONTROLLERS`, `BUILD_DYNAMICAL_SYSTEMS` and
-`BUILD_ROBOT_MODEL` determine which modules are built, and are all defined as `ON` by default. The building of the
-`state_representation` library cannot be disabled, as all other libraries depend on it. To selectively disable the build
-of a particular module, set the flag to `=OFF`. For example, the following flags will prevent the `robot_model` module
-from being built, which is useful if the Pinocchio dependency is not fulfilled on your system.
-
-```shell script
--DBUILD_ROBOT_MODEL=OFF
-```
-
-Similarly, if one is not interested in the installation of the [protocol](./protocol/README.md), it can be disabled with
-
-```shell script
--DBUILD_PROTOCOL_=OFF
-```
-
-The C++ `clproto` library requires control libraries [`state_representation`](../source/state_representation/README.md)
-and [Google Protobuf](https://github.com/protocolbuffers/protobuf/blob/master/src/README.md)
-to be installed on your computer, which includes the compiler `protoc` and the runtime library `libprotobuf.so`.
-
-To also build the tests, add the CMake flag `-DBUILD_TESTING=ON`. This requires GTest to be installed on your system.
-You can then use `make test` to run all test targets.
-
-Alternatively, you can include the source code for each library as submodules in your own CMake project, using the CMake
-directive `add_subdirectory(...)` to link it with your project.
-
-### Installation of Python bindings
-
-You must first install the C++ modules before you can install the Python bindings. Additionally, the installation of the
-bindings requires the following prerequisites:
-- `python3` >= 3.0
-- `pip3` >= 10.0.0
-
-The installation itself is then quite straightforward:
-```shell
-python3 pip install ./python
-```
-
-If the installation fails, it may be because of non-default installation directories for some dependencies. In this
-case, the include path for OSQP can be set through environment variables before the pip install.
-
-```shell
-export OSQP_INCLUDE_DIR='/path/to/include/osqp' # replace /path/to/include with installation directory
-python3 pip install ./python
-```
-
-The example above installs the module to the default dist-packages location. You can see more information about the
-installed module using `pip3 show control-libraries`.
-
-The process also works with Python virtual environments. For example, with `pipenv`:
-
-```shell script
-## pip3 install pipenv
-
-pipenv install ./python
-```
+This project is provided free and open-source under the GPLv3 license.
+See the [licenses](./licenses) folder for more information.
## External resources
+- [Docker images with control libraries installations](https://github.com/aica-technology/docker-images)
- [Modulo: an extension layer to ROS2 based on control libraries](https://github.com/aica-technology/modulo)
diff --git a/VERSION b/VERSION
index af9798417..e459e6b6a 100644
--- a/VERSION
+++ b/VERSION
@@ -1 +1 @@
-7.3.14
+7.3.13
diff --git a/apt-packages.txt b/apt-packages.txt
index 2988803ab..c7033178b 100644
--- a/apt-packages.txt
+++ b/apt-packages.txt
@@ -1,4 +1,3 @@
libboost-all-dev
-libeigen3-dev
liburdfdom-dev
libassimp-dev
diff --git a/demos/CMakeLists.txt b/demos/CMakeLists.txt
index ff74d0d73..f458d610d 100644
--- a/demos/CMakeLists.txt
+++ b/demos/CMakeLists.txt
@@ -15,7 +15,7 @@ if(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID MATCHES "Clang")
add_compile_options(-Wall -Wextra -Wpedantic)
endif()
-find_package(control_libraries 7.3.14 CONFIG REQUIRED)
+find_package(control_libraries 7.3.13 CONFIG REQUIRED)
set(DEMOS_SCRIPTS
task_space_control_loop
diff --git a/doxygen/doxygen.conf b/doxygen/doxygen.conf
index ed147c4bf..27f4050e8 100644
--- a/doxygen/doxygen.conf
+++ b/doxygen/doxygen.conf
@@ -38,7 +38,7 @@ PROJECT_NAME = "Control Libraries"
# could be handy for archiving the generated documentation or if some version
# control system is used.
-PROJECT_NUMBER = 7.3.14
+PROJECT_NUMBER = 7.3.13
# Using the PROJECT_BRIEF tag one can provide an optional one line description
# for a project that appears at the top of each page and should give viewer a
diff --git a/install.sh b/install.sh
deleted file mode 100755
index d7da83d17..000000000
--- a/install.sh
+++ /dev/null
@@ -1,86 +0,0 @@
-#!/bin/bash
-
-SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
-
-INSTALL_DESTINATION="/usr/local"
-AUTO_INSTALL=""
-
-PINOCCHIO_TAG=v2.9.0
-HPP_FCL_TAG=v1.8.1
-
-FAIL_MESSAGE="The provided input arguments are not valid.
-Run the script with the '--help' argument."
-
-HELP_MESSAGE="Usage: [sudo] ./install.sh [OPTIONS]
-
-An install script for the control libraries.
-
-Options:
- -y, --auto Suppress any input prompts and
- automatically approve install steps.
- -d, --dir [path] Configure the installation directory
- (default: ${INSTALL_DESTINATION}).
-
- --clean Delete any previously installed header
- files from /usr/local/include and any
- shared library files from /usr/local/lib.
- --cleandir [path] Delete any previously installed header
- and library files from the specified path.
-
- -h, --help Show this help message."
-
-function uninstall {
- function delete_components {
- rm -r "${INSTALL_DESTINATION}"/include/controllers
- rm -r "${INSTALL_DESTINATION}"/include/dynamical_systems
- rm -r "${INSTALL_DESTINATION}"/include/robot_model
- rm -r "${INSTALL_DESTINATION}"/include/state_representation
- rm -r "${INSTALL_DESTINATION}"/lib/libcontrollers*.so
- rm -r "${INSTALL_DESTINATION}"/lib/libdynamical_systems*.so
- rm -r "${INSTALL_DESTINATION}"/lib/librobot_model*.so
- rm -r "${INSTALL_DESTINATION}"/lib/libstate_representation*.so
- rm -r "${INSTALL_DESTINATION}"/include/clproto
- rm -r "${INSTALL_DESTINATION}"/lib/libclproto*.so
- }
-
- delete_components >/dev/null 2>&1
-
- echo "Deleted any control library artefacts from ${INSTALL_DESTINATION}."
-}
-
-while [ "$#" -gt 0 ]; do
- case "$1" in
- -y|--auto) AUTO_INSTALL="-y"; shift 1;;
- --clean) uninstall; exit 0;;
- --cleandir) INSTALL_DESTINATION=$2; uninstall; exit 0;;
- -d|--dir) INSTALL_DESTINATION=$2; shift 2;;
- -h|--help) echo "$HELP_MESSAGE"; exit 0;;
- -*) echo "Unknown option: $1" >&2; echo "$FAIL_MESSAGE"; exit 1;;
- esac
-done
-
-mkdir -p "${SCRIPT_DIR}"/tmp || exit 1
-
-echo ">>> INSTALLING DEPENDENCIES"
-
-apt update
-xargs -a <(awk '! /^ *(#|$)/' "${SCRIPT_DIR}/apt-packages.txt") -r -- apt install "${AUTO_INSTALL}"
-
-cd "${SCRIPT_DIR}"/tmp
-cp "${SCRIPT_DIR}"/dependencies/base_dependencies.cmake CMakeLists.txt || exit 1
-cmake -B build -DBUILD_TESTING=OFF -DCMAKE_BUILD_TYPE=Release && cmake --build build && cmake --install build || exit 1
-rm -rf build
-git clone --depth 1 -b ${HPP_FCL_TAG} --recursive https://github.com/humanoid-path-planner/hpp-fcl || exit 1
-cmake -B build -S hpp-fcl -DBUILD_TESTING=OFF -DCMAKE_BUILD_TYPE=Release -DBUILD_PYTHON_INTERFACE=OFF && cmake --build build --target all install || exit 1
-rm -rf build
-git clone --depth 1 -b ${PINOCCHIO_TAG} --recursive https://github.com/stack-of-tasks/pinocchio
-cmake -B build -S pinocchio -DBUILD_TESTING=OFF -DCMAKE_BUILD_TYPE=Release -DBUILD_PYTHON_INTERFACE=OFF -DBUILD_WITH_COLLISION_SUPPORT=ON && cmake --build build --target all install || exit 1
-rm -rf build
-cp "${SCRIPT_DIR}"/dependencies/dependencies.cmake CMakeLists.txt || exit 1
-cmake -B build -Dprotobuf_BUILD_TESTS=OFF -DCMAKE_BUILD_TYPE=Release && cmake --build build && cmake --install build || exit 1
-rm -rf build
-
-echo ">>> INSTALLING CONTROL LIBRARIES"
-cd "${SCRIPT_DIR}" && rm -rf "${SCRIPT_DIR}"/tmp
-cmake -B build -DCMAKE_BUILD_TYPE=Release && cmake --build build && cmake --install build --prefix "${INSTALL_DESTINATION}" || exit 1
-rm -rf build
diff --git a/protocol/Dockerfile.protocol b/protocol/Dockerfile.protocol
new file mode 100644
index 000000000..bf99aeea9
--- /dev/null
+++ b/protocol/Dockerfile.protocol
@@ -0,0 +1,25 @@
+ARG BASE_TAG=latest
+FROM ghcr.io/aica-technology/control-libraries/development-dependencies:${BASE_TAG} as source
+ARG BRANCH=develop
+
+WORKDIR /source
+RUN git clone --depth 1 --branch ${BRANCH} https://github.com/aica-technology/control-libraries
+RUN bash control-libraries/source/install.sh --auto --no-controllers --no-dynamical-systems --no-robot-model
+
+WORKDIR /tmp/protocol
+COPY ./ ./
+
+WORKDIR /tmp/protocol/protobuf
+RUN make all
+
+
+FROM source as build
+
+WORKDIR /tmp/protocol/clproto_cpp/build
+RUN cmake -DBUILD_TESTING=ON .. && make -j all && make install
+
+
+FROM build as testing
+
+RUN CTEST_OUTPUT_ON_FAILURE=1 make test
+RUN rm -rf /tmp/protocol/
diff --git a/protocol/README.md b/protocol/README.md
index df8792329..3d4b71373 100644
--- a/protocol/README.md
+++ b/protocol/README.md
@@ -18,3 +18,34 @@ message formats hidden behind a neat header file. The API
allows compatible objects to be easily encoded into a serialized
binary string for transmitting across. Equivalently, well-formatted
string messages can be decoded back into the equivalent objects.
+
+## Installation
+
+The C++ `clproto` library requires control libraries [`state_representation`](../source/state_representation/README.md)
+and [Google Protobuf](https://github.com/protocolbuffers/protobuf/blob/master/src/README.md)
+to be installed on your computer, which includes the compiler `protoc` and the runtime library `libprotobuf.so`.
+
+An [install script](./install.sh) is provided in this directory. Run `./install.sh -h` for more information.
+
+### Automatic dependency installation
+
+The easiest way to install `clproto` is to use the included install script in automatic mode.
+By supplying the `--auto` flag to this script, it will automatically and recursively install any dependencies.
+If Protobuf is not yet installed, this step will take some time.
+```shell
+git clone https://github.com/aica-technology/control-libraries.git
+sudo control-libraries/protocol/install.sh --auto
+```
+
+### Copying protobuf dependencies
+
+If you are using Docker, the Protobuf dependencies are already built in the [`development-dependencies`](ghcr.io/aica-technology/control-libraries/development-dependencies:latest) image.
+Since building and installing Protobuf from source takes quite a long time, you can instead copy the final artefacts
+from this image into your image using docker `COPY` functionality:
+
+```Dockerfile
+COPY --from=ghcr.io/aica-technology/control-libraries/development-dependencies:latest /usr/local/include/google /usr/local/include/google
+COPY --from=ghcr.io/aica-technology/control-libraries/development-dependencies:latest /usr/local/lib/libproto* /usr/local/lib
+COPY --from=ghcr.io/aica-technology/control-libraries/development-dependencies:latest /usr/local/bin/protoc /usr/local/bin
+RUN ldconfig
+```
diff --git a/protocol/build-test.sh b/protocol/build-test.sh
new file mode 100755
index 000000000..21eb57f13
--- /dev/null
+++ b/protocol/build-test.sh
@@ -0,0 +1,43 @@
+#!/usr/bin/env bash
+
+BASE_TAG="latest"
+
+BRANCH=$(git branch --show-current)
+
+HELP_MESSAGE="Usage: build-test.sh [-b ] [--base-tag ] [-r] [-v]
+Options:
+ -b, --branch Specify the branch of control libraries
+ that should be used to build the image.
+ --base-tag Tag of the development image.
+ (default: ${BASE_TAG})
+ -r, --rebuild Rebuild the image using the docker
+ --no-cache option.
+ -v, --verbose Use the verbose option during the building
+ process.
+ -h, --help Show this help message.
+"
+
+BUILD_FLAGS=()
+while [[ $# -gt 0 ]]; do
+ opt="$1"
+ case $opt in
+ -b|--branch) BRANCH=$2; shift 2;;
+ --base-tag) BASE_TAG=$2; shift 2;;
+ -r|--rebuild) BUILD_FLAGS+=(--no-cache); shift ;;
+ -v|--verbose) BUILD_FLAGS+=(--progress=plain); shift ;;
+ -h|--help) echo "${HELP_MESSAGE}"; exit 0 ;;
+ *) echo 'Error in command line parsing' >&2
+ echo -e "\n${HELP_MESSAGE}"
+ exit 1
+ esac
+done
+
+IMAGE_NAME=aica-technology/control-libraries/protocol/testing:"${BASE_TAG}"
+BUILD_FLAGS+=(--build-arg BASE_TAG="${BASE_TAG}")
+BUILD_FLAGS+=(-t "${IMAGE_NAME}")
+
+echo "Using control libraries branch ${BRANCH}"
+BUILD_FLAGS+=(--build-arg BRANCH="${BRANCH}")
+
+docker pull ghcr.io/aica-technology/control-libraries/development-dependencies:"${BASE_TAG}" || exit 1
+DOCKER_BUILDKIT=1 docker build . --file ./Dockerfile.protocol "${BUILD_FLAGS[@]}"
diff --git a/protocol/clproto_cpp/CMakeLists.txt b/protocol/clproto_cpp/CMakeLists.txt
index 979dc8547..5a23383c4 100644
--- a/protocol/clproto_cpp/CMakeLists.txt
+++ b/protocol/clproto_cpp/CMakeLists.txt
@@ -1,6 +1,6 @@
cmake_minimum_required(VERSION 3.15)
-project(clproto VERSION 7.3.14)
+project(clproto VERSION 7.3.13)
# Default to C99
if(NOT CMAKE_C_STANDARD)
diff --git a/protocol/dev-server.sh b/protocol/dev-server.sh
new file mode 100755
index 000000000..b004081ea
--- /dev/null
+++ b/protocol/dev-server.sh
@@ -0,0 +1,90 @@
+#!/usr/bin/env bash
+
+BASE_TAG="latest"
+
+CONTAINER_NAME=aica-technology-control-libraries-protocol-development-ssh
+BRANCH=$(git branch --show-current)
+
+SSH_PORT=2244
+SSH_KEY_FILE="${HOME}/.ssh/id_rsa.pub"
+
+HELP_MESSAGE="Usage: ./dev-server.sh [-b ] [-p ] [-k ] [--base-tag ] [-r]
+
+Build and run a docker container as an SSH toolchain server for remote development.
+
+The server is bound to the specified port on localhost (127.0.0.1)
+and uses passwordless RSA key-pair authentication. The host public key
+is read from the specified key file and copied to the server on startup.
+On linux hosts, the UID and GID of the specified user will also be
+set to match the UID and GID of the host user by the entry script.
+
+The server will run in the background as ${CONTAINER_NAME}.
+
+You can connect with 'ssh developer@localhost -p '.
+
+Close the server with 'docker stop ${CONTAINER_NAME}'.
+
+Options:
+ -b, --branch Specify the branch of control libraries
+ that should be used to build the image.
+ -p, --port Specify the port to bind for SSH
+ connection.
+ (default: ${SSH_PORT})
+ -k, --key-file Specify the path of the RSA
+ public key file.
+ (default: ${SSH_KEY_FILE})
+ --base-tag Tag of the development image.
+ (default: ${BASE_TAG})
+ -r, --rebuild Rebuild the image with the --no-cache option.
+ -h, --help Show this help message."
+
+BUILD_FLAGS=()
+while [ "$#" -gt 0 ]; do
+ case "$1" in
+ -b|--branch) BRANCH=$2; shift 2;;
+ -p|--port) SSH_PORT=$2; shift 2;;
+ -k|--key-file) SSH_KEY_FILE=$2; shift 2;;
+ --base-tag) BASE_TAG=$2; shift 2;;
+ -h|--help) echo "${HELP_MESSAGE}"; exit 0;;
+ -r|--rebuild) BUILD_FLAGS+=(--no-cache); shift 1;;
+ *) echo 'Error in command line parsing' >&2
+ echo -e "\n${HELP_MESSAGE}"
+ exit 1
+ esac
+done
+
+PUBLIC_KEY=$(cat "${SSH_KEY_FILE}")
+
+COMMAND_FLAGS=()
+COMMAND_FLAGS+=(--key "${PUBLIC_KEY}")
+COMMAND_FLAGS+=(--user developer)
+
+if [[ "${OSTYPE}" != "darwin"* ]]; then
+ USER_ID=$(id -u "${USER}")
+ GROUP_ID=$(id -g "${USER}")
+ COMMAND_FLAGS+=(--uid "${USER_ID}")
+ COMMAND_FLAGS+=(--gid "${GROUP_ID}")
+fi
+
+IMAGE_NAME=aica-technology/control-libraries/protocol/development:"${BASE_TAG}"
+BUILD_FLAGS+=(--build-arg BASE_TAG="${BASE_TAG}")
+BUILD_FLAGS+=(-t "${IMAGE_NAME}")
+
+echo "Using control libraries branch ${BRANCH}"
+BUILD_FLAGS+=(--build-arg BRANCH="${BRANCH}")
+BUILD_FLAGS+=(--target source)
+docker pull ghcr.io/aica-technology/control-libraries/development-dependencies:"${BASE_TAG}" || exit 1
+DOCKER_BUILDKIT=1 docker build . --file ./Dockerfile.protocol "${BUILD_FLAGS[@]}" || exit 1
+
+docker container stop "${CONTAINER_NAME}" >/dev/null 2>&1
+docker rm --force "${CONTAINER_NAME}" >/dev/null 2>&1
+
+echo "Starting background container with access port ${SSH_PORT} for user developer"
+docker run -d --rm --cap-add sys_ptrace \
+ --user root \
+ --publish 127.0.0.1:"${SSH_PORT}":22 \
+ --name "${CONTAINER_NAME}" \
+ --hostname "${CONTAINER_NAME}" \
+ "${IMAGE_NAME}" /sshd_entrypoint.sh "${COMMAND_FLAGS[@]}"
+
+echo "${CONTAINER_NAME}"
diff --git a/protocol/install.sh b/protocol/install.sh
new file mode 100755
index 000000000..476741229
--- /dev/null
+++ b/protocol/install.sh
@@ -0,0 +1,177 @@
+#!/bin/bash
+SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+PROTOBUF_DIR="${SCRIPT_DIR}"/protobuf
+CLPROTO_DIR="${SCRIPT_DIR}"/clproto_cpp
+
+INSTALL_DESTINATION="/usr/local"
+BUILD_TESTING="OFF"
+AUTO_INSTALL=""
+BINDINGS_ONLY=false
+PROTOBUF_VERSION="3.17.0"
+
+HELP_MESSAGE="Usage: [sudo] ./install.sh [OPTIONS]
+
+An install script for the clproto library.
+
+Options:
+ -y, --auto Suppress any input prompts and
+ automatically approve install steps.
+
+ --bindings-only Only generate the protobuf bindings
+ without building or installing clproto.
+
+ -d, --dir [path] Configure the installation directory
+ (default: ${INSTALL_DESTINATION}).
+
+ --build-tests Build the unittest targets.
+
+ --clean-bindings Clean any previously generated protobuf
+ bindings.
+
+ --clean Delete any previously installed header
+ files from ${INSTALL_DESTINATION}/include and any
+ shared library files from ${INSTALL_DESTINATION}/lib.
+
+ --cleandir [path] Delete any previously installed header
+ and library files from the specified path.
+
+ -h, --help Show this help message."
+
+function make_bindings() {
+ cd "${PROTOBUF_DIR}" && make all || exit 1
+}
+
+function clean_bindings() {
+ cd "${PROTOBUF_DIR}" && make clean
+}
+
+function install_protobuf() {
+ echo ">>> INSTALLING PROTOBUF DEPENDENCIES"
+ apt-get update && apt-get install "${AUTO_INSTALL}" autoconf automake libtool curl make g++ unzip || exit 1
+
+ mkdir -p "${SCRIPT_DIR}"/install
+ cd "${SCRIPT_DIR}"/install || exit 1
+ wget -O protobuf-cpp-"${PROTOBUF_VERSION}".tar.gz \
+ https://github.com/protocolbuffers/protobuf/releases/download/v"${PROTOBUF_VERSION}"/protobuf-cpp-"${PROTOBUF_VERSION}".tar.gz &&
+ tar -xzf protobuf-cpp-"${PROTOBUF_VERSION}".tar.gz &&
+ rm protobuf-cpp-"${PROTOBUF_VERSION}".tar.gz
+
+ cd "${SCRIPT_DIR}"/install/protobuf-"${PROTOBUF_VERSION}" || exit 1
+ ./autogen.sh && ./configure && make && make install || exit 1
+ ldconfig
+}
+
+function install_state_representation() {
+ echo ">> INSTALLING CONTROL LIBRARY DEPENDENCIES"
+ mkdir -p "${SCRIPT_DIR}"/install
+ cd "${SCRIPT_DIR}"/install || exit 1
+ CL_INSTALL_SCRIPT="$(dirname "${SCRIPT_DIR}")"/source/install.sh
+ if [ -f "${CL_INSTALL_SCRIPT}" ]; then
+ bash "${CL_INSTALL_SCRIPT}" --no-controllers --no-dynamical-systems --no-robot-model "${AUTO_INSTALL}"
+ else
+ echo ">>> INSTALL SCRIPT NOT FOUND: ${CL_INSTALL_SCRIPT}!"
+ exit 1
+ fi
+ ldconfig
+}
+
+function install_clproto() {
+ cd "${CLPROTO_DIR}" && mkdir -p build && cd build || exit 1
+
+ cmake -DCMAKE_BUILD_TYPE=Release -DBUILD_TESTING="${BUILD_TESTING}" \
+ -DCMAKE_INSTALL_PREFIX="${INSTALL_DESTINATION}" .. || exit 1
+
+ make -j && make install || exit 1
+ ldconfig
+}
+
+function uninstall() {
+ function delete_components() {
+ rm -r "${INSTALL_DESTINATION}"/include/clproto
+ rm -r "${INSTALL_DESTINATION}"/lib/libclproto*.so
+ clean_bindings
+ }
+
+ delete_components >/dev/null 2>&1
+
+ echo "Deleted any clproto artefacts from ${INSTALL_DESTINATION} and ${PROTOBUF_DIR}."
+}
+
+while [ "$#" -gt 0 ]; do
+ case "$1" in
+ -y | --auto)
+ AUTO_INSTALL="-y"
+ shift 1
+ ;;
+ -d | --dir)
+ INSTALL_DESTINATION=$2
+ shift 2
+ ;;
+ --bindings-only)
+ BINDINGS_ONLY=true
+ shift 1
+ ;;
+ --build-tests)
+ BUILD_TESTING="ON"
+ shift 1
+ ;;
+ --clean-bindings)
+ clean_bindings
+ exit 0
+ ;;
+ --clean)
+ uninstall
+ exit 0
+ ;;
+ --cleandir)
+ INSTALL_DESTINATION=$2
+ uninstall
+ exit 0
+ ;;
+ -h | --help)
+ echo "${HELP_MESSAGE}"
+ exit 0
+ ;;
+
+ -*)
+ echo "Unknown option: $1" >&2
+ echo "${FAIL_MESSAGE}"
+ exit 1
+ ;;
+ esac
+done
+
+if ! [ -x "$(command -v protoc)" ]; then
+ echo ">>> PROTOC NOT FOUND"
+ install_protobuf || exit 1
+fi
+
+echo ">>> GENERATING PROTOBUF BINDINGS"
+make_bindings || exit 1
+
+if [ "${BINDINGS_ONLY}" == true ]; then
+ echo ">>> DONE!"
+ exit 0
+fi
+
+PROTOBUF_INSTALL=$(ldconfig -p | grep libprotobuf)
+if [ -z "${PROTOBUF_INSTALL}" ]; then
+ echo ">>> LIBPROTOBUF NOT FOUND"
+ install_protobuf || exit 1
+fi
+
+if [ -z $(which pkg-config) ]; then
+ echo ">>> INSTALLING pkg-config tool"
+ apt-get update && apt-get install "${AUTO_INSTALL}" pkg-config || exit 1
+fi
+
+pkg-config state_representation --atleast-version=$(cat "$(dirname "${SCRIPT_DIR}")"/VERSION)
+if [ "$?" != 0 ]; then
+ echo ">>> STATE REPRESENTATION LIBRARY NOT FOUND!"
+ install_state_representation
+fi
+
+echo ">>> INSTALLING CLPROTO"
+install_clproto || exit 1
+
+echo ">>> DONE"
diff --git a/python/Dockerfile.python b/python/Dockerfile.python
new file mode 100644
index 000000000..4b3753478
--- /dev/null
+++ b/python/Dockerfile.python
@@ -0,0 +1,28 @@
+ARG BASE_TAG=latest
+FROM ghcr.io/aica-technology/control-libraries/development-dependencies:${BASE_TAG} as source
+ARG BRANCH=develop
+
+WORKDIR /source
+RUN git clone --depth 1 --branch ${BRANCH} https://github.com/aica-technology/control-libraries
+RUN bash control-libraries/source/install.sh --auto
+RUN bash control-libraries/protocol/install.sh --auto
+RUN rm -rf control-libraries
+
+FROM source as build
+
+USER developer
+WORKDIR ${HOME}/python
+# copy these files separately because otherwise any changes in the test directory would trigger the install again
+COPY include include
+COPY source source
+COPY pyproject.toml setup.py .
+ENV OSQP_INCLUDE_DIR /usr/local/include/osqp
+RUN pip3 install .
+
+
+FROM build as testing
+
+COPY test test
+RUN pytest
+
+CMD ["/bin/bash"]
diff --git a/python/README.md b/python/README.md
index fe5bbdc16..35f17a059 100644
--- a/python/README.md
+++ b/python/README.md
@@ -2,31 +2,47 @@
This directory defines Python bindings for the control libraries.
-## Structure
+## Installation
-[PyBind11](https://PyBind11.readthedocs.io/en/stable/index.html) is used to generate
-Python bindings for the classes and functions in control libraries.
+You must first install `control-libraries` before you can install the Python bindings.
+Refer to the installation instructions in the top-level [README](../README.md) for more information.
-The generated package is named `control-libraries`, but contains specific modules for importing.
-These are named the same as the standard modules of control libraries (e.g. `state_representation`).
+Additionally, the installation of the bindings requires the following prerequisites:
+- `python3` >= 3.0
+- `pip3` >= 10.0.0
-The contents of the [`source`](./source) directory define the bindings between
-each Python module and the respective C++ library. The source files to bind each module are
-contained within a subdirectory of the same name.
+The installation itself is then quite straightforward:
+```shell
+git clone https://github.com/aica-technology/control-libraries
-The `setup.py` and `pyproject.toml` files are used to configure the build and installation
-of the Python bindings. The `.toml` file allows `pip` to automatically fetch the
-installation dependencies (namely `setuptools` and `pybind11`) in a temporary cache,
-allowing the subsequent `setup.py` to be evaluated without needing a local installation of `pybind11`.
-This feature requires a [`pip`](https://pypi.org/project/pip/) version 10.0 or newer.
+## install control-libraries (skip this stage if already done)
+sudo control-libraries/source/install.sh
-The [`test`](./test) directory contains some Python scripts that import and check the bindings
-using the Python `unittest` and `pytest` frameworks. They are not currently comprehensive.
+## install protocol (skip this stage if already done or not desired)
+sudo control-libraries/protocol/install.sh
+
+## install the bindings using the pip installer
+pip3 install control-libraries/python
+```
-## Usage
+If the installation fails, it may be because of non-default installation directories for some dependencies.
+In this case, the include path for OSQP can be set through environment variables before the pip install.
+```shell
+export OSQP_INCLUDE_DIR='/path/to/include/osqp' # default /usr/local/include/osqp
+pip3 install control-libraries/python
+```
+
+The example above installs the module to the default dist-packages location.
+You can see more information about the installed module using `pip3 show control-libraries`.
-You can import the modules with an optional short alias:
+The process also works with Python virtual environments. For example, with `pipenv`:
+```shell script
+## pip3 install pipenv
+pipenv install control-libraries/python
+```
+
+Once installed, you can simply import the module with an optional short alias:
```python
#!/usr/bin/env python
import state_representation as sr
@@ -40,7 +56,6 @@ print(A)
```
Or, directly import specific classes from the module.
-
```python
#!/usr/bin/env python
from state_representation import JointState
@@ -50,8 +65,8 @@ B = JointState().Random("B", 3)
ds = create_cartesian_ds(DYNAMICAL_SYSTEM_TYPE.POINT_ATTRACTOR)
```
-The `clproto` Python module can be used to encode and decode objects into bytes of serialized data.
-
+If the `clproto` C++ library is installed, the installation steps above will automatically install the `clproto`
+Python module which can be used to encode and decode objects into bytes of serialized data.
```python
#!/usr/bin/env python
from state_representation import JointState
@@ -62,3 +77,42 @@ encoded_msg = clproto.encode(B, clproto.MessageType.JOINT_STATE_MESSAGE)
decoded_object = clproto.decode(encoded_msg)
```
+
+## About
+
+[PyBind11](https://PyBind11.readthedocs.io/en/stable/index.html) is used to generate
+Python bindings for the classes and functions in control libraries.
+
+The generated package is named `control-libraries`, but contains specific modules for importing.
+These are named the same as the standard modules of control libraries (e.g. `state_representation`).
+
+The contents of the [`source`](./source) directory define the bindings between
+each Python module and the respective C++ library. The source files to bind each module are
+contained within a subdirectory of the same name.
+
+The `setup.py` and `pyproject.toml` files are used to configure the build and installation
+of the Python bindings. The `.toml` file allows `pip` to automatically fetch the
+installation dependencies (namely `setuptools` and `pybind11`) in a temporary cache,
+allowing the subsequent `setup.py` to be evaluated without needing a local installation of `pybind11`.
+This feature requires a [`pip`](https://pypi.org/project/pip/) version 10.0 or newer.
+
+The [`test`](./test) directory contains some Python scripts that import and check the bindings
+using the Python `unittest` framework. They are not currently comprehensive.
+
+## Dockerfile
+
+A Dockerfile and run script are provided to test the installation of the bindings.
+
+The docker image installs the core control libraries and subsequently installs the python bindings.
+
+The [`build-test.sh`](./build-test.sh) script will build the docker image and launch an interactive container
+with the test files in the [`test`](./test) directory copied to the local path.
+
+The run script tries to the clone the current local git branch when installing the control libraries
+in the Dockerfile. If the local branch does not exist on the remote, or if you want to test the
+python bindings against a difference control libraries source branch, you can supply a specific
+branch as the first argument to the run script. For example, `./run.sh develop` to use the `develop` branch.
+
+You can run a single test with `python `, or just enter a python shell with `python`.
+Run all tests with `python -m unittest discover `, or just `python -m unittest` if
+the [`test`](./test) directory in your current working directory.
diff --git a/python/build-test.sh b/python/build-test.sh
new file mode 100755
index 000000000..fd84936af
--- /dev/null
+++ b/python/build-test.sh
@@ -0,0 +1,48 @@
+#!/usr/bin/env bash
+
+BASE_TAG="latest"
+
+BRANCH=$(git branch --show-current)
+
+HELP_MESSAGE="Usage: build-test.sh [-b ] [--base-tag ] [-r] [-v]
+Options:
+ -b, --branch Specify the branch of control libraries
+ that should be used to build the image.
+ --base-tag Tag of the development image.
+ (default: ${BASE_TAG})
+ -r, --rebuild Rebuild the image using the docker
+ --no-cache option.
+ -v, --verbose Use the verbose option during the building
+ process.
+ -h, --help Show this help message.
+"
+
+BUILD_FLAGS=()
+while [[ $# -gt 0 ]]; do
+ opt="$1"
+ case $opt in
+ -b|--branch) BRANCH=$2; shift 2;;
+ --base-tag) BASE_TAG=$2; shift 2;;
+ -r|--rebuild) BUILD_FLAGS+=(--no-cache); shift ;;
+ -v|--verbose) BUILD_FLAGS+=(--progress=plain); shift ;;
+ -h|--help) echo "${HELP_MESSAGE}"; exit 0 ;;
+ *) echo 'Error in command line parsing' >&2
+ echo -e "\n${HELP_MESSAGE}"
+ exit 1
+ esac
+done
+
+IMAGE_NAME=aica-technology/control-libraries/python/testing:"${BASE_TAG}"
+BUILD_FLAGS+=(--build-arg BASE_TAG="${BASE_TAG}")
+BUILD_FLAGS+=(-t "${IMAGE_NAME}")
+
+echo "Using control libraries branch ${BRANCH}"
+BUILD_FLAGS+=(--build-arg BRANCH="${BRANCH}")
+
+docker pull ghcr.io/aica-technology/control-libraries/development-dependencies:"${BASE_TAG}" || exit 1
+DOCKER_BUILDKIT=1 docker build . --file ./Dockerfile.python "${BUILD_FLAGS[@]}" || exit 1
+
+docker run -it --rm \
+ --volume "$(pwd)":/home/developer/python:rw \
+ --name "${IMAGE_NAME//[\/.:]/-}" \
+ "${IMAGE_NAME}"
diff --git a/python/dev-server.sh b/python/dev-server.sh
new file mode 100755
index 000000000..733025d15
--- /dev/null
+++ b/python/dev-server.sh
@@ -0,0 +1,95 @@
+#!/usr/bin/env bash
+
+BASE_TAG="latest"
+
+CONTAINER_NAME=aica-technology-control-libraries-python-development-ssh
+BRANCH=$(git branch --show-current)
+
+SSH_PORT=2233
+SSH_KEY_FILE="${HOME}/.ssh/id_rsa.pub"
+
+HELP_MESSAGE="Usage: ./dev-server.sh [-b ] [-p ] [-k ] [--base-tag ] [-r] [-v]
+
+Build and run a docker container as an SSH toolchain server for remote development.
+
+The server is bound to the specified port on localhost (127.0.0.1)
+and uses passwordless RSA key-pair authentication. The host public key
+is read from the specified key file and copied to the server on startup.
+On linux hosts, the UID and GID of the specified user will also be
+set to match the UID and GID of the host user by the entry script.
+
+The server will run in the background as ${CONTAINER_NAME}.
+
+You can connect with 'ssh developer@localhost -p '.
+
+Close the server with 'docker stop ${CONTAINER_NAME}'.
+
+Options:
+ -b, --branch Specify the branch of control libraries
+ that should be used to build the image.
+ -p, --port Specify the port to bind for SSH
+ connection.
+ (default: ${SSH_PORT})
+ -k, --key-file [path] Specify the path of the RSA
+ public key file.
+ (default: ${SSH_KEY_FILE})
+ --base-tag Tag of the development image.
+ (default: ${BASE_TAG})
+ -r, --rebuild Rebuild the image using the docker
+ --no-cache option.
+ -v, --verbose Use the verbose option during the building
+ process.
+ -h, --help Show this help message."
+
+BUILD_FLAGS=()
+while [ "$#" -gt 0 ]; do
+ case "$1" in
+ -p|--port) SSH_PORT=$2; shift 2;;
+ -b|--branch) BRANCH=$2; shift 2;;
+ -k|--key-file) SSH_KEY_FILE=$2; shift 2;;
+ --base-tag) BASE_TAG=$2; shift 2;;
+ -r|--rebuild) BUILD_FLAGS+=(--no-cache); shift ;;
+ -v|--verbose) BUILD_FLAGS+=(--progress=plain); shift ;;
+ -h|--help) echo "${HELP_MESSAGE}"; exit 0;;
+ *) echo 'Error in command line parsing' >&2
+ echo -e "\n${HELP_MESSAGE}"
+ exit 1
+ esac
+done
+
+PUBLIC_KEY=$(cat "${SSH_KEY_FILE}")
+
+COMMAND_FLAGS=()
+COMMAND_FLAGS+=(--key "${PUBLIC_KEY}")
+COMMAND_FLAGS+=(--user developer)
+
+if [[ "${OSTYPE}" != "darwin"* ]]; then
+ USER_ID=$(id -u "${USER}")
+ GROUP_ID=$(id -g "${USER}")
+ COMMAND_FLAGS+=(--uid "${USER_ID}")
+ COMMAND_FLAGS+=(--gid "${GROUP_ID}")
+fi
+
+IMAGE_NAME=aica-technology/control-libraries/python/development:"${BASE_TAG}"
+BUILD_FLAGS+=(--build-arg BASE_TAG="${BASE_TAG}")
+BUILD_FLAGS+=(-t "${IMAGE_NAME}")
+
+echo "Using control libraries branch ${BRANCH}"
+BUILD_FLAGS+=(--build-arg BRANCH="${BRANCH}")
+BUILD_FLAGS+=(--target build)
+
+docker pull ghcr.io/aica-technology/control-libraries/development-dependencies:"${BASE_TAG}" || exit 1
+DOCKER_BUILDKIT=1 docker build . --file ./Dockerfile.python "${BUILD_FLAGS[@]}" || exit 1
+
+docker container stop "${CONTAINER_NAME}" >/dev/null 2>&1
+docker rm --force "${CONTAINER_NAME}" >/dev/null 2>&1
+
+echo "Starting background container with access port ${SSH_PORT} for user developer"
+docker run -d --rm --cap-add sys_ptrace \
+ --user root \
+ --publish 127.0.0.1:"${SSH_PORT}":22 \
+ --name "${CONTAINER_NAME}" \
+ --hostname "${CONTAINER_NAME}" \
+ "${IMAGE_NAME}" /sshd_entrypoint.sh "${COMMAND_FLAGS[@]}"
+
+echo "${CONTAINER_NAME}"
diff --git a/python/setup.py b/python/setup.py
index 7f98db757..103d59e5e 100644
--- a/python/setup.py
+++ b/python/setup.py
@@ -11,7 +11,7 @@
# names of the environment variables that define osqp and openrobots include directories
osqp_path_var = 'OSQP_INCLUDE_DIR'
-__version__ = "7.3.14"
+__version__ = "7.3.13"
__libraries__ = ['state_representation', 'clproto', 'controllers', 'dynamical_systems', 'robot_model']
__include_dirs__ = ['include']
@@ -48,11 +48,8 @@
raise Exception(msg)
if __install_robot_model_module__:
- if osqp_path_var in os.environ.keys():
- __include_dirs__.append(os.environ[osqp_path_var])
- else:
- __include_dirs__.append('/usr/local/include/osqp')
- __include_dirs__.append('/usr/include/osqp')
+ osqp_path = os.environ[osqp_path_var] if osqp_path_var in os.environ.keys() else '/usr/include/osqp'
+ __include_dirs__.append(osqp_path)
if __install_controllers_module__ and not __install_robot_model_module__:
warnings.warn(
diff --git a/source/CMakeLists.txt b/source/CMakeLists.txt
index 5403bd8e4..7ee5c194f 100644
--- a/source/CMakeLists.txt
+++ b/source/CMakeLists.txt
@@ -1,6 +1,6 @@
cmake_minimum_required(VERSION 3.15)
-project(control_libraries VERSION 7.3.14)
+project(control_libraries VERSION 7.3.13)
# Build options
option(BUILD_TESTING "Build all tests." OFF)
diff --git a/source/Dockerfile.source b/source/Dockerfile.source
new file mode 100644
index 000000000..4c770b5a7
--- /dev/null
+++ b/source/Dockerfile.source
@@ -0,0 +1,21 @@
+ARG BASE_TAG=latest
+FROM ghcr.io/aica-technology/control-libraries/development-dependencies:${BASE_TAG} as build
+ARG BUILD_CONTROLLERS=ON
+ARG BUILD_DYNAMICAL_SYSTEMS=ON
+ARG BUILD_ROBOT_MODEL=ON
+
+WORKDIR /tmp/control_lib
+COPY ./ ./
+
+WORKDIR /tmp/control_lib/build
+RUN cmake -DBUILD_CONTROLLERS="${BUILD_CONTROLLERS}" \
+ -DBUILD_DYNAMICAL_SYSTEMS="${BUILD_DYNAMICAL_SYSTEMS}" \
+ -DBUILD_ROBOT_MODEL="${BUILD_ROBOT_MODEL}" \
+ -DBUILD_TESTING=ON .. \
+ && make -j all && make install
+
+
+FROM build as testing
+
+RUN CTEST_OUTPUT_ON_FAILURE=1 make test
+RUN rm -rf /tmp/control_lib/
diff --git a/source/README.md b/source/README.md
index 494ad35e6..e2d7f9f29 100644
--- a/source/README.md
+++ b/source/README.md
@@ -49,6 +49,81 @@ Dependencies: `state_representation`, `robot_model`
---
+
+## Installation
+
+### Supported platforms
+
+These libraries have been developed and tested on Linux Ubuntu 18.04 and 20.04.
+They should also work on macOS and Windows, though the installation
+steps may differ. At this time no guarantees are made for library support on
+non-Linux systems.
+
+### Installation with the install script
+This project uses CMake to generate static library objects for each of the modules.
+
+To facilitate the installation process, an [install script](./install.sh) is provided. Users who are interested in
+the manual installation steps and/or have already installed Pinocchio refer to the
+[manual installation steps](#manual-installation-steps) in the next section.
+
+The install script takes care of all the installation steps, including the installation and configuration of Pinocchio.
+It can be run with several optional arguments:
+- `-y`, `--auto`: Any input prompts will be suppressed and install steps automatically approved.
+- `-d [path]`, `--dir [path]`: If provided, the installation directory will be changed to `[path]`.
+- `--no-controllers`: The controllers library will be excluded from the installation.
+- `--no-dynamical-systems`: The dynamical systems library will be excluded from the installation.
+- `--no-robot-model`: The robot model library, and therefore Pinocchio, will be excluded from the installation.
+- `--build-tests`: The unittest targets will be included in the installation.
+- `--clean`: Any previously installed header files from `/usr/local/include` and any shared library files from
+ `/usr/local/lib` will be deleted before the installation.
+- `--cleandir [path]`: Any previously installed header files shared library files from `[path]` will be deleted before
+ the installation.
+
+### Manual installation steps
+
+Eigen3 (release [3.4.0](https://gitlab.com/libeigen/eigen/-/releases/3.4.0)) is required for
+building `state_representation` and all other libraries. You can install it with:
+```shell script
+wget -c https://gitlab.com/libeigen/eigen/-/archive/3.4.0/eigen-3.4.0.tar.gz -O - | tar -xz
+cd eigen-3.4.0 && mkdir build && cd build && cmake .. && make install
+cd ../.. && rm -r eigen-3.4.0
+```
+
+Pinocchio is required for building the `robot_model` library. Installing this requires
+some additional steps; see the [install script](./install.sh) for reference.
+If the `robot_model` library is not needed, you can skip the installation of Pinocchio.
+
+Once the dependencies are installed, build and install the libraries by navigating
+to the source folder and invoking `cmake` and `make` as shown below.
+The library files are installed to `usr/local/lib`, and the library header files
+are copied to `/usr/local/include`.
+
+```shell script
+cd control-libraries/source
+mkdir build && cd build
+cmake -DCMAKE_BUILD_TYPE=Release ..
+make -j
+make install
+```
+
+The CMake configuration flags `BUILD_CONTROLLERS`, `BUILD_DYNAMICAL_SYSTEMS` and `BUILD_ROBOT_MODEL`
+determine which libraries are built, and are all defined as `ON` by default.
+The building of the `state_representation` library cannot be disabled, as all other libraries depend on it.
+
+To selectively disable the build of a particular library, set the flag to `=OFF`.
+For example, the following flags will prevent the `robot_model` library from being built,
+which is useful if the Pinocchio dependency is not fulfilled on your system.
+
+```shell script
+cmake -DCMAKE_BUILD_TYPE=Release -DBUILD_ROBOT_MODEL=OFF ..
+```
+
+To also build the library tests, add the CMake flag `-DBUILD_TESTING=ON`.
+This requires GTest to be installed on your system. You can then use `make test` to run all test targets.
+
+Alternatively, you can include the source code for each library as submodules in your own CMake project,
+using the CMake directive `add_subdirectory(...)` to link it with your project.
+
## Usage in a cmake project
If you have a target library or executable `my_target`, you can link all required libraries
diff --git a/source/build-test.sh b/source/build-test.sh
new file mode 100755
index 000000000..e1398072b
--- /dev/null
+++ b/source/build-test.sh
@@ -0,0 +1,43 @@
+#!/usr/bin/env bash
+
+BASE_TAG="latest"
+
+BUILD_CONTROLLERS="ON"
+BUILD_DYNAMICAL_SYSTEMS="ON"
+BUILD_ROBOT_MODEL="ON"
+
+HELP_MESSAGE="Usage: build-test.sh [--base-tag ] [-r] [-v]
+Options:
+ --base-tag Tag of the development image.
+ (default: ${BASE_TAG})
+ -r, --rebuild Rebuild the image using the docker
+ --no-cache option.
+ -v, --verbose Use the verbose option during the building
+ process.
+ -h, --help Show this help message.
+"
+
+BUILD_FLAGS=()
+while [[ $# -gt 0 ]]; do
+ opt="$1"
+ case $opt in
+ --base-tag) BASE_TAG=$2; shift 2;;
+ -r|--rebuild) BUILD_FLAGS+=(--no-cache); shift ;;
+ -v|--verbose) BUILD_FLAGS+=(--progress=plain); shift ;;
+ -h|--help) echo "${HELP_MESSAGE}"; exit 0 ;;
+ *) echo 'Error in command line parsing' >&2
+ echo -e "\n${HELP_MESSAGE}"
+ exit 1
+ esac
+done
+
+IMAGE_NAME=aica-technology/control-libraries/source/testing:"${BASE_TAG}"
+
+BUILD_FLAGS+=(--build-arg "BASE_TAG=${BASE_TAG}")
+BUILD_FLAGS+=(--build-arg "BUILD_CONTROLLERS=${BUILD_CONTROLLERS}")
+BUILD_FLAGS+=(--build-arg "BUILD_DYNAMICAL_SYSTEMS=${BUILD_DYNAMICAL_SYSTEMS}")
+BUILD_FLAGS+=(--build-arg "BUILD_ROBOT_MODEL=${BUILD_ROBOT_MODEL}")
+BUILD_FLAGS+=(-t "${IMAGE_NAME}")
+
+docker pull ghcr.io/aica-technology/control-libraries/development-dependencies:"${BASE_TAG}" || exit 1
+DOCKER_BUILDKIT=1 docker build . --file ./Dockerfile.source "${BUILD_FLAGS[@]}"
diff --git a/source/dev-server.sh b/source/dev-server.sh
new file mode 100755
index 000000000..4576a06ae
--- /dev/null
+++ b/source/dev-server.sh
@@ -0,0 +1,76 @@
+#!/usr/bin/env bash
+
+BASE_TAG="latest"
+CONTAINER_NAME=aica-technology-control-libraries-development-dependencies-ssh
+
+SSH_PORT=2222
+SSH_KEY_FILE="${HOME}/.ssh/id_rsa.pub"
+
+HELP_MESSAGE="Usage: ./dev-server.sh [-p ] [-k ] [--base-tag ]
+
+Build and run a docker container as an SSH toolchain server for remote development.
+
+The server is bound to the specified port on localhost (127.0.0.1)
+and uses passwordless RSA key-pair authentication. The host public key
+is read from the specified key file and copied to the server on startup.
+On linux hosts, the UID and GID of the specified user will also be
+set to match the UID and GID of the host user by the entry script.
+
+The server will run in the background as ${CONTAINER_NAME}.
+
+You can connect with 'ssh developer@localhost -p '.
+
+Close the server with 'docker stop ${CONTAINER_NAME}'.
+
+Options:
+ -p, --port Specify the port to bind for SSH
+ connection.
+ (default: ${SSH_PORT})
+ -k, --key-file [path] Specify the path of the RSA
+ public key file.
+ (default: ${SSH_KEY_FILE})
+ --base-tag Tag of the development image.
+ (default: ${BASE_TAG})
+ -h, --help Show this help message."
+
+while [ "$#" -gt 0 ]; do
+ case "$1" in
+ -p|--port) SSH_PORT=$2; shift 2;;
+ -k|--key-file) SSH_KEY_FILE=$2; shift 2;;
+ --base-tag) BASE_TAG=$2; shift 2;;
+ -h|--help) echo "${HELP_MESSAGE}"; exit 0;;
+ *) echo 'Error in command line parsing' >&2
+ echo -e "\n${HELP_MESSAGE}"
+ exit 1
+ esac
+done
+
+IMAGE_NAME=ghcr.io/aica-technology/control-libraries/development-dependencies:"${BASE_TAG}"
+
+PUBLIC_KEY=$(cat "${SSH_KEY_FILE}")
+
+COMMAND_FLAGS=()
+COMMAND_FLAGS+=(--key "${PUBLIC_KEY}")
+COMMAND_FLAGS+=(--user developer)
+
+if [[ "${OSTYPE}" != "darwin"* ]]; then
+ USER_ID=$(id -u "${USER}")
+ GROUP_ID=$(id -g "${USER}")
+ COMMAND_FLAGS+=(--uid "${USER_ID}")
+ COMMAND_FLAGS+=(--gid "${GROUP_ID}")
+fi
+
+docker pull "${IMAGE_NAME}" || exit 1
+
+docker container stop "${CONTAINER_NAME}" >/dev/null 2>&1
+docker rm --force "${CONTAINER_NAME}" >/dev/null 2>&1
+
+echo "Starting background container with access port ${SSH_PORT} for user developer"
+docker run -d --rm --cap-add sys_ptrace \
+ --user root \
+ --publish 127.0.0.1:"${SSH_PORT}":22 \
+ --name "${CONTAINER_NAME}" \
+ --hostname "${CONTAINER_NAME}" \
+ "${IMAGE_NAME}" /sshd_entrypoint.sh "${COMMAND_FLAGS[@]}"
+
+echo "${CONTAINER_NAME}"
diff --git a/source/install.sh b/source/install.sh
new file mode 100755
index 000000000..ee9679b11
--- /dev/null
+++ b/source/install.sh
@@ -0,0 +1,163 @@
+#!/bin/bash
+SCRIPT=$(readlink -f "${BASH_SOURCE[0]}")
+SOURCE_PATH=$(dirname "$SCRIPT")
+
+BUILD_CONTROLLERS="ON"
+BUILD_DYNAMICAL_SYSTEMS="ON"
+BUILD_ROBOT_MODEL="ON"
+BUILD_TESTING="OFF"
+INSTALL_DESTINATION="/usr/local"
+AUTO_INSTALL=""
+
+EIGEN_VERSION=3.4.0
+OSQP_TAG=0.6.2
+OSQP_EIGEN_TAG=0.6.4
+PINOCCHIO_TAG=2.6.9
+
+FAIL_MESSAGE="The provided input arguments are not valid.
+Run the script with the '--help' argument."
+
+HELP_MESSAGE="Usage: [sudo] ./install.sh [OPTIONS]
+
+An install script for the control libraries.
+
+Options:
+ -y, --auto Suppress any input prompts and
+ automatically approve install steps.
+ -d, --dir [path] Configure the installation directory
+ (default: /usr/local).
+
+ --no-controllers Exclude the controllers library.
+ --no-dynamical-systems Exclude the dynamical systems library.
+ --no-robot-model Exclude the robot model library.
+ --build-tests Build the unittest targets.
+
+ --clean Delete any previously installed header
+ files from /usr/local/include and any
+ shared library files from /usr/local/lib.
+ --cleandir [path] Delete any previously installed header
+ and library files from the specified path.
+
+ -h, --help Show this help message."
+
+function uninstall {
+ function delete_components {
+ rm -r "${INSTALL_DESTINATION}"/include/controllers
+ rm -r "${INSTALL_DESTINATION}"/include/dynamical_systems
+ rm -r "${INSTALL_DESTINATION}"/include/robot_model
+ rm -r "${INSTALL_DESTINATION}"/include/state_representation
+ rm -r "${INSTALL_DESTINATION}"/lib/libcontrollers*.so
+ rm -r "${INSTALL_DESTINATION}"/lib/libdynamical_systems*.so
+ rm -r "${INSTALL_DESTINATION}"/lib/librobot_model*.so
+ rm -r "${INSTALL_DESTINATION}"/lib/libstate_representation*.so
+ }
+
+ delete_components >/dev/null 2>&1
+
+ echo "Deleted any control library artefacts from ${INSTALL_DESTINATION}."
+}
+
+while [ "$#" -gt 0 ]; do
+ case "$1" in
+ -y|--auto) AUTO_INSTALL="-y"; shift 1;;
+ --build-tests) BUILD_TESTING="ON"; shift 1;;
+ --clean) uninstall; exit 0;;
+ --cleandir) INSTALL_DESTINATION=$2; uninstall; exit 0;;
+ -d|--dir) INSTALL_DESTINATION=$2; shift 2;;
+ -h|--help) echo "$HELP_MESSAGE"; exit 0;;
+ --no-controllers) BUILD_CONTROLLERS="OFF"; shift 1;;
+ --no-dynamical-systems) BUILD_DYNAMICAL_SYSTEMS="OFF"; shift 1;;
+ --no-robot-model) BUILD_ROBOT_MODEL="OFF"; shift 1;;
+
+ -*) echo "Unknown option: $1" >&2; echo "$FAIL_MESSAGE"; exit 1;;
+ esac
+done
+
+if [ "${BUILD_CONTROLLERS}" == "ON" ] && [ "${BUILD_ROBOT_MODEL}" == "OFF" ]; then
+ echo "The robot model library is required to build the controllers library!"
+ echo "Either disable controller installation with '--no-controllers' or enable"
+ echo "the robot model installation by removing the '--no-robot-model' flag."
+ exit 1
+fi
+
+# cleanup any previous build folders
+rm -rf "${SOURCE_PATH}"/tmp
+
+# install base dependencies
+echo ">>> INSTALLING BASE DEPENDENCIES"
+
+if [ -z $(which pkg-config) ]; then
+ echo ">>> INSTALLING pkg-config tool"
+ apt-get update && apt-get install "${AUTO_INSTALL}" pkg-config || exit 1
+fi
+
+pkg-config eigen3 --atleast-version="${EIGEN_VERSION}"
+if [ "$?" != 0 ]; then
+ echo ">>> INSTALLING EIGEN"
+ mkdir -p "${SOURCE_PATH}"/tmp/lib && cd "${SOURCE_PATH}"/tmp/lib || exit 1
+ wget -c "https://gitlab.com/libeigen/eigen/-/archive/${EIGEN_VERSION}/eigen-${EIGEN_VERSION}.tar.gz" -O - | tar -xz || exit 1
+ cd "eigen-${EIGEN_VERSION}" && mkdir -p build && cd build && env CXXFLAGS=-DEIGEN_MPL2_ONLY cmake .. && make install || exit 1
+fi
+EIGEN_PATH=$(cmake --find-package -DNAME=Eigen3 -DCOMPILER_ID=GNU -DLANGUAGE=C -DMODE=COMPILE)
+if [ "${EIGEN_PATH::14}" != "-I/usr/include" ]; then
+ rm -rf /usr/include/eigen3 && ln -s ${EIGEN_PATH:2} /usr/include/eigen3 || exit 1
+fi
+
+# install module-specific dependencies
+if [ "${BUILD_ROBOT_MODEL}" == "ON" ]; then
+ echo ">>> INSTALLING ROBOT MODEL DEPENDENCIES"
+ apt-get update && apt-get install "${AUTO_INSTALL}" libboost-all-dev liburdfdom-dev || exit 1
+
+ pkg-config pinocchio --atleast-version=${PINOCCHIO_TAG}
+ if [ "$?" != 0 ]; then
+ mkdir -p "${SOURCE_PATH}"/tmp/lib && cd "${SOURCE_PATH}"/tmp/lib || exit 1
+
+ echo ">>> INSTALLING OSQP [1/3]"
+ git clone --depth 1 -b v${OSQP_TAG} --recursive https://github.com/oxfordcontrol/osqp \
+ && cd osqp && mkdir build && cd build && cmake -G "Unix Makefiles" .. && cmake --build . --target install \
+ && cd ../.. && rm -r osqp || exit 1
+
+ echo ">>> INSTALLING OSQP_EIGEN [2/3]"
+ git clone --depth 1 -b v${OSQP_EIGEN_TAG} https://github.com/robotology/osqp-eigen.git \
+ && cd osqp-eigen && mkdir build && cd build && cmake .. && make -j && make install \
+ && cd ../.. && rm -r osqp-eigen || exit 1
+
+ echo ">>> INSTALLING PINOCCHIO [3/3]"
+ git clone --depth 1 -b v${PINOCCHIO_TAG} --recursive https://github.com/stack-of-tasks/pinocchio \
+ && cd pinocchio && mkdir build && cd build \
+ && cmake .. -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=/usr/local -DBUILD_PYTHON_INTERFACE=OFF \
+ && make -j1 && make install && cd ../.. && rm -r pinocchio || exit 1
+ fi
+ ldconfig
+fi
+
+# install testing dependencies
+if [ "${BUILD_TESTING}" == "ON" ]; then
+ echo ">>> INSTALLING TEST DEPENDENCIES"
+ apt-get update && apt-get install "${AUTO_INSTALL}" libgtest-dev || exit 1
+
+ mkdir -p "${SOURCE_PATH}"/tmp/lib/gtest && cd "${SOURCE_PATH}"/tmp/lib/gtest || exit 1
+ cmake /usr/src/gtest && make || exit 1
+ cp lib/* /usr/local/lib || cp ./*.a /usr/local/lib
+fi
+
+# build and install the specified modules
+echo ">>> BUILDING CONTROL LIBRARIES"
+cd "${SOURCE_PATH}" && mkdir -p build && cd build || exit 1
+
+cmake -DCMAKE_BUILD_TYPE=Release \
+ -DCMAKE_INSTALL_PREFIX="${INSTALL_DESTINATION}" \
+ -DBUILD_CONTROLLERS="${BUILD_CONTROLLERS}" \
+ -DBUILD_DYNAMICAL_SYSTEMS="${BUILD_DYNAMICAL_SYSTEMS}" \
+ -DBUILD_ROBOT_MODEL="${BUILD_ROBOT_MODEL}" \
+ -DBUILD_TESTING="${BUILD_TESTING}" .. || exit 1
+
+make -j && make install || exit 1
+
+ldconfig
+
+# cleanup any temporary folders
+rm -rf "${SOURCE_PATH}"/tmp
+
+# reset location
+cd "${SOURCE_PATH}" || return