Skip to content

Commit

Permalink
NFC: Add MLIR-TensorRT 'CMakePresets.json' and update build documenta…
Browse files Browse the repository at this point in the history
…tion (#312)

This change adds a CMake presets file to MLIR-TensorRT, which helps to
simplify the initial CMake setup considerably.

The build documentation is updated to use this feature, and some
additional
notes are added to describe how to control the TensorRT version that
is used for build & test.
  • Loading branch information
christopherbate authored Oct 29, 2024
1 parent b9a479a commit c643ec3
Show file tree
Hide file tree
Showing 3 changed files with 101 additions and 14 deletions.
8 changes: 4 additions & 4 deletions mlir-tensorrt/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,9 @@ endif()
if(MLIR_TRT_PACKAGE_CACHE_DIR)
set(CPM_SOURCE_CACHE "${MLIR_TRT_PACKAGE_CACHE_DIR}" CACHE STRING "" FORCE)
set(CPM_USE_NAMED_CACHE_DIRECTORIES ON CACHE BOOL "" FORCE)
else()
message(WARNING "MLIR_TRT_PACKAGE_CACHE_DIR is not set. Downloaded packages will be \
stored in your build directory. It is highly recommended to specify a package cache directory outside of \
elseif(NOT CPM_SOURCE_CACHE)
message(WARNING "CPM_SOURCE_CACHE is not set. Source code for third party C++ packages will be \
stored in your build directory. It is highly recommended to specify a CPM source cache directory outside of \
your build directory (for example '$PWD/.cache.cpm')")
endif()

Expand Down Expand Up @@ -55,7 +55,7 @@ mtrt_option(MLIR_TRT_ENABLE_EXECUTOR "Build the Executor dialect and MLIR-Tensor
mtrt_option(MLIR_TRT_ENABLE_NCCL "Enable the NCCL runtime module" ON)

set(MLIR_TRT_TENSORRT_DIR "" CACHE STRING "Path to TensorRT install directory")
set(MLIR_TRT_DOWNLOAD_TENSORRT_VERSION "10.0" CACHE STRING
set(MLIR_TRT_DOWNLOAD_TENSORRT_VERSION "10.2" CACHE STRING
"Version of TensorRT to download and use. It overrides MLIR_TRT_TENSORRT_DIR.")
set(MLIR_TRT_PACKAGE_CACHE_DIR "" CACHE STRING "Directory where to cache downloaded C++ packages")
set(MLIR_TRT_USE_LINKER "" CACHE STRING "Specify a linker to use (e.g. LLD); this is just an alias for LLVM_USE_LINKER")
Expand Down
56 changes: 56 additions & 0 deletions mlir-tensorrt/CMakePresets.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
{
"version": 6,
"include": [],
"configurePresets": [
{
"name": "base",
"generator": "Ninja",
"binaryDir": "build",
"cacheVariables": {
"CMAKE_BUILD_TYPE": "RelWithDebInfo",
"LLVM_ENABLE_ASSERTIONS": "ON",
"CPM_SOURCE_CACHE": "${sourceDir}/.cache.cpm",
"CPM_USE_NAMED_CACHE_DIRECTORIES": "ON"
}
},
{
"name": "ninja-llvm",
"displayName": "Ninja RelWithDebInfo LLVM",
"generator": "Ninja",
"binaryDir": "build",
"inherits": "base",
"cacheVariables": {
"CMAKE_C_COMPILER": "clang",
"CMAKE_CXX_COMPILER": "clang++",
"LLVM_USE_LINKER": "lld"
}
},
{
"name": "ninja-llvm-release",
"inherits": "ninja-llvm",
"displayName": "Ninja Release LLVM",
"cacheVariables": {
"CMAKE_BUILD_TYPE": "Release"
}
},
{
"name": "ninja-llvm-debug",
"inherits": "ninja-llvm",
"displayName": "Ninja Release LLVM",
"cacheVariables": {
"CMAKE_BUILD_TYPE": "Debug"
}
},
{
"name": "ninja-gcc",
"displayName": "Ninja RelWithDebInfo GCC",
"generator": "Ninja",
"binaryDir": "build-gcc",
"inherits": "base",
"cacheVariables": {
"CMAKE_C_COMPILER": "gcc",
"CMAKE_CXX_COMPILER": "g++"
}
}
]
}
51 changes: 41 additions & 10 deletions mlir-tensorrt/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -42,16 +42,14 @@ since we download LLVM-Project as a zip archive directly from GitHub
at our pinned commit.

```sh
# Note: we use clang and lld here. These are recommended.
# However, GNU toolchains will also work,
# clang toolchain is optional.
cmake -B ./build -S . -G Ninja \
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
-DMLIR_TRT_PACKAGE_CACHE_DIR=${PWD}/.cache.cpm \
-DMLIR_TRT_ENABLE_ASSERTIONS=ON \
-DMLIR_TRT_DOWNLOAD_TENSORRT_VERSION=10.2 \
-DCMAKE_C_COMPILER=clang -DCMAKE_CXX_COMPILER=clang++ \
-DMLIR_TRT_USE_LINKER=lld
# See CMakePresets.json for convenient CMake presets.
# Preset 'ninja-llvm' uses the Ninja generator, clang, and
# LLD, but GNU GCC toolchain is also supported (use preset
# ninja-gcc).
#
# By default, the CMake build system will download a version
# of TensorRT for you.
cmake --preset ninja-llvm

# Example build commands:

Expand Down Expand Up @@ -116,6 +114,39 @@ This will produce wheels under `build/mlir-tensorrt/wheels`:
ninja -C build/mlir-tensorrt mlir-tensorrt-all-wheels
```

## Configuring What TensorRT Version is Used

Our CMake-based build system will by default attempt to download a
version of TensorRT to use during building and testing. This is controlled
by the CMake cache variable [`MLIR_TRT_DOWNLOAD_TENSORRT_VERSION`](./CMakeLists.txt#L58).

To instead use a local TensorRT version, simply set the CMake
cache variable [`MLIR_TRT_TENSORRT_DIR`](./CMakeLists.txt#L200) to the
path to the TensorRT installation directory (containing directories `include`, `lib64`, and
so on), and set `MLIR_TRT_DOWNLOAD_TENSORRT_VERSION` to the empty string.

These variables are fed into the CMake function `find_tesnsorrt` which is invoked
[here](./CMakeLists.txt#L199). Options `INSTALL_DIR` and `DOWNLOAD_VERSION` are
mutually exclusive.

All executables built by the project will link TensorRT dynamically and load it
dynamically at runtime using the runtime environment's default dynamic library
search mechanism. The LIT testing configurations used in the project set the
dynamic library search path (e.g. the environment variable `LD_LIBRARY_PATH` on
Linux systems) to ensure that the TensorRT version used during compilation is
also used during testing.

When invoking an executable (e.g. `mlir-tensorrt-opt`) directly outside of the
LIT test runner, one should set the appropriate environment variables (e.g.
`LD_LIBRARY_PATH`) to point to the TensorRT library which should be loaded at runtime.
In general, if the project is compiled with TensorRT `X.Y` but version
`X.Z` is loaded at runtime, with `Z > Y`, the software is expected to work, but
no guarantees are currently made.








Expand Down

0 comments on commit c643ec3

Please sign in to comment.