diff --git a/.github/workflows/h5bench-hdf5-1.13.1-mpich-3.4.3.yml b/.github/workflows/h5bench-hdf5-1.13.1-mpich-3.4.3.yml new file mode 100644 index 00000000..f8e313a9 --- /dev/null +++ b/.github/workflows/h5bench-hdf5-1.13.1-mpich-3.4.3.yml @@ -0,0 +1,906 @@ +name: h5bench (HDF5 1.13.1, MPICH 3.4.3) + +on: + pull_request: + + workflow_dispatch: + +jobs: + h5bench: + runs-on: ubuntu-20.04 + container: + image: hpcio/hdf5-1.13.1-mpich-3.4.3 + timeout-minutes: 60 + env: + OMPI_ALLOW_RUN_AS_ROOT: 1 + OMPI_ALLOW_RUN_AS_ROOT_CONFIRM: 1 + OMPI_MCA_rmaps_base_oversubscribe: "yes" + + steps: + - uses: actions/checkout@v2 + with: + submodules: true + + - name: Dependencies + run: | + # VOL-ASYNC + git clone --recursive https://github.com/hpc-io/vol-async.git /opt/vol-async + + # VOL-CACHE + git clone --recursive https://github.com/hpc-io/vol-cache.git /opt/vol-cache + + - name: Build Argobots + run: | + export ABT_DIR=/opt/argobots + + cd /opt/vol-async/argobots + + ./autogen.sh + ./configure --prefix=$ABT_DIR + + make -j 2 + make install + + - name: Build VOL-ASYNC + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd $ASYNC_DIR + mkdir build + cd build + + cmake .. -DCMAKE_INSTALL_PREFIX=$ASYNC_DIR -DCMAKE_PREFIX_PATH=$HDF5_DIR -DCMAKE_C_FLAGS="-fPIC" + make + make install + + - name: Test VOL-ASYNC + run: | + export HDF5_DIR=/opt/hdf5 + export HDF5_HOME=$HDF5_DIR + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_DIR/lib:$HDF5_DIR/lib:$ABT_DIR/lib:$LD_LIBRARY_PATH + export HDF5_PLUGIN_PATH="$ASYNC_DIR/include" + export HDF5_VOL_CONNECTOR="async under_vol=0;under_info={}" + + cd $ASYNC_DIR/build + + export LD_PRELOAD=$ASYNC_DIR/lib/libh5async.so:$ABT_DIR/lib/libabt.so:$HDF5_DIR/lib/libhdf5.so + + ctest + + - name: Build VOL-CACHE + run: | + export HDF5_ROOT=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + export HDF5_VOL_DIR=/opt/vol-cache + + [ -e $HDF5_VOL_DIR ] || mkdir $HDF5_VOL_DIR + [ -e $HDF5_VOL_DIR/bin ] || mkdir $HDF5_VOL_DIR/bin + [ -e $HDF5_VOL_DIR/lib ] || mkdir $HDF5_VOL_DIR/lib + [ -e $HDF5_VOL_DIR/include ] || mkdir $HDF5_VOL_DIR/include + + cp -r $ASYNC_DIR/lib/*.so $ASYNC_DIR/lib/*.a $HDF5_VOL_DIR/lib/ + cp -r $ASYNC_DIR/include/*.h $HDF5_VOL_DIR/include/ + + export LD_LIBRARY_PATH=$HDF5_VOL_DIR/lib:$HDF5_ROOT/lib:$ABT_DIR/lib:$LD_LIBRARY_PATH + + cd $HDF5_VOL_DIR/src + + make all + - name: Build h5bench SYNC + run: | + export HDF5_DIR=/opt/hdf5 + export HDF5_HOME=$HDF5_DIR + + export LD_LIBRARY_PATH=$HDF5_HOME/lib:$LD_LIBRARY_PATH + + mkdir build-sync + cd build-sync + + cmake .. \ + -DH5BENCH_ALL=ON + make -j 2 + sudo make install + + - name: Build h5bench ASYNC + run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + + mkdir build-async + cd build-async + + cmake .. \ + -DWITH_ASYNC_VOL:BOOL=ON \ + -DCMAKE_C_FLAGS="-I$ASYNC_HOME/include -L$ASYNC_HOME/lib" \ + -DH5BENCH_ALL=ON + make -j 2 + sudo make install + + - name: Build h5bench CACHE + run: | + export HDF5_ROOT=/opt/hdf5 + export HDF5_HOME=/opt/hdf5 + export HDF5_VOL_DIR=/opt/vol-cache + + mkdir build-cache + cd build-cache + + cmake .. \ + -DWITH_CACHE_VOL:BOOL=ON \ + -DWITH_ASYNC_VOL:BOOL=ON \ + -DCMAKE_C_FLAGS="-I/$HDF5_VOL_DIR/include -L/$HDF5_VOL_DIR/lib" + make -j 2 + + - name: Configure for MPICH + run: | + cd build-sync + + find ../samples -name '*.json' -exec sed -i "s/--allow-run-as-root//g" '{}' \; + find ../samples -name '*.json' -exec sed -i "s/--oversubscribe//g" '{}' \; + + - name: Test h5bench SYNC write/read + run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + + cd build-sync + export PATH=$(pwd):$PATH + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-read-contig-1d-small.json + + - name: Test h5bench SYNC write 1D contiguous (memory) strided (file) + run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + + cd build-sync + export PATH=$(pwd):$PATH + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-1d-contig-strided.json + + - name: Test h5bench SYNC write 1D contiguous (memory) contiguous (file) + run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + + cd build-sync + export PATH=$(pwd):$PATH + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-1d-contig-contig.json + + - name: Test h5bench SYNC write 1D contiguous (memory) interleaved (file) + run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + + cd build-sync + export PATH=$(pwd):$PATH + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-1d-contig-interleaved.json + + - name: Test h5bench SYNC write 1D interleaved (memory) contiguous (file) + run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + + cd build-sync + export PATH=$(pwd):$PATH + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-1d-interleaved-contig.json + + - name: Test h5bench SYNC write 1D interleaved (memory) interleaved (file) + run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + + cd build-sync + export PATH=$(pwd):$PATH + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-1d-interleaved-interleaved.json + + - name: Test h5bench SYNC write 2D contiguous (memory) contiguous (file) + run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + + cd build-sync + export PATH=$(pwd):$PATH + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-2d-contig-contig.json + + - name: Test h5bench SYNC write 2D contiguous (memory) interleaved (file) + run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + + cd build-sync + export PATH=$(pwd):$PATH + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-2d-contig-interleaved.json + + - name: Test h5bench SYNC write 2D interleaved (memory) contiguous (file) + run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + + cd build-sync + export PATH=$(pwd):$PATH + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-2d-interleaved-contig.json + + - name: Test h5bench SYNC write 2D interleaved (memory) interleaved (file) + run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + + cd build-sync + export PATH=$(pwd):$PATH + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-2d-interleaved-interleaved.json + + - name: Test h5bench SYNC write 3D contiguous (memory) contiguous (file) + run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + + cd build-sync + export PATH=$(pwd):$PATH + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-3d-contig-contig.json + + - name: Test h5bench SYNC read 1D contiguous (memory) contiguous (file) full + run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + + cd build-sync + export PATH=$(pwd):$PATH + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-1d-contig-contig-read-full.json + + - name: Test h5bench SYNC read 1D contiguous (memory) contiguous (file) partial + run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + + cd build-sync + export PATH=$(pwd):$PATH + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-1d-contig-contig-read-partial.json + + - name: Test h5bench SYNC read 1D contiguous (memory) contiguous (file) strided + run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + + cd build-sync + export PATH=$(pwd):$PATH + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-1d-contig-contig-read-strided.json + + - name: Test h5bench SYNC read 2D contiguous (memory) contiguous (file) full + run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + + cd build-sync + export PATH=$(pwd):$PATH + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-2d-contig-contig-read-full.json + + - name: Test h5bench SYNC read 3D contiguous (memory) contiguous (file) full + run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + + cd build-sync + export PATH=$(pwd):$PATH + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-2d-contig-contig-read-full.json + + - name: Test h5bench SYNC write unlimited + run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + + cd build-sync + export PATH=$(pwd):$PATH + ./h5bench --debug --abort-on-failure ../samples/sync-write-unlimited.json + + - name: Test h5bench SYNC overwrite + run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + + cd build-sync + export PATH=$(pwd):$PATH + ./h5bench --debug --abort-on-failure ../samples/sync-overwrite.json + + - name: Test h5bench SYNC append + run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + + cd build-sync + export PATH=$(pwd):$PATH + ./h5bench --debug --abort-on-failure ../samples/sync-append.json + + - name: Test h5bench SYNC exerciser + run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + + cd build-sync + export PATH=$(pwd):$PATH + ./h5bench --debug --abort-on-failure ../samples/sync-exerciser.json + + - name: Test h5bench SYNC metadata + run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + + cd build-sync + export PATH=$(pwd):$PATH + ./h5bench --debug --abort-on-failure ../samples/sync-metadata.json + + - name: Test h5bench SYNC amrex + run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + + cd build-sync + export PATH=$(pwd):$PATH + ./h5bench --debug --abort-on-failure ../samples/sync-amrex.json + + - name: Test h5bench SYNC openpmd + run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + + cd build-sync + export PATH=$(pwd):$PATH + ./h5bench --debug --abort-on-failure ../samples/sync-openpmd.json + + - name: Test h5bench SYNC e3sm + run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + + cd build-sync + export PATH=$(pwd):$PATH + ./h5bench --debug --abort-on-failure ../samples/sync-e3sm.json + + - name: Test h5bench ASYNC write/read + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-async + export PATH=$(pwd):$PATH + + python3 ../samples/update.py ../samples/async-write-read-contig-1d-small.json + + ./h5bench --debug --abort-on-failure ../samples/async-write-read-contig-1d-small.json + + - name: Test h5bench ASYNC write 1D contiguous (memory) strided (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-async + export PATH=$(pwd):$PATH + + python3 ../samples/update.py ../samples/async-write-1d-contig-strided.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-strided.json + + - name: Test h5bench ASYNC write 1D contiguous (memory) contiguous (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-async + export PATH=$(pwd):$PATH + + python3 ../samples/update.py ../samples/async-write-1d-contig-contig.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig.json + + - name: Test h5bench ASYNC write 1D contiguous (memory) interleaved (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-async + export PATH=$(pwd):$PATH + + python3 ../samples/update.py ../samples/async-write-1d-contig-interleaved.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-interleaved.json + + - name: Test h5bench ASYNC write 1D interleaved (memory) contiguous (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-async + export PATH=$(pwd):$PATH + + python3 ../samples/update.py ../samples/async-write-1d-interleaved-contig.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-interleaved-contig.json + + - name: Test h5bench ASYNC write 1D interleaved (memory) interleaved (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-async + export PATH=$(pwd):$PATH + + python3 ../samples/update.py ../samples/async-write-1d-interleaved-interleaved.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-interleaved-interleaved.json + + - name: Test h5bench ASYNC write 2D contiguous (memory) contiguous (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-async + export PATH=$(pwd):$PATH + + python3 ../samples/update.py ../samples/async-write-2d-contig-contig.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-contig.json + + - name: Test h5bench ASYNC write 2D contiguous (memory) interleaved (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-async + export PATH=$(pwd):$PATH + + python3 ../samples/update.py ../samples/async-write-2d-contig-interleaved.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-interleaved.json + + - name: Test h5bench ASYNC write 2D interleaved (memory) contiguous (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-async + export PATH=$(pwd):$PATH + + python3 ../samples/update.py ../samples/async-write-2d-interleaved-contig.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-interleaved-contig.json + + - name: Test h5bench ASYNC write 2D interleaved (memory) interleaved (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-async + export PATH=$(pwd):$PATH + + python3 ../samples/update.py ../samples/async-write-2d-interleaved-interleaved.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-interleaved-interleaved.json + + - name: Test h5bench ASYNC write 3D contiguous (memory) contiguous (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-async + export PATH=$(pwd):$PATH + + python3 ../samples/update.py ../samples/async-write-3d-contig-contig.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-3d-contig-contig.json + + - name: Test h5bench ASYNC read 1D contiguous (memory) contiguous (file) full + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-async + export PATH=$(pwd):$PATH + + python3 ../samples/update.py ../samples/async-write-1d-contig-contig-read-full.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig-read-full.json + + - name: Test h5bench ASYNC read 1D contiguous (memory) contiguous (file) partial + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-async + export PATH=$(pwd):$PATH + + python3 ../samples/update.py ../samples/async-write-1d-contig-contig-read-partial.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig-read-partial.json + + - name: Test h5bench ASYNC read 1D contiguous (memory) contiguous (file) strided + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-async + export PATH=$(pwd):$PATH + + python3 ../samples/update.py ../samples/async-write-1d-contig-contig-read-strided.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig-read-strided.json + + - name: Test h5bench ASYNC read 2D contiguous (memory) contiguous (file) full + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-async + export PATH=$(pwd):$PATH + + python3 ../samples/update.py ../samples/async-write-2d-contig-contig-read-full.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-contig-read-full.json + + - name: Test h5bench ASYNC read 3D contiguous (memory) contiguous (file) full + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-async + export PATH=$(pwd):$PATH + + python3 ../samples/update.py ../samples/async-write-2d-contig-contig-read-full.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-contig-read-full.json + + - name: Test h5bench ASYNC amrex + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-async + export PATH=$(pwd):$PATH + + python3 ../samples/update.py ../samples/async-amrex.json + + ./h5bench --debug --abort-on-failure ../samples/async-amrex.json + + - name: Test h5bench CACHE write 1D contiguous (memory) strided (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + export HDF5_VOL_DIR=/opt/vol-cache + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184\n" > cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-contig-strided.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-strided.json + + - name: Test h5bench CACHE write 1D contiguous (memory) contiguous (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184\n" > cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-contig-contig.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig.json + + - name: Test h5bench CACHE write 1D contiguous (memory) interleaved (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184\n" > cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-contig-interleaved.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-interleaved.json + + - name: Test h5bench CACHE write 1D interleaved (memory) contiguous (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184\n" > cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-interleaved-contig.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-interleaved-contig.json + + - name: Test h5bench CACHE write 1D interleaved (memory) interleaved (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184\n" > cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-interleaved-interleaved.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-interleaved-interleaved.json + + - name: Test h5bench CACHE write 2D contiguous (memory) contiguous (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184\n" > cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-2d-contig-contig.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-contig.json + + - name: Test h5bench CACHE write 2D contiguous (memory) interleaved (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184\n" > cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-2d-contig-interleaved.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-interleaved.json + + - name: Test h5bench CACHE write 2D interleaved (memory) contiguous (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184\n" > cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-2d-interleaved-contig.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-interleaved-contig.json + + - name: Test h5bench CACHE write 2D interleaved (memory) interleaved (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184\n" > cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-2d-interleaved-interleaved.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-interleaved-interleaved.json + + - name: Test h5bench CACHE write 3D contiguous (memory) contiguous (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184\n" > cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-3d-contig-contig.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-3d-contig-contig.json + + - name: Test h5bench CACHE read 1D contiguous (memory) contiguous (file) full + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184\n" > cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-contig-contig-read-full.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig-read-full.json + + - name: Test h5bench CACHE read 1D contiguous (memory) contiguous (file) partial + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184\n" > cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-contig-contig-read-partial.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig-read-partial.json + + - name: Test h5bench CACHE read 1D contiguous (memory) contiguous (file) strided + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184\n" > cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-contig-contig-read-strided.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig-read-strided.json + + - name: Test h5bench CACHE read 2D contiguous (memory) contiguous (file) full + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184\n" > cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-2d-contig-contig-read-full.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-contig-read-full.json + + - name: Test h5bench CACHE read 3D contiguous (memory) contiguous (file) full + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184\n" > cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-2d-contig-contig-read-full.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-contig-read-full.json + + - name: Upload artifact + if: always() + uses: actions/upload-artifact@v2 + with: + name: test + path: build*/storage/**/std* + retention-days: 1 diff --git a/.github/workflows/h5bench-hdf5-develop.yml b/.github/workflows/h5bench-hdf5-develop.yml index 58ed2f20..6c23df3a 100644 --- a/.github/workflows/h5bench-hdf5-develop.yml +++ b/.github/workflows/h5bench-hdf5-develop.yml @@ -8,7 +8,7 @@ on: jobs: h5bench: runs-on: ubuntu-latest - timeout-minutes: 60 + timeout-minutes: 90 env: OMPI_ALLOW_RUN_AS_ROOT: 1 OMPI_ALLOW_RUN_AS_ROOT_CONFIRM: 1 @@ -30,6 +30,8 @@ jobs: # VOL-ASYNC git clone --recursive https://github.com/hpc-io/vol-async.git /opt/vol-async + # VOL-CACHE + git clone --recursive https://github.com/hpc-io/vol-cache.git /opt/vol-cache - name: Build HDF5 develop run: | export HDF5_DIR=/opt/hdf5 @@ -69,7 +71,7 @@ jobs: mkdir build cd build - cmake .. -DCMAKE_INSTALL_PREFIX=$ASYNC_DIR -DCMAKE_PREFIX_PATH=$HDF5_DIR + cmake .. -DCMAKE_INSTALL_PREFIX=$ASYNC_DIR -DCMAKE_PREFIX_PATH=$HDF5_DIR -DCMAKE_C_FLAGS="-fPIC" make make install @@ -90,6 +92,27 @@ jobs: ctest + - name: Build VOL-CACHE + run: | + export HDF5_ROOT=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + export HDF5_VOL_DIR=/opt/vol-cache + + [ -e $HDF5_VOL_DIR ] || mkdir $HDF5_VOL_DIR + [ -e $HDF5_VOL_DIR/bin ] || mkdir $HDF5_VOL_DIR/bin + [ -e $HDF5_VOL_DIR/lib ] || mkdir $HDF5_VOL_DIR/lib + [ -e $HDF5_VOL_DIR/include ] || mkdir $HDF5_VOL_DIR/include + + cp -r $ASYNC_DIR/lib/*.so $ASYNC_DIR/lib/*.a $HDF5_VOL_DIR/lib/ + cp -r $ASYNC_DIR/include/*.h $HDF5_VOL_DIR/include/ + + export LD_LIBRARY_PATH=$HDF5_VOL_DIR/lib:$HDF5_ROOT/lib:$ABT_DIR/lib:$LD_LIBRARY_PATH + + cd $HDF5_VOL_DIR/src + + make all + - name: Build h5bench SYNC run: | export HDF5_DIR=/opt/hdf5 @@ -107,8 +130,6 @@ jobs: - name: Build h5bench ASYNC run: | - current="$PWD" - export HDF5_HOME=/opt/hdf5 export ABT_HOME=/opt/argobots export ASYNC_HOME=/opt/vol-async @@ -125,6 +146,21 @@ jobs: make -j 2 sudo make install + - name: Build h5bench CACHE + run: | + export HDF5_ROOT=/opt/hdf5 + export HDF5_HOME=/opt/hdf5 + export HDF5_VOL_DIR=/opt/vol-cache + + mkdir build-cache + cd build-cache + + cmake .. \ + -DWITH_CACHE_VOL:BOOL=ON \ + -DWITH_ASYNC_VOL:BOOL=ON \ + -DCMAKE_C_FLAGS="-I/$HDF5_VOL_DIR/include -L/$HDF5_VOL_DIR/lib" + make -j 2 + - name: Test h5bench SYNC write/read run: | export HDF5_HOME=/opt/hdf5 @@ -491,8 +527,6 @@ jobs: - name: Test h5bench ASYNC write 2D contiguous (memory) interleaved (file) run: | - current="$PWD" - export HDF5_DIR=/opt/hdf5 export ABT_DIR=/opt/argobots export ASYNC_DIR=/opt/vol-async @@ -611,6 +645,248 @@ jobs: ./h5bench --debug --abort-on-failure ../samples/async-amrex.json + + - name: Test h5bench CACHE write 1D contiguous (memory) strided (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + export HDF5_VOL_DIR=/opt/vol-cache + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-contig-strided.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-strided.json + + - name: Test h5bench CACHE write 1D contiguous (memory) contiguous (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-contig-contig.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig.json + + - name: Test h5bench CACHE write 1D contiguous (memory) interleaved (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-contig-interleaved.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-interleaved.json + + - name: Test h5bench CACHE write 1D interleaved (memory) contiguous (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-interleaved-contig.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-interleaved-contig.json + + - name: Test h5bench CACHE write 1D interleaved (memory) interleaved (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-interleaved-interleaved.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-interleaved-interleaved.json + + - name: Test h5bench CACHE write 2D contiguous (memory) contiguous (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-2d-contig-contig.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-contig.json + + - name: Test h5bench CACHE write 2D contiguous (memory) interleaved (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-2d-contig-interleaved.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-interleaved.json + + - name: Test h5bench CACHE write 2D interleaved (memory) contiguous (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-2d-interleaved-contig.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-interleaved-contig.json + + - name: Test h5bench CACHE write 2D interleaved (memory) interleaved (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-2d-interleaved-interleaved.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-interleaved-interleaved.json + + - name: Test h5bench CACHE write 3D contiguous (memory) contiguous (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-3d-contig-contig.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-3d-contig-contig.json + + - name: Test h5bench CACHE read 1D contiguous (memory) contiguous (file) full + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-contig-contig-read-full.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig-read-full.json + + - name: Test h5bench CACHE read 1D contiguous (memory) contiguous (file) partial + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-contig-contig-read-partial.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig-read-partial.json + + - name: Test h5bench CACHE read 1D contiguous (memory) contiguous (file) strided + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-1d-contig-contig-read-strided.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig-read-strided.json + + - name: Test h5bench CACHE read 2D contiguous (memory) contiguous (file) full + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-2d-contig-contig-read-full.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-contig-read-full.json + + - name: Test h5bench CACHE read 3D contiguous (memory) contiguous (file) full + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd build-cache + + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" >& cache_1.cfg + + [ -e SSD ] || mkdir SSD + + python3 ../samples/update.py ../samples/async-write-2d-contig-contig-read-full.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-contig-read-full.json + - name: Upload artifact if: always() uses: actions/upload-artifact@v2 diff --git a/CMakeLists.txt b/CMakeLists.txt index f7e2fbc0..5df9d0f7 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -92,6 +92,24 @@ endif() message(STATUS "HDF5 VOL ASYNC: ${WITH_ASYNC_VOL}") +# VOL CACHE Dependency ####################################################### +# + +set(CACHE_HOME $ENV{CACHE_HOME}) +option(WITH_CACHE_VOL "Enable HDF5 VOL CACHE connector" OFF) + +if (WITH_CACHE_VOL) + if(${HDF5_VERSION} VERSION_GREATER_EQUAL "1.13.1") + add_definitions(-DUSE_CACHE_VOL) + include_directories(${CACHE_HOME}) + link_directories(${CACHE_HOME}) + else() + message(SEND_ERROR "VOL CACHE requires HDF5 1.13.1 or newer.") + endif() +endif() + +message(STATUS "HDF5 VOL CACHE: ${WITH_CACHE_VOL}") + # h5bench Utility ############################################################# # @@ -105,6 +123,9 @@ add_library(h5bench_util ${h5bench_util_src}) if(WITH_ASYNC_VOL) target_link_libraries(h5bench_util asynchdf5 h5async) endif() +if(WITH_CACHE_VOL) + target_link_libraries(h5bench_util cache_new_h5api) +endif() # h5bench WRITE ############################################################### # diff --git a/commons/h5bench_util.c b/commons/h5bench_util.c index 17957448..d0694b02 100644 --- a/commons/h5bench_util.c +++ b/commons/h5bench_util.c @@ -69,11 +69,13 @@ h5bench_sleep(duration sleep_time) void async_sleep(hid_t es_id, duration sleep_time) { +#ifndef USE_CACHE_VOL #ifdef USE_ASYNC_VOL size_t num_in_progress; hbool_t op_failed; H5ESwait(es_id, 0, &num_in_progress, &op_failed); +#endif #endif h5bench_sleep(sleep_time); } @@ -646,6 +648,15 @@ _set_params(char *key, char *val_in, bench_params *params_in_out, int do_write) return 0; char *val = _parse_val(val_in); + has_vol_async = has_vol_connector(); + + if (has_vol_async) { + (*params_in_out).asyncMode = MODE_ASYNC; + } + else { + (*params_in_out).asyncMode = MODE_SYNC; + } + if (strcmp(key, "IO_OPERATION") == 0) { if (strcmp(val, "READ") == 0) { params_in_out->io_op = IO_READ; @@ -909,20 +920,23 @@ _set_params(char *key, char *val_in, bench_params *params_in_out, int do_write) else (*params_in_out).subfiling = 0; } + else if (strcmp(key, "MODE") == 0) { + if (strcmp(val_in, "SYNC") == 0) { + params_in_out->asyncMode = MODE_ASYNC; + } + else if (strcmp(val_in, "ASYNC") == 0) { + params_in_out->asyncMode = MODE_SYNC; + } + else { + printf("Unknown MODE: %s\n", key); + return -1; + } + } else { printf("Unknown Parameter: %s\n", key); return -1; } - has_vol_async = has_vol_connector(); - - if (has_vol_async) { - (*params_in_out).asyncMode = MODE_ASYNC; - } - else { - (*params_in_out).asyncMode = MODE_SYNC; - } - if ((*params_in_out).useCSV) (*params_in_out).csv_fs = csv_init(params_in_out->csv_path, params_in_out->env_meta_path); diff --git a/docs/source/buildinstructions.rst b/docs/source/buildinstructions.rst index 056252f0..fe8fe6e3 100644 --- a/docs/source/buildinstructions.rst +++ b/docs/source/buildinstructions.rst @@ -114,6 +114,33 @@ h5bench will automatically set the environment variables required to run the asy # MacOS export DYLD_LIBRARY_PATH="$HDF5_HOME/lib:$ASYNC_HOME" + + +Build to run with Cache VOL +----------------------------------- + +To run h5bench with Cache VOL, you need the develop branchs of HDF5, Async VOL and Cache VOL. Please create a folder (HDF5_VOL_DIR) with the following structure. Please build Async VOL and Cache VOL first and copy the header files and library files to the folder, HDF5_VOL_DIR. + + HDF5_VOL_DIR: + ./include - contains header files for Cache VOL and Async VOL. + ./lib - contains Cache VOL and Async VOL libraries such as libh5cache_ext.so, libcache_new_h5api.a, libasynchdf5.a, libh5async.so + +.. code-block:: bash + mkdir build + cd build + cmake .. -DWITH_CACHE_VOL:BOOL=ON -DCMAKE_C_FLAGS="-I$HDF5_VOL_DIR/include -L$HDF5_VOL_DIR/lib -g" + make + +Please also set the following environment variable: + + export HDF5_HOME="$YOUR_HDF5_DEVELOP_BRANCH_BUILD/hdf5" + export HDF5_VOL_CONNECTOR="cache_ext config=config.cfg;under_vol=0;under_info={}" + export HDF5_PLUGIN_PATH="$HDF5_VOL_DIR/lib" + export DYLD_LIBRARY_PATH="$HDF5_HOME/lib:$HDF5_PLUGIN_PATH" + +On Linux platform, replace DYLD_LIBRARY_PATH with LD_LIBRARY_PATH. Please follow instruction from https://vol-cache.readthedocs.io/en/latest/gettingstarted.html#set-environment-variables to set up the configuration for Cache VOL. + + ----------------------------------- Build with Spack ----------------------------------- diff --git a/docs/source/index.rst b/docs/source/index.rst index 853d4d9c..0f63602b 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -24,6 +24,19 @@ h5bench is a suite of parallel I/O benchmarks or kernels representing I/O patter These are the benchmarks and kernels currently available in h5bench: +<<<<<<< HEAD +==================== =========================== ==================== ======================== ======================== +**Benchmark** **Name** **SYNC** **ASYNC VOL** **CACHE VOL** +==================== =========================== ==================== ======================== ======================== +h5bench write ``h5bench_write`` |:white_check_mark:| |:white_check_mark:| |:white_check_mark:| +h5bench read ``h5bench_read`` |:white_check_mark:| |:white_check_mark:| |:white_large_square:| +Metadata Stress ``h5bench_hdf5_iotest`` |:white_check_mark:| |:white_large_square:| |:white_large_square:| +AMReX ``h5bench_amrex`` |:white_check_mark:| |:white_check_mark:| |:white_large_square:| +Exerciser ``h5bench_exerciser`` |:white_check_mark:| |:white_large_square:| |:white_large_square:| +OpenPMD (write) ``h5bench_openpmd_write`` |:white_check_mark:| |:white_large_square:| |:white_large_square:| +OpenPMD (read) ``h5bench_openpmd_read`` |:white_check_mark:| |:white_large_square:| |:white_large_square:| +==================== =========================== ==================== ======================== ======================= +======= ==================== =========================== ==================== ======================== **Benchmark** **Name** **SYNC** **ASYNC VOL** ==================== =========================== ==================== ======================== @@ -36,6 +49,7 @@ OpenPMD (write) ``h5bench_openpmd_write`` |:white_check_mark:| |:white_la OpenPMD (read) ``h5bench_openpmd_read`` |:white_check_mark:| |:white_large_square:| E3SM-IO ``h5bench_e3sm`` |:white_check_mark:| |:white_large_square:| ==================== =========================== ==================== ======================== +>>>>>>> f21f1e0709454a7a30598f58068ffa788a523da3 .. toctree:: :maxdepth: 2 diff --git a/docs/source/running.rst b/docs/source/running.rst index 052fc017..b7a4e3e0 100644 --- a/docs/source/running.rst +++ b/docs/source/running.rst @@ -83,7 +83,16 @@ Because some benchmarks inside h5bench do not have support for VOL connectors ye "connector": "async under_vol=0;under_info={}" } -You should provide the absolute path for all the libraries required by the VOL connector using the ``library`` property, the ``path`` of the VOL connector, and the configuration in ``connector``. The provided example depicts how to configure the HDF5 VOL async connector. +You should provide the absolute path for all the libraries required by the VOL connector using the ``library`` property, the ``path`` of the VOL connector, and the configuration in ``connector``. The provided example depicts how to configure the HDF5 VOL async connector. For Cache connector, + +.. code-block:: + "vol": { + "library": "/hdf5-vol-dir/lib:/argobots/install/lib:/hdf5-install/install:", + "path": "/hdf5-vol-dir/lib", + "connector": "cache_ext config=cache.cfg;under_vol=512;under_info={under_vol=0;under_info={}}" + } + +One also has to provide the configuration file for the Cache storage: cache.cfg. Directory ^^^^^^^^^ diff --git a/docs/source/vpic.rst b/docs/source/vpic.rst index b9a1b2ae..d975a5dd 100644 --- a/docs/source/vpic.rst +++ b/docs/source/vpic.rst @@ -220,7 +220,7 @@ Known Issues .. warning:: - In Cori/NERSC or similar platforms that use Cray-MPICH library, if you encouter a failed assertion regarding support for ``MPI_THREAD_MULTIPLE`` you should define the following environment variable: + In Cori/NERSC and Theta/ALCF, or similar platforms that use Cray-MPICH library, if you encouter a failed assertion regarding support for ``MPI_THREAD_MULTIPLE`` you should define the following environment variable: .. code-block:: bash @@ -228,8 +228,10 @@ Known Issues .. warning:: - If you're trying to run the benchmark with the HDF5 VOL ASYNC connector in MacOS and are getting segmentation fault (from ``ABT_thread_create``), please try to set the following environment variable: + If you're trying to run the benchmark with the HDF5 VOL ASYNC in MacOS (or any other platforms) and are getting segmentation fault (from ``ABT_thread_create``), please try to set the following environment variable: .. code-block:: bash export ABT_THREAD_STACKSIZE=100000 + + If you run the benchmark with the HDF5 VOL CACHE connector, we set this value automatically inside the VOL connector. diff --git a/h5bench_patterns/h5bench_write.c b/h5bench_patterns/h5bench_write.c index 04b0e5a1..ad8030c9 100644 --- a/h5bench_patterns/h5bench_write.c +++ b/h5bench_patterns/h5bench_write.c @@ -48,10 +48,16 @@ #include #include "../commons/h5bench_util.h" #include "../commons/async_adaptor.h" + +#ifdef USE_CACHE_VOL +#include "cache_new_h5api.h" +#endif + #ifdef HAVE_SUBFILING #include "H5FDsubfiling.h" #include "H5FDioc.h" #endif + #define DIM_MAX 3 herr_t ierr; @@ -474,7 +480,6 @@ data_write_contig_contig_MD_array(time_step *ts, hid_t loc, hid_t *dset_ids, hid ts->es_meta_create); unsigned t2 = get_time_usec(); - ierr = H5Dwrite_async(dset_ids[0], H5T_NATIVE_FLOAT, memspace, filespace, plist_id, data_in->x, ts->es_data); ierr = @@ -757,6 +762,9 @@ _run_benchmark_write(bench_params params, hid_t file_id, hid_t fapl, hid_t files unsigned long metadata_time_imp = 0, data_time_imp = 0; unsigned long meta_time1 = 0, meta_time2 = 0, meta_time3 = 0, meta_time4 = 0, meta_time5 = 0; for (int ts_index = 0; ts_index < timestep_cnt; ts_index++) { +#ifdef USE_CACHE_VOL + H5Fcache_async_close_wait(file_id); +#endif meta_time1 = 0, meta_time2 = 0, meta_time3 = 0, meta_time4 = 0, meta_time5 = 0; time_step *ts = &(MEM_MONITOR->time_steps[ts_index]); MEM_MONITOR->mem_used += ts->mem_size; @@ -780,7 +788,9 @@ _run_benchmark_write(bench_params params, hid_t file_id, hid_t fapl, hid_t files if (MY_RANK == 0) printf("Writing %s ... \n", grp_name); - +#ifdef USE_CACHE_VOL + H5Fcache_async_op_pause(file_id); +#endif switch (pattern) { case CONTIG_CONTIG_1D: case CONTIG_CONTIG_2D: @@ -816,7 +826,9 @@ _run_benchmark_write(bench_params params, hid_t file_id, hid_t fapl, hid_t files default: break; } - +#ifdef USE_CACHE_VOL + H5Fcache_async_op_start(file_id); +#endif ts->status = TS_DELAY; if (params.cnt_time_step_delay == 0) { @@ -833,7 +845,6 @@ _run_benchmark_write(bench_params params, hid_t file_id, hid_t fapl, hid_t files t4 = get_time_usec(); meta_time5 += (t4 - t3); } - if (ts_index != timestep_cnt - 1) { // no sleep after the last ts if (params.compute_time.time_num >= 0) { if (MY_RANK == 0) @@ -842,7 +853,7 @@ _run_benchmark_write(bench_params params, hid_t file_id, hid_t fapl, hid_t files } } - *metadata_time_total += (meta_time1 + meta_time2 + meta_time3 + meta_time4); + *metadata_time_total += (meta_time1 + meta_time2 + meta_time3 + meta_time4 + meta_time5); *data_time_total += (data_time_exp + data_time_imp); } // end for timestep_cnt @@ -1011,6 +1022,14 @@ main(int argc, char *argv[]) if (params.subfiling) subfiling = 1; +#if H5_VERSION_GE(1, 13, 1) + if (H5VLis_connector_registered_by_name("cache_ext")) { + if (MY_RANK == 0) { + printf("Using 'cache_ext' VOL connector\n"); + } + } +#endif + #if H5_VERSION_GE(1, 13, 0) if (H5VLis_connector_registered_by_name("async")) { if (MY_RANK == 0) { @@ -1078,6 +1097,9 @@ main(int argc, char *argv[]) else { file_id = H5Fcreate_async(output_file, H5F_ACC_TRUNC, H5P_DEFAULT, fapl, 0); } +#ifdef USE_CACHE_VOL + H5Fcache_async_close_set(file_id); +#endif unsigned long tfopen_end = get_time_usec(); if (MY_RANK == 0) diff --git a/samples/async-write-read-contig-1d-small.json b/samples/async-write-read-contig-1d-small.json index b3f2aef5..96450733 100644 --- a/samples/async-write-read-contig-1d-small.json +++ b/samples/async-write-read-contig-1d-small.json @@ -2,7 +2,7 @@ "mpi": { "command": "mpirun", "ranks": "4", - "configuration": "--allow-run-as-root --oversubscribe" + "configuration": "--allow-run-as-root --np 2 --oversubscribe" }, "vol": { "library": "/vol-async/src:/hdf5-async-vol-register-install/lib:/argobots/install/lib:/hdf5-install/install:", diff --git a/samples/cache-write-1d-contig-contig.json b/samples/cache-write-1d-contig-contig.json new file mode 100644 index 00000000..f6f8f7c5 --- /dev/null +++ b/samples/cache-write-1d-contig-contig.json @@ -0,0 +1,37 @@ +{ + "mpi": { + "command": "mpirun", + "ranks": "4", + "configuration": "--allow-run-as-root --np 2 --oversubscribe" + }, + "vol": { + "library": "/vol/lib:/argobots/lib:/hdf5/lib", + "path": [ + "vol/lib" + ], + "connector": "cache_ext config=cache_1.cfg;under_vol=512;under_info={under_vol=0;under_info={}}" + }, + "file-system": {}, + "directory": "storage", + "benchmarks": [ + { + "benchmark": "write", + "file": "test.h5", + "configuration": { + "MEM_PATTERN": "CONTIG", + "FILE_PATTERN": "CONTIG", + "TIMESTEPS": "5", + "DELAYED_CLOSE_TIMESTEPS": "0", + "COLLECTIVE_DATA": "YES", + "COLLECTIVE_METADATA": "YES", + "EMULATED_COMPUTE_TIME_PER_TIMESTEP": "1 s", + "NUM_DIMS": "1", + "DIM_1": "4194304", + "DIM_2": "1", + "DIM_3": "1", + "CSV_FILE": "output.csv", + "MODE": "SYNC" + } + } + ] +} diff --git a/samples/cache-write-2d-contig-contig.json b/samples/cache-write-2d-contig-contig.json new file mode 100644 index 00000000..7d80a656 --- /dev/null +++ b/samples/cache-write-2d-contig-contig.json @@ -0,0 +1,37 @@ +{ + "mpi": { + "command": "mpirun", + "ranks": "4", + "configuration": "--allow-run-as-root --np 2 --oversubscribe" + }, + "vol": { + "library": "/vol/lib:/argobots/lib:/hdf5/lib", + "path": [ + "vol/lib" + ], + "connector": "cache_ext config=cache_1.cfg;under_vol=512;under_info={under_vol=0;under_info={}}" + }, + "file-system": {}, + "directory": "storage", + "benchmarks": [ + { + "benchmark": "write", + "file": "test.h5", + "configuration": { + "MEM_PATTERN": "CONTIG", + "FILE_PATTERN": "CONTIG", + "TIMESTEPS": "5", + "DELAYED_CLOSE_TIMESTEPS": "0", + "COLLECTIVE_DATA": "YES", + "COLLECTIVE_METADATA": "YES", + "EMULATED_COMPUTE_TIME_PER_TIMESTEP": "1 s", + "NUM_DIMS": "2", + "DIM_1": "1024", + "DIM_2": "1024", + "DIM_3": "1", + "CSV_FILE": "output.csv", + "MODE": "SYNC" + } + } + ] +} diff --git a/samples/cache-write-3d-contig-contig.json b/samples/cache-write-3d-contig-contig.json new file mode 100644 index 00000000..617bebe0 --- /dev/null +++ b/samples/cache-write-3d-contig-contig.json @@ -0,0 +1,37 @@ +{ + "mpi": { + "command": "mpirun", + "ranks": "4", + "configuration": "--allow-run-as-root --np 2 --oversubscribe" + }, + "vol": { + "library": "/vol/lib:/argobots/lib:/hdf5/lib", + "path": [ + "vol/lib" + ], + "connector": "cache_ext config=cache_1.cfg;under_vol=512;under_info={under_vol=0;under_info={}}" + }, + "file-system": {}, + "directory": "storage", + "benchmarks": [ + { + "benchmark": "write", + "file": "test.h5", + "configuration": { + "MEM_PATTERN": "CONTIG", + "FILE_PATTERN": "CONTIG", + "TIMESTEPS": "5", + "DELAYED_CLOSE_TIMESTEPS": "0", + "COLLECTIVE_DATA": "YES", + "COLLECTIVE_METADATA": "YES", + "EMULATED_COMPUTE_TIME_PER_TIMESTEP": "1 s", + "NUM_DIMS": "3", + "DIM_1": "64", + "DIM_2": "64", + "DIM_3": "64", + "CSV_FILE": "output.csv", + "MODE": "SYNC" + } + } + ] +} diff --git a/samples/update.py b/samples/update.py index c46cfb87..5a0d93b4 100644 --- a/samples/update.py +++ b/samples/update.py @@ -18,6 +18,7 @@ HDF5_DIR = os.getenv('HDF5_DIR') ABT_DIR = os.getenv('ABT_DIR') ASYNC_DIR = os.getenv('ASYNC_DIR') +VOL_DIR = os.getenv('HDF5_VOL_DIR') if HDF5_DIR is None: print('HDF5_DIR enviroment variable is not set!') @@ -33,20 +34,31 @@ with open(ARGS.setup, 'r') as f: data = json.load(f, object_pairs_hook=collections.OrderedDict) + +if VOL_DIR is not None: + ''' Multiple VOL connectors ''' + data['vol']['library'] = '{}:{}:{}'.format( + '/'.join([VOL_DIR, 'lib']), + '/'.join([ABT_DIR, 'lib']), + '/'.join([HDF5_DIR, 'lib']) + ) -data['vol']['library'] = '{}:{}:{}'.format( - '/'.join([ASYNC_DIR, 'lib']), - '/'.join([ABT_DIR, 'lib']), - '/'.join([HDF5_DIR, 'lib']) -) + data['vol']['preload'] = '{}:{}:{}'.format( + '/'.join([ASYNC_DIR, 'lib', 'libh5async.so']), + '/'.join([ABT_DIR, 'lib', 'libabt.so']), + '/'.join([HDF5_DIR, 'lib', 'libhdf5.so']) + ) -data['vol']['preload'] = '{}:{}:{}'.format( - '/'.join([ASYNC_DIR, 'lib', 'libh5async.so']), - '/'.join([ABT_DIR, 'lib', 'libabt.so']), - '/'.join([HDF5_DIR, 'lib', 'libhdf5.so']) -) + data['vol']['path'] = '/'.join([VOL_DIR, 'lib']) +else: + ''' Single VOL connector ''' + data['vol']['library'] = '{}:{}:{}'.format( + '/'.join([ASYNC_DIR, 'lib']), + '/'.join([ABT_DIR, 'lib']), + '/'.join([HDF5_DIR, 'lib']) + ) -data['vol']['path'] = '/'.join([ASYNC_DIR, 'lib']) + data['vol']['path'] = '/'.join([ASYNC_DIR, 'lib']) with open(ARGS.setup, 'w') as f: - json.dump(data, f, indent=4, sort_keys=False) \ No newline at end of file + json.dump(data, f, indent=4, sort_keys=False) diff --git a/src/h5bench.py b/src/h5bench.py index 38f35666..825cfaac 100755 --- a/src/h5bench.py +++ b/src/h5bench.py @@ -55,7 +55,8 @@ def check_parallel(self): """Check for parallel overwrite command.""" mpi = [ 'mpirun', 'mpiexec', - 'srun' + 'srun', + 'aprun' ] # Get user defined shell @@ -238,6 +239,8 @@ def prepare_parallel(self, mpi): self.mpi = '{} -np {}'.format(mpi['command'], mpi['ranks']) elif mpi['command'] == 'srun': self.mpi = '{} --cpu_bind=cores -n {}'.format(mpi['command'], mpi['ranks']) + elif mpi['command'] == 'aprun': + self.mpi = '{} -n {} -N {} '.format(mpi['command'], mpi['ranks'], mpi['ppn']) else: self.logger.warning('Unknown MPI launcher selected!') @@ -267,6 +270,10 @@ def prepare_vol(self, vol): self.vol_environment['HDF5_PLUGIN_PATH'] = vol['path'] if 'preload' in vol: self.vol_environment['LD_PRELOAD'] += vol['preload'] + if 'cache_write' in vol: + self.vol_environment['HDF5_CACHE_WR'] = vol['cache_write'] + if 'cache_read' in vol: + self.vol_environment['HDF5_CACHE_RD'] = vol['cache_read'] self.vol_environment['ABT_THREAD_STACKSIZE'] = '100000'