Skip to content

Commit

Permalink
Add a benchmark for wasi-nn ONNX models.
Browse files Browse the repository at this point in the history
  • Loading branch information
jianjunz committed May 14, 2024
1 parent 3adb762 commit f1940e9
Show file tree
Hide file tree
Showing 5 changed files with 124 additions and 106 deletions.
143 changes: 45 additions & 98 deletions benchmarks/image-classification/rust-benchmark/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

10 changes: 9 additions & 1 deletion benchmarks/image-classification/rust-benchmark/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,15 @@ publish = false

[dependencies]
image = { version = "0.23.14", default-features = false, features = ["jpeg"] }
wasi-nn = "0.2.1"
wasi-nn = "0.5.0"
sightglass-api = "0.1"

[workspace]

[[bin]]
name = "image-classification-benchmark-onnx"
path = "src/onnx.rs"

[[bin]]
name = "image-classification-benchmark-openvino"
path = "src/openvino.rs"
55 changes: 55 additions & 0 deletions benchmarks/image-classification/rust-benchmark/src/onnx.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
use image::io::Reader;
use image::DynamicImage;
use sightglass_api as bench;
use std::convert::TryInto;
use std::fs;
use wasi_nn;
mod imagenet_classes;

pub fn main() {
// Convert image to tensor data.
let tensor_data = fs::read("./kitten.rgb").unwrap();

// Load model from a file.
let graph =
wasi_nn::GraphBuilder::new(wasi_nn::GraphEncoding::Onnx, wasi_nn::ExecutionTarget::CPU)
.build_from_files(["./mobilenet.onnx"])
.unwrap();

let mut context = graph.init_execution_context().unwrap();
context
.set_input(0, wasi_nn::TensorType::F32, &[1, 3, 224, 224], &tensor_data)
.unwrap();

bench::start();

// Execute the inference.
context.compute().unwrap();

bench::end();


// Retrieve the output.
let mut output_buffer = vec![0f32; 1000];
context.get_output(0, &mut output_buffer[..]).unwrap();

let result = sort_results(&output_buffer);
}

// Sort the buffer of probabilities. The graph places the match probability for each class at the
// index for that class (e.g. the probability of class 42 is placed at buffer[42]). Here we convert
// to a wrapping InferenceResult and sort the results.
fn sort_results(buffer: &[f32]) -> Vec<InferenceResult> {
let mut results: Vec<InferenceResult> = buffer
.iter()
.skip(1)
.enumerate()
.map(|(c, p)| InferenceResult(c, *p))
.collect();
results.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap());
results
}

// A wrapper for class ID and match probabilities.
#[derive(Debug, PartialEq)]
struct InferenceResult(usize, f32);
22 changes: 15 additions & 7 deletions benchmarks/image-classification/setup.sh
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,18 @@
# cargo run -- benchmark benchmarks/image-classification/image-classification-benchmark.wasm --engine-flags="--wasi nn" --engine engines/wasmtime/libengine.so

WASI_NN_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
FILENAME=l_openvino_toolkit_ubuntu20_2022.2.0.7713.af16ea1d79a_x86_64
MODEL=https://github.com/intel/openvino-rs/raw/main/crates/openvino/tests/fixtures/mobilenet
[ ! -d ${WASI_NN_DIR}/mobilenet.xml ] && wget -nc ${MODEL}/mobilenet.xml -O ${WASI_NN_DIR}/mobilenet.xml
[ ! -d ${WASI_NN_DIR}/mobilenet.bin ] && wget -nc -q --no-check-certificate ${MODEL}/mobilenet.bin -O ${WASI_NN_DIR}/mobilenet.bin
[ ! -d ${WASI_NN_DIR}/openvino ] && wget -nc -q --no-check-certificate https://storage.openvinotoolkit.org/repositories/openvino/packages/2022.2/linux/${FILENAME}.tgz -O ${WASI_NN_DIR}/${FILENAME}.tgz
[ ! -d ${WASI_NN_DIR}/openvino ] && wget -nc -q --no-check-certificate https://storage.openvinotoolkit.org/repositories/openvino/packages/2022.2/linux/${FILENAME}.tgz -O ${WASI_NN_DIR}/${FILENAME}.tgz
[ ! -d ${WASI_NN_DIR}/openvino ] && tar -C ${WASI_NN_DIR} -zxf ${WASI_NN_DIR}/${FILENAME}.tgz && mv ${WASI_NN_DIR}/${FILENAME} ${WASI_NN_DIR}/openvino || echo "OpenVINO is already there, skipping..."

OpenVINO
OPENVINO_FILENAME=l_openvino_toolkit_ubuntu20_2022.2.0.7713.af16ea1d79a_x86_64
OPENVINO_MODEL=https://github.com/intel/openvino-rs/raw/main/crates/openvino/tests/fixtures/mobilenet
[ ! -d ${WASI_NN_DIR}/mobilenet.xml ] && wget -nc ${OPENVINO_MODEL}/mobilenet.xml -O ${WASI_NN_DIR}/mobilenet.xml
[ ! -d ${WASI_NN_DIR}/mobilenet.bin ] && wget -nc -q --no-check-certificate ${OPENVINO_MODEL}/mobilenet.bin -O ${WASI_NN_DIR}/mobilenet.bin
[ ! -d ${WASI_NN_DIR}/openvino ] && wget -nc -q --no-check-certificate https://storage.openvinotoolkit.org/repositories/openvino/packages/2022.2/linux/${OPENVINO_FILENAME}.tgz -O ${WASI_NN_DIR}/${OPENVINO_FILENAME}.tgz
[ ! -d ${WASI_NN_DIR}/openvino ] && wget -nc -q --no-check-certificate https://storage.openvinotoolkit.org/repositories/openvino/packages/2022.2/linux/${OPENVINO_FILENAME}.tgz -O ${WASI_NN_DIR}/${OPENVINO_FILENAME}.tgz
[ ! -d ${WASI_NN_DIR}/openvino ] && tar -C ${WASI_NN_DIR} -zxf ${WASI_NN_DIR}/${OPENVINO_FILENAME}.tgz && mv ${WASI_NN_DIR}/${OPENVINO_FILENAME} ${WASI_NN_DIR}/openvino || echo "OpenVINO is already there, skipping..."

# ONNX
ONNX_MODEL=https://github.com/onnx/models/raw/bec48b6a70e5e9042c0badbaafefe4454e072d08/validated/vision/classification/mobilenet/model/mobilenetv2-7.onnx?download=
[ ! -d ${WASI_NN_DIR}/mobilenet.onnx ] && wget -nc ${ONNX_MODEL} -O ${WASI_NN_DIR}/mobilenet.onnx
ONNX_IMAGE_RGB=https://github.com/bytecodealliance/wasmtime/raw/v20.0.2/crates/wasi-nn/tests/fixtures/kitten.rgb
[ ! -d ${WASI_NN_DIR}/kitten.rgb ] && wget -nc ${ONNX_IMAGE_RGB} -O ${WASI_NN_DIR}/kitten.rgb

0 comments on commit f1940e9

Please sign in to comment.