Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Support data type for object detection #236

Merged
merged 2 commits into from
May 10, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 15 additions & 0 deletions object_detection/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,21 @@
</div>
</div>
</div>
<div class="row mb-2 align-items-center">
<div class="col-1 col-md-1">
<span class="nowrap">Data Type</span>
</div>
<div class="col-md-auto">
<div class="btn-group-toggle" data-toggle="buttons" id="dataTypeBtns">
<label class="btn btn-outline-info" id="float32Label" active>
<input type="radio" name="layout" id="float32" autocomplete="off" checked>Float32
</label>
<label class="btn btn-outline-info" id="float16Label">
<input type="radio" name="layout" id="float16" autocomplete="off">Float16
</label>
</div>
</div>
</div>
<div class="row align-items-center">
<div class="col-1 col-md-1">
<span>Model</span>
Expand Down
67 changes: 60 additions & 7 deletions object_detection/main.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,9 @@ const imgElement = document.getElementById('feedElement');
imgElement.src = './images/test.jpg';
const camElement = document.getElementById('feedMediaElement');
let modelName = '';
let modelId = '';
let layout = 'nhwc';
let dataType = 'float32';
let instanceType = modelName + layout;
let rafReq;
let isFirstTimeLoad = true;
Expand All @@ -33,6 +35,19 @@ let lastBackend = '';
let stopRender = true;
let isRendering = false;
const disabledSelectors = ['#tabs > li', '.btn'];
const modelIds = ['ssdmobilenetv1', 'tinyyolov2'];
const modelList = {
'cpu': {
'float32': modelIds,
},
'gpu': {
'float32': modelIds,
'float16': modelIds,
},
'npu': {
'float16': ['ssdmobilenetv1'],
},
};

async function fetchLabels(url) {
const response = await fetch(url);
Expand All @@ -53,18 +68,56 @@ $('#backendBtns .btn').on('change', async (e) => {
if (inputType === 'camera') {
await stopCamRender();
}
layout = utils.getDefaultLayout($(e.target).attr('id'));
await main();
const backendId = $(e.target).attr('id');
layout = utils.getDefaultLayout(backendId);
[backend, deviceType] = backendId.split('_');
// Only show the supported models for each deviceType. Now fp16 nchw models
// are only supported on gpu/npu.
if (backendId == 'webnn_gpu') {
ui.handleBtnUI('#float16Label', false);
ui.handleBtnUI('#float32Label', false);
utils.displayAvailableModels(modelList, modelIds, deviceType, dataType);
} else if (backendId == 'webnn_npu') {
ui.handleBtnUI('#float16Label', false);
ui.handleBtnUI('#float32Label', true);
$('#float16').click();
utils.displayAvailableModels(modelList, modelIds, deviceType, 'float16');
} else {
ui.handleBtnUI('#float16Label', true);
ui.handleBtnUI('#float32Label', false);
$('#float32').click();
utils.displayAvailableModels(modelList, modelIds, deviceType, 'float32');
}

// Uncheck selected model
if (modelId != '') {
$(`#${modelId}`).parent().removeClass('active');
}
});

$('#modelBtns .btn').on('change', async (e) => {
if (inputType === 'camera') {
await stopCamRender();
}
modelName = $(e.target).attr('id');

modelId = $(e.target).attr('id');
modelName = modelId;
if (dataType == 'float16') {
modelName += 'fp16';
}

await main();
});

$('#dataTypeBtns .btn').on('change', async (e) => {
dataType = $(e.target).attr('id');
utils.displayAvailableModels(modelList, modelIds, deviceType, dataType);
// Uncheck selected model
if (modelId != '') {
$(`#${modelId}`).parent().removeClass('active');
}
});

// Click trigger to do inference with <img> element
$('#img').click(async () => {
if (inputType === 'camera') {
Expand Down Expand Up @@ -146,7 +199,7 @@ async function drawOutput(inputElement, outputs, labels) {
$('#inferenceresult').show();

// Draw output for SSD Mobilenet V1 model
if (modelName === 'ssdmobilenetv1') {
if (modelName.includes('ssdmobilenetv1')) {
const anchors = SsdDecoder.generateAnchors({});
SsdDecoder.decodeOutputBoxTensor({}, outputs.boxes, anchors);
let [totalDetections, boxesList, scoresList, classesList] =
Expand Down Expand Up @@ -181,8 +234,10 @@ function showPerfResult(medianComputeTime = undefined) {
function constructNetObject(type) {
const netObject = {
'tinyyolov2nchw': new TinyYoloV2Nchw(),
'tinyyolov2fp16nchw': new TinyYoloV2Nchw('float16'),
'tinyyolov2nhwc': new TinyYoloV2Nhwc(),
'ssdmobilenetv1nchw': new SsdMobilenetV1Nchw(),
'ssdmobilenetv1fp16nchw': new SsdMobilenetV1Nchw('float16'),
'ssdmobilenetv1nhwc': new SsdMobilenetV1Nhwc(),
};

Expand All @@ -192,8 +247,6 @@ function constructNetObject(type) {
async function main() {
try {
if (modelName === '') return;
[backend, deviceType] =
$('input[name="backend"]:checked').attr('id').split('_');
ui.handleClick(disabledSelectors, true);
if (isFirstTimeLoad) $('#hint').hide();
let start;
Expand All @@ -218,7 +271,7 @@ async function main() {
netInstance = constructNetObject(instanceType);
inputOptions = netInstance.inputOptions;
labels = await fetchLabels(inputOptions.labelUrl);
if (modelName === 'tinyyolov2') {
if (modelName.includes('tinyyolov2')) {
outputs = {
'output': new Float32Array(
utils.sizeOfShape(netInstance.outputDimensions)),
Expand Down
7 changes: 2 additions & 5 deletions object_detection/ssd_mobilenetv1_nchw.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,10 @@ import {buildConstantByNpy, computePadding2DForAutoPad, weightsOrigin} from '../

// SSD MobileNet V1 model with 'nchw' layout, trained on the COCO dataset.
export class SsdMobilenetV1Nchw {
constructor() {
constructor(dataType = 'float32') {
this.context_ = null;
this.deviceType_ = null;
this.targetDataType_ = 'float32';
this.targetDataType_ = dataType;
this.model_ = null;
this.builder_ = null;
this.graph_ = null;
Expand Down Expand Up @@ -86,9 +86,6 @@ ${nameArray[1]}_BatchNorm_batchnorm`;
async load(contextOptions) {
this.context_ = await navigator.ml.createContext(contextOptions);
this.deviceType_ = contextOptions.deviceType;
if (this.deviceType_ == 'gpu' || this.deviceType_ == 'npu') {
this.targetDataType_ = 'float16';
}
this.builder_ = new MLGraphBuilder(this.context_);
let input = this.builder_.input('input', {
dataType: 'float32',
Expand Down
7 changes: 2 additions & 5 deletions object_detection/tiny_yolov2_nchw.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,12 @@ import {buildConstantByNpy, computePadding2DForAutoPad, weightsOrigin} from '../

// Tiny Yolo V2 model with 'nchw' layout, trained on the Pascal VOC dataset.
export class TinyYoloV2Nchw {
constructor() {
constructor(dataType = 'float32') {
this.context_ = null;
this.builder_ = null;
this.graph_ = null;
this.deviceType_ = null;
this.targetDataType_ = 'float32';
this.targetDataType_ = dataType;
this.weightsUrl_ = weightsOrigin() +
'/test-data/models/tiny_yolov2_nchw/weights/';
this.inputOptions = {
Expand Down Expand Up @@ -60,9 +60,6 @@ export class TinyYoloV2Nchw {
async load(contextOptions) {
this.context_ = await navigator.ml.createContext(contextOptions);
this.deviceType_ = contextOptions.deviceType;
if (this.deviceType_ == 'gpu' || this.deviceType_ == 'npu') {
this.targetDataType_ = 'float16';
}
this.builder_ = new MLGraphBuilder(this.context_);
let image = this.builder_.input('input', {
dataType: 'float32',
Expand Down
2 changes: 1 addition & 1 deletion test-data
Submodule test-data updated 50 files
+1 βˆ’1 models/tiny_yolov2_nchw/README.md
+ βˆ’ models/tiny_yolov2_nchw/weights/BatchNormalization_B.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/BatchNormalization_mean.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/BatchNormalization_mean1.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/BatchNormalization_mean2.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/BatchNormalization_mean3.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/BatchNormalization_mean4.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/BatchNormalization_mean5.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/BatchNormalization_mean6.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/BatchNormalization_mean7.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/BatchNormalization_scale.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/BatchNormalization_scale1.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/BatchNormalization_scale2.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/BatchNormalization_scale3.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/BatchNormalization_scale4.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/BatchNormalization_scale5.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/BatchNormalization_scale6.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/BatchNormalization_scale7.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/BatchNormalization_variance.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/BatchNormalization_variance1.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/BatchNormalization_variance2.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/BatchNormalization_variance3.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/BatchNormalization_variance4.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/BatchNormalization_variance5.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/BatchNormalization_variance6.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/BatchNormalization_variance7.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/ConvBnFusion_BN_B_BatchNormalization_B.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/ConvBnFusion_BN_B_BatchNormalization_B1.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/ConvBnFusion_BN_B_BatchNormalization_B2.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/ConvBnFusion_BN_B_BatchNormalization_B3.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/ConvBnFusion_BN_B_BatchNormalization_B4.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/ConvBnFusion_BN_B_BatchNormalization_B5.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/ConvBnFusion_BN_B_BatchNormalization_B6.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/ConvBnFusion_BN_B_BatchNormalization_B7.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/ConvBnFusion_W_convolution1_W.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/ConvBnFusion_W_convolution2_W.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/ConvBnFusion_W_convolution3_W.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/ConvBnFusion_W_convolution4_W.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/ConvBnFusion_W_convolution5_W.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/ConvBnFusion_W_convolution6_W.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/ConvBnFusion_W_convolution7_W.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/ConvBnFusion_W_convolution_W.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/convolution1_W.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/convolution2_W.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/convolution3_W.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/convolution4_W.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/convolution5_W.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/convolution8_B.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/convolution8_W.npy
+ βˆ’ models/tiny_yolov2_nchw/weights/convolution_W.npy