Skip to content

Commit

Permalink
Merge branch 'nmetulev/wcrapis2' of https://github.com/microsoft/ai-d…
Browse files Browse the repository at this point in the history
…ev-gallery into nmetulev/wcrapis2
  • Loading branch information
nmetulev committed Feb 13, 2025
2 parents 4e6acda + 61924b7 commit 6d87b2b
Show file tree
Hide file tree
Showing 2 changed files with 24 additions and 37 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -57,8 +57,7 @@ private void Page_Loaded()
// </exclude>
protected override async Task LoadModelAsync(SampleNavigationParameters sampleParams)
{
var hardwareAccelerator = sampleParams.HardwareAccelerator;
await InitModel(sampleParams.ModelPath, hardwareAccelerator);
await InitModel(sampleParams.ModelPath, sampleParams.HardwareAccelerator);
sampleParams.NotifyCompletion();

await DetectPose(Path.Join(Windows.ApplicationModel.Package.Current.InstalledLocation.Path, "Assets", "pose_default.png"));
Expand Down Expand Up @@ -111,7 +110,6 @@ private async void UploadButton_Click(object sender, RoutedEventArgs e)
var file = await picker.PickSingleFileAsync();
if (file != null)
{
// Call function to run inference and classify image
UploadButton.Focus(FocusState.Programmatic);
await DetectPose(file.Path);
}
Expand All @@ -127,37 +125,29 @@ private async Task DetectPose(string filePath)
Loader.IsActive = true;
Loader.Visibility = Visibility.Visible;
UploadButton.Visibility = Visibility.Collapsed;

DefaultImage.Source = new BitmapImage(new Uri(filePath));
NarratorHelper.AnnounceImageChanged(DefaultImage, "Image changed: new upload."); // <exclude-line>

using Bitmap image = new(filePath);

var originalImageWidth = image.Width;
var originalImageHeight = image.Height;
using Bitmap originalImage = new(filePath);

int modelInputWidth = 256;
int modelInputHeight = 192;

// Resize Bitmap
using Bitmap resizedImage = BitmapFunctions.ResizeBitmap(image, modelInputWidth, modelInputHeight);
using Bitmap resizedImage = BitmapFunctions.ResizeBitmap(originalImage, modelInputWidth, modelInputHeight);

var predictions = await Task.Run(() =>
{
// Preprocessing
Tensor<float> input = new DenseTensor<float>([1, 3, modelInputWidth, modelInputHeight]);
input = BitmapFunctions.PreprocessBitmapWithStdDev(resizedImage, input);

var inputMetadataName = _inferenceSession!.InputNames[0];

// Setup inputs
var inputs = new List<NamedOnnxValue>
var onnxInputs = new List<NamedOnnxValue>
{
NamedOnnxValue.CreateFromTensor(inputMetadataName, input)
};

// Run inference
using IDisposableReadOnlyCollection<DisposableNamedOnnxValue> results = _inferenceSession!.Run(inputs);
using IDisposableReadOnlyCollection<DisposableNamedOnnxValue> results = _inferenceSession!.Run(onnxInputs);
var heatmaps = results[0].AsTensor<float>();

var outputName = _inferenceSession!.OutputNames[0];
Expand All @@ -166,12 +156,11 @@ private async Task DetectPose(string filePath)
float outputWidth = outputDimensions[2];
float outputHeight = outputDimensions[3];

List<(float X, float Y)> keypointCoordinates = PoseHelper.PostProcessResults(heatmaps, originalImageWidth, originalImageHeight, outputWidth, outputHeight);
List<(float X, float Y)> keypointCoordinates = PoseHelper.PostProcessResults(heatmaps, originalImage.Width, originalImage.Height, outputWidth, outputHeight);
return keypointCoordinates;
});

// Render predictions and create output bitmap
using Bitmap output = PoseHelper.RenderPredictions(image, predictions, .02f);
using Bitmap output = PoseHelper.RenderPredictions(originalImage, predictions, .02f);
BitmapImage outputImage = BitmapFunctions.ConvertBitmapToBitmapImage(output);
NarratorHelper.AnnounceImageChanged(DefaultImage, "Image changed: key points rendered."); // <exclude-line>

Expand Down
36 changes: 17 additions & 19 deletions AIDevGallery/Samples/SharedCode/PoseHelper.cs
Original file line number Diff line number Diff line change
Expand Up @@ -49,33 +49,31 @@ internal class PoseHelper
return keypointCoordinates;
}

public static Bitmap RenderPredictions(Bitmap originalImage, List<(float X, float Y)> keypoints, float markerRatio, Bitmap? baseImage = null)
public static Bitmap RenderPredictions(Bitmap image, List<(float X, float Y)> keypoints, float markerRatio, Bitmap? baseImage = null)
{
Bitmap outputImage = new(originalImage);

using (Graphics g = Graphics.FromImage(outputImage))
using (Graphics g = Graphics.FromImage(image))
{
// If refernce is multipose, use base image not cropped image for scaling
// If reference is one person pose, use original image as base image isn't used.
var imageValue = baseImage != null ? baseImage.Width + baseImage.Height : originalImage.Width + originalImage.Height;
int markerSize = (int)(imageValue * markerRatio / 2);
var averageOfWidthAndHeight = baseImage != null ? baseImage.Width + baseImage.Height : image.Width + image.Height;
int markerSize = (int)(averageOfWidthAndHeight * markerRatio / 2);
Brush brush = Brushes.Red;

using Pen linePen = new(Color.Blue, markerSize / 2);

List<(int StartIdx, int EndIdx)> connections =
[
(5, 6), // Left shoulder to right shoulder
(5, 7), // Left shoulder to left elbow
(7, 9), // Left elbow to left wrist
(6, 8), // Right shoulder to right elbow
(8, 10), // Right elbow to right wrist
(11, 12), // Left hip to right hip
(5, 11), // Left shoulder to left hip
(6, 12), // Right shoulder to right hip
(11, 13), // Left hip to left knee
(13, 15), // Left knee to left ankle
(12, 14), // Right hip to right knee
(14, 16) // Right knee to right ankle
(5, 7), // Left shoulder to left elbow
(7, 9), // Left elbow to left wrist
(6, 8), // Right shoulder to right elbow
(8, 10), // Right elbow to right wrist
(11, 12), // Left hip to right hip
(5, 11), // Left shoulder to left hip
(6, 12), // Right shoulder to right hip
(11, 13), // Left hip to left knee
(13, 15), // Left knee to left ankle
(12, 14), // Right hip to right knee
(14, 16) // Right knee to right ankle
];

foreach (var (startIdx, endIdx) in connections)
Expand All @@ -92,6 +90,6 @@ public static Bitmap RenderPredictions(Bitmap originalImage, List<(float X, floa
}
}

return outputImage;
return image;
}
}

0 comments on commit 6d87b2b

Please sign in to comment.