I am trying to process webcam image to detect faces on it with Microsoft Azure Face API. Since the API accepts image url or stream, I need to get Stream objects of the MediaFrameReader, but I am unsuccessful in it.
I hope someone can solve this problem to me.
In many tries I got various kind of errors, for example:
Exception thrown at 0x7B8ADBAE (coreclr.dll) in FaceAPI.exe: 0xC0000005: Access violation reading location 0x18670000.
Even BadRequest exception happened sometimes:
Microsoft.Azure.CognitiveServices.Vision.Face.Models.APIErrorException: 'Operation returned an invalid status code 'BadRequest''
You can see my code below:
using Microsoft.Azure.CognitiveServices.Vision.Face.Models;
using System;
using System.IO;
using System.Linq;
using System.Runtime.InteropServices.WindowsRuntime;
using System.Threading.Tasks;
using Windows.Media.Capture;
using Windows.Media.Capture.Frames;
using Windows.Storage;
using Windows.Storage.AccessCache;
using Windows.Storage.Pickers;
using Windows.UI.Popups;
using Windows.UI.Xaml;
using Windows.UI.Xaml.Controls;
using Windows.UI.Xaml.Media;
namespace FaceAPI
{
public sealed partial class MainPage : Page
{
public FaceAPI Api { get; set; }
private MediaCapture mediaCapture;
private MediaFrameReader mediaFrameReader;
public MainPage()
{
this.InitializeComponent();
this.Loaded += MainPage_Loaded;
}
private async void MainPage_Loaded(object sender, RoutedEventArgs e)
{
Api = new FaceAPI();
await Setup();
}
private void btnDetect_Click(object sender, RoutedEventArgs e)
{
SolidColorBrush fill_Brush = rectRecord.Fill as SolidColorBrush;
if (fill_Brush.Color.R > 0)
{
rectRecord.Fill = new SolidColorBrush(Windows.UI.Colors.Black);
StopDetection();
}
else
{
rectRecord.Fill = new SolidColorBrush(Windows.UI.Colors.Red);
StartDetection();
}
}
private async void btnSetFolder_Click(object sender, RoutedEventArgs e)
{
FolderPicker picker = new FolderPicker();
picker.FileTypeFilter.Add("*");
picker.ViewMode = PickerViewMode.Thumbnail;
picker.SuggestedStartLocation = PickerLocationId.Desktop;
picker.CommitButtonText = "This Folder";
StorageFolder objPicked_Folder = await picker.PickSingleFolderAsync();
if (objPicked_Folder != null)
{
string sPath = objPicked_Folder.Path;
StorageApplicationPermissions.FutureAccessList.Add(objPicked_Folder, "metadata");
tbxFolder.Text = sPath;
Windows.Storage.ApplicationData.Current.LocalSettings.Values["Folder_to_Save"] = sPath;
}
}
private async void btnOpenFolder_Click(object sender, RoutedEventArgs e)
{
String sPath = ApplicationData.Current.LocalSettings.Values["Folder_to_Save"] as string;
StorageFolder folder = await StorageFolder.GetFolderFromPathAsync(sPath);
await Windows.System.Launcher.LaunchFolderAsync(folder);
}
private async Task Setup()
{
await InitCapture();
InitCamera();
string folder = ApplicationData.Current.LocalSettings.Values["Folder_to_Save"] as string;
tbxFolder.Text = folder;
}
private async Task InitCapture()
{
mediaCapture = new MediaCapture();
(App.Current as App).MediaCapture = mediaCapture;
MediaCaptureInitializationSettings mediaCaptureInitSettings = new MediaCaptureInitializationSettings()
{
StreamingCaptureMode = StreamingCaptureMode.Video
};
await mediaCapture.InitializeAsync(mediaCaptureInitSettings);
mediaCapture.Failed += MediaCaptureOnFailed;
mediaCapture.RecordLimitationExceeded += MediaCaptureOnRecordLimitationExceeded;
}
private async void StartDetection()
{
var frameSourceGroups = await MediaFrameSourceGroup.FindAllAsync();
MediaFrameSourceGroup selectedGroup = null;
MediaFrameSourceInfo colorSourceInfo = null;
foreach (var sourceGroup in frameSourceGroups)
{
foreach (var sourceInfo in sourceGroup.SourceInfos)
{
if (sourceInfo.MediaStreamType == MediaStreamType.VideoRecord
&& sourceInfo.SourceKind == MediaFrameSourceKind.Color)
{
colorSourceInfo = sourceInfo;
break;
}
}
if (colorSourceInfo != null)
{
selectedGroup = sourceGroup;
break;
}
}
var allGroups = await MediaFrameSourceGroup.FindAllAsync();
var eligibleGroups = allGroups.Select(g => new
{
Group = g,
SourceInfos = new MediaFrameSourceInfo[]
{
g.SourceInfos.FirstOrDefault(info => info.SourceKind == MediaFrameSourceKind.Color)
}
}).Where(g => g.SourceInfos.Any(info => info != null)).ToList();
if (eligibleGroups.Count == 0)
{
System.Diagnostics.Debug.WriteLine("No source group with color, depth or infrared found.");
return;
}
var selectedGroupIndex = 0;
selectedGroup = eligibleGroups[selectedGroupIndex].Group;
colorSourceInfo = eligibleGroups[selectedGroupIndex].SourceInfos[0];
var colorFrameSource = mediaCapture.FrameSources[colorSourceInfo.Id];
var preferredFormat = colorFrameSource.SupportedFormats.Where(format =>
{
return format.VideoFormat.Width >= 1080
&& format.Subtype == "NV12";
}).FirstOrDefault();
if (preferredFormat == null)
{
return;
}
await colorFrameSource.SetFormatAsync(preferredFormat);
mediaFrameReader = await mediaCapture.CreateFrameReaderAsync(colorFrameSource, "NV12");
mediaFrameReader.AcquisitionMode = MediaFrameReaderAcquisitionMode.Buffered;
mediaFrameReader.FrameArrived += CheckFrameForFaces;
await mediaFrameReader.StartAsync();
}
private async void CheckFrameForFaces(MediaFrameReader sender, MediaFrameArrivedEventArgs args)
{
var mediaFrame = sender.TryAcquireLatestFrame();
Stream image = mediaFrame?.VideoMediaFrame.FrameReference.BufferMediaFrame.Buffer.AsStream();
if (image != null)
{
foreach (DetectedFace face in await Api.DetectFaces(image))
{
//This is a test line so that I can put breakpoint to somewhere to be sure that the API processed the image
var age = face.FaceAttributes.Age;
}
}
}
private async void StopDetection()
{
await mediaFrameReader.StopAsync();
mediaFrameReader.FrameArrived -= CheckFrameForFaces;
mediaFrameReader.Dispose();
}
private async void InitCamera()
{
ctlCamera.Source = mediaCapture;
await mediaCapture.StartPreviewAsync();
}
private async void MediaCaptureOnFailed(MediaCapture sender, MediaCaptureFailedEventArgs erroreventargs)
{
await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, async () =>
{
var warningMessage = new MessageDialog(String.Format("The video capture failed: {0}", erroreventargs.Message), "Capture Failed");
await warningMessage.ShowAsync();
});
}
private async void MediaCaptureOnRecordLimitationExceeded(MediaCapture sender)
{
await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, async () =>
{
await mediaCapture.StopRecordAsync();
var warningMessage = new MessageDialog(String.Format("The video capture has exceeded its maximum length: {0}", "Capture Halted"));
await warningMessage.ShowAsync();
});
}
private async void FlSysMessagebox(string Text, string Title)
{
MessageDialog dialog = new MessageDialog(Text, Title);
await dialog.ShowAsync();
}
private async Task UpdateStatus(string status)
{
await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () => Status.Text = status);
}
}
}
I can assure you that the Face API endpoint works, I tested it with image urls, but I want it to work with Stream to be able to process webcam image.
I tried a differnt way as well:
I tested if I can save the canvas image to a file using FileSavePicker and it worked, so I suppose I get a valid pixel buffer from this code:
await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, async () => {
RenderTargetBitmap renderTargetBitmap = new RenderTargetBitmap();
await renderTargetBitmap.RenderAsync(canvas);
var pixelBuffer = await renderTargetBitmap.GetPixelsAsync();
Stream img = pixelBuffer.AsStream();
foreach (DetectedFace face in await Api.DetectFaces(img))
{
var age = face.FaceAttributes.Age;
}
});
But the Stream that I got by calling AsStream() on pixelBuffer don't work with the Face API.
User contributions licensed under CC BY-SA 3.0