in Kiosk/Controls/CameraControl.xaml.cs [385:456]
private void ShowFaceTrackingVisualization(Windows.Foundation.Size framePixelSize, IEnumerable<Windows.Media.FaceAnalysis.DetectedFace> detectedFaces)
{
this.FaceTrackingVisualizationCanvas.Children.Clear();
double actualWidth = this.FaceTrackingVisualizationCanvas.ActualWidth;
double actualHeight = this.FaceTrackingVisualizationCanvas.ActualHeight;
if (captureManager.CameraStreamState == Windows.Media.Devices.CameraStreamState.Streaming &&
detectedFaces != null && actualWidth != 0 && actualHeight != 0)
{
double widthScale = framePixelSize.Width / actualWidth;
double heightScale = framePixelSize.Height / actualHeight;
foreach (Windows.Media.FaceAnalysis.DetectedFace face in detectedFaces)
{
RealTimeFaceIdentificationBorder faceBorder = new RealTimeFaceIdentificationBorder();
this.FaceTrackingVisualizationCanvas.Children.Add(faceBorder);
faceBorder.ShowFaceRectangle((uint)(face.FaceBox.X / widthScale), (uint)(face.FaceBox.Y / heightScale), (uint)(face.FaceBox.Width / widthScale), (uint)(face.FaceBox.Height / heightScale));
if (this.realTimeDataProvider != null)
{
Microsoft.Azure.CognitiveServices.Vision.Face.Models.DetectedFace detectedFace = this.realTimeDataProvider.GetLastFaceAttributesForFace(face.FaceBox);
IdentifiedPerson identifiedPerson = this.realTimeDataProvider.GetLastIdentifiedPersonForFace(face.FaceBox);
SimilarFace similarPersistedFace = this.realTimeDataProvider.GetLastSimilarPersistedFaceForFace(face.FaceBox);
string uniqueId = null;
if (similarPersistedFace != null)
{
uniqueId = similarPersistedFace.PersistedFaceId.GetValueOrDefault().ToString("N").Substring(0, 4);
}
if (detectedFace != null && detectedFace.FaceAttributes != null)
{
if (identifiedPerson != null && identifiedPerson.Person != null)
{
// age, gender and id available
faceBorder.ShowIdentificationData(detectedFace.FaceAttributes.Age.GetValueOrDefault(),
detectedFace.FaceAttributes.Gender?.ToString(), (uint)Math.Round(identifiedPerson.Confidence * 100), identifiedPerson.Person.Name, uniqueId: uniqueId);
}
else
{
// only age and gender available
faceBorder.ShowIdentificationData(detectedFace.FaceAttributes.Age.GetValueOrDefault(),
detectedFace.FaceAttributes.Gender?.ToString(), 0, null, uniqueId: uniqueId);
}
faceBorder.ShowRealTimeEmotionData(detectedFace.FaceAttributes.Emotion);
}
else if (identifiedPerson != null && identifiedPerson.Person != null)
{
// only id available
faceBorder.ShowIdentificationData(0, null, (uint)Math.Round(identifiedPerson.Confidence * 100), identifiedPerson.Person.Name, uniqueId: uniqueId);
}
else if (uniqueId != null)
{
// only unique id available
faceBorder.ShowIdentificationData(0, null, 0, null, uniqueId: uniqueId);
}
}
if (SettingsHelper.Instance.ShowDebugInfo)
{
this.FaceTrackingVisualizationCanvas.Children.Add(new TextBlock
{
Text = string.Format("Coverage: {0:0}%", 100 * ((double)face.FaceBox.Height / this.videoProperties.Height)),
Margin = new Thickness((uint)(face.FaceBox.X / widthScale), (uint)(face.FaceBox.Y / heightScale), 0, 0)
});
}
}
}
}