in code_examples/dotnet_examples/image/net-image-orientation-bounding-box.cs [12:74]
public static void Example()
{
String photo = "photo.jpg";
AmazonRekognitionClient rekognitionClient = new AmazonRekognitionClient();
Image image = new Image();
try
{
using (FileStream fs = new FileStream(photo, FileMode.Open, FileAccess.Read))
{
byte[] data = null;
data = new byte[fs.Length];
fs.Read(data, 0, (int)fs.Length);
image.Bytes = new MemoryStream(data);
}
}
catch (Exception)
{
Console.WriteLine("Failed to load file " + photo);
return;
}
int height;
int width;
// Used to extract original photo width/height
using (System.Drawing.Bitmap imageBitmap = new System.Drawing.Bitmap(photo))
{
height = imageBitmap.Height;
width = imageBitmap.Width;
}
Console.WriteLine("Image Information:");
Console.WriteLine(photo);
Console.WriteLine("Image Height: " + height);
Console.WriteLine("Image Width: " + width);
try
{
DetectFacesRequest detectFacesRequest = new DetectFacesRequest()
{
Image = image,
Attributes = new List<String>() { "ALL" }
};
DetectFacesResponse detectFacesResponse = rekognitionClient.DetectFaces(detectFacesRequest);
foreach (FaceDetail face in detectFacesResponse.FaceDetails)
{
Console.WriteLine("Face:");
ShowBoundingBoxPositions(height, width,
face.BoundingBox, detectFacesResponse.OrientationCorrection);
Console.WriteLine("BoundingBox: top={0} left={1} width={2} height={3}", face.BoundingBox.Left,
face.BoundingBox.Top, face.BoundingBox.Width, face.BoundingBox.Height);
Console.WriteLine("The detected face is estimated to be between " +
face.AgeRange.Low + " and " + face.AgeRange.High + " years old.");
Console.WriteLine();
}
}
catch (Exception e)
{
Console.WriteLine(e.Message);
}
}