in Shared/HoloLensForCV/SensorFrameReceiver.cpp [93:187]
Concurrency::task<SensorFrame^> SensorFrameReceiver::ReceiveSensorFrameAsync(
SensorFrameStreamHeader^ header)
{
return concurrency::create_task(
_reader->LoadAsync(
header->ImageHeight * header->RowStride)).
then([this, header](concurrency::task<unsigned int> frameBytesLoadedTaskResult)
{
//
// Make sure that we have received exactly the number of bytes we have
// asked for. Doing so will also implicitly check for the possible exceptions
// that could have been thrown in the async call chain.
//
const size_t frameBytesLoaded = frameBytesLoadedTaskResult.get();
if (header->ImageHeight * header->RowStride != frameBytesLoaded)
{
#if DBG_ENABLE_ERROR_LOGGING
dbg::trace(
L"SensorFrameReceiver::ReceiveAsync: expected image frame data of %i bytes, got %i bytes",
header->ImageHeight * header->RowStride,
frameBytesLoaded);
#endif /* DBG_ENABLE_ERROR_LOGGING */
throw ref new Platform::FailureException();
}
Windows::Storage::Streams::IBuffer^ frameAsBuffer =
_reader->ReadBuffer(
static_cast<uint32_t>(frameBytesLoaded));
Windows::Graphics::Imaging::BitmapPixelFormat pixelFormat;
uint32_t packedImageWidthMultiplier = 1;
switch (header->FrameType)
{
case SensorType::PhotoVideo:
pixelFormat = Windows::Graphics::Imaging::BitmapPixelFormat::Bgra8;
break;
case SensorType::ShortThrowToFDepth:
case SensorType::LongThrowToFDepth:
pixelFormat = Windows::Graphics::Imaging::BitmapPixelFormat::Gray16;
break;
case SensorType::ShortThrowToFReflectivity:
case SensorType::LongThrowToFReflectivity:
pixelFormat = Windows::Graphics::Imaging::BitmapPixelFormat::Gray8;
break;
case SensorType::VisibleLightLeftLeft:
case SensorType::VisibleLightLeftFront:
case SensorType::VisibleLightRightFront:
case SensorType::VisibleLightRightRight:
pixelFormat = Windows::Graphics::Imaging::BitmapPixelFormat::Gray8;
packedImageWidthMultiplier = 4;
break;
default:
#if DBG_ENABLE_ERROR_LOGGING
dbg::trace(
L"SensorFrameReceiver::ReceiveAsync: unrecognized sensor type %i",
header->FrameType);
#endif /* DBG_ENABLE_ERROR_LOGGING */
throw ref new Platform::FailureException();
}
Windows::Graphics::Imaging::SoftwareBitmap^ frameAsSoftwareBitmap =
Windows::Graphics::Imaging::SoftwareBitmap::CreateCopyFromBuffer(
frameAsBuffer,
pixelFormat,
header->ImageWidth * packedImageWidthMultiplier,
header->ImageHeight,
Windows::Graphics::Imaging::BitmapAlphaMode::Ignore);
//
// Timestamps on the wire are encoded as universal time
//
Windows::Foundation::DateTime frameTimestamp;
frameTimestamp.UniversalTime =
header->Timestamp;
SensorFrame^ sensorFrame =
ref new SensorFrame(
header->FrameType,
frameTimestamp,
frameAsSoftwareBitmap);
//TODO: add support for sending and receiving camera intrinsics and extrinsics
return sensorFrame;
});
}