in Shared/Holographic/AppMainBase.cpp [173:339]
bool AppMainBase::Render(
Windows::Graphics::Holographic::HolographicFrame^ holographicFrame)
{
// Don't try to render anything before the first Update.
if (_timer.GetFrameCount() == 0)
{
return false;
}
//
// Take care of any tasks that are not specific to an individual holographic
// camera. This includes anything that doesn't need the final view or projection
// matrix, such as lighting maps.
//
OnPreRender();
// Lock the set of holographic camera resources, then draw to each camera
// in this frame.
return _deviceResources->UseHolographicCameraResources<bool>(
[this, holographicFrame](
std::map<UINT32, std::unique_ptr<Graphics::CameraResources>>& cameraResourceMap)
{
// Up-to-date frame predictions enhance the effectiveness of image stablization and
// allow more accurate positioning of holograms.
holographicFrame->UpdateCurrentPrediction();
HolographicFramePrediction^ prediction =
holographicFrame->CurrentPrediction;
bool atLeastOneCameraRendered = false;
for (auto cameraPose : prediction->CameraPoses)
{
// This represents the device-based resources for a HolographicCamera.
Graphics::CameraResources* pCameraResources =
cameraResourceMap[
cameraPose->HolographicCamera->Id].get();
// Get the device context.
const auto context =
_deviceResources->GetD3DDeviceContext();
const auto depthStencilView =
pCameraResources->GetDepthStencilView();
// Set render targets to the current holographic camera.
{
ID3D11RenderTargetView *const targets[1] =
{
pCameraResources->GetBackBufferRenderTargetView()
};
context->OMSetRenderTargets(
1 /* NumViews */,
targets,
depthStencilView);
// Clear the back buffer and depth stencil view.
context->ClearRenderTargetView(
targets[0],
DirectX::Colors::Transparent);
context->ClearDepthStencilView(
depthStencilView,
D3D11_CLEAR_DEPTH | D3D11_CLEAR_STENCIL,
1.0f /* Depth */,
0 /* Stencil */);
}
//
// Notes regarding holographic content:
// * For drawing, remember that you have the potential to fill twice as many pixels
// in a stereoscopic render target as compared to a non-stereoscopic render target
// of the same resolution. Avoid unnecessary or repeated writes to the same pixel,
// and only draw holograms that the user can see.
// * To help occlude hologram geometry, you can create a depth map using geometry
// data obtained via the surface mapping APIs. You can use this depth map to avoid
// rendering holograms that are intended to be hidden behind tables, walls,
// monitors, and so on.
// * Black pixels will appear transparent to the user wearing the device, but you
// should still use alpha blending to draw semitransparent holograms. You should
// also clear the screen to Transparent as shown above.
//
// The view and projection matrices for each holographic camera will change
// every frame. This function refreshes the data in the constant buffer for
// the holographic camera indicated by cameraPose.
pCameraResources->UpdateViewProjectionBuffer(
_deviceResources,
cameraPose,
_spatialPerception->GetOriginFrameOfReference()->CoordinateSystem);
// Attach the view/projection constant buffer for this camera to the graphics pipeline.
const bool cameraActive =
pCameraResources->AttachViewProjectionBuffer(
_deviceResources);
//
// Only render world-locked content when positional tracking is active.
//
if (cameraActive)
{
OnRender();
}
//
// Unbind the render target and depth-stencil views from the pipeline
//
{
ID3D11RenderTargetView* nullViews[D3D11_SIMULTANEOUS_RENDER_TARGET_COUNT] = { nullptr };
context->OMSetRenderTargets(ARRAYSIZE(nullViews), nullViews, nullptr);
}
//
// When positional tracking is active,
//
if (cameraActive)
{
auto spRenderingParameters = holographicFrame->GetRenderingParameters(
cameraPose);
if (_hasFocusPoint)
{
//
// SetFocusPoint informs the system about a specific point in your scene to
// prioritize for image stabilization. The focus point is set independently
// for each holographic camera.
// You should set the focus point near the content that the user is looking at.
// In this example, we put the focus point at the center of the sample hologram,
// since that is the only hologram available for the user to focus on.
// You can also set the relative velocity and facing of that content; the sample
// hologram is at a fixed point so we only need to indicate its position.
//
spRenderingParameters->SetFocusPoint(
_spatialPerception->GetOriginFrameOfReference()->CoordinateSystem,
_optionalFocusPoint);
}
#if 0
else
{
//
// Make use of the depth buffer to optimize image stabilization.
//
Microsoft::WRL::ComPtr<ID3D11Texture2D> depthStencil(
pCameraResources->GetDepthStencil());
Microsoft::WRL::ComPtr<IDXGIResource1> depthStencilResource;
ASSERT_SUCCEEDED(depthStencil.As(&depthStencilResource));
Microsoft::WRL::ComPtr<IDXGISurface2> depthDxgiSurface;
ASSERT_SUCCEEDED(depthStencilResource->CreateSubresourceSurface(0, &depthDxgiSurface));
auto d3dSurface = Windows::Graphics::DirectX::Direct3D11::CreateDirect3DSurface(
depthDxgiSurface.Get());
spRenderingParameters->CommitDirect3D11DepthBuffer(
d3dSurface);
}
#endif
}
atLeastOneCameraRendered = true;
}
return atLeastOneCameraRendered;
});
}