private void CaptureThreadProc()

in Sources/Kinect/Microsoft.Psi.AzureKinect.x64/AzureKinectCore.cs [247:423]


        private void CaptureThreadProc()
        {
            if (this.configuration.ColorResolution == ColorResolution.Off &&
                this.configuration.DepthMode == DepthMode.Off)
            {
                return;
            }

            var colorImageFormat = PixelFormat.BGRA_32bpp;
            var infraredImageFormat = PixelFormat.Gray_16bpp;

            var calibrationPosted = false;

            Stopwatch sw = new Stopwatch();
            int frameCount = 0;
            sw.Start();

            while (this.device != null && !this.shutdown)
            {
                if (this.configuration.OutputCalibration && !calibrationPosted)
                {
                    // Compute and post the device's calibration object.
                    var currentTime = this.pipeline.GetCurrentTime();
                    var calibration = this.device.GetCalibration();

                    if (calibration != null)
                    {
                        this.AzureKinectSensorCalibration.Post(calibration, currentTime);

                        var colorExtrinsics = calibration.ColorCameraCalibration.Extrinsics;
                        var colorIntrinsics = calibration.ColorCameraCalibration.Intrinsics;
                        var depthIntrinsics = calibration.DepthCameraCalibration.Intrinsics;

                        if (colorIntrinsics.Type == CalibrationModelType.Rational6KT || depthIntrinsics.Type == CalibrationModelType.Rational6KT)
                        {
                            throw new Exception("Calibration output not permitted for deprecated internal Azure Kinect cameras. Only Brown_Conrady calibration supported.");
                        }
                        else if (colorIntrinsics.Type != CalibrationModelType.BrownConrady || depthIntrinsics.Type != CalibrationModelType.BrownConrady)
                        {
                            throw new Exception("Calibration output only supported for Brown_Conrady model.");
                        }
                        else
                        {
                            Matrix<double> colorCameraMatrix = Matrix<double>.Build.Dense(3, 3);
                            colorCameraMatrix[0, 0] = colorIntrinsics.Parameters[2];
                            colorCameraMatrix[1, 1] = colorIntrinsics.Parameters[3];
                            colorCameraMatrix[0, 2] = colorIntrinsics.Parameters[0];
                            colorCameraMatrix[1, 2] = colorIntrinsics.Parameters[1];
                            colorCameraMatrix[2, 2] = 1;
                            Matrix<double> depthCameraMatrix = Matrix<double>.Build.Dense(3, 3);
                            depthCameraMatrix[0, 0] = depthIntrinsics.Parameters[2];
                            depthCameraMatrix[1, 1] = depthIntrinsics.Parameters[3];
                            depthCameraMatrix[0, 2] = depthIntrinsics.Parameters[0];
                            depthCameraMatrix[1, 2] = depthIntrinsics.Parameters[1];
                            depthCameraMatrix[2, 2] = 1;
                            Matrix<double> depthToColorMatrix = Matrix<double>.Build.Dense(4, 4);
                            for (int i = 0; i < 3; i++)
                            {
                                for (int j = 0; j < 3; j++)
                                {
                                    // The AzureKinect SDK assumes that vectors are row vectors, while the MathNet SDK assumes
                                    // column vectors, so we need to flip them here.
                                    depthToColorMatrix[i, j] = colorExtrinsics.Rotation[(j * 3) + i];
                                }
                            }

                            depthToColorMatrix[3, 0] = colorExtrinsics.Translation[0];
                            depthToColorMatrix[3, 1] = colorExtrinsics.Translation[1];
                            depthToColorMatrix[3, 2] = colorExtrinsics.Translation[2];
                            depthToColorMatrix[3, 3] = 1.0;
                            var metersToMillimeters = Matrix<double>.Build.Dense(4, 4);
                            metersToMillimeters[0, 0] = 1000.0;
                            metersToMillimeters[1, 1] = 1000.0;
                            metersToMillimeters[2, 2] = 1000.0;
                            metersToMillimeters[3, 3] = 1.0;
                            var millimetersToMeters = Matrix<double>.Build.Dense(4, 4);
                            millimetersToMeters[0, 0] = 1.0 / 1000.0;
                            millimetersToMeters[1, 1] = 1.0 / 1000.0;
                            millimetersToMeters[2, 2] = 1.0 / 1000.0;
                            millimetersToMeters[3, 3] = 1.0;
                            depthToColorMatrix = (metersToMillimeters * depthToColorMatrix * millimetersToMeters).Transpose();

                            double[] colorRadialDistortion = new double[6]
                            {
                                colorIntrinsics.Parameters[4],
                                colorIntrinsics.Parameters[5],
                                colorIntrinsics.Parameters[6],
                                colorIntrinsics.Parameters[7],
                                colorIntrinsics.Parameters[8],
                                colorIntrinsics.Parameters[9],
                            };
                            double[] colorTangentialDistortion = new double[2] { colorIntrinsics.Parameters[13], colorIntrinsics.Parameters[12] };
                            double[] depthRadialDistortion = new double[6]
                            {
                                depthIntrinsics.Parameters[4],
                                depthIntrinsics.Parameters[5],
                                depthIntrinsics.Parameters[6],
                                depthIntrinsics.Parameters[7],
                                depthIntrinsics.Parameters[8],
                                depthIntrinsics.Parameters[9],
                            };
                            double[] depthTangentialDistortion = new double[2] { depthIntrinsics.Parameters[13], depthIntrinsics.Parameters[12] };

                            // Azure Kinect uses a basis under the hood that assumes Forward=Z, Right=X, Down=Y.
                            var kinectBasis = new CoordinateSystem(default, UnitVector3D.ZAxis, UnitVector3D.XAxis.Negate(), UnitVector3D.YAxis.Negate());

                            var cameraCalibration = new DepthDeviceCalibrationInfo(
                                calibration.ColorCameraCalibration.ResolutionWidth,
                                calibration.ColorCameraCalibration.ResolutionHeight,
                                colorCameraMatrix,
                                colorRadialDistortion,
                                colorTangentialDistortion,
                                kinectBasis.Invert() * depthToColorMatrix * kinectBasis,
                                calibration.DepthCameraCalibration.ResolutionWidth,
                                calibration.DepthCameraCalibration.ResolutionHeight,
                                depthCameraMatrix,
                                depthRadialDistortion,
                                depthTangentialDistortion,
                                CoordinateSystem.CreateIdentity(4));

                            this.DepthDeviceCalibrationInfo.Post(cameraCalibration, currentTime);
                        }

                        calibrationPosted = true;
                    }
                }

                // Wait for a capture on a thread pool thread
                using var capture = this.device.GetCapture(this.configuration.DeviceCaptureTimeout);
                if (capture != null)
                {
                    var currentTime = this.pipeline.GetCurrentTime();

                    if (this.configuration.OutputColor && capture.Color != null)
                    {
                        using var sharedColorImage = ImagePool.GetOrCreate(this.colorImageWidth, this.colorImageHeight, colorImageFormat);
                        sharedColorImage.Resource.CopyFrom(capture.Color.Memory.ToArray());
                        this.ColorImage.Post(sharedColorImage, currentTime);
                    }

                    Shared<Image> sharedIRImage = null;
                    Shared<DepthImage> sharedDepthImage = null;

                    if (this.configuration.OutputInfrared && capture.IR != null)
                    {
                        sharedIRImage = ImagePool.GetOrCreate(this.depthImageWidth, this.depthImageHeight, infraredImageFormat);
                        sharedIRImage.Resource.CopyFrom(capture.IR.Memory.ToArray());
                        this.InfraredImage.Post(sharedIRImage, currentTime);
                    }

                    if (this.configuration.OutputDepth && capture.Depth != null)
                    {
                        sharedDepthImage = DepthImagePool.GetOrCreate(this.depthImageWidth, this.depthImageHeight);
                        sharedDepthImage.Resource.CopyFrom(capture.Depth.Memory.ToArray());
                        this.DepthImage.Post(sharedDepthImage, currentTime);

                        if (sharedIRImage != null)
                        {
                            this.DepthAndIRImages.Post((sharedDepthImage, sharedIRImage), currentTime);
                        }
                    }

                    sharedIRImage?.Dispose();
                    sharedDepthImage?.Dispose();

                    this.Temperature.Post(capture.Temperature, currentTime);

                    ++frameCount;
                    if (sw.Elapsed > this.configuration.FrameRateReportingFrequency)
                    {
                        this.FrameRate.Post((double)frameCount / sw.Elapsed.TotalSeconds, currentTime);
                        frameCount = 0;
                        sw.Restart();
                    }
                }
            }
        }