in Sources/Calibration/Test.Psi.Calibration/DistortionTests.cs [58:243]
public void TestDistortion()
{
// Create a checkerboard image
bool useColor = false;
bool reverseDirection = true;
int width = useColor ? 1280 : 640;
int height = useColor ? 720 : 576;
var img = new Microsoft.Psi.Imaging.Image(width, height, Microsoft.Psi.Imaging.PixelFormat.BGR_24bpp);
unsafe
{
byte* row = (byte*)img.ImageData.ToPointer();
for (int i = 0; i < img.Height; i++)
{
byte* col = row;
for (int j = 0; j < img.Width; j++)
{
if ((i / 20 + j / 20) % 2 == 0)
{
col[0] = (byte)(255.0f * (float)j / (float)img.Width);
col[1] = (byte)(255.0f * (1.0f - (float)j / (float)img.Width));
col[2] = 0;
}
else
{
col[0] = 0;
col[1] = (byte)(255.0f * (float)i / (float)img.Height);
col[2] = (byte)(255.0f * (1.0f - (float)i / (float)img.Height));
}
col += img.BitsPerPixel / 8;
}
row += img.Stride;
}
#if DUMP_IMAGES
SaveColorToBMP("checkerboard.bmp", img);
#endif // DUMP_IMAGES
}
double[] colorAzureDistortionCoefficients = new double[6]
{
0.609246314,
-2.84837151,
1.63566089,
0.483219713,
-2.66301942,
1.55776918,
};
double[] colorAzureTangentialCoefficients = new double[2]
{
-0.000216085638,
0.000744335062,
};
double[] colorIntrinsics = new double[4]
{
638.904968, // cx
350.822327, // cy
607.090698, // fx
607.030762, // fy
};
double[] depthIntrinsics = new double[4]
{
326.131775, // cx
324.755524, // cy
504.679749, // fx
504.865875, // fy
};
double[] depthAzureDistortionCoefficients = new double[6]
{
0.228193134,
-0.0650567561,
-0.000764187891,
0.568694472,
-0.0599768497,
-0.0119919786,
};
double[] depthAzureTangentialCoefficients = new double[2]
{
-9.04210319e-05,
-9.16166828e-05,
};
// Setup our distortion coefficients
var distortionCoefficients = useColor ? colorAzureDistortionCoefficients : depthAzureDistortionCoefficients;
var tangentialCoefficients = useColor ? colorAzureTangentialCoefficients : depthAzureTangentialCoefficients;
// Next run distort on the image
var distortedImage = new Microsoft.Psi.Imaging.Image(img.Width, img.Height, img.PixelFormat);
double[] colorArray = new double[9] { colorIntrinsics[2], 0.0, 0.0, 0.0, colorIntrinsics[3], 0.0, colorIntrinsics[0], colorIntrinsics[1], 1.0, };
double[] depthArray = new double[9] { depthIntrinsics[2], 0.0, 0.0, 0.0, depthIntrinsics[3], 0.0, depthIntrinsics[0], depthIntrinsics[1], 1.0, };
var intrinsicMat = CreateMatrix.Dense<double>(3, 3, useColor ? colorArray : depthArray);
var ci = new CameraIntrinsics(
img.Width,
img.Height,
intrinsicMat,
Vector<double>.Build.DenseOfArray(distortionCoefficients),
Vector<double>.Build.DenseOfArray(tangentialCoefficients),
reverseDirection);
unsafe
{
byte* dstrow = (byte*)distortedImage.ImageData.ToPointer();
for (int i = 0; i < distortedImage.Height; i++)
{
byte* dstcol = dstrow;
for (int j = 0; j < distortedImage.Width; j++)
{
MathNet.Spatial.Euclidean.Point2D pixelCoord = new MathNet.Spatial.Euclidean.Point2D(
((float)j - ci.PrincipalPoint.X) / ci.FocalLengthXY.X,
((float)i - ci.PrincipalPoint.Y) / ci.FocalLengthXY.Y);
Point2D undistortedPoint;
bool converged = ci.UndistortPoint(pixelCoord, out undistortedPoint);
int px = (int)(undistortedPoint.X * ci.FocalLengthXY.X + ci.PrincipalPoint.X);
int py = (int)(undistortedPoint.Y * ci.FocalLengthXY.Y + ci.PrincipalPoint.Y);
if (converged && px >= 0 && px < img.Width && py >= 0 && py < img.Height)
{
byte* src = (byte*)img.ImageData.ToPointer() + py * img.Stride + px * img.BitsPerPixel / 8;
dstcol[0] = src[0];
dstcol[1] = src[1];
dstcol[2] = src[2];
}
dstcol += distortedImage.BitsPerPixel / 8;
}
dstrow += distortedImage.Stride;
}
#if DUMP_IMAGES
SaveColorToBMP("distorted.bmp", distortedImage);
#endif // DUMP_IMAGES
}
// Finally run undistort on the result
var undistortedImage = new Microsoft.Psi.Imaging.Image(img.Width, img.Height, img.PixelFormat);
unsafe
{
double err = 0.0;
int numPts = 0;
byte* dstrow = (byte*)undistortedImage.ImageData.ToPointer();
for (int i = 0; i < undistortedImage.Height; i++)
{
byte* dstcol = dstrow;
for (int j = 0; j < undistortedImage.Width; j++)
{
MathNet.Spatial.Euclidean.Point2D pixelCoord = new MathNet.Spatial.Euclidean.Point2D(
((float)j - ci.PrincipalPoint.X) / ci.FocalLengthXY.X,
((float)i - ci.PrincipalPoint.Y) / ci.FocalLengthXY.Y);
MathNet.Spatial.Euclidean.Point2D distortedPixelCoord, undistortedPixelCoord;
ci.DistortPoint(pixelCoord, out distortedPixelCoord);
bool converged = ci.UndistortPoint(distortedPixelCoord, out undistortedPixelCoord);
int px = (int)(undistortedPixelCoord.X * ci.FocalLengthXY.X + ci.PrincipalPoint.X);
int py = (int)(undistortedPixelCoord.Y * ci.FocalLengthXY.Y + ci.PrincipalPoint.Y);
if (converged && px >= 0 && px < img.Width && py >= 0 && py < img.Height)
{
byte* src = (byte*)img.ImageData.ToPointer() + py * img.Stride + px * img.BitsPerPixel / 8;
dstcol[0] = src[0];
dstcol[1] = src[1];
dstcol[2] = src[2];
byte* src2 = (byte*)img.ImageData.ToPointer() + i * img.Stride + j * img.BitsPerPixel / 8;
double dx = (double)src2[0] - (double)src[0];
double dy = (double)src2[1] - (double)src[1];
double dz = (double)src2[2] - (double)src[2];
err += dx * dx + dy * dy + dz * dz;
numPts++;
}
dstcol += undistortedImage.BitsPerPixel / 8;
}
dstrow += undistortedImage.Stride;
}
double rmse = Math.Sqrt(err / (double)numPts);
if (rmse > 100)
{
throw new AssertFailedException("Distort/Undistort returned incorrect results.");
}
#if DUMP_IMAGES
SaveColorToBMP("undistorted.bmp", undistortedImage);
#endif // DUMP_IMAGES
}
}