Can anyone shed some light on what is happening here?
I'm using OpenCV to map the X,Y in a video to the X,Y,Z in the real world. Somehow, CalibrateCamera appears to impact the subsequent call to ProjectPoint through (maybe?) the CameraMatrix variable.
In the code below, I send fixed values to ProjectPoint before calling CalibrateCamera and again after calling CalibrateCamera. Somehow, The values returned by ProjectPoint are changing dispite sending what I believe to be the same values to ProjectPoint.
Can anyone explain what is happening here?
var objPts1 = new MCvPoint3D32f[] { new MCvPoint3D32f(9, 18, 0), new MCvPoint3D32f(9, 12, 0), new MCvPoint3D32f(9, 9, 0), new MCvPoint3D32f(9, 6, 0), new MCvPoint3D32f(9, 0, 0), new MCvPoint3D32f(0, 0, 0), new MCvPoint3D32f(0, 6, 0), new MCvPoint3D32f(0, 9, 0), new MCvPoint3D32f(0, 12, 0), new MCvPoint3D32f(0, 18, 0), new MCvPoint3D32f(9, 9, 2.24F), new MCvPoint3D32f(0, 9, 2.24F), new MCvPoint3D32f(4.5F, 0, 0), new MCvPoint3D32f(4.5F, 18, 0), }; var objPts = new MCvPoint3D32f[1][]; objPts[0] = objPts1; PointF[] imgPts_fixed = new PointF[] { new PointF(1182, 376), new PointF(1245, 484), new PointF(1287, 545), new PointF(1335, 624), new PointF(1475, 851), new PointF(120, 847), new PointF(249, 632), new PointF(297, 551), new PointF(337, 488), new PointF(400, 378), new PointF(1298, 249), new PointF(290, 249), new PointF(830, 839), new PointF(832, 384) }; var imgPts = new PointF[1][]; imgPts[0] = imgPts_fixed; Size camSize = new Size(1600, 1080); MCvTermCriteria termCriteria = new MCvTermCriteria(10000); try { CourtPoint_XYZ courtPoint_XYZ = new CourtPoint_XYZ(9, 18, 0); MCvPoint3D32f[] objPt = courtPoint_XYZ.GetMCvPoint(); Matrix<float> rv = new Matrix<float>(new float[,] { { 1.749f }, { -0.104f }, { 0.115f } }); Matrix<float> tv = new Matrix<float>(new float[,] { { -7.911f }, { 1.512f }, { 24.189f } }); Matrix<float> dm = new Matrix<float>(new float[,] { { 2.305f, -20.791f, -0.014f, -0.208f, 93.641f } }); Matrix<float> CMat = new Matrix<float>(3, 3); CMat.Data[0, 0] = 3000; CMat.Data[0, 1] = 0; CMat.Data[0, 2] = (float)(camSize.Width / 2.0); CMat.Data[1, 0] = 0; CMat.Data[1, 1] = 5000; CMat.Data[1, 2] = (float)(camSize.Height / 2.0); CMat.Data[2, 0] = 0; CMat.Data[2, 1] = 0; CMat.Data[2, 2] = 1; Matrix<float> CMat1 = new Matrix<float>(3, 3); CMat1.Data[0, 0] = 3000; CMat1.Data[0, 1] = 0; CMat1.Data[0, 2] = (float)(camSize.Width / 2.0); CMat1.Data[1, 0] = 0; CMat1.Data[1, 1] = 5000; CMat1.Data[1, 2] = (float)(camSize.Height / 2.0); CMat1.Data[2, 0] = 0; CMat1.Data[2, 1] = 0; CMat1.Data[2, 2] = 1; PointF[] pointFromCM = CvInvoke.ProjectPoints(objPt, rv, tv, CMat1, dm); PointF[] pointFromCamMat = CvInvoke.ProjectPoints(objPt, rv, tv, CMat, dm); // These two are equal // pointFromCM = {X = 697.6779 Y = 328.845825} // pointFromCamMat = {X = 697.6779 Y = 328.845825} DistMat = new Mat(); CalibType flags0 = CalibType.UseIntrinsicGuess; Error = CvInvoke.CalibrateCamera(objPts, imgPts, camSize, CMat, DistMat, flags0, termCriteria, out rVect, out tVect); CourtPoint_XYZ xyz = new CourtPoint_XYZ(0, 18, 0); courtPoint_XYZ = new CourtPoint_XYZ(9, 18, 0); objPt = courtPoint_XYZ.GetMCvPoint(); rv = new Matrix<float>(new float[,] { { 1.749f }, { -0.104f }, { 0.115f } }); tv = new Matrix<float>(new float[,] { { -7.911f }, { 1.512f }, { 24.189f } }); dm = new Matrix<float>(new float[,] { { 2.305f, -20.791f, -0.014f, -0.208f, 93.641f } }); // I'm resetting all these values to show that CMat should be the same as it was prior to the call of CalibrateCamera CMat.Data[0, 0] = 3000; CMat.Data[0, 1] = 0; CMat.Data[0, 2] = (float)(camSize.Width / 2.0); CMat.Data[1, 0] = 0; CMat.Data[1, 1] = 5000; CMat.Data[1, 2] = (float)(camSize.Height / 2.0); CMat.Data[2, 0] = 0; CMat.Data[2, 1] = 0; CMat.Data[2, 2] = 1; PointF[] pointFromCM1 = CvInvoke.ProjectPoints(objPt, rv, tv, CMat1, dm); PointF[] pointFromCamMat1 = CvInvoke.ProjectPoints(objPt, rv, tv, CMat, dm); // pointFromCM1 = {X = 697.6779 Y = 328.845825} // pointFromCamMat1 = {X = 868.6621 Y = 243.203537} int m = 1; } catch (Exception ex) { MessageBox.Show(ex.Message); return; }
I first noticed this with live points and the code I included is simply to provide what should be a static environment. All the data in the CameraMatrix appears to stay the same (which is why I am manually resetting them to be sure).
When I call ProjectPoint the second time with the CamMat that was used in the CalibrateCamera the point returned is different.
Also of interst is that this only appear to happen in the C# implementation and not the Python implementation. Additionally, the changed point (after calling CalibrateCamera) appears to the more accurate point (I'm not 100% sure of this)