I have been trying to calibrate my camera quite for a while using Opencv calibrateCamera() function. I have followed the same procedure as described in opencv sample program. I am trying to first load 10 9 x 6 chessboard images. Then finding chessboard corners. If corners are found then corners' pixel location is stored in vector< vector < Point2f>> ImagePoints. After doing this for all images, runCalibrationAndSave part is executed. In runCalibrationAndSave , first runCalibration part is executed where ObjectPoints (of type vector< vector < Point3f>>) are filled with corners' real coordinate values. Upto this point code works well and no problem occurs.Chessboard corners are accurately found and ImagePoints vector is also filled with vectors. But when it goes to calibrateCamera() part OpenCV:: assertion is failed with following error:
OpenCV Error:
Assertion failed (nimages > 0 && nimages == (int)imagePoints1.total() && (!imgPtMat2 || nimages == (int)imagePoints2.total())) in collectCalibrationData, file /............/modules/calib3d/src/calibration.cpp, line 3164
I did some research for the same problem and found that this problem usually occurs when ObjectPoints vector and ImagePoints vector are not of equal length or if they are not properly filled. But In my case I have checked in debugging mode that both vectors are properly filled with equal lengths. For the reference I have attached the code part which properly runs upto before calibrateCamera() part and then assertion is failed.
#include <iostream>
#include <sstream>
#include <time.h>
#include <stdio.h>
#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/calib3d/calib3d.hpp>
#include <opencv2/highgui/highgui.hpp>
using namespace cv;
using namespace std;
class Settings
{public:
Size boardSize;
float squareSize;
};
bool runCalibrationAndSave(Settings& s, Size imageSize, Mat& cameraMatrix, Mat& distCoeffs,
vector<vector<Point2f> > imagePoints );
int main()
{
Settings s;
s.boardSize.width =9;
s.boardSize.height=6;
s.squareSize=50;
Mat cameraMatrix, distCoeffs;
Size imageSize;
char filename[512];
vector<vector<Point2f> > imagePoints;
for(int counter=0; counter<10; counter++)
{sprintf( filename, "chessboard%d.jpg", counter );
IplImage* img = cvLoadImage(filename);
cv::Mat& m = cv::cvarrToMat(img);
Mat pointBuf = Mat::zeros(54,2,CV_32FC1);
vector<Point2f> pointBuf_vec;
bool found=false;
found = findChessboardCorners( m,s.boardSize, pointBuf,CV_CALIB_CB_ADAPTIVE_THRESH | CV_CALIB_CB_FAST_CHECK | CV_CALIB_CB_NORMALIZE_IMAGE);
if(found)
{
cout<<"check"<<endl;
Mat viewGray;
cvtColor(m, viewGray, CV_BGR2GRAY);
cornerSubPix( viewGray, pointBuf, Size(11 ,11),Size(-1,-1), TermCriteria( CV_TERMCRIT_EPS+CV_TERMCRIT_ITER, 30, 0.1 ));
drawChessboardCorners( m, s.boardSize, Mat(pointBuf), found );
pointBuf_vec.clear();
for(int i=0;i<54;i++)
{
Point2f temp;
temp.x=pointBuf.at<float>(i,0);
temp.y=pointBuf.at<float>(i,1);
pointBuf_vec.push_back(temp);
}
imagePoints.push_back(pointBuf_vec);
}
imshow("Example1",m);
cvWaitKey();
imageSize = m.size();
}
runCalibrationAndSave(s, imageSize, cameraMatrix, distCoeffs, imagePoints);
return 0;
}
static void calcBoardCornerPositions(Size boardSize, float squareSize, vector<Point3f>& corners)
{
corners.clear();
for( int i = 0; i < boardSize.height; i++ )
for( int j = 0; j < boardSize.width; j++ )
{ corners.push_back(Point3f(float( j*squareSize ), float( i*squareSize ), 0));
}
}
static bool runCalibration( Settings& s, Size& imageSize, Mat& cameraMatrix, Mat& distCoeffs,
vector<vector<Point2f> > imagePoints, vector<Mat>& rvecs, vector<Mat>& tvecs,
vector<float>& reprojErrs, double& totalAvgErr)
{
cameraMatrix = Mat::eye(3, 3, CV_64F);
// if( s.flag & CV_CALIB_FIX_ASPECT_RATIO )
// cameraMatrix.at<double>(0,0) = 1.0;
distCoeffs = Mat::zeros(8, 1, CV_64F);
vector<vector<Point3f> > objectPoints;
Mat object_pointBuf = Mat::zeros(s.boardSize.width*s.boardSize.height,3,CV_32FC1);
vector<Point3f> object_pointBuf_vec;
calcBoardCornerPositions(s.boardSize, s.squareSize, object_pointBuf_vec);
for(int k=0;k<imagePoints.size();k++)
{
objectPoints.push_back(object_pointBuf_vec);
}
// objectPoints.resize(imagePoints.size(),objectPoints[0]);
//Find intrinsic and extrinsic camera parameters
double rms = calibrateCamera(objectPoints, imagePoints, imageSize, cameraMatrix,
distCoeffs, rvecs, tvecs, /*s.flag|* /CV_CALIB_FIX_K4|CV_CALIB_FIX_K5);
cout << "Re-projection error reported by calibrateCamera: "<< rms << endl;
bool ok = checkRange(cameraMatrix) && checkRange(distCoeffs);
// totalAvgErr = computeReprojectionErrors(objectPoints, imagePoints,
// rvecs, tvecs, cameraMatrix, distCoeffs, reprojErrs);
return ok;
}
bool runCalibrationAndSave(Settings& s, Size imageSize, Mat& cameraMatrix, Mat& distCoeffs,vector<vector<Point2f> > imagePoints )
{
vector<Mat> rvecs, tvecs;
vector<float> reprojErrs;
double totalAvgErr = 0;
bool ok = runCalibration(s,imageSize, cameraMatrix, distCoeffs, imagePoints, rvecs, tvecs,
reprojErrs, totalAvgErr);
cout << (ok ? "Calibration succeeded" : "Calibration failed")
<< ". avg re projection error = " << totalAvgErr ;
return ok;
}
I am using Visual C++ and Opencv 2.4.9. Please help me in figuring out the problem. This is first time I am asking any question in SO , please let me know if I made any mistake in asking question. Thanks for help in advance.
vector::size()to get the number of elements in avectorand check if the two have consistent sizes. - BConicIplImage* img = cvLoadImage(filename); cv::Mat& m = cv::cvarrToMat(img);// why dont you just useimread()- Samer