我正在阅读 Mastering OpenCV 一书中的第 4 章的“运动结构”。我目前正在尝试Essential Matrix
使用Fundamental Matrix
. 此语句
Mat_<double> E = K.t() * F * K;
导致异常,我无法弄清楚。我假设它的类型不匹配。
#include <opencv2/features2d/features2d.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc_c.h>
#include <opencv2/nonfree/features2d.hpp>
#include <opencv/cv.h>
#include <iostream>
#include <vector>
using namespace cv;
using namespace std;
int main(int argc, const char** argv)
{
vector<Mat> images;
vector<vector<KeyPoint>> imgKeyPts;
cv::Ptr<cv::FeatureDetector> detector;
cv::Ptr<cv::DescriptorExtractor> extractor;
std::vector<cv::Mat> descriptors;
char* imgs[] = {
{"TestBranches/branches1.jpg"},
{"TestBranches/branches2.jpg"},
};
//Images are already in CV_8UC3 format.
//No to convert them explicitly for now
for(int i = 0 ; i < 2; i++)
{
Mat im = imread(imgs[i]);
Mat gray = cv::Mat();
cvtColor(im, gray, CV_BGR2GRAY);
images.push_back(gray);
}
/* Load Camera's Calibrated Properties */
cv::Mat camMatrix;
cv::Mat distortionCamera, distcoeff_32f;
cv::Mat K, K_32f;
cv::Mat Kinv; //TODO: Kinv was originally cv::Mat_<double> this data type was not useful when doing invert operation because it was throwing exceptions.
CvMat *intrinsic = (CvMat*)cvLoad( "Intrinsics.xml" );
CvMat *distortion = (CvMat*)cvLoad( "Distortion.xml" );
camMatrix = Mat(intrinsic);
distortionCamera = Mat(distortion);
cout<<"K = " <<camMatrix<<std::endl;
cout<<"Distort = "<<distortionCamera<<std::endl;
K = camMatrix;
invert(K, Kinv);
distortionCamera.convertTo(distcoeff_32f,CV_32FC1);
K.convertTo(K_32f, CV_32FC1);
cout<<"Kinv = " <<Kinv<<std::endl;
/* Extract Image Features and Match with another view */
detector = FeatureDetector::create("PyramidFAST");
extractor = DescriptorExtractor::create("ORB");
detector->detect(images, imgKeyPts);
extractor->compute(images, imgKeyPts,descriptors);
BFMatcher matcher(NORM_HAMMING,true);
std::vector< DMatch > matches;
const Mat& descriptors_1 = descriptors[0];
const Mat& descriptors_2 = descriptors[1];
/* Store correspondences in a matrix */
matcher.match( descriptors_1, descriptors_2, matches);
Mat img_matches, img_1, img_2;
drawKeypoints(images[0], imgKeyPts[0], img_1);
drawKeypoints(images[1], imgKeyPts[1], img_2);
drawMatches( images[0], imgKeyPts[0], images[1], imgKeyPts[1],
matches, img_matches, Scalar::all(-1), Scalar::all(-1),
vector<char>(), DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS );
//Outputting point correspondences.
imwrite("output.jpg", img_matches);
imwrite("img1.jpg", img_1);
imwrite("img2.jpg", img_2);
vector <Point2f> imgPts1, imgPts2;
for( unsigned int i = 0; i < matches.size(); i++)
{
//queryidx -> left image
imgPts1.push_back(imgKeyPts[0][matches[i].queryIdx].pt);
//trainidx -> right image
imgPts2.push_back(imgKeyPts[1][matches[i].trainIdx].pt);
}
Mat F = findFundamentalMat(imgPts1, imgPts2, FM_RANSAC, 0.1, 0.99);
cout<<"F type " << CV_MAT_TYPE(F.type())<<endl;
cout<<"K type " << K.type()<<endl;
cout<<"K inv type " << Kinv.type()<<endl;
cout<<"K t type "<< (K.t()).type()<<endl;
Mat_<double> E = K.t() * F * K;
cin.get();
return 0;
}