1

我试图从要用于机器人项目的图像中获取 xy 真实坐标,在互联网上进行了大量研究后,我发现可以在 SO 网站上发布并发布它运行良好。我只是用这个可以但是替换了相机参数,这里的四个点坐标是代码{

#include "opencv2/opencv.hpp"
#include <stdio.h>
#include <iostream>
#include <sstream>
#include <math.h>

using namespace cv;
using namespace std;

vector<Point2f> imagePoints;
vector<Point3f> objectPoints;

int main()
{
imagePoints.push_back(Point2f(150.,180.));
imagePoints.push_back(Point2f(30.,365.));
imagePoints.push_back(Point2f(604.,365.));
imagePoints.push_back(Point2f(465.,180.));

objectPoints.push_back(Point3f(0., 0., 0.)); //distance in millimeters
objectPoints.push_back(Point3f(0.,320.,0.));
objectPoints.push_back(Point3f(320.,320.,0.));
objectPoints.push_back(Point3f(320.,0.,0.));

Mat cameraMatrix =(Mat_<double>(3,3) <<6.5409089313379638e+02, 0., 320., 0., 6.5409089313379638e+02, 240., 0., 0.,
    1.);

Mat distCoeffs =(Mat_<double>(5,1) <<8.1771217385200712e-02, -2.5852666755450442e+00, 0., 0.,
    1.0382795831648963e+01);

//Mat rvec(1,3,DataType::<double>::type);
Mat rvec(Mat_<double>(1,3));
Mat tvec(Mat_<double>(1,3));
Mat rotationMatrix((Mat_<double>(3,3)));

solvePnP(objectPoints, imagePoints, cameraMatrix, distCoeffs, rvec, tvec);
Rodrigues(rvec,rotationMatrix);
Mat ones(Mat_<double>(3,1));
Mat uvPoint = ones; //u,v,1
uvPoint.at<double>(0,0) = 150.; 
uvPoint.at<double>(1,0) = 180.;
Mat tempMat, tempMat2;
double s;
tempMat = rotationMatrix.inv() * cameraMatrix.inv() * uvPoint;
tempMat2 = rotationMatrix.inv() * tvec;

s = 0 + tempMat2.at<double>(2,0); 
s /= tempMat.at<double>(2,0);
cout << "P = " << rotationMatrix.inv() * (s * cameraMatrix.inv() * uvPoint - tvec) << endl;
return 0;
}

问题是当我尝试用已知点 (150,180) 对其进行测试时,它应该给出原点 (0,0,0) 而我得到 P = [340.5995534946562; 730.955008122653;-5.684341886080801e-14] 对这个问题有任何帮助,因为我需要解决这个问题才能建立我的系统。

4

0 回答 0