13

给定一组二维点,我如何应用相反的undistortPoints

我有相机的内在函数,distCoeffs并且想(例如)创建一个正方形,并像相机通过镜头看到它一样扭曲它。

我在这里找到了一个“扭曲”补丁:http: //code.opencv.org/issues/1387,但这似乎只对图像有用,我想处理稀疏点。

4

6 回答 6

9

这个问题相当老了,但由于我最终从谷歌搜索到这里而没有看到一个简洁的答案,所以我还是决定回答它。

有一个名为的函数projectPoints正是这样做的。C 版本由 OpenCV 在内部使用,用于估计具有以下功能的相机参数calibrateCamerastereoCalibrate

编辑:
要使用 2D 点作为输入,我们可以将所有 z 坐标设置为 1,convertPointsToHomogeneous并使用projectPoints不旋转和不平移。

cv::Mat points2d = ...;
cv::Mat points3d;
cv::Mat distorted_points2d;
convertPointsToHomogeneous(points2d, points3d);
projectPoints(points3d, cv::Vec3f(0,0,0), cv::Vec3f(0,0,0), camera_matrix, dist_coeffs, distorted_points2d);
于 2013-12-26T12:04:52.103 回答
5

一个简单的解决方案是使用initUndistortRectifyMap从未扭曲坐标到扭曲坐标获取地图:

cv::Mat K = ...; // 3x3 intrinsic parameters
cv::Mat D = ...; // 4x1 or similar distortion parameters
int W = 640; // image width
int H = 480; // image height

cv::Mat mapx, mapy;
cv::initUndistortRectifyMap(K, D, cv::Mat(), K, cv::Size(W, H), 
  CV_32F, mapx, mapy);

float distorted_x = mapx.at<float>(y, x);
float distorted_y = mapy.at<float>(y, x);

我编辑以澄清代码是正确的:

我引用的文档initUndistortRectifyMap

对于目标(校正和校正)图像中的每个像素 (u, v),该函数计算源图像(即来自相机的原始图像)中的相应坐标。

map_x(u,v) = x''f_x + c_x

map_y(u,v) = y''f_y + c_y

于 2013-12-26T12:27:36.543 回答
2

undistortPoint是项目点的简单反向版本

就我而言,我想做以下事情:

不失真点:

int undisortPoints(const vector<cv::Point2f> &uv, vector<cv::Point2f> &xy, const cv::Mat &M, const cv::Mat &d)
{
    cv::undistortPoints(uv, xy, M, d, cv::Mat(), M);
    return 0;
}

这将使点不失真到与图像原点非常相似的坐标,但不会失真。这是 cv::undistort() 函数的默认行为。

重新扭曲点:

int distortPoints(const vector<cv::Point2f> &xy, vector<cv::Point2f> &uv, const cv::Mat &M, const cv::Mat &d)
{
    vector<cv::Point2f> xy2;
    vector<cv::Point3f>  xyz;
    cv::undistortPoints(xy, xy2, M, cv::Mat());
    for (cv::Point2f p : xy2)xyz.push_back(cv::Point3f(p.x, p.y, 1));
    cv::Mat rvec = cv::Mat::zeros(3, 1, CV_64FC1);
    cv::Mat tvec = cv::Mat::zeros(3, 1, CV_64FC1);
    cv::projectPoints(xyz, rvec, tvec, M, d, uv);
    return 0;
}

这里的小技巧是首先使用线性相机模型将点投影到 z=1 平面。之后,您必须使用原始相机模型投影它们。

我发现这些很有用,我希望它也对你有用。

于 2016-01-26T14:47:46.583 回答
1

我有完全相同的需求。这是一个可能的解决方案:

void MyDistortPoints(const std::vector<cv::Point2d> & src, std::vector<cv::Point2d> & dst, 
                     const cv::Mat & cameraMatrix, const cv::Mat & distorsionMatrix)
{
  dst.clear();
  double fx = cameraMatrix.at<double>(0,0);
  double fy = cameraMatrix.at<double>(1,1);
  double ux = cameraMatrix.at<double>(0,2);
  double uy = cameraMatrix.at<double>(1,2);

  double k1 = distorsionMatrix.at<double>(0, 0);
  double k2 = distorsionMatrix.at<double>(0, 1);
  double p1 = distorsionMatrix.at<double>(0, 2);
  double p2 = distorsionMatrix.at<double>(0, 3);
  double k3 = distorsionMatrix.at<double>(0, 4);
  //BOOST_FOREACH(const cv::Point2d &p, src)
  for (unsigned int i = 0; i < src.size(); i++)
  {
    const cv::Point2d &p = src[i];
    double x = p.x;
    double y = p.y;
    double xCorrected, yCorrected;
    //Step 1 : correct distorsion
    {     
      double r2 = x*x + y*y;
      //radial distorsion
      xCorrected = x * (1. + k1 * r2 + k2 * r2 * r2 + k3 * r2 * r2 * r2);
      yCorrected = y * (1. + k1 * r2 + k2 * r2 * r2 + k3 * r2 * r2 * r2);

      //tangential distorsion
      //The "Learning OpenCV" book is wrong here !!!
      //False equations from the "Learning OpenCv" book
      //xCorrected = xCorrected + (2. * p1 * y + p2 * (r2 + 2. * x * x)); 
      //yCorrected = yCorrected + (p1 * (r2 + 2. * y * y) + 2. * p2 * x);
      //Correct formulae found at : http://www.vision.caltech.edu/bouguetj/calib_doc/htmls/parameters.html
      xCorrected = xCorrected + (2. * p1 * x * y + p2 * (r2 + 2. * x * x));
      yCorrected = yCorrected + (p1 * (r2 + 2. * y * y) + 2. * p2 * x * y);
    }
    //Step 2 : ideal coordinates => actual coordinates
    {
      xCorrected = xCorrected * fx + ux;
      yCorrected = yCorrected * fy + uy;
    }
    dst.push_back(cv::Point2d(xCorrected, yCorrected));
  }


}

void MyDistortPoints(const std::vector<cv::Point2d> & src, std::vector<cv::Point2d> & dst, 
                     const cv::Matx33d & cameraMatrix, const cv::Matx<double, 1, 5> & distorsionMatrix)
{
  cv::Mat cameraMatrix2(cameraMatrix);
  cv::Mat distorsionMatrix2(distorsionMatrix);
  return MyDistortPoints(src, dst, cameraMatrix2, distorsionMatrix2);
}

void TestDistort()
{
  cv::Matx33d cameraMatrix = 0.;
  {
    //cameraMatrix Init
    double fx = 1000., fy = 950.;
    double ux = 324., uy = 249.;
    cameraMatrix(0, 0) = fx;
    cameraMatrix(1, 1) = fy;
    cameraMatrix(0, 2) = ux;
    cameraMatrix(1, 2) = uy;
    cameraMatrix(2, 2) = 1.;
  }


  cv::Matx<double, 1, 5> distorsionMatrix;
  {
    //distorsion Init
    const double k1 = 0.5, k2 = -0.5, k3 = 0.000005, p1 = 0.07, p2 = -0.05;

    distorsionMatrix(0, 0) = k1;
    distorsionMatrix(0, 1) = k2;
    distorsionMatrix(0, 2) = p1;
    distorsionMatrix(0, 3) = p2;
    distorsionMatrix(0, 4) = k3;
  }


  std::vector<cv::Point2d> distortedPoints;
  std::vector<cv::Point2d> undistortedPoints;
  std::vector<cv::Point2d> redistortedPoints;
  distortedPoints.push_back(cv::Point2d(324., 249.));// equals to optical center
  distortedPoints.push_back(cv::Point2d(340., 200));
  distortedPoints.push_back(cv::Point2d(785., 345.));
  distortedPoints.push_back(cv::Point2d(0., 0.));
  cv::undistortPoints(distortedPoints, undistortedPoints, cameraMatrix, distorsionMatrix);  
  MyDistortPoints(undistortedPoints, redistortedPoints, cameraMatrix, distorsionMatrix);
  cv::undistortPoints(redistortedPoints, undistortedPoints, cameraMatrix, distorsionMatrix);  

  //Poor man's unit test ensuring we have an accuracy that is better than 0.001 pixel
  for (unsigned int i = 0; i < undistortedPoints.size(); i++)
  {
    cv::Point2d dist = redistortedPoints[i] - distortedPoints[i];
    double norm = sqrt(dist.dot(dist));
    std::cout << "norm = " << norm << std::endl;
    assert(norm < 1E-3);
  }
}
于 2012-12-08T15:19:23.397 回答
1

对于那些仍在搜索的人,这里有一个简单的 python 函数,它会扭曲点回来:

def distortPoints(undistortedPoints, k, d):
    
    undistorted = np.float32(undistortedPoints[:, np.newaxis, :])

    kInv = np.linalg.inv(k)

    for i in range(len(undistorted)):
        srcv = np.array([undistorted[i][0][0], undistorted[i][0][1], 1])
        dstv = kInv.dot(srcv)
        undistorted[i][0][0] = dstv[0]
        undistorted[i][0][1] = dstv[1]


    distorted = cv2.fisheye.distortPoints(undistorted, k, d)
    return distorted

例子:

undistorted = np.array([(639.64, 362.09), (234, 567)])
distorted = distortPoints(undistorted, camK, camD)
print(distorted)
于 2020-07-06T17:21:33.503 回答
0

这是 main.cpp。它是自给自足的,除了opencv之外不需要其他任何东西。我不记得我在哪里找到的,它有效,我在我的项目中使用它。该程序吃掉一组标准棋盘图像,并生成带有相机所有失真的 json/xml 文件。

#include <iostream>
#include <sstream>
#include <time.h>
#include <stdio.h>

#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/calib3d/calib3d.hpp>
#include <opencv2/highgui/highgui.hpp>

#ifndef _CRT_SECURE_NO_WARNINGS
# define _CRT_SECURE_NO_WARNINGS
#endif

using namespace cv;
using namespace std;

static void help()
{
        cout <<  "This is a camera calibration sample." << endl
        <<  "Usage: calibration configurationFile"  << endl
        <<  "Near the sample file you'll find the configuration file, which has detailed help of "
        "how to edit it.  It may be any OpenCV supported file format XML/YAML." << endl;
}
class Settings
{
public:
        Settings() : goodInput(false) {}
        enum Pattern { NOT_EXISTING, CHESSBOARD, CIRCLES_GRID, ASYMMETRIC_CIRCLES_GRID };
        enum InputType {INVALID, CAMERA, VIDEO_FILE, IMAGE_LIST};

        void write(FileStorage& fs) const                        //Write serialization for this class
        {
                fs << "{" << "BoardSize_Width"  << boardSize.width
                << "BoardSize_Height" << boardSize.height
                << "Square_Size"         << squareSize
                << "Calibrate_Pattern" << patternToUse
                << "Calibrate_NrOfFrameToUse" << nrFrames
                << "Calibrate_FixAspectRatio" << aspectRatio
                << "Calibrate_AssumeZeroTangentialDistortion" << calibZeroTangentDist
                << "Calibrate_FixPrincipalPointAtTheCenter" << calibFixPrincipalPoint

                << "Write_DetectedFeaturePoints" << bwritePoints
                << "Write_extrinsicParameters"   << bwriteExtrinsics
                << "Write_outputFileName"  << outputFileName

                << "Show_UndistortedImage" << showUndistorsed

                << "Input_FlipAroundHorizontalAxis" << flipVertical
                << "Input_Delay" << delay
                << "Input" << input
                << "}";
        }
        void read(const FileNode& node)                          //Read serialization for this class
        {
                node["BoardSize_Width" ] >> boardSize.width;
                node["BoardSize_Height"] >> boardSize.height;
                node["Calibrate_Pattern"] >> patternToUse;
                node["Square_Size"]  >> squareSize;
                node["Calibrate_NrOfFrameToUse"] >> nrFrames;
                node["Calibrate_FixAspectRatio"] >> aspectRatio;
                node["Write_DetectedFeaturePoints"] >> bwritePoints;
                node["Write_extrinsicParameters"] >> bwriteExtrinsics;
                node["Write_outputFileName"] >> outputFileName;
                node["Calibrate_AssumeZeroTangentialDistortion"] >> calibZeroTangentDist;
                node["Calibrate_FixPrincipalPointAtTheCenter"] >> calibFixPrincipalPoint;
                node["Input_FlipAroundHorizontalAxis"] >> flipVertical;
                node["Show_UndistortedImage"] >> showUndistorsed;
                node["Input"] >> input;
                node["Input_Delay"] >> delay;
                interprate();
        }
        void interprate()
        {
                goodInput = true;
                if (boardSize.width <= 0 || boardSize.height <= 0)
                {
                        cerr << "Invalid Board size: " << boardSize.width << " " << boardSize.height << endl;
                        goodInput = false;
                }
                if (squareSize <= 10e-6)
                {
                        cerr << "Invalid square size " << squareSize << endl;
                        goodInput = false;
                }
                if (nrFrames <= 0)
                {
                        cerr << "Invalid number of frames " << nrFrames << endl;
                        goodInput = false;
                }

                if (input.empty())      // Check for valid input
                        inputType = INVALID;
                else
                {
                        if (input[0] >= '0' && input[0] <= '9')
                        {
                                stringstream ss(input);
                                ss >> cameraID;
                                inputType = CAMERA;
                        }
                        else
                        {
                                if (readStringList(input, imageList))
                                {
                                        inputType = IMAGE_LIST;
                                        nrFrames = (nrFrames < (int)imageList.size()) ? nrFrames : (int)imageList.size();
                                }
                                else
                                        inputType = VIDEO_FILE;
                        }
                        if (inputType == CAMERA)
                                inputCapture.open(cameraID);
                        if (inputType == VIDEO_FILE)
                                inputCapture.open(input);
                        if (inputType != IMAGE_LIST && !inputCapture.isOpened())
                                inputType = INVALID;
                }
                if (inputType == INVALID)
                {
                        cerr << " Inexistent input: " << input << endl;
                        goodInput = false;
                }

                flag = 0;
                if(calibFixPrincipalPoint) flag |= CV_CALIB_FIX_PRINCIPAL_POINT;
                if(calibZeroTangentDist)   flag |= CV_CALIB_ZERO_TANGENT_DIST;
                if(aspectRatio)            flag |= CV_CALIB_FIX_ASPECT_RATIO;

                calibrationPattern = NOT_EXISTING;
                if (!patternToUse.compare("CHESSBOARD")) calibrationPattern = CHESSBOARD;
                if (!patternToUse.compare("CIRCLES_GRID")) calibrationPattern = CIRCLES_GRID;
                if (!patternToUse.compare("ASYMMETRIC_CIRCLES_GRID")) calibrationPattern = ASYMMETRIC_CIRCLES_GRID;
                if (calibrationPattern == NOT_EXISTING)
                {
                        cerr << " Inexistent camera calibration mode: " << patternToUse << endl;
                        goodInput = false;
                }
                atImageList = 0;

        }
        Mat nextImage()
        {
                Mat result;
                if( inputCapture.isOpened() )
                {
                        Mat view0;
                        inputCapture >> view0;
                        view0.copyTo(result);
                }
                else if( atImageList < (int)imageList.size() )
                        result = imread(imageList[atImageList++], CV_LOAD_IMAGE_COLOR);

                return result;
        }

        static bool readStringList( const string& filename, vector<string>& l )
        {
                l.clear();
                FileStorage fs(filename, FileStorage::READ);
                if( !fs.isOpened() )
                        return false;
                FileNode n = fs.getFirstTopLevelNode();
                if( n.type() != FileNode::SEQ )
                        return false;
                FileNodeIterator it = n.begin(), it_end = n.end();
                for( ; it != it_end; ++it )
                        l.push_back((string)*it);
                return true;
        }
public:
        Size boardSize;            // The size of the board -> Number of items by width and height
        Pattern calibrationPattern;// One of the Chessboard, circles, or asymmetric circle pattern
        float squareSize;          // The size of a square in your defined unit (point, millimeter,etc).
        int nrFrames;              // The number of frames to use from the input for calibration
        float aspectRatio;         // The aspect ratio
        int delay;                 // In case of a video input
        bool bwritePoints;         //  Write detected feature points
        bool bwriteExtrinsics;     // Write extrinsic parameters
        bool calibZeroTangentDist; // Assume zero tangential distortion
        bool calibFixPrincipalPoint;// Fix the principal point at the center
        bool flipVertical;          // Flip the captured images around the horizontal axis
        string outputFileName;      // The name of the file where to write
        bool showUndistorsed;       // Show undistorted images after calibration
        string input;               // The input ->



        int cameraID;
        vector<string> imageList;
        int atImageList;
        VideoCapture inputCapture;
        InputType inputType;
        bool goodInput;
        int flag;

private:
        string patternToUse;


};

static void read(const FileNode& node, Settings& x, const Settings& default_value = Settings())
{
        if(node.empty())
                x = default_value;
        else
                x.read(node);
}

enum { DETECTION = 0, CAPTURING = 1, CALIBRATED = 2 };

bool runCalibrationAndSave(Settings& s, Size imageSize, Mat&  cameraMatrix, Mat& distCoeffs,
                           vector<vector<Point2f> > imagePoints );

int main(int argc, char* argv[])
{
//        help();
        Settings s;
        const string inputSettingsFile = argc > 1 ? argv[1] : "default.xml";
        FileStorage fs(inputSettingsFile, FileStorage::READ); // Read the settings
        if (!fs.isOpened())
        {
                cout << "Could not open the configuration file: \"" << inputSettingsFile << "\"" << endl;
                return -1;
        }
        fs["Settings"] >> s;
        fs.release();                                         // close Settings file

        if (!s.goodInput)
        {
                cout << "Invalid input detected. Application stopping. " << endl;
                return -1;
        }

        vector<vector<Point2f> > imagePoints;
        Mat cameraMatrix, distCoeffs;
        Size imageSize;
        int mode = s.inputType == Settings::IMAGE_LIST ? CAPTURING : DETECTION;
        clock_t prevTimestamp = 0;
        const Scalar RED(0,0,255), GREEN(0,255,0);
        const char ESC_KEY = 27;

        for(int i = 0;;++i)
        {
                Mat view;
                bool blinkOutput = false;

                view = s.nextImage();

                //-----  If no more image, or got enough, then stop calibration and show result -------------
                if( mode == CAPTURING && imagePoints.size() >= (unsigned)s.nrFrames )
                {
                        if( runCalibrationAndSave(s, imageSize,  cameraMatrix, distCoeffs, imagePoints))
                                mode = CALIBRATED;
                        else
                                mode = DETECTION;
                }
                if(view.empty())          // If no more images then run calibration, save and stop loop.
                {
                        if( imagePoints.size() > 0 )
                                runCalibrationAndSave(s, imageSize,  cameraMatrix, distCoeffs, imagePoints);
                        break;
                }


                imageSize = view.size();  // Format input image.
                if( s.flipVertical )    flip( view, view, 0 );

                vector<Point2f> pointBuf;

                bool found;
                switch( s.calibrationPattern ) // Find feature points on the input format
                {
                        case Settings::CHESSBOARD:
                                found = findChessboardCorners( view, s.boardSize, pointBuf,
                                                              CV_CALIB_CB_ADAPTIVE_THRESH | CV_CALIB_CB_FAST_CHECK | CV_CALIB_CB_NORMALIZE_IMAGE);
                                break;
                        case Settings::CIRCLES_GRID:
                                found = findCirclesGrid( view, s.boardSize, pointBuf );
                                break;
                        case Settings::ASYMMETRIC_CIRCLES_GRID:
                                found = findCirclesGrid( view, s.boardSize, pointBuf, CALIB_CB_ASYMMETRIC_GRID );
                                break;
                        default:
                                found = false;
                                break;
                }

                if ( found)                // If done with success,
                {
                        // improve the found corners' coordinate accuracy for chessboard
                        if( s.calibrationPattern == Settings::CHESSBOARD)
                        {
                                Mat viewGray;
                                cvtColor(view, viewGray, COLOR_BGR2GRAY);
                                cornerSubPix( viewGray, pointBuf, Size(11,11),
                                             Size(-1,-1), TermCriteria( CV_TERMCRIT_EPS+CV_TERMCRIT_ITER, 30, 0.1 ));
                        }

                        if( mode == CAPTURING &&  // For camera only take new samples after delay time
                           (!s.inputCapture.isOpened() || clock() - prevTimestamp > s.delay*1e-3*CLOCKS_PER_SEC) )
                        {
                                imagePoints.push_back(pointBuf);
                                prevTimestamp = clock();
                                blinkOutput = s.inputCapture.isOpened();
                        }

                        // Draw the corners.
                        drawChessboardCorners( view, s.boardSize, Mat(pointBuf), found );
                }

                //----------------------------- Output Text ------------------------------------------------
                string msg = (mode == CAPTURING) ? "100/100" :
                mode == CALIBRATED ? "Calibrated" : "Press 'g' to start";
                int baseLine = 0;
                Size textSize = getTextSize(msg, 1, 1, 1, &baseLine);
                Point textOrigin(view.cols - 2*textSize.width - 10, view.rows - 2*baseLine - 10);

                if( mode == CAPTURING )
                {
                        if(s.showUndistorsed)
                                msg = format( "%d/%d Undist", (int)imagePoints.size(), s.nrFrames );
                        else
                                msg = format( "%d/%d", (int)imagePoints.size(), s.nrFrames );
                }

                putText( view, msg, textOrigin, 1, 1, mode == CALIBRATED ?  GREEN : RED);

                if( blinkOutput )
                        bitwise_not(view, view);

                //------------------------- Video capture  output  undistorted ------------------------------
                if( mode == CALIBRATED && s.showUndistorsed )
                {
                        Mat temp = view.clone();
                        undistort(temp, view, cameraMatrix, distCoeffs);
                }

                //------------------------------ Show image and check for input commands -------------------
                imshow("Image View", view);
                char key = (char)waitKey(s.inputCapture.isOpened() ? 50 : s.delay);

                if( key  == ESC_KEY )
                        break;

                if( key == 'u' && mode == CALIBRATED )
                        s.showUndistorsed = !s.showUndistorsed;

                if( s.inputCapture.isOpened() && key == 'g' )
                {
                        mode = CAPTURING;
                        imagePoints.clear();
                }
        }

        // -----------------------Show the undistorted image for the image list ------------------------
        if( s.inputType == Settings::IMAGE_LIST && s.showUndistorsed )
        {
                Mat view, rview, map1, map2;
                initUndistortRectifyMap(cameraMatrix, distCoeffs, Mat(),
                                        getOptimalNewCameraMatrix(cameraMatrix, distCoeffs, imageSize, 1, imageSize, 0),
                                        imageSize, CV_16SC2, map1, map2);

                for(int i = 0; i < (int)s.imageList.size(); i++ )
                {
                        view = imread(s.imageList[i], 1);
                        if(view.empty())
                                continue;
                        remap(view, rview, map1, map2, INTER_LINEAR);
                        imshow("Image View", rview);
                        char c = (char)waitKey();
                        if( c  == ESC_KEY || c == 'q' || c == 'Q' )
                                break;
                }
        }


        return 0;
}

static double computeReprojectionErrors( const vector<vector<Point3f> >& objectPoints,
                                        const vector<vector<Point2f> >& imagePoints,
                                        const vector<Mat>& rvecs, const vector<Mat>& tvecs,
                                        const Mat& cameraMatrix , const Mat& distCoeffs,
                                        vector<float>& perViewErrors)
{
        vector<Point2f> imagePoints2;
        int i, totalPoints = 0;
        double totalErr = 0, err;
        perViewErrors.resize(objectPoints.size());

        for( i = 0; i < (int)objectPoints.size(); ++i )
        {
                projectPoints( Mat(objectPoints[i]), rvecs[i], tvecs[i], cameraMatrix,
                              distCoeffs, imagePoints2);
                err = norm(Mat(imagePoints[i]), Mat(imagePoints2), CV_L2);

                int n = (int)objectPoints[i].size();
                perViewErrors[i] = (float) std::sqrt(err*err/n);
                totalErr        += err*err;
                totalPoints     += n;
        }

        return std::sqrt(totalErr/totalPoints);
}

static void calcBoardCornerPositions(Size boardSize, float squareSize, vector<Point3f>& corners,
                                     Settings::Pattern patternType /*= Settings::CHESSBOARD*/)
{
        corners.clear();

        switch(patternType)
        {
                case Settings::CHESSBOARD:
                case Settings::CIRCLES_GRID:
                        for( int i = 0; i < boardSize.height; ++i )
                                for( int j = 0; j < boardSize.width; ++j )
                                        corners.push_back(Point3f(float( j*squareSize ), float( i*squareSize ), 0));
                        break;

                case Settings::ASYMMETRIC_CIRCLES_GRID:
                        for( int i = 0; i < boardSize.height; i++ )
                                for( int j = 0; j < boardSize.width; j++ )
                                        corners.push_back(Point3f(float((2*j + i % 2)*squareSize), float(i*squareSize), 0));
                        break;
                default:
                        break;
        }
}

static bool runCalibration( Settings& s, Size& imageSize, Mat& cameraMatrix, Mat& distCoeffs,
                           vector<vector<Point2f> > imagePoints, vector<Mat>& rvecs, vector<Mat>& tvecs,
                           vector<float>& reprojErrs,  double& totalAvgErr)
{

        cameraMatrix = Mat::eye(3, 3, CV_64F);
        if( s.flag & CV_CALIB_FIX_ASPECT_RATIO )
                cameraMatrix.at<double>(0,0) = 1.0;

        distCoeffs = Mat::zeros(8, 1, CV_64F);

        vector<vector<Point3f> > objectPoints(1);
        calcBoardCornerPositions(s.boardSize, s.squareSize, objectPoints[0], s.calibrationPattern);

        objectPoints.resize(imagePoints.size(),objectPoints[0]);

        //Find intrinsic and extrinsic camera parameters
        double rms = calibrateCamera(objectPoints, imagePoints, imageSize, cameraMatrix,
                                     distCoeffs, rvecs, tvecs, s.flag|CV_CALIB_FIX_K4|CV_CALIB_FIX_K5);

        cout << "Re-projection error reported by calibrateCamera: "<< rms << endl;

        bool ok = checkRange(cameraMatrix) && checkRange(distCoeffs);

        totalAvgErr = computeReprojectionErrors(objectPoints, imagePoints,
                                                rvecs, tvecs, cameraMatrix, distCoeffs, reprojErrs);

        return ok;
}

// Print camera parameters to the output file
static void saveCameraParams( Settings& s, Size& imageSize, Mat& cameraMatrix, Mat& distCoeffs,
                             const vector<Mat>& rvecs, const vector<Mat>& tvecs,
                             const vector<float>& reprojErrs, const vector<vector<Point2f> >& imagePoints,
                             double totalAvgErr )
{
        FileStorage fs( s.outputFileName, FileStorage::WRITE );

        time_t tm;
        time( &tm );
        struct tm *t2 = localtime( &tm );
        char buf[1024];
        strftime( buf, sizeof(buf)-1, "%c", t2 );

        fs << "calibration_Time" << buf;

        if( !rvecs.empty() || !reprojErrs.empty() )
                fs << "nrOfFrames" << (int)std::max(rvecs.size(), reprojErrs.size());
        fs << "image_Width" << imageSize.width;
        fs << "image_Height" << imageSize.height;
        fs << "board_Width" << s.boardSize.width;
        fs << "board_Height" << s.boardSize.height;
        fs << "square_Size" << s.squareSize;

        if( s.flag & CV_CALIB_FIX_ASPECT_RATIO )
                fs << "FixAspectRatio" << s.aspectRatio;

        if( s.flag )
        {
                sprintf( buf, "flags: %s%s%s%s",
                        s.flag & CV_CALIB_USE_INTRINSIC_GUESS ? " +use_intrinsic_guess" : "",
                        s.flag & CV_CALIB_FIX_ASPECT_RATIO ? " +fix_aspectRatio" : "",
                        s.flag & CV_CALIB_FIX_PRINCIPAL_POINT ? " +fix_principal_point" : "",
                        s.flag & CV_CALIB_ZERO_TANGENT_DIST ? " +zero_tangent_dist" : "" );
                cvWriteComment( *fs, buf, 0 );

        }

        fs << "flagValue" << s.flag;

        fs << "Camera_Matrix" << cameraMatrix;
        fs << "Distortion_Coefficients" << distCoeffs;

        fs << "Avg_Reprojection_Error" << totalAvgErr;
        if( !reprojErrs.empty() )
                fs << "Per_View_Reprojection_Errors" << Mat(reprojErrs);

        if( !rvecs.empty() && !tvecs.empty() )
        {
                CV_Assert(rvecs[0].type() == tvecs[0].type());
                Mat bigmat((int)rvecs.size(), 6, rvecs[0].type());
                for( int i = 0; i < (int)rvecs.size(); i++ )
                {
                        Mat r = bigmat(Range(i, i+1), Range(0,3));
                        Mat t = bigmat(Range(i, i+1), Range(3,6));

                        CV_Assert(rvecs[i].rows == 3 && rvecs[i].cols == 1);
                        CV_Assert(tvecs[i].rows == 3 && tvecs[i].cols == 1);
                        //*.t() is MatExpr (not Mat) so we can use assignment operator
                        r = rvecs[i].t();
                        t = tvecs[i].t();
                }
                cvWriteComment( *fs, "a set of 6-tuples (rotation vector + translation vector) for each view", 0 );
                fs << "Extrinsic_Parameters" << bigmat;
        }

        if( !imagePoints.empty() )
        {
                Mat imagePtMat((int)imagePoints.size(), (int)imagePoints[0].size(), CV_32FC2);
                for( int i = 0; i < (int)imagePoints.size(); i++ )
                {
                        Mat r = imagePtMat.row(i).reshape(2, imagePtMat.cols);
                        Mat imgpti(imagePoints[i]);
                        imgpti.copyTo(r);
                }
                fs << "Image_points" << imagePtMat;
        }
}

bool runCalibrationAndSave(Settings& s, Size imageSize, Mat&  cameraMatrix, Mat& distCoeffs,vector<vector<Point2f> > imagePoints )
{
        vector<Mat> rvecs, tvecs;
        vector<float> reprojErrs;
        double totalAvgErr = 0;

        bool ok = runCalibration(s,imageSize, cameraMatrix, distCoeffs, imagePoints, rvecs, tvecs,
                                 reprojErrs, totalAvgErr);
        cout << (ok ? "Calibration succeeded" : "Calibration failed")
        << ". avg re projection error = "  << totalAvgErr ;

        if( ok )
                saveCameraParams( s, imageSize, cameraMatrix, distCoeffs, rvecs ,tvecs, reprojErrs,
                                 imagePoints, totalAvgErr);
        return ok;
}
于 2019-08-03T07:03:14.600 回答