22

我想从旋转角度和到对象的距离开始计算透视变换(warpPerspective 函数的矩阵)。

怎么做?

我在OE的某个地方找到了代码。示例程序如下:

#include <opencv2/objdetect/objdetect.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>

#include <iostream>
#include <math.h>

using namespace std;
using namespace cv;

Mat frame;

int alpha_int;
int dist_int;
int f_int;

double w;
double h; 
double alpha; 
double dist; 
double f;

void redraw() {

    alpha = (double)alpha_int/1000.;
    //dist = 1./(dist_int+1);
    //dist = dist_int+1;
    dist = dist_int-50;
    f = f_int+1;

    cout << "alpha = " << alpha << endl;
    cout << "dist = " << dist << endl;
    cout << "f = " << f << endl;

    // Projection 2D -> 3D matrix
    Mat A1 = (Mat_<double>(4,3) <<
        1,              0,              -w/2,
        0,              1,              -h/2,
        0,              0,              1,
        0,              0,              1);

    // Rotation matrices around the X axis
    Mat R = (Mat_<double>(4, 4) <<
        1,              0,              0,              0,
        0,              cos(alpha),     -sin(alpha),    0,
        0,              sin(alpha),     cos(alpha),     0,
        0,              0,              0,              1);

    // Translation matrix on the Z axis 
    Mat T = (Mat_<double>(4, 4) <<
        1,              0,              0,              0,
        0,              1,              0,              0,
        0,              0,              1,              dist,
        0,              0,              0,              1);

    // Camera Intrisecs matrix 3D -> 2D
    Mat A2 = (Mat_<double>(3,4) <<
        f,              0,              w/2,            0,
        0,              f,              h/2,            0,
        0,              0,              1,              0);

    Mat m = A2 * (T * (R * A1));

    cout << "R=" << endl << R << endl;
    cout << "A1=" << endl << A1 << endl;
    cout << "R*A1=" << endl << (R*A1) << endl;
    cout << "T=" << endl << T << endl;
    cout << "T * (R * A1)=" << endl << (T * (R * A1)) << endl;
    cout << "A2=" << endl << A2 << endl;
    cout << "A2 * (T * (R * A1))=" << endl << (A2 * (T * (R * A1))) << endl;
    cout << "m=" << endl << m << endl;

    Mat frame1;


    warpPerspective( frame, frame1, m, frame.size(), INTER_CUBIC | WARP_INVERSE_MAP);

    imshow("Frame", frame);
    imshow("Frame1", frame1);
}

void callback(int, void* ) {
    redraw();
}

void main() {


    frame = imread("FruitSample_small.png", CV_LOAD_IMAGE_COLOR);
    imshow("Frame", frame);

    w = frame.size().width;
    h = frame.size().height; 

    createTrackbar("alpha", "Frame", &alpha_int, 100, &callback);
    dist_int = 50;
    createTrackbar("dist", "Frame", &dist_int, 100, &callback);
    createTrackbar("f", "Frame", &f_int, 100, &callback);

    redraw();

    waitKey(-1);
}

但不幸的是,这种变换做了一些奇怪的事情

在此处输入图像描述

为什么?上图的另一半是什么时候alpha>0?以及如何围绕其他轴旋转?为什么dist工作如此奇怪?

4

3 回答 3

56

我有足够的时间思考数学和代码。我在一两年前就这样做了。我什至用漂亮的 LaTeX 排版。

我有意设计了我的解决方案,以便无论提供什么旋转角度,整个输入图像都包含在输出帧中,居中,否则为黑色。

我的warpImage函数的参数是所有 3 个轴的旋转角度、比例因子和垂直视场角。该函数输出扭曲矩阵、输出图像和输出图像中源图像的角点。

数学(代码见下文)

第 1 页 在此处输入图像描述

LaTeX 源代码在这里

代码(对于数学,请看上面)

这是一个扭曲相机的测试应用程序

#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <math.h>


using namespace cv;
using namespace std;


static double rad2Deg(double rad){return rad*(180/M_PI);}//Convert radians to degrees
static double deg2Rad(double deg){return deg*(M_PI/180);}//Convert degrees to radians




void warpMatrix(Size   sz,
                double theta,
                double phi,
                double gamma,
                double scale,
                double fovy,
                Mat&   M,
                vector<Point2f>* corners){
    double st=sin(deg2Rad(theta));
    double ct=cos(deg2Rad(theta));
    double sp=sin(deg2Rad(phi));
    double cp=cos(deg2Rad(phi));
    double sg=sin(deg2Rad(gamma));
    double cg=cos(deg2Rad(gamma));

    double halfFovy=fovy*0.5;
    double d=hypot(sz.width,sz.height);
    double sideLength=scale*d/cos(deg2Rad(halfFovy));
    double h=d/(2.0*sin(deg2Rad(halfFovy)));
    double n=h-(d/2.0);
    double f=h+(d/2.0);

    Mat F=Mat(4,4,CV_64FC1);//Allocate 4x4 transformation matrix F
    Mat Rtheta=Mat::eye(4,4,CV_64FC1);//Allocate 4x4 rotation matrix around Z-axis by theta degrees
    Mat Rphi=Mat::eye(4,4,CV_64FC1);//Allocate 4x4 rotation matrix around X-axis by phi degrees
    Mat Rgamma=Mat::eye(4,4,CV_64FC1);//Allocate 4x4 rotation matrix around Y-axis by gamma degrees

    Mat T=Mat::eye(4,4,CV_64FC1);//Allocate 4x4 translation matrix along Z-axis by -h units
    Mat P=Mat::zeros(4,4,CV_64FC1);//Allocate 4x4 projection matrix

    //Rtheta
    Rtheta.at<double>(0,0)=Rtheta.at<double>(1,1)=ct;
    Rtheta.at<double>(0,1)=-st;Rtheta.at<double>(1,0)=st;
    //Rphi
    Rphi.at<double>(1,1)=Rphi.at<double>(2,2)=cp;
    Rphi.at<double>(1,2)=-sp;Rphi.at<double>(2,1)=sp;
    //Rgamma
    Rgamma.at<double>(0,0)=Rgamma.at<double>(2,2)=cg;
    Rgamma.at<double>(0,2)=-sg;Rgamma.at<double>(2,0)=sg;

    //T
    T.at<double>(2,3)=-h;
    //P
    P.at<double>(0,0)=P.at<double>(1,1)=1.0/tan(deg2Rad(halfFovy));
    P.at<double>(2,2)=-(f+n)/(f-n);
    P.at<double>(2,3)=-(2.0*f*n)/(f-n);
    P.at<double>(3,2)=-1.0;
    //Compose transformations
    F=P*T*Rphi*Rtheta*Rgamma;//Matrix-multiply to produce master matrix

    //Transform 4x4 points
    double ptsIn [4*3];
    double ptsOut[4*3];
    double halfW=sz.width/2, halfH=sz.height/2;

    ptsIn[0]=-halfW;ptsIn[ 1]= halfH;
    ptsIn[3]= halfW;ptsIn[ 4]= halfH;
    ptsIn[6]= halfW;ptsIn[ 7]=-halfH;
    ptsIn[9]=-halfW;ptsIn[10]=-halfH;
    ptsIn[2]=ptsIn[5]=ptsIn[8]=ptsIn[11]=0;//Set Z component to zero for all 4 components

    Mat ptsInMat(1,4,CV_64FC3,ptsIn);
    Mat ptsOutMat(1,4,CV_64FC3,ptsOut);

    perspectiveTransform(ptsInMat,ptsOutMat,F);//Transform points

    //Get 3x3 transform and warp image
    Point2f ptsInPt2f[4];
    Point2f ptsOutPt2f[4];

    for(int i=0;i<4;i++){
        Point2f ptIn (ptsIn [i*3+0], ptsIn [i*3+1]);
        Point2f ptOut(ptsOut[i*3+0], ptsOut[i*3+1]);
        ptsInPt2f[i]  = ptIn+Point2f(halfW,halfH);
        ptsOutPt2f[i] = (ptOut+Point2f(1,1))*(sideLength*0.5);
    }

    M=getPerspectiveTransform(ptsInPt2f,ptsOutPt2f);

    //Load corners vector
    if(corners){
        corners->clear();
        corners->push_back(ptsOutPt2f[0]);//Push Top Left corner
        corners->push_back(ptsOutPt2f[1]);//Push Top Right corner
        corners->push_back(ptsOutPt2f[2]);//Push Bottom Right corner
        corners->push_back(ptsOutPt2f[3]);//Push Bottom Left corner
    }
}

void warpImage(const Mat &src,
               double    theta,
               double    phi,
               double    gamma,
               double    scale,
               double    fovy,
               Mat&      dst,
               Mat&      M,
               vector<Point2f> &corners){
    double halfFovy=fovy*0.5;
    double d=hypot(src.cols,src.rows);
    double sideLength=scale*d/cos(deg2Rad(halfFovy));

    warpMatrix(src.size(),theta,phi,gamma, scale,fovy,M,&corners);//Compute warp matrix
    warpPerspective(src,dst,M,Size(sideLength,sideLength));//Do actual image warp
}


int main(void){
    int c = 0;
    Mat m, disp, warp;
    vector<Point2f> corners;
    VideoCapture cap(0);

    while(c != 033 && cap.isOpened()){
        cap >> m;
        warpImage(m, 5, 50, 0, 1, 30, disp, warp, corners);
        imshow("Disp", disp);
        c = waitKey(1);
    }
}
于 2013-11-20T06:59:36.350 回答
1

我遇到了类似的问题。事实证明,在反投影到 3D 坐标后,由于我们没有相机的内在参数,因此我们使用一些猜测值甚至 1,就像在您的矩阵中一样A1。在旋转期间,这可能导致图像平面位于图像平面的“错误一侧”,具有负深度值 ( z < 0)。投影后,当你用你的坐标划分你的坐标时,z你会得到一些奇怪的东西,就像你展示的那样。

因此,解决方案结果是将焦距缩放为相对于您的图像大小的东西。

假设您的图像具有尺寸(H, W),例如将焦距设置为H/3。在这种情况下,您A1

[H/3, 0, W/2]
[0, H/3, H/2]
[0,   0,   1]
于 2017-02-02T13:28:56.267 回答
0

这是我对流行的 C++ 示例代码答案的 Java 转换。我无法知道这段 Java 代码、原始 C++ 代码或底层方程式的真实性,也无法知道关于调整方程式的几条评论意味着什么,只能说它似乎有效。这是为了与高中生一起玩图像处理的实验目的。

package app;

import java.util.Arrays;
import java.util.stream.Collectors;

import org.opencv.highgui.HighGui;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Size;
import org.opencv.imgcodecs.Imgcodecs;
import org.opencv.core.Point;
import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint2f;
import org.opencv.imgproc.Imgproc;
import org.opencv.videoio.VideoCapture;

public class App {
    static {
        System.loadLibrary(Core.NATIVE_LIBRARY_NAME); // Load the native library.
    }

    static void warpMatrix(Size   sz,
                    double theta,
                    double phi,
                    double gamma,
                    double scale,
                    double fovy,
                    Mat   M,
                    MatOfPoint2f corners) {

        double st=Math.sin(Math.toRadians(theta));
        double ct=Math.cos(Math.toRadians(theta));
        double sp=Math.sin(Math.toRadians(phi));
        double cp=Math.cos(Math.toRadians(phi));
        double sg=Math.sin(Math.toRadians(gamma));
        double cg=Math.cos(Math.toRadians(gamma));

        double halfFovy=fovy*0.5;
        double d=Math.hypot(sz.width,sz.height);
        double sideLength=scale*d/Math.cos(Math.toRadians(halfFovy));
        double h=d/(2.0*Math.sin(Math.toRadians(halfFovy)));
        double n=h-(d/2.0);
        double f=h+(d/2.0);

        Mat F=new Mat(4,4, CvType.CV_64FC1);//Allocate 4x4 transformation matrix F
        Mat Rtheta=Mat.eye(4,4, CvType.CV_64FC1);//Allocate 4x4 rotation matrix around Z-axis by theta degrees
        Mat Rphi=Mat.eye(4,4, CvType.CV_64FC1);//Allocate 4x4 rotation matrix around X-axis by phi degrees
        Mat Rgamma=Mat.eye(4,4, CvType.CV_64FC1);//Allocate 4x4 rotation matrix around Y-axis by gamma degrees

        Mat T=Mat.eye(4,4, CvType.CV_64FC1);//Allocate 4x4 translation matrix along Z-axis by -h units
        Mat P=Mat.zeros(4,4, CvType.CV_64FC1);//Allocate 4x4 projection matrix
                                                // zeros instead of eye as in github manisoftwartist/perspectiveproj

        //Rtheta Z
        Rtheta.put(0,0, ct);
        Rtheta.put(1,1, ct);
        Rtheta.put(0,1, -st);
        Rtheta.put(1,0, st);
        //Rphi X
        Rphi.put(1,1, cp);
        Rphi.put(2,2, cp);
        Rphi.put(1,2, -sp);
        Rphi.put(2,1, sp);
        //Rgamma Y
        Rgamma.put(0,0, cg);
        Rgamma.put(2,2, cg);
        Rgamma.put(0,2, -sg); // sign reversed? Math different convention than computer graphics according to Wikipedia
        Rgamma.put(2,0, sg);
        //T
        T.put(2,3, -h);
        //P Perspective Matrix (see also in computer vision a camera matrix or (camera) projection matrix is a 3x4 matrix which describes the mapping of a pinhole camera from 3D points in the world to 2D points in an image.)
        P.put(0,0, 1.0/Math.tan(Math.toRadians(halfFovy)));
        P.put(1,1, 1.0/Math.tan(Math.toRadians(halfFovy)));
        P.put(2,2, -(f+n)/(f-n));
        P.put(2,3, -(2.0*f*n)/(f-n));
        P.put(3,2, -1.0);
        System.out.println("P " + P.dump());
        System.out.println("T " + T.dump());
        System.out.println("Rphi " + Rphi.dump());
        System.out.println("Rtheta " + Rtheta.dump());
        System.out.println("Rgamma " + Rgamma.dump());
        //Compose transformations
        //F=P*T*Rphi*Rtheta*Rgamma;//Matrix-multiply to produce master matrix
        //gemm(Mat src1, Mat src2, double alpha, Mat src3, double beta, Mat dst)
        //dst = alpha*src1.t()*src2 + beta*src3.t(); // w or w/o the .t() transpose
        // D=α∗AB+β∗C
        Mat F1 = new Mat();
        Mat F2 = new Mat();
        Mat F3 = new Mat();
        Core.gemm(P, T, 1, new Mat(), 0, F1);
        Core.gemm(F1, Rphi, 1, new Mat(), 0, F2);
        Core.gemm(F2, Rtheta, 1, new Mat(), 0, F3);
        Core.gemm(F3, Rgamma, 1, new Mat(), 0, F);
        P.release();
        T.release();
        Rphi.release();
        Rtheta.release();
        Rgamma.release();
        F1.release();
        F2.release();
        F3.release();

        //Transform 4x4 points
        double[] ptsIn = new double[4*3];
        double[] ptsOut = new double[4*3];
        double halfW=sz.width/2, halfH=sz.height/2;

        ptsIn[0]=-halfW;ptsIn[ 1]= halfH;
        ptsIn[3]= halfW;ptsIn[ 4]= halfH;
        ptsIn[6]= halfW;ptsIn[ 7]=-halfH;
        ptsIn[9]=-halfW;ptsIn[10]=-halfH;
        ptsIn[2]=ptsIn[5]=ptsIn[8]=ptsIn[11]=0;//Set Z component to zero for all 4 components

        Mat ptsInMat = new Mat(1,4,CvType.CV_64FC3);
        ptsInMat.put(0,0, ptsIn);

        Mat ptsOutMat = new Mat(1,4,CvType.CV_64FC3);

        System.out.println("ptsInMat " + ptsInMat + " " + ptsInMat.dump());
        System.out.println("F " + F + " " + F.dump());
        Core.perspectiveTransform(ptsInMat, ptsOutMat, F);//Transform points
        System.out.println("ptsOutMat " + ptsOutMat + " " + ptsOutMat.dump());
        ptsInMat.release();
        F.release();
        ptsOutMat.get(0, 0, ptsOut);
        ptsOutMat.release();
        System.out.println(toString(ptsOut));
        System.out.println(halfW + " " + halfH);

        //Get 3x3 transform and warp image
        Point[] ptsInPt2f = new Point[4];
        Point[] ptsOutPt2f = new Point[4];
        for(int i=0;i<4;i++){
            ptsInPt2f[i] = new Point(0, 0);
            ptsOutPt2f[i] = new Point(0, 0);
            System.out.println(i);
            System.out.println("points " + ptsIn [i*3+0] + " " + ptsIn [i*3+1]);
            Point ptIn = new Point(ptsIn [i*3+0], ptsIn [i*3+1]);
            Point ptOut = new Point(ptsOut[i*3+0], ptsOut[i*3+1]);

            ptsInPt2f[i].x  = ptIn.x+halfW;
            ptsInPt2f[i].y  = ptIn.y+halfH;

            ptsOutPt2f[i].x = (ptOut.x+1) * sideLength*0.5;
            ptsOutPt2f[i].y = (ptOut.y+1) * sideLength*0.5;
           System.out.println("ptsOutPt2f " + ptsOutPt2f[i]);
        }  

        Mat ptsInPt2fTemp =  Mat.zeros(4,1,CvType.CV_32FC2);
        ptsInPt2fTemp.put(0, 0,
            ptsInPt2f[0].x,ptsInPt2f[0].y,
            ptsInPt2f[1].x,ptsInPt2f[1].y,
            ptsInPt2f[2].x,ptsInPt2f[2].y,
            ptsInPt2f[3].x,ptsInPt2f[3].y);

        Mat ptsOutPt2fTemp = Mat.zeros(4,1,CvType.CV_32FC2);
        ptsOutPt2fTemp.put(0, 0,
            ptsOutPt2f[0].x,ptsOutPt2f[0].y,
            ptsOutPt2f[1].x,ptsOutPt2f[1].y,
            ptsOutPt2f[2].x,ptsOutPt2f[2].y,
            ptsOutPt2f[3].x,ptsOutPt2f[3].y);

        System.out.println("ptsInPt2fTemp " + ptsInPt2fTemp.dump());
        System.out.println("ptsOutPt2fTemp " + ptsOutPt2fTemp.dump());
        Mat warp=Imgproc.getPerspectiveTransform(ptsInPt2fTemp, ptsOutPt2fTemp);
        warp.copyTo(M);
        ptsInPt2fTemp.release();
        warp.release();

        //Load corners vector
        if(corners != null)
        {
            corners.put(0,0, ptsOutPt2f[0].x, ptsOutPt2f[0].y//Push Top Left corner
            , ptsOutPt2f[1].x, ptsOutPt2f[1].y//Push Top Right corner
            , ptsOutPt2f[2].x, ptsOutPt2f[2].y//Push Bottom Right corner
            , ptsOutPt2f[3].x, ptsOutPt2f[3].y);//Push Bottom Left corner
        }
        ptsOutPt2fTemp.release();
        System.out.println("corners " + corners + " " + corners.dump());
    }

    static void warpImage(Mat src,
                   double    theta,
                   double    phi,
                   double    gamma,
                   double    scale,
                   double    fovy,
                   Mat      dst,
                   Mat      M,
                   MatOfPoint2f corners){
        double halfFovy=fovy*0.5;
        double d=Math.hypot(src.cols(),src.rows());
        double sideLength=scale*d/Math.cos(Math.toRadians(halfFovy));
        System.out.println("d " + d + ", sideLength " + sideLength);
        warpMatrix(src.size(), theta, phi, gamma, scale, fovy, M, corners);//Compute warp matrix
        System.out.println("M " + M + " " + M.dump());
        Imgproc.warpPerspective(src, dst, M, new Size(sideLength,sideLength));//Do actual image warp
    }

    public static void main(String[] args)
    {
        int c = 0;
        Mat m = new Mat();
        Mat disp = new Mat();
        Mat warp = new Mat();
        MatOfPoint2f corners = new MatOfPoint2f(new Point(0,0),new Point(0,0),new Point(0,0),new Point(0,0));

        String filename = "lena.jpg";
        m = Imgcodecs.imread(filename, Imgcodecs.IMREAD_COLOR);
        if (m.empty()) {
            System.out.println("Error opening image");
            System.exit(-1);
        }

        double scale = 1.;
        double fovy = 53.;
        double halfFovy=fovy*0.5;

        VideoCapture cap;
        cap = new VideoCapture();
        cap.open(0);
        cap.read(m);
        warpImage(m, 5, 50, 0, 1, 30, disp, warp, corners); // fovy = rad2deg(arctan2(640,480)) = 53 ??

        while(true) {
            cap.read(m);
            double d=Math.hypot(m.cols(),m.rows());
            double sideLength=scale*d/Math.cos(Math.toRadians(halfFovy));
            Imgproc.warpPerspective(m, disp, warp, new Size(sideLength,sideLength));//Do actual image warp
            HighGui.imshow("Disp", disp);
            HighGui.imshow("Orig", m);
            c = HighGui.waitKey(25);
            if (c != -1) break;
        }

        m.release();
        disp.release();
        warp.release();
        corners.release();
        System.exit(0);
    }

    static String toString(double[] array) {
        return Arrays.stream(array)
                .mapToObj(i -> String.format("%5.2f", i))
                .collect(Collectors.joining(", ", "[", "]"));
                //.collect(Collectors.joining("|", "|", "|"));
            }
}

于 2020-06-11T19:33:07.020 回答