2

我想实现一个简单的应用程序(对 sample2 的修改),它显示了 SIFT、SURF、BRIEF 和 ORB 的作用。用户可以简单地比较旋转或缩放不变性或速度。但我发现失败,我无法处理,所以我求助于你。当我尝试使用 SIFT 或 SURF 时,我总是在尝试匹配时在线获得异常:matcherBruteForce.match(descriptorFrame, matches);

我有一个类似的 AR 应用程序,并且使用这些设置它正在工作,所以我无法弄清楚我在哪里犯了错误。我试图将变量“matcherBruteForce”设置为 BRUTEFORCE,BRUTEFORCE_L1,BRUTEFORCE_SL2 事件设置为 BRUTEFORCE_HAMMING。但我总是遇到同样的例外:

筛:

CvException [org.opencv.core.CvException: /home/andreyk/OpenCV2/trunk/opencv_2.3.1.b2/modules/features2d/include/opencv2/features2d/features2d.hpp:2455: error: (-215) DataType<ValueType>::type == matcher.trainDescCollection[iIdx].type() || matcher.trainDescCollection[iIdx].empty() in function static void cv::BruteForceMatcher<Distance>::commonKnnMatchImpl(cv::BruteForceMatcher<Distance>&, const cv::Mat&, std::vector<std::vector<cv::DMatch> >&, int, const std::vector<cv::Mat>&, bool) [with Distance = cv::SL2<float>]
]

冲浪:

CvException [org.opencv.core.CvException: /home/andreyk/OpenCV2/trunk/opencv_2.3.1.b2/modules/features2d/include/opencv2/features2d/features2d.hpp:2455: error: (-215) DataType<ValueType>::type == matcher.trainDescCollection[iIdx].type() || matcher.trainDescCollection[iIdx].empty() in function static void cv::BruteForceMatcher<Distance>::commonKnnMatchImpl(cv::BruteForceMatcher<Distance>&, const cv::Mat&, std::vector<std::vector<cv::DMatch> >&, int, const std::vector<cv::Mat>&, bool) [with Distance = cv::SL2<float>]
]

任何帮助表示赞赏

全班:

    package sk.bolyos.opencv;

import java.util.Vector;

import org.opencv.features2d.DMatch;
import org.opencv.features2d.DescriptorExtractor;
import org.opencv.features2d.DescriptorMatcher;
import org.opencv.features2d.FeatureDetector;
import org.opencv.features2d.Features2d;
import org.opencv.features2d.KeyPoint;
import org.opencv.highgui.VideoCapture;
import org.opencv.android.Utils;
import org.opencv.core.Mat;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import org.opencv.highgui.Highgui;

import sk.bolyos.svk.*;

import android.content.Context;
import android.graphics.Bitmap;
import android.util.Log;
import android.view.SurfaceHolder;




public class MyView extends CvViewBase {

    private static final int BOUNDARY = 35;

    private Mat mRgba;
    private Mat mGray;
    private Mat mIntermediateMat;
    private Mat mLogoMilka1,mLogoMilka2,mLogoMilka3,mLogoMilka4;
    ///////////////////DETECTORS
    FeatureDetector siftDetector = FeatureDetector.create(FeatureDetector.SIFT);
    FeatureDetector surfDetector = FeatureDetector.create(FeatureDetector.SURF);
    FeatureDetector fastDetector = FeatureDetector.create(FeatureDetector.FAST);
    FeatureDetector orbDetector = FeatureDetector.create(FeatureDetector.ORB);
    ///////////////////DESCRIPTORS
    DescriptorExtractor siftDescriptor = DescriptorExtractor.create(DescriptorExtractor.SIFT);
    DescriptorExtractor surfDescriptor = DescriptorExtractor.create(DescriptorExtractor.SURF);
    DescriptorExtractor briefDescriptor = DescriptorExtractor.create(DescriptorExtractor.BRIEF);
    DescriptorExtractor orbDescriptor = DescriptorExtractor.create(DescriptorExtractor.ORB);
    ///////////////////DATABASE
    Vector<KeyPoint> vectorMilka1 = new Vector<KeyPoint>();
    Vector<KeyPoint> vectorMilka2 = new Vector<KeyPoint>();
    Vector<KeyPoint> vectorMilka3 = new Vector<KeyPoint>();
    Vector<KeyPoint> vectorMilka4 = new Vector<KeyPoint>();
    Mat descriptorMilka1 = new Mat();
    Mat descriptorMilka2 = new Mat();
    Mat descriptorMilka3 = new Mat(); 
    Mat descriptorMilka4 = new Mat();
    ///////////////////VIDEO
    Vector<KeyPoint> vectorFrame = new Vector<KeyPoint>();
    Mat descriptorFrame = new Mat();

    DescriptorMatcher matcherHamming = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT);
    DescriptorMatcher matcherBruteForce = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_SL2);
    Vector<DMatch> matches = new Vector<DMatch>();
    Vector<Mat> siftDescriptors = new Vector<Mat>();
    Vector<Mat> surfDescriptors = new Vector<Mat>();
    Vector<Mat> briefDescriptors = new Vector<Mat>();
    Vector<Mat> orbDescriptors = new Vector<Mat>();

    public MyView(Context context) {
        super(context);
        // TODO Auto-generated constructor stub
        try{
            /*
            if (mLogoMilka1 == null){
                mLogoMilka1 = new Mat();
                mLogoMilka1 = Utils.loadResource(getContext(), R.drawable.milkalogo);
                fillDB(mLogoMilka1,vectorMilka1,descriptorMilka1);
            }
            if (mLogoMilka2 == null){
                mLogoMilka2 = new Mat();
                mLogoMilka2 = Utils.loadResource(getContext(), R.drawable.milkalogom);
                fillDB(mLogoMilka2,vectorMilka2,descriptorMilka2);
            }
            if (mLogoMilka3 == null){
                mLogoMilka3 = new Mat();
                mLogoMilka3 = Utils.loadResource(getContext(), R.drawable.milkalogol);
                fillDB(mLogoMilka3,vectorMilka3,descriptorMilka3);
            }*/
            if (mLogoMilka4 == null){
                mLogoMilka4 = new Mat();
                mLogoMilka4 = Utils.loadResource(getContext(), R.drawable.milkalogolc);
                fillDB(mLogoMilka4,vectorMilka4,descriptorMilka4);
            }

        }catch(Exception e){
            Log.e( "SVK APPLICATION", "in MyView constructor "+e.toString());
        }
    }

    public void fillDB(Mat mLogo,Vector<KeyPoint> vector,Mat descriptor){

      //SIFT 
        siftDetector.detect( mLogo, vector );
        siftDescriptor.compute(mLogo, vector, descriptor);
        siftDescriptors.add(descriptor);
      //SURF 
        surfDetector.detect( mLogo, vector );
        surfDescriptor.compute(mLogo, vector, descriptor);
        surfDescriptors.add(descriptor);
      //FAST+BRIEF 
        fastDetector.detect( mLogo, vector );
        briefDescriptor.compute(mLogo, vector, descriptor);
        briefDescriptors.add(descriptor);
      //ORB 
        orbDetector.detect( mLogo, vector );
        orbDescriptor.compute(mLogo, vector, descriptor);
        orbDescriptors.add(descriptor);

    }


    @Override
    public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
        super.surfaceChanged(_holder, format, width, height);

        synchronized (this) {
            // initialize Mats before usage
            mGray = new Mat();
            mRgba = new Mat();
            mIntermediateMat = new Mat();
            matches = new Vector<DMatch>();
            vectorFrame = new Vector<KeyPoint>();
            descriptorFrame = new Mat(); 
        }
    }

    @Override
    protected Bitmap processFrame(VideoCapture capture) {
        // TODO Auto-generated method stub
        switch (SVKApplikaciaActivity.viewMode) {
        case SVKApplikaciaActivity.VIEW_MODE_SIFT:
            //TODO SIFT
            try{
                //matcherBruteForce = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE);
                //matcherBruteForce.clear();
                matcherBruteForce.add(siftDescriptors);
                matcherBruteForce.train();// proba

                capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
                Imgproc.resize(mGray, mGray,new Size(480,320)); 
                siftDetector.detect( mGray, vectorFrame );
                siftDescriptor.compute(mGray, vectorFrame, descriptorFrame);

                matcherBruteForce.match(descriptorFrame, matches);  
                Vector<DMatch> matchesXXX = new Vector<DMatch>();
                for (DMatch t : matches)
                    if(t.distance<BOUNDARY)
                        matchesXXX.add(t);
                Mat nGray = new Mat();
                Mat nLogo = new Mat();
                Mat nRgba = new Mat();
                Imgproc.cvtColor(mGray, nGray, Imgproc.COLOR_RGBA2RGB, 3);
                Imgproc.cvtColor(mLogoMilka4, nLogo, Imgproc.COLOR_RGBA2BGR, 3);
                Features2d.drawMatches(nGray, vectorFrame, nLogo, vectorMilka4, matchesXXX, nRgba);
                Imgproc.cvtColor(nRgba, mRgba, Imgproc.COLOR_RGB2RGBA, 4);
            }catch(Exception e){
                Log.e( "SVK APPLICATION","in SIFT "+ e.toString());
            }
            break;
        case SVKApplikaciaActivity.VIEW_MODE_SURF:
            //TODO SURF
            try{
                //matcherBruteForce = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE);
                //matcherBruteForce.clear();
                matcherBruteForce.add(surfDescriptors);
                matcherBruteForce.train();// proba

                capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
                Imgproc.resize(mGray, mGray,new Size(480,320)); 
                surfDetector.detect( mGray, vectorFrame );
                surfDescriptor.compute(mGray, vectorFrame, descriptorFrame);

                matcherBruteForce.match(descriptorFrame, matches);  
                Vector<DMatch> matchesXXX = new Vector<DMatch>();
                for (DMatch t : matches)
                    if(t.distance<BOUNDARY)
                        matchesXXX.add(t);
                Mat nGray = new Mat();
                Mat nLogo = new Mat();
                Mat nRgba = new Mat();
                Imgproc.cvtColor(mGray, nGray, Imgproc.COLOR_RGBA2RGB, 3);
                Imgproc.cvtColor(mLogoMilka4, nLogo, Imgproc.COLOR_RGBA2BGR, 3);
                Features2d.drawMatches(nGray, vectorFrame, nLogo, vectorMilka4, matchesXXX, nRgba);
                Imgproc.cvtColor(nRgba, mRgba, Imgproc.COLOR_RGB2RGBA, 4);
            }catch(Exception e){
                Log.e( "SVK APPLICATION","in Surf "+ e.toString());
            }
            break;
        case SVKApplikaciaActivity.VIEW_MODE_BRIEF:
            //TODO BRIEF
            try{
                matcherHamming = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT);
                matcherHamming.add(briefDescriptors);
                matcherHamming.train();// proba

                capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
                Imgproc.resize(mGray, mGray,new Size(480,320)); 
                fastDetector.detect( mGray, vectorFrame );
                briefDescriptor.compute(mGray, vectorFrame, descriptorFrame);

                matcherHamming.match(descriptorFrame, matches); 
                Vector<DMatch> matchesXXX = new Vector<DMatch>();
                for (DMatch t : matches)
                    if(t.distance<BOUNDARY)
                        matchesXXX.add(t);
                Mat nGray = new Mat();
                Mat nLogo = new Mat();
                Mat nRgba = new Mat();
                Imgproc.cvtColor(mGray, nGray, Imgproc.COLOR_RGBA2RGB, 3);
                Imgproc.cvtColor(mLogoMilka4, nLogo, Imgproc.COLOR_RGBA2BGR, 3);
                Features2d.drawMatches(nGray, vectorFrame, nLogo, vectorMilka4, matchesXXX, nRgba);
                Imgproc.cvtColor(nRgba, mRgba, Imgproc.COLOR_RGB2RGBA, 4);
            }catch(Exception e){
                Log.e( "SVK APPLICATION","in Brief "+ e.toString());
            }
            break;
        case SVKApplikaciaActivity.VIEW_MODE_ORB:
            //TODO ORB
            try{
                matcherHamming = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT);
                matcherHamming.add(orbDescriptors);
                matcherHamming.train();// proba

                capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
                Imgproc.resize(mGray, mGray,new Size(480,320)); 
                orbDetector.detect( mGray, vectorFrame );
                orbDescriptor.compute(mGray, vectorFrame, descriptorFrame);

                matcherHamming.match(descriptorFrame, matches); 
                Vector<DMatch> matchesXXX = new Vector<DMatch>();
                for (DMatch t : matches)
                    if(t.distance<BOUNDARY)
                        matchesXXX.add(t);
                Mat nGray = new Mat();
                Mat nLogo = new Mat();
                Mat nRgba = new Mat();
                Imgproc.cvtColor(mGray, nGray, Imgproc.COLOR_RGBA2RGB, 3);
                Imgproc.cvtColor(mLogoMilka4, nLogo, Imgproc.COLOR_RGBA2BGR, 3);
                Features2d.drawMatches(nGray, vectorFrame, nLogo, vectorMilka4, matchesXXX, nRgba);
                Imgproc.cvtColor(nRgba, mRgba, Imgproc.COLOR_RGB2RGBA, 4);
                }catch(Exception e){
                    Log.e( "SVK APPLICATION","in ORB "+ e.toString());
                }
            break;  
        case SVKApplikaciaActivity.VIEW_MODE_AR:
            //TODO AR
            break;    

        }

        Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888);

        if (Utils.matToBitmap(mRgba, bmp))
            return bmp;

        bmp.recycle();

        return null;
    }

    @Override
    public void run() {
        super.run();

        synchronized (this) {
            // Explicitly deallocate Mats
            if (mRgba != null)
                mRgba.release();
            if (mGray != null)
                mGray.release();
            if (mIntermediateMat != null)
                mIntermediateMat.release();

            mRgba = null;
            mGray = null;
            mIntermediateMat = null;
        }
    }

}
4

2 回答 2

3

我想我知道问题所在。您使用的匹配器不能应用于 SIFT 和 SURF 描述符。如果必须将 DescriptorMatcher 与 sift 或 surf 一起使用,则必须将其设置为

DescriptorMatcher matcherBruteForce=DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_SL2);

由于 SURF 和 SIFT 仅接受 FloatBased 描述符,因此如果您将 DescriptorMatcher 设置为 HAMMING 传递给它,它将返回错误。

请注意,在您的代码中,您有两个 DescriptorMatchers,一个设置为 BRUTEFORCE.SL2,另一个设置为 HAMMING。确保您通过正确的一个,即 BRUTEFORCE.SL2 到 SIFT 或 SURF。

然而,最好将基于 FLANN 的匹配器用于 SIFT 或 SURF,因为与 ORB 相比,它们提取更多的关键点,并且 FLANN 适合于大量关键点在这里阅读更多关于它的信息 http://computer-vision-talks。 com/2011/07/comparison-of-the-opencvs-feature-detection-algorithms-ii/

在这里 http://opencv.willowgarage.com/documentation/cpp/flann_fast_approximate_nearest_neighbor_search.html

更新:可以使用 L2 或 L1 距离来匹配 uchar 描述符。如果您将 DescriptorMatcher 设置为 BRUTEFORCE 它也可能适用于 ORB(尽管结果不佳)

于 2012-04-10T12:47:00.020 回答
1

你确定你的vectorFrame的大小不等于零吗?我想我有同样的问题..您的问题出在检测算法中,我认为当您的图像的颜色代码不正确时它会返回零向量帧

只是放在 Log.e( "SVK APPLICATION","vectorFrame size = "+ vectorFrame.size());某个地方

于 2012-04-04T13:17:31.850 回答