1

我正在使用android “opencv samples - face detection”来检测人脸。它运作良好。现在我想将“眼睛检测”“面部检测”包括在内。可能吗?如果是的话,谁能给我一些样本来检测眼睛。我已经在互联网上进行了搜索。但我没有得到任何样品。

4

4 回答 4

1

您可以使用此代码检测眼睛......(关于面孔数量)

//代码...

public void setFace() {
    FaceDetector fd;
    mFaceBitmap = mBitmap.copy(Bitmap.Config.RGB_565, true);
    mFaceWidth = mFaceBitmap.getWidth();
    mFaceHeight = mFaceBitmap.getHeight(); 
    FaceDetector.Face [] faces = new FaceDetector.Face[MAX_FACES];
    PointF eyescenter = new PointF();
    float eyesdist = 0.0f;
    int [] fpx = null;
    int [] fpy = null;

    try {
        Log.e("setFace(): ", "try");
        fd = new FaceDetector(mFaceWidth, mFaceHeight, MAX_FACES);        
        count = fd.findFaces(mFaceBitmap, faces);
    } catch (Exception e) {
        return;
    }

    // check if we detect any faces
    if (count > 0) {
        fpx = new int[count * 1];
        fpy = new int[count * 1];

        for (int i = 0; i < count; i++) { 
            try {
                Log.e("setFace(): ", "loop");
                faces[i].getMidPoint(eyescenter);                  
                eyesdist = faces[i].eyesDistance(); 

                // set up left eye location
                fpx[1 * i] = (int)(eyescenter.x - eyesdist / 2);
                fpy[1 * i] = (int)eyescenter.y;

                // set up right eye location
                fpx[1 * i + 1] = (int)(eyescenter.x + eyesdist / 2);
                fpy[1 * i + 1] = (int)eyescenter.y; 
            } catch (Exception e) {
            }            
        } 
        MainActivity.clicked = true;
    }
} 
于 2012-09-03T05:19:47.330 回答
0

我认为这是可能的。您需要加载另一个级联(嵌套级联)并在人脸级联检测到的ROI中使用detectMultiScale。看一下 c 示例中的 facedetect.cpp 示例。

于 2012-09-05T07:29:04.357 回答
0

是的,你可以这样做。既有通用的眼睛检测器,也有特定的/右眼检测器可用。

您可以使用与面部相同的 Haar 级联代码,只需替换眼睛检测器即可。

在实践中,我发现最好同时尝试两者 - 从通用眼睛检测器开始,然后如果在合理的位置没有发现任何东西,则继续使用左眼和右眼检测器。

于 2013-03-11T18:58:54.917 回答
0

您可以通过此代码

public FdView(Context context) {
    super(context);
    this.context = context;


    try {
        InputStream is = context.getResources().openRawResource(
                R.raw.lbpcascade_frontalface);
        File cascadeDir = context.getDir("cascade", Context.MODE_PRIVATE);
        mCascadeFile = new File(cascadeDir, "lbpcascade_frontalface.xml");
        FileOutputStream os = new FileOutputStream(mCascadeFile);

        byte[] buffer = new byte[4096];
        int bytesRead;
        while ((bytesRead = is.read(buffer)) != -1) {
            os.write(buffer, 0, bytesRead);
        }
        is.close();
        os.close();

        // ------------------------- load left eye classificator//
        // -----------------------------------
        InputStream iser = context.getResources().openRawResource(
                R.raw.haarcascade_lefteye_2splits);
        File cascadeDirER = context.getDir("cascadeER",
                Context.MODE_PRIVATE);
        File cascadeFileER = new File(cascadeDirER,
                "haarcascade_eye_right.xml");
        FileOutputStream oser = new FileOutputStream(cascadeFileER);

        byte[] bufferER = new byte[4096];
        int bytesReadER;
        while ((bytesReadER = iser.read(bufferER)) != -1) {
            oser.write(bufferER, 0, bytesReadER);
        }
        iser.close();
        oser.close();
        // ----------------------------------------------------------------------------------------------------

        // --------------------------------- load right eye classificator//
        // ------------------------------------
        InputStream isel = context.getResources().openRawResource(
                R.raw.haarcascade_lefteye_2splits);
        File cascadeDirEL = context.getDir("cascadeEL",
                Context.MODE_PRIVATE);
        File cascadeFileEL = new File(cascadeDirEL,
                "haarcascade_eye_left.xml");
        FileOutputStream osel = new FileOutputStream(cascadeFileEL);

        byte[] bufferEL = new byte[4096];
        int bytesReadEL;
        while ((bytesReadEL = isel.read(bufferEL)) != -1) {
            osel.write(bufferEL, 0, bytesReadEL);
        }
        isel.close();
        osel.close();

        // ------------------------------------------------------------------------------------------------------

        mJavaDetector = new CascadeClassifier(
                mCascadeFile.getAbsolutePath());
        mCascadeER = new CascadeClassifier(cascadeFileER.getAbsolutePath());
        mCascadeEL = new CascadeClassifier(cascadeFileER.getAbsolutePath());
        if (mJavaDetector.empty() || mCascadeER.empty()
                || mCascadeEL.empty()) {
            Log.e(TAG, "Failed to load cascade classifier");
            mJavaDetector = null;
            mCascadeER = null;
            mCascadeEL = null;
        } else
            Log.i(TAG,
                    "Loaded cascade classifier from "
                            + mCascadeFile.getAbsolutePath());

        mNativeDetector = new DetectionBasedTracker(
                mCascadeFile.getAbsolutePath(), 0);

        cascadeDir.delete();
        cascadeFileER.delete();
        cascadeDirER.delete();
        cascadeFileEL.delete();
        cascadeDirEL.delete();

    } catch (IOException e) {
        e.printStackTrace();
        Log.e(TAG, "Failed to load cascade. Exception thrown: " + e);
    }
}

@Override
public void surfaceCreated(SurfaceHolder holder) {
    synchronized (this) {
        // initialize Mats before usage
        mGray = new Mat();
        mRgba = new Mat();
    }

        super.surfaceCreated(holder);
}

@Override
protected Bitmap processFrame(VideoCapture capture) {
    capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
    capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);

    if (mAbsoluteFaceSize == 0) {
        int height = mGray.rows();
        if (Math.round(height * mRelativeFaceSize) > 0)

        {
            mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
        }
        mNativeDetector.setMinFaceSize(mAbsoluteFaceSize);
    }

    MatOfRect faces = new MatOfRect();

    if (mDetectorType == JAVA_DETECTOR) {
        if (mJavaDetector != null)
            mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2,
                    new Size(mAbsoluteFaceSize, mAbsoluteFaceSize),
                    new Size());

        /*
         * if (mZoomCorner == null || mZoomWindow == null)
         * CreateAuxiliaryMats();
         */

        Rect[] facesArray = faces.toArray();

        for (int i = 0; i < facesArray.length; i++) {

            Rect r = facesArray[i];

            Core.rectangle(mGray, r.tl(), r.br(),new Scalar(255, 0, 255, 0), 3);

            Core.rectangle(mRgba, r.tl(), r.br(),new Scalar(46, 139, 87, 5), 3);

            //eyearea = new Rect(r.x + r.width / 8,(int) (r.y + (r.height / 4.5)), r.width - 10 * r.width/ 8, (int) (r.height / 3.0));

        //  Core.rectangle(mRgba, eyearea.tl(), eyearea.br(), new Scalar(255, 0, 0, 255), 3);

            Rect eyearea_right = new Rect(r.x + r.width / 16,
                    (int) (r.y + (r.height / 4.5)),
                    (r.width - 2 * r.width / 16) / 2,(int) (r.height / 3.0));

            Rect eyearea_left = new Rect(r.x + r.width / 16+ (r.width - 2 * r.width / 16) / 2,(int) (r.y + (r.height / 4.5)),(r.width - 2 * r.width / 16) / 2,(int) (r.height / 3.0));

            // outer recTangles
            //Core.rectangle(mRgba, eyearea_left.tl(), eyearea_left.br(),new Scalar(0, 255, 255, 255), 3);

            //Core.rectangle(mRgba, eyearea_right.tl(), eyearea_right.br(),new Scalar(0, 255, 255, 255), 3);

            teplateR = get_template(mCascadeER, eyearea_right, 3);

            teplateL = get_template(mCascadeEL, eyearea_left, 3);

            learn_frames++;

            /*
             * match_value = match_eye(eyearea_right, teplateR,
             * FdActivity.method);
             * 
             * match_value = match_eye(eyearea_left, teplateL,
             * FdActivity.method); ; }
             */

            /************************************************************************************************/                  

            /*
             * Imgproc.resize(mRgba.submat(eyearea_left), mZoomWindow2,
             * mZoomWindow2.size());
             * Imgproc.resize(mRgba.submat(eyearea_right), mZoomWindow,
             * mZoomWindow.size());
             */

        }
    } else if (mDetectorType == NATIVE_DETECTOR) {
        if (mNativeDetector != null)
            mNativeDetector.detect(mGray, faces);
    } else {
        Log.e(TAG, "Detection method is not selected!");
    }

    Rect[] facesArray = faces.toArray();
    for (int i = 0; i < facesArray.length; i++)
        Core.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(),
                FACE_RECT_COLOR, 3);

    Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(),
            Bitmap.Config.ARGB_8888);

    try {
        Utils.matToBitmap(mRgba, bmp);
    } catch (Exception e) {
        Log.e(TAG,
                "Utils.matToBitmap() throws an exception: "
                        + e.getMessage());
        bmp.recycle();
        bmp = null;
    }

    return bmp;
}

希望这会帮助你。

于 2013-12-09T06:51:52.387 回答