57

关于 camera2 人脸检测机制的信息不足。我使用了 Google 的 Camera2 示例:https ://github.com/android/camera-samples

我将人脸检测模式设置为 FULL。

mPreviewRequestBuilder.set(CaptureRequest.STATISTICS_FACE_DETECT_MODE,
                                    CameraMetadata.STATISTICS_FACE_DETECT_MODE_FULL);

另外,我检查了

STATISTICS_INFO_MAX_FACE_COUNTSTATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES

int max_count = characteristics.get(
CameraCharacteristics.STATISTICS_INFO_MAX_FACE_COUNT);
int modes [] = characteristics.get(
CameraCharacteristics.STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES);

输出maxCount:5,模式:[0, 2]

我的捕获回调:

 private CameraCaptureSession.CaptureCallback mCaptureCallback
 = new CameraCaptureSession.CaptureCallback() {

    private void process(CaptureResult result) {
                Integer mode = result.get(CaptureResult.STATISTICS_FACE_DETECT_MODE);
                Face [] faces = result.get(CaptureResult.STATISTICS_FACES);
                if(faces != null && mode != null)
                    Log.e("tag", "faces : " + faces.length + " , mode : " + mode ); 
    }

    @Override
    public void onCaptureProgressed(CameraCaptureSession session, CaptureRequest request,
                                    CaptureResult partialResult) {
        process(partialResult);
    }

    @Override
    public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
                                   TotalCaptureResult result) {
        process(result);
    }

输出面:0,模式:2

 public static final int STATISTICS_FACE_DETECT_MODE_FULL = 2;

人脸长度始终为 0。看起来它没有正确检测到人脸,或者我错过了一些东西。

我知道FaceDetector的方法。我只是想检查一下它如何与新的 camera2 Face一起使用。

4

5 回答 5

1

您可以查看 Google 的 ML-KIT:

在 Android 上使用 ML Kit 检测人脸

还有一个相关的示例应用程序: vision-quickstart

或者,如果您使用的是 camera2,则有一个工作示例: Camera2Vision

于 2021-10-12T13:20:15.517 回答
0

我的尝试是在 android 5.0(API 21) 上。更新到 5.1(API 22)后,它无需更改代码即可开始工作。

于 2015-05-20T20:38:02.133 回答
0

我认为您的手机无法与 Google 人脸检测配合使用。你确定它使用 HAL3 并且可以使用 API2 吗?

例如,在我的代码中,我使用人脸检测没有任何问题:

 private CameraCaptureSession.CaptureCallback mPhotoCaptureCallback
            = new CameraCaptureSession.CaptureCallback() {
//more code...
  private void process(CaptureResult result) {
            switch (mState) {
                case STATE_PREVIEW: {
                    checkFaces(result.get(CaptureResult.STATISTICS_FACES));
                   //more code....
                    break;
                }
//more code...
}

这是 checkFaces 方法:

 private void checkFaces(Face[] faces) {
    if (faces != null) {
        CameraUtil.CustomFace[] mMappedCustomFaces;
        mMappedCustomFaces = computeFacesFromCameraCoordinates(faces);
        if (faces != null && faces.length > 0) {
            mHandler.sendEmptyMessage(SHOW_FACES_MSG);
            mLastTimeRenderingFaces = System.currentTimeMillis();
        }
    } else {
        if (System.currentTimeMillis() > (mLastTimeRenderingFaces + 100)) {
            mHandler.sendEmptyMessage(HIDE_FACES_MSG);
        }
    }
}

我的自定义 Face 类:

     //    public static class CustomFace extends Camera.CustomFace{
public static class CustomFace {
    private int score = 0;
    private Rect rect = null;

    public CustomFace(Rect rect, int score) {
        this.score = score;
        this.rect = rect;
    }

    public int getScore() {
        return score;
    }

    public Rect getBounds() {
        return rect;
    }
}

最后使用这种方法,您可以正确绘制面部(您可以使用默认的 android 面部,但矩形在 4:3 或 16:9 尺寸或旋转手机时效果不佳:

  public static RectF rectToRectF(Rect r) {
    return new RectF(r.left, r.top, r.right, r.bottom);
}

     private CameraFaceUtil.CustomFace[] computeFacesFromCameraCoordinates(Face[] faces) {
        CameraFaceUtil.CustomFace[] mappedFacesList = new CameraFaceUtil.CustomFace[faces.length];

        mCameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);

        float toStandardAspectRatio = ((float) mPreviewRect.bottom / (float) mPreviewRect.right) / AutoFitTextureView.RATIO_STANDARD;
//
        for (int i = 0; i < faces.length; i++) {

            RectF mappedRect = new RectF();
            Log.i(TAG, "[computeFacesFromCameraCoordinates] toStandardAspectRatio: " + toStandardAspectRatio);
            Log.i(TAG, "[computeFacesFromCameraCoordinates] preview rect: " + mPreviewRect);
            Log.i(TAG, "[computeFacesFromCameraCoordinates] raw rect: " + faces[i].getBounds());

            mCameraToPreviewMatrix.mapRect(mappedRect, CameraUtil.rectToRectF(faces[i].getBounds()));

            Log.i(TAG, "[computeFacesFromCameraCoordinates] mapped rect: " + mappedRect);

            Rect auxRect = new Rect(CameraUtil.rectFToRect(mappedRect));


            Log.i(TAG, "[computeFacesFromCameraCoordinates] aux rect: " + auxRect);

            int cameraSensorOrientation = mCameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
            Log.i(TAG, "[computeFacesFromCameraCoordinates] cameraSensorOrientation: " + cameraSensorOrientation);
            switch (cameraSensorOrientation) {
                case 90:
                    mappedRect.top = auxRect.left;
                    mappedRect.bottom = auxRect.right;
                    mappedRect.left = (mPreviewRect.right - auxRect.bottom);
                    mappedRect.right = (mPreviewRect.right - auxRect.top);
                    break;

                case 180:
                    mappedRect.top = (mPreviewRect.bottom - auxRect.bottom) * toStandardAspectRatio;
                    mappedRect.bottom = (mPreviewRect.bottom - auxRect.top) * toStandardAspectRatio;
                    mappedRect.left = (mPreviewRect.right - auxRect.right) * toStandardAspectRatio;
                    mappedRect.right = (mPreviewRect.right - auxRect.left) * toStandardAspectRatio;
                    break;

                case 270:
                    mappedRect.top = (mPreviewRect.bottom - auxRect.right) * toStandardAspectRatio;
                    mappedRect.bottom = (mPreviewRect.bottom - auxRect.left) * toStandardAspectRatio;
                    mappedRect.left = auxRect.top;
                    mappedRect.right = auxRect.bottom;
                    break;
            }

            Log.i(TAG, "[computeFacesFromCameraCoordinates] rotated by camera driver orientation rect without scale: "
                    + mappedRect + ",  with score: " + faces[i].getScore());

            float topOffset = mappedRect.top;
            float leftOffset = mappedRect.left;

            mappedRect.top = mappedRect.top * toStandardAspectRatio;
            mappedRect.bottom = mappedRect.bottom * toStandardAspectRatio;
            mappedRect.left = mappedRect.left * toStandardAspectRatio;
            mappedRect.right = mappedRect.right * toStandardAspectRatio;


            Log.i(TAG, "[computeFacesFromCameraCoordinates] rotated by camera driver orientation rect with scale: "
                    + mappedRect + ",  with score: " + faces[i].getScore());

            topOffset = mappedRect.top - topOffset;
            leftOffset = mappedRect.left - leftOffset;

            mappedRect.top -= topOffset /*- (mMirror ? mPreviewRect.height() : 0)*/;
            mappedRect.bottom -= topOffset /* - (mMirror ? mPreviewRect.height() : 0)*/;
            mappedRect.left -= leftOffset;
            mappedRect.right -= leftOffset;

            Log.i(TAG, "[computeFacesFromCameraCoordinates] rotated by camera driver orientation rect with offset: "
                    + mappedRect + " topOffset " + topOffset + " leftOffset " + leftOffset);

            // set the new values to the mapping array to get rendered
            mappedFacesList[i] = new CameraFaceUtil.CustomFace(CameraUtil.rectFToRect(mappedRect), faces[i].getScore());
        }

        return mappedFacesList;

    }

我正在做的是根据屏幕比例和大小绘制面孔。随时询问您是否需要有关 camera2API 的其他信息。

于 2016-06-30T07:47:21.170 回答
0

我发现只有在 case 中STATE_PREVIEW,您才能处理结果以显示面孔长度。从改变

private CameraCaptureSession.CaptureCallback mCaptureCallback
        = new CameraCaptureSession.CaptureCallback() {

    private void process(CaptureResult result) {
        Integer mode = result.get(CaptureResult.STATISTICS_FACE_DETECT_MODE);
        Face[] faces = result.get(CaptureResult.STATISTICS_FACES);
        if(faces != null && mode != null) {
            Log.e("tag", "faces : " + faces.length + " , mode : " + mode);
        }

        switch (mState) {
            case STATE_PREVIEW: {
                // We have nothing to do when the camera preview is working normally.
                break;
            }
...

private CameraCaptureSession.CaptureCallback mCaptureCallback
        = new CameraCaptureSession.CaptureCallback() {

    private void process(CaptureResult result) {


        switch (mState) {
            case STATE_PREVIEW: {
              Face[] faces = result.get(CaptureResult.STATISTICS_FACES);
              if (faces != null && faces.length > 0) {
                  Log.e("tag", "faces : " + faces.length);
              }
                break;
            }

请试试这个,看看它是否有效。

于 2017-07-27T08:27:22.050 回答
0

您确定您的手机处于 API 级别 21 上吗? 直到这个 API 才添加STATISTICS_FACES ,早期的手机使用单独的字段。

您应该尝试运行dumpsys. 它提供了很多信息,包括完整状态下的最新 CaptureResult。我喜欢通过管道搜索 'face' 字符串:

adb shell dumpsys media.camera | grep 'face' -A1 
于 2021-02-13T15:05:13.267 回答