FacePreview on Android with native libraries

Android has its own face recognition and we don’t need OpenCV to find faces on pictures or while shooting with camera.
In this example I used FaceDetector to find faces on Bitmap image which I had to create from Raw data. The biggest problem of this solution is the conversion. Since FaceDetector supports RGB_565 and many android cameras support only YUV. I tested this on my Huawei Honor 2 with NV21 bitmap image type. If your device supports other image format feel free to edit code and/or post your conversion function in comments or ask for help if needed.
Second problem was the speed of this method. I had to subsample the image to make face recognition faster. On HD resolution it takes 1,5 seconds on 320×180 it takes only 80 ms.
That is pretty much it so here is the code:

/**
 *  MainActivity.java
 *  Filip Bednárik <drndos@drndos.sk>
 *  @ 22.5.2013 18:03
 */
package sk.drndos.android.facedetectionandroid;

import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.Bitmap.Config;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.ImageFormat;
import android.graphics.Paint;
import android.graphics.PointF;
import android.graphics.Rect;
import android.hardware.Camera;
import android.media.FaceDetector;
import android.media.FaceDetector.Face;
import android.os.Bundle;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.widget.FrameLayout;
import java.io.IOException;
import java.util.List;

public class MainActivity extends Activity {

    private FrameLayout layout;
    private FaceView faceView;
    private Preview mPreview;

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        // Hide the window title.
        requestWindowFeature(Window.FEATURE_NO_TITLE);
        getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
        super.onCreate(savedInstanceState);

        getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);

        // Create our Preview view and set it as the content of our activity.
        layout = new FrameLayout(this);
        faceView = new FaceView(this);
        mPreview = new Preview(this, faceView);
        layout.addView(mPreview);
        layout.addView(faceView);
        setContentView(layout);
    }
}

// ----------------------------------------------------------------------
class Preview extends SurfaceView implements SurfaceHolder.Callback {

    SurfaceHolder mHolder;
    Camera mCamera;
    Camera.PreviewCallback previewCallback;

    Preview(MainActivity context, Camera.PreviewCallback previewCallback) {
        super(context);
        this.previewCallback = previewCallback;
        // Install a SurfaceHolder.Callback so we get notified when the
        // underlying surface is created and destroyed.
        mHolder = getHolder();
        mHolder.addCallback(this);
        mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
    }

    public void surfaceCreated(SurfaceHolder holder) {
        // The Surface has been created, acquire the camera and tell it where
        // to draw.
        mCamera = Camera.open();
        try {
            mCamera.setPreviewDisplay(holder);
        } catch (IOException exception) {
            mCamera.release();
            mCamera = null;
            // TODO: add more exception handling logic here
        }
    }

    public void surfaceDestroyed(SurfaceHolder holder) {
        // Surface will be destroyed when we return, so stop the preview.
        // Because the CameraDevice object is not a shared resource, it's very
        // important to release it when the activity is paused.
        mCamera.stopPreview();
        mCamera.release();
        mCamera = null;
    }

    private Camera.Size getOptimalPreviewSize(List<Camera.Size> sizes, int w, int h) {
        final double ASPECT_TOLERANCE = 0.05;
        double targetRatio = (double) w / h;
        if (sizes == null) {
            return null;
        }

        Camera.Size optimalSize = null;
        double minDiff = Double.MAX_VALUE;

        int targetHeight = h;

        // Try to find an size match aspect ratio and size
        for (Camera.Size size : sizes) {
            double ratio = (double) size.width / size.height;
            if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) {
                continue;
            }
            if (Math.abs(size.height - targetHeight) < minDiff) {
                optimalSize = size;
                minDiff = Math.abs(size.height - targetHeight);
            }
        }

        // Cannot find the one match the aspect ratio, ignore the requirement
        if (optimalSize == null) {
            minDiff = Double.MAX_VALUE;
            for (Camera.Size size : sizes) {
                if (Math.abs(size.height - targetHeight) < minDiff) {
                    optimalSize = size;
                    minDiff = Math.abs(size.height - targetHeight);
                }
            }
        }
        return optimalSize;
    }

    public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
        // Now that the size is known, set up the camera parameters and begin
        // the preview.
        Camera.Parameters parameters = mCamera.getParameters();

        List<Camera.Size> sizes = parameters.getSupportedPreviewSizes();
        for(Camera.Size size:sizes)
        {
            Log.w("FACEPREVIEW","Supported width: "+size.width+" height: "+size.height);
        }
        Camera.Size optimalSize = getOptimalPreviewSize(sizes, w, h);
        parameters.setPreviewSize(optimalSize.width, optimalSize.height);

        mCamera.setParameters(parameters);
        if (previewCallback != null) {
            mCamera.setPreviewCallbackWithBuffer(previewCallback);
            Camera.Size size = parameters.getPreviewSize();
            byte[] data = new byte[size.width * size.height
                    * ImageFormat.getBitsPerPixel(parameters.getPreviewFormat()) / 8];
            mCamera.addCallbackBuffer(data);
        }
        mCamera.startPreview();
    }
}

class FaceView extends View implements Camera.PreviewCallback {

    public String TAG = "FACEPREVIEW";
    Rect r = null;
    public static int SUBSAMPLE = 4;

    private static int convertYUVtoRGB(int y, int u, int v) {
        int r, g, b;
        r = y + (int) 1.402f * v;
        g = y - (int) (0.344f * u + 0.714f * v);
        b = y + (int) 1.772f * u;
        r = r > 255 ? 255 : r < 0 ? 0 : r;
        g = g > 255 ? 255 : g < 0 ? 0 : g;
        b = g > 255 ? 255 : b < 0 ? 0 : b;
        return 0xff000000 | (b << 16) | (g << 8) | (r);
    }

    public FaceView(MainActivity context) {
        super(context);
    }

    public void onPreviewFrame(byte[] data, Camera camera) {
        try {
            long time = System.currentTimeMillis();
            Log.w(TAG, "Starting conversion");
            Camera.Size size = camera.getParameters().getPreviewSize();
            Log.w(TAG, "Width: " + size.width + " Height: " + size.height + " Datalength: " + data.length);
            Log.w(TAG, "NIV21 decoded");
            //Bitmap bitmap = BitmapFactory.decodeByteArray(data, 0, data.length);
            Bitmap bitmap = Bitmap.createBitmap(convertIt(data, size.width, size.height), size.width, size.height, Config.ARGB_8888);

            bitmap = Bitmap.createScaledBitmap(bitmap, (int)((float)size.width/(float)SUBSAMPLE), (int)((float)size.height/(float)SUBSAMPLE), false);
            Bitmap bitmap2 = bitmap.copy(Config.RGB_565, true);

            Log.w(TAG, "Converting took " + (System.currentTimeMillis() - time) + " ms");
            time = System.currentTimeMillis();
            r = findFace(bitmap2);
            postInvalidate();
            Log.w(TAG, "Finding face took " + (System.currentTimeMillis() - time) + " ms");
            camera.addCallbackBuffer(data);
        } catch (RuntimeException e) {
            Log.w(TAG, e);
            // The camera has probably just been released, ignore.
        }

    }

    public static int[] convertIt(byte[] data, int width, int height) {
        int size = width * height;
        int offset = size;
        int[] pixels = new int[size];
        int u, v, y1, y2, y3, y4;
        for (int i = 0, k = 0; i < size; i += 2, k += 2) {
            y1 = data[i] & 0xff;
            y2 = data[i + 1] & 0xff;
            y3 = data[width + i] & 0xff;
            y4 = data[width + i + 1] & 0xff;

            u = data[offset + k] & 0xff;
            v = data[offset + k + 1] & 0xff;
            u = u - 128;
            v = v - 128;

            pixels[i] = convertYUVtoRGB(y1, u, v);
            pixels[i + 1] = convertYUVtoRGB(y2, u, v);
            pixels[width + i] = convertYUVtoRGB(y3, u, v);
            pixels[width + i + 1] = convertYUVtoRGB(y4, u, v);

            if (i != 0 && (i + 2) % width == 0) {
                i += width;
            }
        }
        return pixels;
    }

    @Override
    protected void onDraw(Canvas canvas) {
        Paint paint = new Paint();
        paint.setColor(Color.RED);
        paint.setTextSize(20);
        String s = "FacePreview - This side up.";
        float textWidth = paint.measureText(s);
        canvas.drawText(s, (getWidth() - textWidth) / 2, 20, paint);

        if (r != null) {
            paint.setStrokeWidth(2);
            paint.setStyle(Paint.Style.STROKE);
            int x = r.centerX(), y = r.centerY(), w = r.width(), h = r.height();
            x = x - (w / 2);
            y = y - (h / 2);
            canvas.drawRect(x, y, (x + w), (y + h), paint);
            r = null;
        }
    }

    public Rect findFace(Bitmap bmp) {
        // Ask for 1 face
        Face faces[] = new FaceDetector.Face[1];
        FaceDetector detector = new FaceDetector(bmp.getWidth(), bmp.getHeight(), 1);
        int count = detector.findFaces(bmp, faces);

        Face face = null;

        if (count > 0) {
            face = faces[0];

            PointF midEyes = new PointF();
            face.getMidPoint(midEyes);
            Log.w(TAG,
                    "Found face. Confidence: " + face.confidence() + ". Eye Distance: " + face.eyesDistance() + " Pose: ("
                    + face.pose(FaceDetector.Face.EULER_X) + "," + face.pose(FaceDetector.Face.EULER_Y) + ","
                    + face.pose(FaceDetector.Face.EULER_Z) + "). Eye Midpoint: (" + midEyes.x + "," + midEyes.y + ")");

            float eyedist = face.eyesDistance();
            PointF lt = new PointF(midEyes.x - eyedist * 2.0f, midEyes.y - eyedist * 2.5f);
            // Create rectangle around face.  Create a box based on the eyes and add some padding.
            // The ratio of head height to width is generally 9/5 but that makes the rect a bit to tall.
            return new Rect(
                    Math.max((int) (lt.x*SUBSAMPLE), 0),
                    Math.max((int) (lt.y*SUBSAMPLE), 0),
                    Math.min((int) (lt.x + eyedist * 4.0f)*SUBSAMPLE, bmp.getWidth()*SUBSAMPLE),
                    Math.min((int) (lt.y + eyedist * 5.5f)*SUBSAMPLE, bmp.getHeight()*SUBSAMPLE));
        } else {
            Log.w(TAG, "No face found :(");
        }

        return null;
    }
}

Leave a Comment

Your email address will not be published. Required fields are marked *