Android基于RTMP视频流的人脸识别(下篇)

在上篇中我们已经通过ffmpeg将Rtmp流成功的解析为NV21数据,接下来笔者将讲述如何通过OpenGL将NV21绘制上去,以及如何通过NV21进行人脸识别,并绘制人脸框。

一、OpenGL的数据绘制

OpenGL的绘制需要设置好顶点着色器与片段着色器,笔者提供了一个着色器工具类GLUtil,里面定义了普通效果,灰度效果与浮雕效果,可以供大家参考。除此之外还要根据frame宽高设置好纹理数据。最后就是将裸数据传入后提取Y、U、V分量,再绘制上去即可。笔者自定义了一个RtmpGLSurfaceView给大家作一个参考。

public class RtmpGLSurfaceView extends GLSurfaceView {
    private static final String TAG = "CameraGLSurfaceView";
    // 源视频帧宽/高
    private int frameWidth, frameHeight;
    private boolean isMirror;
    private int rotateDegree = 0;
    // 用于判断preview数据是否被传入,避免在初始化时有一段时间的绿色背景(y、u、v均全为0)
    private boolean dataInput = false;
    // 圆角半径
    private int radius = 0;

    private ByteBuffer yBuf = null, uBuf = null, vBuf = null;
    // 纹理id
    private int[] yTexture = new int[1];
    private int[] uTexture = new int[1];
    private int[] vTexture = new int[1];

    //YUV分量
    private byte[] yArray;
    private byte[] uArray;
    private byte[] vArray;

    private static final int FLOAT_SIZE_BYTES = 4;
    //片段着色器的效果
    private String fragmentShaderCode = GLUtil.FRAG_SHADER_NORMAL;

    private FloatBuffer squareVertices = null;
    private FloatBuffer coordVertices = null;
    private boolean rendererReady = false;
    float[] coordVertice = null;

    public RtmpGLSurfaceView(Context context) {
        this(context, null);
    }

    public RtmpGLSurfaceView(Context context, AttributeSet attrs) {
        super(context, attrs);
        setEGLContextClientVersion(2);
        //设置Renderer到GLSurfaceView
        setRenderer(new YUVRenderer());
        // 只有在绘制数据改变时才绘制view
        setRenderMode(RENDERMODE_WHEN_DIRTY);
        setOutlineProvider(new ViewOutlineProvider() {
            @Override
            public void getOutline(View view, Outline outline) {
                Rect rect = new Rect(0, 0, view.getMeasuredWidth(), view.getMeasuredHeight());
                outline.setRoundRect(rect, radius);
            }
        });
        setClipToOutline(true);
    }

    public void turnRound() {
        invalidateOutline();
    }

    public int getRadius() {
        return radius;
    }

    public void setRadius(int radius) {
        this.radius = radius;
    }

    /**
     * 设置不同的片段着色器代码以达到不同的预览效果
     *
     * @param fragmentShaderCode 片段着色器代码
     */
    public void setFragmentShaderCode(String fragmentShaderCode) {
        this.fragmentShaderCode = fragmentShaderCode;
    }

    public void init(boolean isMirror, int rotateDegree, int frameWidth, int frameHeight) {
        if (this.frameWidth == frameWidth
                && this.frameHeight == frameHeight
                && this.rotateDegree == rotateDegree
                && this.isMirror == isMirror) {
            return;
        }
        dataInput = false;
        this.frameWidth = frameWidth;
        this.frameHeight = frameHeight;
        this.rotateDegree = rotateDegree;
        this.isMirror = isMirror;
        yArray = new byte[this.frameWidth * this.frameHeight];
        uArray = new byte[this.frameWidth * this.frameHeight / 4];
        vArray = new byte[this.frameWidth * this.frameHeight / 4];

        int yFrameSize = this.frameHeight * this.frameWidth;
        int uvFrameSize = yFrameSize >> 2;
        yBuf = ByteBuffer.allocateDirect(yFrameSize);
        yBuf.order(ByteOrder.nativeOrder()).position(0);

        uBuf = ByteBuffer.allocateDirect(uvFrameSize);
        uBuf.order(ByteOrder.nativeOrder()).position(0);

        vBuf = ByteBuffer.allocateDirect(uvFrameSize);
        vBuf.order(ByteOrder.nativeOrder()).position(0);
        // 顶点坐标
        squareVertices = ByteBuffer
                .allocateDirect(GLUtil.SQUARE_VERTICES.length * FLOAT_SIZE_BYTES)
                .order(ByteOrder.nativeOrder())
                .asFloatBuffer();
        squareVertices.put(GLUtil.SQUARE_VERTICES).position(0);
        //纹理坐标
        if (isMirror) {
            switch (rotateDegree) {
                case 0:
                    coordVertice = GLUtil.MIRROR_COORD_VERTICES;
                    break;
                case 90:
                    coordVertice = GLUtil.ROTATE_90_MIRROR_COORD_VERTICES;
                    break;
                case 180:
                    coordVertice = GLUtil.ROTATE_180_MIRROR_COORD_VERTICES;
                    break;
                case 270:
                    coordVertice = GLUtil.ROTATE_270_MIRROR_COORD_VERTICES;
                    break;
                default:
                    break;
            }
        } else {
            switch (rotateDegree) {
                case 0:
                    coordVertice = GLUtil.COORD_VERTICES;
                    break;
                case 90:
                    coordVertice = GLUtil.ROTATE_90_COORD_VERTICES;
                    break;
                case 180:
                    coordVertice = GLUtil.ROTATE_180_COORD_VERTICES;
                    break;
                case 270:
                    coordVertice = GLUtil.ROTATE_270_COORD_VERTICES;
                    break;
                default:
                    break;
            }
        }
        coordVertices = ByteBuffer.allocateDirect(coordVertice.length * FLOAT_SIZE_BYTES).order(ByteOrder.nativeOrder()).asFloatBuffer();
        coordVertices.put(coordVertice).position(0);
    }

    /**
     * 创建OpenGL Program并关联GLSL中的变量
     *
     * @param fragmentShaderCode 片段着色器代码
     */
    private void createGLProgram(String fragmentShaderCode) {
        int programHandleMain = GLUtil.createShaderProgram(fragmentShaderCode);
        if (programHandleMain != -1) {
            // 使用着色器程序
            GLES20.glUseProgram(programHandleMain);
            // 获取顶点着色器变量
            int glPosition = GLES20.glGetAttribLocation(programHandleMain, "attr_position");
            int textureCoord = GLES20.glGetAttribLocation(programHandleMain, "attr_tc");

            // 获取片段着色器变量
            int ySampler = GLES20.glGetUniformLocation(programHandleMain, "ySampler");
            int uSampler = GLES20.glGetUniformLocation(programHandleMain, "uSampler");
            int vSampler = GLES20.glGetUniformLocation(programHandleMain, "vSampler");

            //给变量赋值
            /**
             * GLES20.GL_TEXTURE0 和 ySampler 绑定
             * GLES20.GL_TEXTURE1 和 uSampler 绑定
             * GLES20.GL_TEXTURE2 和 vSampler 绑定
             *
             * 也就是说 glUniform1i的第二个参数代表图层序号
             */
            GLES20.glUniform1i(ySampler, 0);
            GLES20.glUniform1i(uSampler, 1);
            GLES20.glUniform1i(vSampler, 2);

            GLES20.glEnableVertexAttribArray(glPosition);
            GLES20.glEnableVertexAttribArray(textureCoord);

            /**
             * 设置Vertex Shader数据
             */
            squareVertices.position(0);
            GLES20.glVertexAttribPointer(glPosition, GLUtil.COUNT_PER_SQUARE_VERTICE, GLES20.GL_FLOAT, false, 8, squareVertices);
            coordVertices.position(0);
            GLES20.glVertexAttribPointer(textureCoord, GLUtil.COUNT_PER_COORD_VERTICES, GLES20.GL_FLOAT, false, 8, coordVertices);
        }
    }

    public class YUVRenderer implements Renderer {
        private void initRenderer() {
            rendererReady = false;
            createGLProgram(fragmentShaderCode);

            //启用纹理
            GLES20.glEnable(GLES20.GL_TEXTURE_2D);
            //创建纹理
            createTexture(frameWidth, frameHeight, GLES20.GL_LUMINANCE, yTexture);
            createTexture(frameWidth / 2, frameHeight / 2, GLES20.GL_LUMINANCE, uTexture);
            createTexture(frameWidth / 2, frameHeight / 2, GLES20.GL_LUMINANCE, vTexture);

            rendererReady = true;
        }

        @Override
        public void onSurfaceCreated(GL10 unused, EGLConfig config) {
            initRenderer();
        }

        // 根据宽高和格式创建纹理
        private void createTexture(int width, int height, int format, int[] textureId) {
            //创建纹理
            GLES20.glGenTextures(1, textureId, 0);
            //绑定纹理
            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId[0]);
            /**
             * {@link GLES20#GL_TEXTURE_WRAP_S}代表左右方向的纹理环绕模式
             * {@link GLES20#GL_TEXTURE_WRAP_T}代表上下方向的纹理环绕模式
             *
             *  {@link GLES20#GL_REPEAT}:重复
             *  {@link GLES20#GL_MIRRORED_REPEAT}:镜像重复
             *  {@link GLES20#GL_CLAMP_TO_EDGE}:忽略边框截取
             *
             * 例如我们使用{@link GLES20#GL_REPEAT}:
             *
             *             squareVertices           coordVertices
             *             -1.0f, -1.0f,            1.0f, 1.0f,
             *             1.0f, -1.0f,             1.0f, 0.0f,         ->          和textureView预览相同
             *             -1.0f, 1.0f,             0.0f, 1.0f,
             *             1.0f, 1.0f               0.0f, 0.0f
             *
             *             squareVertices           coordVertices
             *             -1.0f, -1.0f,            2.0f, 2.0f,
             *             1.0f, -1.0f,             2.0f, 0.0f,         ->          和textureView预览相比,分割成了4 块相同的预览(左下,右下,左上,右上)
             *             -1.0f, 1.0f,             0.0f, 2.0f,
             *             1.0f, 1.0f               0.0f, 0.0f
             */
            GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_REPEAT);
            GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_REPEAT);
            /**
             * {@link GLES20#GL_TEXTURE_MIN_FILTER}代表所显示的纹理比加载进来的纹理小时的情况
             * {@link GLES20#GL_TEXTURE_MAG_FILTER}代表所显示的纹理比加载进来的纹理大时的情况
             *
             *  {@link GLES20#GL_NEAREST}:使用纹理中坐标最接近的一个像素的颜色作为需要绘制的像素颜色
             *  {@link GLES20#GL_LINEAR}:使用纹理中坐标最接近的若干个颜色,通过加权平均算法得到需要绘制的像素颜色
             */
            GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
            GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
            GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, format, width, height, 0, format, GLES20.GL_UNSIGNED_BYTE, null);
        }

        @Override
        public void onDrawFrame(GL10 gl) {
            // 分别对每个纹理做激活、绑定、设置数据操作
            if (dataInput) {
                //y
                GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
                GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yTexture[0]);
                GLES20.glTexSubImage2D(GLES20.GL_TEXTURE_2D,
                        0,
                        0,
                        0,
                        frameWidth,
                        frameHeight,
                        GLES20.GL_LUMINANCE,
                        GLES20.GL_UNSIGNED_BYTE,
                        yBuf);

                //u
                GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
                GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, uTexture[0]);
                GLES20.glTexSubImage2D(GLES20.GL_TEXTURE_2D,
                        0,
                        0,
                        0,
                        frameWidth >> 1,
                        frameHeight >> 1,
                        GLES20.GL_LUMINANCE,
                        GLES20.GL_UNSIGNED_BYTE,
                        uBuf);

                //v
                GLES20.glActiveTexture(GLES20.GL_TEXTURE2);
                GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, vTexture[0]);
                GLES20.glTexSubImage2D(GLES20.GL_TEXTURE_2D,
                        0,
                        0,
                        0,
                        frameWidth >> 1,
                        frameHeight >> 1,
                        GLES20.GL_LUMINANCE,
                        GLES20.GL_UNSIGNED_BYTE,
                        vBuf);
                //在数据绑定完成后进行绘制
                GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
            }
        }

        @Override
        public void onSurfaceChanged(GL10 unused, int width, int height) {
            GLES20.glViewport(0, 0, width, height);
        }
    }

    /**
     * 传入NV21刷新帧
     *
     * @param data NV21数据
     */
    public void refreshFrameNV21(byte[] data) {
        if (rendererReady) {
            yBuf.clear();
            uBuf.clear();
            vBuf.clear();
            putNV21(data, frameWidth, frameHeight);
            dataInput = true;
            requestRender();
        }
    }

    /**
     * 传入YV12数据刷新帧
     *
     * @param data YV12数据
     */
    public void refreshFrameYV12(byte[] data) {
        if (rendererReady) {
            yBuf.clear();
            uBuf.clear();
            vBuf.clear();
            putYV12(data, frameWidth, frameHeight);
            dataInput = true;
            requestRender();
        }
    }

    /**
     * 将NV21数据的Y、U、V分量取出
     *
     * @param src    nv21帧数据
     * @param width  宽度
     * @param height 高度
     */
    private void putNV21(byte[] src, int width, int height) {

        int ySize = width * height;
        int frameSize = ySize * 3 / 2;

        //取分量y值
        System.arraycopy(src, 0, yArray, 0, ySize);

        int k = 0;

        //取分量uv值
        int index = ySize;
        while (index < frameSize) {
            vArray[k] = src[index++];
            uArray[k++] = src[index++];
        }
        yBuf.put(yArray).position(0);
        uBuf.put(uArray).position(0);
        vBuf.put(vArray).position(0);
    }

    /**
     * 将YV12数据的Y、U、V分量取出
     *
     * @param src    YV12帧数据
     * @param width  宽度
     * @param height 高度
     */
    private void putYV12(byte[] src, int width, int height) {

        int ySize = width * height;
        int frameSize = ySize * 3 / 2;

        //取分量y值
        System.arraycopy(src, 0, yArray, 0, ySize);

        int k = 0;

        //取分量uv值
        int index = ySize;
        while (index < frameSize) {
            vArray[k] = src[index++];
            uArray[k++] = src[index++];
        }
        yBuf.put(yArray).position(0);
        uBuf.put(uArray).position(0);
        vBuf.put(vArray).position(0);
    }
}

GLUtil 为顶点着色器,片段着色器的定义以及初始化代码,供读者参考。

public class GLUtil {
    private static final String TAG = "GLUtil";

    /**
     * 顶点着色器
     */
    private static String VERTEX_SHADER =
            "    attribute vec4 attr_position;\n" +
                    "    attribute vec2 attr_tc;\n" +
                    "    varying vec2 tc;\n" +
                    "    void main() {\n" +
                    "        gl_Position = attr_position;\n" +
                    "        tc = attr_tc;\n" +
                    "    }";

    /**
     * 片段着色器,正常效果
     */
    public static String FRAG_SHADER_NORMAL =
            "precision mediump float;\n" +
            "    varying vec2 tc;\n" +
                    "    uniform sampler2D ySampler;\n" +
                    "    uniform sampler2D uSampler;\n" +
                    "    uniform sampler2D vSampler;\n" +
                    "    const mat3 convertMat = mat3(1.0, 1.0, 1.0, 0, -0.344, 1.77, 1.403, -0.714,0);\n" +
                    "    void main()\n" +
                    "    {\n" +
                    "        vec3 yuv;\n" +
                    "        yuv.x = texture2D(ySampler, tc).r;\n" +
                    "        yuv.y = texture2D(uSampler, tc).r - 0.5;\n" +
                    "        yuv.z = texture2D(vSampler, tc).r - 0.5;\n" +
                    "        gl_FragColor = vec4(convertMat * yuv, 1.0);\n" +
                    "    }";
    /**
     * 片段着色器,灰度效果。不需要 U V 数据(Java代码中可以做对应修改,仅需拷贝NV21数据中的Y分量即可)
     */
    public static String FRAG_SHADER_GRAY =
            "precision mediump float;\n" +
            "    varying vec2 tc;\n" +
                    "    uniform sampler2D ySampler;\n" +
                    "    void main()\n" +
                    "    {\n" +
                    "        vec3 yuv;\n" +
                    "        yuv.x = texture2D(ySampler, tc).r;\n" +
                    "        gl_FragColor = vec4(vec3(yuv.x), 1.0);\n" +
                    "    }";
    /**
     * 片段着色器,浮雕效果。不需要 U V 数据(Java代码中可以做对应修改,仅需拷贝NV21数据中的Y分量即可)
     */
    public static String FRAG_SHADER_SCULPTURE =
            "precision mediump float;\n" +
                    "varying vec2 tc;\n" +
                    "    uniform sampler2D ySampler;\n" +
                    "    const vec2 texSize = vec2(100.0, 100.0);\n" +
                    "    const vec4 sculptureColor = vec4(0.5, 0.5, 0.5, 1.0);\n" +
                    "\n" +
                    "void main()\n" +
                    "{\n" +
                    "    vec2 upLeftCoord = vec2(tc.x-1.0/texSize.x, tc.y-1.0/texSize.y);\n" +
                    "    vec4 curColor = texture2D(ySampler, tc);\n" +
                    "    vec4 upLeftColor = texture2D(ySampler, upLeftCoord);\n" +
                    "    vec4 delColor = curColor - upLeftColor;\n" +
                    "    gl_FragColor = vec4(vec3(delColor), 0.0) + sculptureColor;\n" +
                    "}";

    //SQUARE_VERTICES每2个值作为一个顶点
    static final int COUNT_PER_SQUARE_VERTICE = 2;
    //COORD_VERTICES每2个值作为一个顶点
    static final int COUNT_PER_COORD_VERTICES = 2;

    /**
     * 显示的顶点
     */
    static final float[] SQUARE_VERTICES = {
            -1.0f, -1.0f,
            1.0f, -1.0f,
            -1.0f, 1.0f,
            1.0f, 1.0f
    };
    /**
     * 原数据显示
     * 0,1***********1,1
     * *             *
     * *             *
     * *             *
     * *             *
     * *             *
     * 0,0***********1,0
     */
    static final float[] COORD_VERTICES = {
            0.0f, 1.0f,
            1.0f, 1.0f,
            0.0f, 0.0f,
            1.0f, 0.0f
    };

    /**
     * 逆时针旋转90度显示
     * 1,1***********1,0
     * *             *
     * *             *
     * *             *
     * *             *
     * *             *
     * 0,1***********0,0
     */
    static final float[] ROTATE_90_COORD_VERTICES = {
            1.0f, 1.0f,
            1.0f, 0.0f,
            0.0f, 1.0f,
            0.0f, 0.0f
    };

    /**
     * 逆时针旋转180度显示
     * 0,1***********1,1
     * *             *
     * *             *
     * *             *
     * *             *
     * *             *
     * 0,0***********1,0
     */
    static final float[] ROTATE_180_COORD_VERTICES = {
            1.0f, 0.0f,
            0.0f, 0.0f,
            1.0f, 1.0f,
            0.0f, 1.0f
    };

    /**
     * 逆时针旋转270度显示
     * 0,1***********1,1
     * *             *
     * *             *
     * *             *
     * *             *
     * *             *
     * 0,0***********1,0
     */
    static final float[] ROTATE_270_COORD_VERTICES = {
            0.0f, 0.0f,
            0.0f, 1.0f,
            1.0f, 0.0f,
            1.0f, 1.0f
    };

    /**
     * 镜像显示
     * 1,1***********0,1
     * *             *
     * *             *
     * *             *
     * *             *
     * *             *
     * 1,0***********0,0
     */
    static final float[] MIRROR_COORD_VERTICES = {
            1.0f, 1.0f,
            0.0f, 1.0f,
            1.0f, 0.0f,
            0.0f, 0.0f
    };

    /**
     * 镜像并逆时针旋转90度显示
     * 0,1***********0,0
     * *             *
     * *             *
     * *             *
     * *             *
     * *             *
     * 1,1***********1,0
     */
    static final float[] ROTATE_90_MIRROR_COORD_VERTICES = {
            0.0f, 1.0f,
            0.0f, 0.0f,
            1.0f, 1.0f,
            1.0f, 0.0f
    };
    /**
     * 镜像并逆时针旋转180度显示
     * 1,0***********0,0
     * *             *
     * *             *
     * *             *
     * *             *
     * *             *
     * 1,1***********0,1
     */
    static final float[] ROTATE_180_MIRROR_COORD_VERTICES = {
            1.0f, 0.0f,
            0.0f, 0.0f,
            1.0f, 1.0f,
            0.0f, 1.0f
    };
    /**
     * 镜像并逆时针旋转270度显示
     * 1,0***********1,1
     * *             *
     * *             *
     * *             *
     * *             *
     * *             *
     * 0,0***********0,1
     */
    static final float[] ROTATE_270_MIRROR_COORD_VERTICES = {
            1.0f, 0.0f,
            1.0f, 1.0f,
            0.0f, 0.0f,
            0.0f, 1.0f
    };

    /**
     * 创建OpenGL Program,并链接
     *
     * @param fragmentShaderCode 片段着色器代码
     * @return OpenGL Program对象的引用
     */
    static int createShaderProgram(String fragmentShaderCode) {
        int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, VERTEX_SHADER);
        int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);

        // 创建一个空的OpenGL ES Program
        int mProgram = GLES20.glCreateProgram();
        // 将vertex shader添加到program
        GLES20.glAttachShader(mProgram, vertexShader);
        // 将fragment shader添加到program
        GLES20.glAttachShader(mProgram, fragmentShader);
        // 链接创建好的 OpenGL ES program
        GLES20.glLinkProgram(mProgram);

        // 检查链接状态
        IntBuffer linked = IntBuffer.allocate(1);
        GLES20.glGetProgramiv(mProgram, GLES20.GL_LINK_STATUS, linked);
        if (linked.get(0) == 0) {
            return -1;
        }
        return mProgram;
    }

    /**
     * 加载着色器
     *
     * @param type       着色器类型,可以是片段着色器{@link GLES20#GL_FRAGMENT_SHADER}或顶点着色器{@link GLES20#GL_VERTEX_SHADER}
     * @param shaderCode 着色器代码
     * @return 着色器对象的引用
     */
    private static int loadShader(int type, String shaderCode) {
        //创建空的shader
        int shader = GLES20.glCreateShader(type);
        //加载shader代码
        GLES20.glShaderSource(shader, shaderCode);
        //编译shader
        GLES20.glCompileShader(shader);

        //检查编译是否成功
        IntBuffer compiled = IntBuffer.allocate(1);
        GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled);
        if (compiled.get(0) == 0) {
            return 0;
        }
        return shader;
    }
}

GLSurfaceView的调用不难,只需要设置好着色器效果,设置好回调,进行初始化,最后将需要绘制的NV21设置上去即可,下面是笔者的示例供大家参考。

   //设置着色器效果
    surfaceView.setFragmentShaderCode(GLUtil.FRAG_SHADER_NORMAL);
    //设置SurfaceHolder回调
    surfaceView.getHolder().addCallback(this);
    //GLSurfaceView 初始化
    surfaceView.init(false, 0, frameWidth, frameHeight);
    //绘制NV21数据
    surfaceView.refreshFrameNV21(data);

实现效果:
Android基于RTMP视频流的人脸识别(下篇)

二、人脸跟踪以及人脸框的绘制

经过上面的步骤,我们已经绘制出了Rtmp流的数据,接下来我们只剩最后一步,进行人脸识别以及绘制人脸框了,人脸识别要借助人脸识别引擎,引擎的使用我将分为下述几个步骤为大家讲解。

  • 引擎激活
  • 引擎初始化
  • 人脸跟踪
  • 人脸框绘制

1.引擎激活

登录虹软开发者中心申请Android版本的引擎,获取引擎包与激活码。

AndroidManifest添加权限,Android7.0及以上版本请动态申请权限

<uses-permission android:name="android.permission.READ_PHONE_STATE" />
<uses-permission android:name="android.permission.INTERNET" />
//激活接口
 FaceEngine.activeOnline(this, Constants.APP_ID, Constants.SDK_KEY);

2.引擎初始化

初始化时选择识别模式为video模式,识别角度选择全角度(单一角度更好),16是人脸占长边的比例,设置的越大,能检测到越小的人脸,最大值为32,设置最多检测5个人脸,因为我们只需要检测人脸因此只加载人脸检测功能。

faceEngine = new FaceEngine();
//初始化接口
int code = faceEngine.init(this, DetectMode.ASF_DETECT_MODE_VIDEO, DetectFaceOrientPriority.ASF_OP_ALL_OUT,
            16, 5, FaceEngine.ASF_FACE_DETECT);
if (code != ErrorInfo.MOK) {
    //初始化失败的情况下进行报错
    Toast.makeText(MainActivity.this, "faceEngineInit Error", Toast.LENGTH_LONG).show();
}

3.人脸跟踪

在上面RtmpPlayer的onFrameAvailable里我们可以获得解析好的NV21数据,直接放入引擎内就可以拿到人脸数据了。

//人脸检测接口
int code = faceEngine.detectFaces(data, frameWidth, frameHeight,FaceEngine.CP_PAF_NV21, faceInfos);
if (code != ErrorInfo.MOK) {
//人脸检测接口有问题 log打印错误码
    Log.i(TAG, "onFrameAvailable:  detect Error");
    return;
}

4.人脸框的绘制

人脸框的绘制相对来说会复杂一些,需要将得到的人脸信息绘制到canvas上去,本文将直接借鉴arcface demo中的DrawHelper 以及 faceRectView,来实现。

DrawHelper

public class DrawHelper {
    private int previewWidth, previewHeight, canvasWidth, canvasHeight, cameraDisplayOrientation, cameraId;
    private boolean isMirror;
    private boolean mirrorHorizontal = false, mirrorVertical = false;

    /**
     * 创建一个绘制辅助类对象,并且设置绘制相关的参数
     *
     * @param previewWidth             预览宽度
     * @param previewHeight            预览高度
     * @param canvasWidth              绘制控件的宽度
     * @param canvasHeight             绘制控件的高度
     * @param cameraDisplayOrientation 旋转角度
     * @param cameraId                 相机ID
     * @param isMirror                 是否水平镜像显示(若相机是镜像显示的,设为true,用于纠正)
     * @param mirrorHorizontal         为兼容部分设备使用,水平再次镜像
     * @param mirrorVertical           为兼容部分设备使用,垂直再次镜像
     */
    public DrawHelper(int previewWidth, int previewHeight, int canvasWidth,
                      int canvasHeight, int cameraDisplayOrientation, int cameraId,
                      boolean isMirror, boolean mirrorHorizontal, boolean mirrorVertical) {
        this.previewWidth = previewWidth;
        this.previewHeight = previewHeight;
        this.canvasWidth = canvasWidth;
        this.canvasHeight = canvasHeight;
        this.cameraDisplayOrientation = cameraDisplayOrientation;
        this.cameraId = cameraId;
        this.isMirror = isMirror;
        this.mirrorHorizontal = mirrorHorizontal;
        this.mirrorVertical = mirrorVertical;
    }

    public void draw(FaceRectView faceRectView, List<DrawInfo> drawInfoList) {
        if (faceRectView == null) {
            return;
        }
        faceRectView.clearFaceInfo();
        if (drawInfoList == null || drawInfoList.size() == 0) {
            return;
        }
        faceRectView.addFaceInfo(drawInfoList);
    }

    /**
     * 调整人脸框用来绘制
     *
     * @param ftRect FT人脸框
     * @return 调整后的需要被绘制到View上的rect
     */
    public Rect adjustRect(Rect ftRect) {
        int previewWidth = this.previewWidth;
        int previewHeight = this.previewHeight;
        int canvasWidth = this.canvasWidth;
        int canvasHeight = this.canvasHeight;
        int cameraDisplayOrientation = this.cameraDisplayOrientation;
        int cameraId = this.cameraId;
        boolean isMirror = this.isMirror;
        boolean mirrorHorizontal = this.mirrorHorizontal;
        boolean mirrorVertical = this.mirrorVertical;

        if (ftRect == null) {
            return null;
        }

        Rect rect = new Rect(ftRect);
        float horizontalRatio;
        float verticalRatio;
        if (cameraDisplayOrientation % 180 == 0) {
            horizontalRatio = (float) canvasWidth / (float) previewWidth;
            verticalRatio = (float) canvasHeight / (float) previewHeight;
        } else {
            horizontalRatio = (float) canvasHeight / (float) previewWidth;
            verticalRatio = (float) canvasWidth / (float) previewHeight;
        }
        rect.left *= horizontalRatio;
        rect.right *= horizontalRatio;
        rect.top *= verticalRatio;
        rect.bottom *= verticalRatio;

        Rect newRect = new Rect();
        switch (cameraDisplayOrientation) {
            case 0:
                if (cameraId == Camera.CameraInfo.CAMERA_FACING_FRONT) {
                    newRect.left = canvasWidth - rect.right;
                    newRect.right = canvasWidth - rect.left;
                } else {
                    newRect.left = rect.left;
                    newRect.right = rect.right;
                }
                newRect.top = rect.top;
                newRect.bottom = rect.bottom;
                break;
            case 90:
                newRect.right = canvasWidth - rect.top;
                newRect.left = canvasWidth - rect.bottom;
                if (cameraId == Camera.CameraInfo.CAMERA_FACING_FRONT) {
                    newRect.top = canvasHeight - rect.right;
                    newRect.bottom = canvasHeight - rect.left;
                } else {
                    newRect.top = rect.left;
                    newRect.bottom = rect.right;
                }
                break;
            case 180:
                newRect.top = canvasHeight - rect.bottom;
                newRect.bottom = canvasHeight - rect.top;
                if (cameraId == Camera.CameraInfo.CAMERA_FACING_FRONT) {
                    newRect.left = rect.left;
                    newRect.right = rect.right;
                } else {
                    newRect.left = canvasWidth - rect.right;
                    newRect.right = canvasWidth - rect.left;
                }
                break;
            case 270:
                newRect.left = rect.top;
                newRect.right = rect.bottom;
                if (cameraId == Camera.CameraInfo.CAMERA_FACING_FRONT) {
                    newRect.top = rect.left;
                    newRect.bottom = rect.right;
                } else {
                    newRect.top = canvasHeight - rect.right;
                    newRect.bottom = canvasHeight - rect.left;
                }
                break;
            default:
                break;
        }

        /**
         * isMirror mirrorHorizontal finalIsMirrorHorizontal
         * true         true                false
         * false        false               false
         * true         false               true
         * false        true                true
         *
         * XOR
         */
        if (isMirror ^ mirrorHorizontal) {
            int left = newRect.left;
            int right = newRect.right;
            newRect.left = canvasWidth - right;
            newRect.right = canvasWidth - left;
        }
        if (mirrorVertical) {
            int top = newRect.top;
            int bottom = newRect.bottom;
            newRect.top = canvasHeight - bottom;
            newRect.bottom = canvasHeight - top;
        }
        return newRect;
    }

    /**
     * 绘制数据信息到view上,若 {@link DrawInfo#getName()} 不为null则绘制 {@link DrawInfo#getName()}
     *
     * @param canvas            需要被绘制的view的canvas
     * @param drawInfo          绘制信息
     * @param faceRectThickness 人脸框厚度
     * @param paint             画笔
     */
    public static void drawFaceRect(Canvas canvas, DrawInfo drawInfo, int faceRectThickness, Paint paint) {
        if (canvas == null || drawInfo == null) {
            return;
        }
        paint.setStyle(Paint.Style.STROKE);
        paint.setStrokeWidth(faceRectThickness);
        paint.setColor(drawInfo.getColor());
        paint.setAntiAlias(true);

        Path mPath = new Path();
        // 左上
        Rect rect = drawInfo.getRect();
        mPath.moveTo(rect.left, rect.top + rect.height() / 4);
        mPath.lineTo(rect.left, rect.top);
        mPath.lineTo(rect.left + rect.width() / 4, rect.top);
        // 右上
        mPath.moveTo(rect.right - rect.width() / 4, rect.top);
        mPath.lineTo(rect.right, rect.top);
        mPath.lineTo(rect.right, rect.top + rect.height() / 4);
        // 右下
        mPath.moveTo(rect.right, rect.bottom - rect.height() / 4);
        mPath.lineTo(rect.right, rect.bottom);
        mPath.lineTo(rect.right - rect.width() / 4, rect.bottom);
        // 左下
        mPath.moveTo(rect.left + rect.width() / 4, rect.bottom);
        mPath.lineTo(rect.left, rect.bottom);
        mPath.lineTo(rect.left, rect.bottom - rect.height() / 4);
        canvas.drawPath(mPath, paint);

        // 绘制文字,用最细的即可,避免在某些低像素设备上文字模糊
        paint.setStrokeWidth(1);

        if (drawInfo.getName() == null) {
            paint.setStyle(Paint.Style.FILL_AND_STROKE);
            paint.setTextSize(rect.width() / 8);

            String str = (drawInfo.getSex() == GenderInfo.MALE ? "MALE" : (drawInfo.getSex() == GenderInfo.FEMALE ? "FEMALE" : "UNKNOWN"))
                    + ","
                    + (drawInfo.getAge() == AgeInfo.UNKNOWN_AGE ? "UNKNWON" : drawInfo.getAge())
                    + ","
                    + (drawInfo.getLiveness() == LivenessInfo.ALIVE ? "ALIVE" : (drawInfo.getLiveness() == LivenessInfo.NOT_ALIVE ? "NOT_ALIVE" : "UNKNOWN"));
            canvas.drawText(str, rect.left, rect.top - 10, paint);
        } else {
            paint.setStyle(Paint.Style.FILL_AND_STROKE);
            paint.setTextSize(rect.width() / 8);
            canvas.drawText(drawInfo.getName(), rect.left, rect.top - 10, paint);
        }
    }

    public void setPreviewWidth(int previewWidth) {
        this.previewWidth = previewWidth;
    }

    public void setPreviewHeight(int previewHeight) {
        this.previewHeight = previewHeight;
    }

    public void setCanvasWidth(int canvasWidth) {
        this.canvasWidth = canvasWidth;
    }

    public void setCanvasHeight(int canvasHeight) {
        this.canvasHeight = canvasHeight;
    }

    public void setCameraDisplayOrientation(int cameraDisplayOrientation) {
        this.cameraDisplayOrientation = cameraDisplayOrientation;
    }

    public void setCameraId(int cameraId) {
        this.cameraId = cameraId;
    }

    public void setMirror(boolean mirror) {
        isMirror = mirror;
    }

    public int getPreviewWidth() {
        return previewWidth;
    }

    public int getPreviewHeight() {
        return previewHeight;
    }

    public int getCanvasWidth() {
        return canvasWidth;
    }

    public int getCanvasHeight() {
        return canvasHeight;
    }

    public int getCameraDisplayOrientation() {
        return cameraDisplayOrientation;
    }

    public int getCameraId() {
        return cameraId;
    }

    public boolean isMirror() {
        return isMirror;
    }

    public boolean isMirrorHorizontal() {
        return mirrorHorizontal;
    }

    public void setMirrorHorizontal(boolean mirrorHorizontal) {
        this.mirrorHorizontal = mirrorHorizontal;
    }

    public boolean isMirrorVertical() {
        return mirrorVertical;
    }

    public void setMirrorVertical(boolean mirrorVertical) {
        this.mirrorVertical = mirrorVertical;
    }
}

FaceRectView

public class FaceRectView extends View {
    private CopyOnWriteArrayList<DrawInfo> drawInfoList = new CopyOnWriteArrayList<>();

    // 画笔,复用
    private Paint paint;

    // 默认人脸框厚度
    private static final int DEFAULT_FACE_RECT_THICKNESS = 6;

    public FaceRectView(Context context) {
        this(context, null);
    }

    public FaceRectView(Context context, @Nullable AttributeSet attrs) {
        super(context, attrs);
        paint = new Paint();
    }

    @Override
    protected void onDraw(Canvas canvas) {
        super.onDraw(canvas);
        if (drawInfoList != null && drawInfoList.size() > 0) {
            for (int i = 0; i < drawInfoList.size(); i++) {
                DrawHelper.drawFaceRect(canvas, drawInfoList.get(i), DEFAULT_FACE_RECT_THICKNESS, paint);
            }
        }
    }

    public void clearFaceInfo() {
        drawInfoList.clear();
        postInvalidate();
    }

    public void addFaceInfo(DrawInfo faceInfo) {
        drawInfoList.add(faceInfo);
        postInvalidate();
    }

    public void addFaceInfo(List<DrawInfo> faceInfoList) {
        drawInfoList.addAll(faceInfoList);
        postInvalidate();
    }
}

我们只需要在原本的GLSurface上再覆盖一层FaceRectView,将框绘制再FaceRectView 上 ,两个图层一叠加,就是我们想要的效果,下面是使用举例。

 /**
 * DrawHelper初始化
 *
 * @param previewWidth             预览宽度
 * @param previewHeight            预览高度
 * @param canvasWidth              绘制控件的宽度
 * @param canvasHeight             绘制控件的高度
 * @param cameraDisplayOrientation 旋转角度
 * @param cameraId                 相机ID
 * @param isMirror                 是否水平镜像显示(若相机是镜像显示的,设为true,用于纠正)
 * @param mirrorHorizontal         为兼容部分设备使用,水平再次镜像
 * @param mirrorVertical           为兼容部分设备使用,垂直再次镜像
 */
drawHelper = new DrawHelper(width, height, faceRectView.getWidth(), faceRectView.getHeight(), 0, 0, false, false, false);

 //得到人脸数据后
for (int i = 0; i < faceInfos.size(); i++) {
    drawInfoList.add(new DrawInfo(drawHelper.adjustRect(faceInfos.get(i).getRect()),
            GenderInfo.UNKNOWN, AgeInfo.UNKNOWN_AGE, LivenessInfo.UNKNOWN, Color.YELLOW,
            String.valueOf(faceInfos.get(i).getFaceId())));
}
drawHelper.draw(faceRectView, drawInfoList);

实现效果:

Android基于RTMP视频流的人脸识别(下篇)

三、附录

下面的是demo的代码,以及上篇文章的链接,供大家参考,如果对您有所帮助,可以为我的demo点个star。

Android基于RTMP视频流的人脸识别(上篇)

RtmpPlayerDemo工程代码(含显示及人脸绘制)

Android基于RTMP视频流的人脸识别(下篇)

上一篇:安卓 日常问题 工作日志 2


下一篇:MATLAB app designer 菜鸟进阶学习(三)