研究了一段时间Android的surface系统,一直执着地认为所有在surface或者屏幕上显示的画面,必须要转换成RGB才能显示,yuv数据也要通过颜色空间转换成RGB才能显示。可最近在研究stagefright视频显示时发现,根本找不到omx解码后的yuv是怎么转换成RGB的代码,yuv数据在render之后就找不到去向了,可画面确确实实的显示出来了,这从此颠覆了yuv必须要转换成RGB才能显示的真理了。
稍微看一下AsomePlayer的代码,不难发现,视频的每一帧是通过调用了SoftwareRenderer来渲染显示的,我也尝试用利用SoftwareRenderer来直接render yuv数据显示,竟然成功了,这是一个很大的突破,比如以后摄像头采集到的yuv,可以直接丢yuv数据到surface显示,无需耗时耗效率的yuv转RGB了。
代码原创,贴出来与大家分享:Android 4.4平台 (其中yuv数据的地址可以从这里下载点击打开链接,放到/mnt/sdcard目录)
#include<include/SoftwareRenderer.h> #include <cutils/memory.h> #include <unistd.h> #include <utils/Log.h> #include <binder/IPCThreadState.h> #include <binder/ProcessState.h> #include <binder/IServiceManager.h> #include <gui/Surface.h> #include <gui/SurfaceComposerClient.h> #include <gui/ISurfaceComposer.h> #include <ui/DisplayInfo.h> #include <android/native_window.h> #include <media/stagefright/MetaData.h> using namespace android; bool getYV12Data(const char *path,unsigned char * pYUVData,int size){ FILE *fp = fopen(path,"rb"); if(fp == NULL){ printf("read %s fail !!!!!!!!!!!!!!!!!!!\n",path); return false; } fread(pYUVData,size,1,fp); fclose(fp); return true; } int main(void){ // set up the thread-pool sp<ProcessState> proc(ProcessState::self()); ProcessState::self()->startThreadPool(); // create a client to surfaceflinger sp<SurfaceComposerClient> client = new SurfaceComposerClient(); sp<IBinder> dtoken(SurfaceComposerClient::getBuiltInDisplay( ISurfaceComposer::eDisplayIdMain)); DisplayInfo dinfo; //获取屏幕的宽高等信息 status_t status = SurfaceComposerClient::getDisplayInfo(dtoken, &dinfo); printf("w=%d,h=%d,xdpi=%f,ydpi=%f,fps=%f,ds=%f\n", dinfo.w, dinfo.h, dinfo.xdpi, dinfo.ydpi, dinfo.fps, dinfo.density); if (status) return -1; //创建surface,有些系统可能报错,dinfo.w和dinfo.h也可以写成固定值 sp<SurfaceControl> surfaceControl = client->createSurface(String8("showYUV"), dinfo.w, dinfo.h, PIXEL_FORMAT_RGBA_8888, 0); /*************************get yuv data from file;****************************************/ printf("[%s][%d]\n",__FILE__,__LINE__); int width,height; width = 320; height = 240; int size = width * height * 3/2; unsigned char *data = new unsigned char[size]; char *path = "/mnt/sdcard/yuv_320_240.yuv"; getYV12Data(path,data,size);//get yuv data from file; /*********************配置surface*******************************************************************/ SurfaceComposerClient::openGlobalTransaction(); surfaceControl->setLayer(100000);//设定Z坐标 surfaceControl->setPosition(100, 100);//以左上角为(0,0)设定显示位置 surfaceControl->setSize(width, height);//设定视频显示大小 SurfaceComposerClient::closeGlobalTransaction(); sp<Surface> surface = surfaceControl->getSurface(); printf("[%s][%d]\n",__FILE__,__LINE__); /****************************************************************************************/ sp<MetaData> meta = new MetaData; meta->setInt32(kKeyWidth, width); meta->setInt32(kKeyHeight, height); /*指定yuv格式,支持以下yuv格式 * OMX_COLOR_FormatYUV420Planar: * OMX_TI_COLOR_FormatYUV420PackedSemiPlanar: * HAL_PIXEL_FORMAT_YV12: * 其他的貌似会转换成OMX_COLOR_Format16bitRGB565 */ meta->setInt32(kKeyColorFormat, HAL_PIXEL_FORMAT_YV12); //setRect不要也可以,我也不知道设置了有什么用,原理是什么,但是设置,参数一定要正确 meta->setRect( kKeyCropRect, 0,//left 0,//top width -1,//right height -1);//bottom printf("[%s][%d]\n",__FILE__,__LINE__); SoftwareRenderer* sr = new SoftwareRenderer(surface,meta);//初始化 printf("[%s][%d]\n",__FILE__,__LINE__); sr->render(data,size,NULL);//关键在这里,显示到屏幕上 delete[] data; printf("[%s][%d]\n",__FILE__,__LINE__); IPCThreadState::self()->joinThreadPool();//可以保证画面一直显示,否则瞬间消失 IPCThreadState::self()->stopProcess(); return 0; }
LOCAL_PATH:= $(call my-dir) include $(CLEAR_VARS) LOCAL_SRC_FILES:= main.cpp LOCAL_STATIC_LIBRARIES := libstagefright_color_conversion LOCAL_SHARED_LIBRARIES := libcutils libutils libbinder libui libgui libstagefright libstagefright_foundation LOCAL_C_INCLUDES := frameworks/native/include/media/openmax frameworks/av/media/libstagefright LOCAL_MODULE:= showYUV LOCAL_MODULE_TAGS := tests include $(BUILD_EXECUTABLE)