SDL2源码分析之OpenGL ES在windows上的渲染过程

SDL2源码分析之OpenGL ES在windows上的渲染过程

更新于2018年11月4日。
更新于2018年11月21日。

ffmpeg + SDL2实现的简易播放器

ffmpeg和SDL非常强大,通过使用ffmpeg和SDL可以用100多行代码就实现一个简易的播放器(来自网上的源码,找不到出处了):

#include <stdio.h>
#define __STDC_CONSTANT_MACROS


#define SDL_MAIN_HANDLED 1

 
#ifdef _WIN32
//Windows
//extern "C"
//{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "SDL2/SDL.h"
//};
#else
//Linux...
#ifdef __cplusplus
extern "C"
{
#endif
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include <SDL2/SDL.h>
#ifdef __cplusplus
};
#endif
#endif
 
 
//Full Screen
#define SHOW_FULLSCREEN 0
//Output YUV420P 
#define OUTPUT_YUV420P 0

int main(int argc, char *argv[]) {
    AVFormatContext *pFormatCtx = NULL;
    int videoStream;
    unsigned i;
    AVCodecContext *pCodecCtxOrig = NULL;
    AVCodecContext *pCodecCtx = NULL;
    AVCodec *pCodec = NULL;
    AVFrame *pFrame = NULL;
    AVPacket packet;
    int frameFinished;
    struct SwsContext *sws_ctx = NULL;
    SDL_Event event;
    SDL_Window *screen;
    SDL_Renderer *renderer;
    SDL_Texture *texture;
    Uint8 *yPlane, *uPlane, *vPlane;
    size_t yPlaneSz, uvPlaneSz;
    int uvPitch;

    if (argc < 2) {
        fprintf(stderr, "Usage: test <file>\n");
        exit(1);
    }
    printf("it is a test\n");
    // Register all formats and codecs
    av_register_all();
    
    //SDL_SetHint(SDL_HINT_RENDER_DRIVER, "opengles2");

    if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
        fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError());
        exit(1);
    }

    // Open video file
    if (avformat_open_input(&pFormatCtx,argv[1], NULL, NULL) != 0)
        return -1; // Couldn't open file

    // Retrieve stream information
    if (avformat_find_stream_info(pFormatCtx, NULL) < 0)
        return -1; // Couldn't find stream information

    // Dump information about file onto standard error
    av_dump_format(pFormatCtx, 0, argv[1], 0);

    // Find the first video stream
    videoStream = -1;
    for (i = 0; i < pFormatCtx->nb_streams; i++)
        if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
            videoStream = i;
            break;
        }
    if (videoStream == -1)
        return -1; // Didn't find a video stream

    // Get a pointer to the codec context for the video stream
    pCodecCtxOrig = pFormatCtx->streams[videoStream]->codec;
    // Find the decoder for the video stream
    pCodec = avcodec_find_decoder(pCodecCtxOrig->codec_id);
    if (pCodec == NULL) {
        fprintf(stderr, "Unsupported codec!\n");
        return -1; // Codec not found
    }

    // Copy context
    pCodecCtx = avcodec_alloc_context3(pCodec);
    if (avcodec_copy_context(pCodecCtx, pCodecCtxOrig) != 0) {
        fprintf(stderr, "Couldn't copy codec context");
        return -1; // Error copying codec context
    }

    // Open codec
    if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0)
        return -1; // Could not open codec

    // Allocate video frame
    pFrame = av_frame_alloc();

    // Make a screen to put our video
    screen = SDL_CreateWindow(
            "FFmpeg Tutorial",
            SDL_WINDOWPOS_UNDEFINED,
            SDL_WINDOWPOS_UNDEFINED,
            pCodecCtx->width,
            pCodecCtx->height,
            0
        );

    if (!screen) {
        fprintf(stderr, "SDL: could not create window - exiting\n");
        exit(1);
    }

    renderer = SDL_CreateRenderer(screen, -1, 0);
    if (!renderer) {
        fprintf(stderr, "SDL: could not create renderer - exiting\n");
        exit(1);
    }
    
    SDL_RendererInfo rendererInfo;
    SDL_GetRendererInfo(renderer, &rendererInfo);

    printf("Renderinfo++++++++++++++++++++++++++:%s--%s\n",SDL_GetCurrentVideoDriver(),rendererInfo.name);

    // Allocate a place to put our YUV image on that screen
    texture = SDL_CreateTexture(
            renderer,
            SDL_PIXELFORMAT_YV12,
            SDL_TEXTUREACCESS_STREAMING,
            pCodecCtx->width,
            pCodecCtx->height
        );
    if (!texture) {
        fprintf(stderr, "SDL: could not create texture - exiting\n");
        exit(1);
    }

    // initialize SWS context for software scaling
    sws_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,
            pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height,
            AV_PIX_FMT_YUV420P,
            SWS_BILINEAR,
            NULL,
            NULL,
            NULL);

    // set up YV12 pixel array (12 bits per pixel)
    yPlaneSz = pCodecCtx->width * pCodecCtx->height;
    uvPlaneSz = pCodecCtx->width * pCodecCtx->height / 4;
    yPlane = (Uint8*)malloc(yPlaneSz);
    uPlane = (Uint8*)malloc(uvPlaneSz);
    vPlane = (Uint8*)malloc(uvPlaneSz);
    if (!yPlane || !uPlane || !vPlane) {
        fprintf(stderr, "Could not allocate pixel buffers - exiting\n");
        exit(1);
    }

    uvPitch = pCodecCtx->width / 2;
    while (av_read_frame(pFormatCtx, &packet) >= 0) {
        // Is this a packet from the video stream?
        if (packet.stream_index == videoStream) {
            // Decode video frame
            avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);

            // Did we get a video frame?
            if (frameFinished) {
                AVPicture pict;
                pict.data[0] = yPlane;
                pict.data[1] = uPlane;
                pict.data[2] = vPlane;
                pict.linesize[0] = pCodecCtx->width;
                pict.linesize[1] = uvPitch;
                pict.linesize[2] = uvPitch;

                // Convert the image into YUV format that SDL uses
                sws_scale(sws_ctx, (uint8_t const * const *) pFrame->data,
                        pFrame->linesize, 0, pCodecCtx->height, pict.data,
                        pict.linesize);

                SDL_UpdateYUVTexture(
                        texture,
                        NULL,
                        yPlane,
                        pCodecCtx->width,
                        uPlane,
                        uvPitch,
                        vPlane,
                        uvPitch
                    );

                SDL_RenderClear(renderer);
                SDL_RenderCopy(renderer, texture, NULL, NULL);
                SDL_RenderPresent(renderer);

            }
        }

        // Free the packet that was allocated by av_read_frame
        av_free_packet(&packet);
        SDL_PollEvent(&event);
        switch (event.type) {
        case SDL_QUIT:
            SDL_DestroyTexture(texture);
            SDL_DestroyRenderer(renderer);
            SDL_DestroyWindow(screen);
            SDL_Quit();
            exit(0);
            break;
        default:
            break;
        }

    }

    // Free the YUV frame
    av_frame_free(&pFrame);
    free(yPlane);
    free(uPlane);
    free(vPlane);

    // Close the codec
    avcodec_close(pCodecCtx);
    avcodec_close(pCodecCtxOrig);

    // Close the video file
    avformat_close_input(&pFormatCtx);

    return 0;
}

ffmpeg用来解码,使用SDL进行渲染。笔者着重对如何在windows平台上使用SDL进行视频渲染进行一下分析。前面的文章已经介绍了如何在windows平台上搭建gcc编译环境,然后执行如下命令编译出可执行程序:

 $gcc simpleplayer.c -L/local/bin -L/bin -I/local/include -I/include -lavformat -lavcodec -lsdl2 -lavutil -lswscale

windows平台下编译下面这句宏定义很关键:

 #define SDL_MAIN_HANDLED 1

应为SDL中已经定义了main函数,不使用SDL中的main就需要加上面的宏定义,如果不加这句话的话会报错:

 c:/mingw/bin/../lib/gcc/mingw32/6.3.0/../../../libmingw32.a(main.o):(.text.startup+0xa0): undefined reference to `WinMain@16'       

最终生成a.exe,执行如下命令进行视频播放:

$ ./a.exe ed_1024_512kb.mp4

如何选择使用OpenGL ES渲染

上面的代码我们在播放视频时将渲染方式打印出来了:

 
 Renderinfo++++++++++++++++++++++++++:windows--direct3d

调用的两个接口,第一个打印了windows,第二个打印了direct3d。
可以通过调用下面的代码来指定渲染方式:

    SDL_SetHint(SDL_HINT_RENDER_DRIVER, "opengles2");

改完之后播放视频成功,但是打印出来的日志依旧没有变化。关于windows中如何支持openGL ES,看一下SDL2源码中的readme文件中是如何描述的:

================================================================================
OpenGL ES 2.x support
================================================================================

SDL has support for OpenGL ES 2.x under Windows via two alternative 
implementations. 
The most straightforward method consists in running your app in a system with 
a graphic card paired with a relatively recent (as of November of 2013) driver 
which supports the WGL_EXT_create_context_es2_profile extension. Vendors known 
to ship said extension on Windows currently include nVidia and Intel.

The other method involves using the ANGLE library (https://code.google.com/p/angleproject/)
If an OpenGL ES 2.x context is requested and no WGL_EXT_create_context_es2_profile
extension is found, SDL will try to load the libEGL.dll library provided by
ANGLE.
To obtain the ANGLE binaries, you can either compile from source from
https://chromium.googlesource.com/angle/angle or copy the relevant binaries from
a recent Chrome/Chromium install for Windows. The files you need are:
    
    * libEGL.dll
    * libGLESv2.dll
    * d3dcompiler_46.dll (supports Windows Vista or later, better shader compiler)
    or...
    * d3dcompiler_43.dll (supports Windows XP or later)
    
If you compile ANGLE from source, you can configure it so it does not need the
d3dcompiler_* DLL at all (for details on this, see their documentation). 
However, by default SDL will try to preload the d3dcompiler_46.dll to
comply with ANGLE's requirements. If you wish SDL to preload d3dcompiler_43.dll (to
support Windows XP) or to skip this step at all, you can use the 
SDL_HINT_VIDEO_WIN_D3DCOMPILER hint (see SDL_hints.h for more details).

有两种方式:

  • 最直接的方式是显卡驱动支持openGL ES,现在已知的厂商有NVIDIA和Intel。
  • 第二种方式是使用google的Angle库。何为Angle库呢?

        What is ANGLE?
        ANGLE allows Windows users to seamlessly run OpenGL ES content by efficiently translating OpenGL ES API into DirectX 11 API calls.
     ANGLE可以将OpenGL ES API转换为DirectX 11 API,从而达到使用OpenGL ES渲染视频的目的,而DirectX是微软开发的专门为windows使用的图形API。
    

看一下第二种方式吧。

ANGLE

到github下载ANGLE的源码,编译出上面提到的几个dll。注意不要下载google的ANGLE,google的东西需要*,下载微软的:


git clone https://github.com/Microsoft/angle.git

根据它的文档使用visual studio编译得出下面的三个dll:

  libEGL.dll
  libGLESv2.dll
  d3dcompiler_47.dll   

将这三个dll拷贝到同简易播放器同级目录下,再次运行播放器:

    
 Renderinfo++++++++++++++++++++++++++:windows--opengles2  

可见此次播放器使用的是OpenGL ES2。

注意:发现d3dcompiler_47.dll 这个dll是没用的,windows readme中提到了,如果从源码编译,通过设置可以不使用d3dcompiler_47.dll,我没有做特定的设置,运行脚本生成sln文件后打开直接编译就生成了三个dll,没有细究这个问题。

还发现了ANGLE只支持GL ES2.0,无奈之前没搞明白这里面的逻辑,所以直接研究了SDL2中OpenGL ES的相关代码,和OpenGL ES2应该是有区别的。以后有时间再研究吧。

源码下载

SDL2.0源码,选择上面的source code下载下来。

源码分析

作者看SDL源码时是按照认为对自己有用的代码逻辑有目的的进行分析和理解,因此像是走马观花,不会对每个细节都分析的很清楚,下面按照简易播放器的调用过程从头到尾把相关的SDL源码走一遍。SDL的渲染函数都是以SDL开头的,下面对简易播放器中的这些变量和函数进行分析:

几个变量

SDL_Event event;//保存渲染结束事件
SDL_Window *screen;//播放器窗口
SDL_Renderer *renderer;//渲染对象
SDL_Texture *texture;//纹理对象

SDL_Init

用于初始化各个子系统:

SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER);

最终会调用下面的函数:

SDL2/src/SDL.c
int
SDL_InitSubSystem(Uint32 flags)
{
           ...
        /* Initialize the video subsystem */
        if ((flags & SDL_INIT_VIDEO)){
    #if !SDL_VIDEO_DISABLED
            if (SDL_PrivateShouldInitSubsystem(SDL_INIT_VIDEO)) {
                if (SDL_VideoInit(NULL) < 0) {
                    return (-1);
                }
            }
            SDL_PrivateSubsystemRefCountIncr(SDL_INIT_VIDEO);
    #else
            return SDL_SetError("SDL not built with video support");
    #endif
        }
    .....        
}

我们只看一下视频子系统的初始化:

SDL2/src/SDL.c
/*
 * Initialize the video and event subsystems -- determine native pixel format
 */
int
SDL_VideoInit(const char *driver_name)
{
     ...
     /* Select the proper video driver */
    index = 0;
    video = NULL;
    if (driver_name == NULL) {
        driver_name = SDL_getenv("SDL_VIDEODRIVER");
    }
    if (driver_name != NULL) {
        for (i = 0; bootstrap[i]; ++i) {
            if (SDL_strncasecmp(bootstrap[i]->name, driver_name, SDL_strlen(driver_name)) == 0) {
                if (bootstrap[i]->available()) {
                    video = bootstrap[i]->create(index);
                    break;
                }
            }
        }
    } else {
        for (i = 0; bootstrap[i]; ++i) {
            if (bootstrap[i]->available()) {
                video = bootstrap[i]->create(index);
                if (video != NULL) {
                    break;
                }
            }
        }
    }
    ...
}

着重看一下视频驱动的选择,调用此函数的参数传进来的为NULL,进来之后会通过调用SDL_getenv获取driver_name,但是看了一下windows的环境变量中没有叫做SDL_VIDEODRIVER的。因此会接着往下走,else里面会把驱动数组从头到尾遍历一遍,只要找到一个available的元素,即使用此驱动。看一下驱动数组的定义:

SDL2/src/video/SDL_video.c
 /* Available video drivers */
static VideoBootStrap *bootstrap[] = {
#if SDL_VIDEO_DRIVER_COCOA//mac
    &COCOA_bootstrap,
#endif
#if SDL_VIDEO_DRIVER_X11
    &X11_bootstrap,
#endif
#if SDL_VIDEO_DRIVER_MIR
    &MIR_bootstrap,
#endif
#if SDL_VIDEO_DRIVER_WAYLAND
    &Wayland_bootstrap,
#endif
#if SDL_VIDEO_DRIVER_VIVANTE
    &VIVANTE_bootstrap,
#endif
#if SDL_VIDEO_DRIVER_DIRECTFB
    &DirectFB_bootstrap,
#endif
#if SDL_VIDEO_DRIVER_WINDOWS//windows
    &WINDOWS_bootstrap,
#endif
#if SDL_VIDEO_DRIVER_WINRT//windows RT
    &WINRT_bootstrap,
#endif
#if SDL_VIDEO_DRIVER_HAIKU
    &HAIKU_bootstrap,
#endif
#if SDL_VIDEO_DRIVER_PANDORA
    &PND_bootstrap,
#endif
#if SDL_VIDEO_DRIVER_UIKIT //iphone
    &UIKIT_bootstrap,
#endif
#if SDL_VIDEO_DRIVER_ANDROID
    &Android_bootstrap,
#endif
#if SDL_VIDEO_DRIVER_PSP
    &PSP_bootstrap,
#endif
#if SDL_VIDEO_DRIVER_KMSDRM
    &KMSDRM_bootstrap,
#endif
#if SDL_VIDEO_DRIVER_RPI
    &RPI_bootstrap,
#endif
#if SDL_VIDEO_DRIVER_NACL
    &NACL_bootstrap,
#endif
#if SDL_VIDEO_DRIVER_EMSCRIPTEN
    &Emscripten_bootstrap,
#endif
#if SDL_VIDEO_DRIVER_QNX
    &QNX_bootstrap,
#endif
#if SDL_VIDEO_DRIVER_DUMMY
    &DUMMY_bootstrap,
#endif
    NULL
};

此数组中的元素由宏定义来决定其加入与否,SDL_VIDEO_DRIVER_WINDOWS 是在SDL_config_windows.h中被定义的,此头文件在SDL_config.h中被引用:

SDL2/include/SDL_config_windows.h
/* Add any platform that doesn't build using the configure system. */
#if defined(__WIN32__)
#include "SDL_config_windows.h"
#elif defined(__WINRT__)
#include "SDL_config_winrt.h"
#elif defined(__MACOSX__)
#include "SDL_config_macosx.h"
#elif defined(__IPHONEOS__)
#include "SDL_config_iphoneos.h"
#elif defined(__ANDROID__)
#include "SDL_config_android.h"
#elif defined(__PSP__)
#include "SDL_config_psp.h"
#else
/* This is a minimal configuration just to get SDL running on new platforms */
#include "SDL_config_minimal.h"
#endif /* platform config */

可见不同平台中会引用对应的头文件,win32会引用SDL_config_windows.h,然后会选择对应的驱动程序,在SDL_VIDEO_DRIVER_WINDOWS 前面的几个宏定义除了MAC之外,对其余的都不熟悉,暂且认定,这些宏定义在windwos中不会被定义。


video = bootstrap[i]->create(index);

选择windows驱动之后,调用create函数进行创建,create函数指向的是如下函数:


SDL2/src/video/windows/SDL_windowsvideo.c
static SDL_VideoDevice * WIN_CreateDevice(int devindex)
{
     ....
     /* Set the function pointers */
    device->VideoInit = WIN_VideoInit;
    device->VideoQuit = WIN_VideoQuit;
    device->GetDisplayBounds = WIN_GetDisplayBounds;
    device->GetDisplayUsableBounds = WIN_GetDisplayUsableBounds;
    device->GetDisplayDPI = WIN_GetDisplayDPI;
    device->GetDisplayModes = WIN_GetDisplayModes;
    device->SetDisplayMode = WIN_SetDisplayMode;
    device->PumpEvents = WIN_PumpEvents;

    device->CreateSDLWindow = WIN_CreateWindow;
    device->CreateSDLWindowFrom = WIN_CreateWindowFrom;
    device->SetWindowTitle = WIN_SetWindowTitle;
    device->SetWindowIcon = WIN_SetWindowIcon;
    device->SetWindowPosition = WIN_SetWindowPosition;
    device->SetWindowSize = WIN_SetWindowSize;
    device->GetWindowBordersSize = WIN_GetWindowBordersSize;
    device->SetWindowOpacity = WIN_SetWindowOpacity;
    device->ShowWindow = WIN_ShowWindow;
    device->HideWindow = WIN_HideWindow;
    device->RaiseWindow = WIN_RaiseWindow;
    device->MaximizeWindow = WIN_MaximizeWindow;
    device->MinimizeWindow = WIN_MinimizeWindow;
    device->RestoreWindow = WIN_RestoreWindow;
    device->SetWindowBordered = WIN_SetWindowBordered;
    device->SetWindowResizable = WIN_SetWindowResizable;
    device->SetWindowFullscreen = WIN_SetWindowFullscreen;
    device->SetWindowGammaRamp = WIN_SetWindowGammaRamp;
    device->GetWindowGammaRamp = WIN_GetWindowGammaRamp;
    device->SetWindowGrab = WIN_SetWindowGrab;
    device->DestroyWindow = WIN_DestroyWindow;
    device->GetWindowWMInfo = WIN_GetWindowWMInfo;
    device->CreateWindowFramebuffer = WIN_CreateWindowFramebuffer;
    device->UpdateWindowFramebuffer = WIN_UpdateWindowFramebuffer;
    device->DestroyWindowFramebuffer = WIN_DestroyWindowFramebuffer;
    device->OnWindowEnter = WIN_OnWindowEnter;
    device->SetWindowHitTest = WIN_SetWindowHitTest;

    device->shape_driver.CreateShaper = Win32_CreateShaper;
    device->shape_driver.SetWindowShape = Win32_SetWindowShape;
    device->shape_driver.ResizeWindowShape = Win32_ResizeWindowShape;

#if SDL_VIDEO_OPENGL_WGL
    device->GL_LoadLibrary = WIN_GL_LoadLibrary;
    device->GL_GetProcAddress = WIN_GL_GetProcAddress;
    device->GL_UnloadLibrary = WIN_GL_UnloadLibrary;
    device->GL_CreateContext = WIN_GL_CreateContext;
    device->GL_MakeCurrent = WIN_GL_MakeCurrent;
    device->GL_SetSwapInterval = WIN_GL_SetSwapInterval;
    device->GL_GetSwapInterval = WIN_GL_GetSwapInterval;
    device->GL_SwapWindow = WIN_GL_SwapWindow;
    device->GL_DeleteContext = WIN_GL_DeleteContext;
#elif SDL_VIDEO_OPENGL_EGL        
    /* Use EGL based functions */
    device->GL_LoadLibrary = WIN_GLES_LoadLibrary;
    device->GL_GetProcAddress = WIN_GLES_GetProcAddress;
    device->GL_UnloadLibrary = WIN_GLES_UnloadLibrary;
    device->GL_CreateContext = WIN_GLES_CreateContext;
    device->GL_MakeCurrent = WIN_GLES_MakeCurrent;
    device->GL_SetSwapInterval = WIN_GLES_SetSwapInterval;
    device->GL_GetSwapInterval = WIN_GLES_GetSwapInterval;
    device->GL_SwapWindow = WIN_GLES_SwapWindow;
    device->GL_DeleteContext = WIN_GLES_DeleteContext;
#endif
#if SDL_VIDEO_VULKAN
    device->Vulkan_LoadLibrary = WIN_Vulkan_LoadLibrary;
    device->Vulkan_UnloadLibrary = WIN_Vulkan_UnloadLibrary;
    device->Vulkan_GetInstanceExtensions = WIN_Vulkan_GetInstanceExtensions;
    device->Vulkan_CreateSurface = WIN_Vulkan_CreateSurface;
#endif

    device->StartTextInput = WIN_StartTextInput;
    device->StopTextInput = WIN_StopTextInput;
    device->SetTextInputRect = WIN_SetTextInputRect;

    device->SetClipboardText = WIN_SetClipboardText;
    device->GetClipboardText = WIN_GetClipboardText;
    device->HasClipboardText = WIN_HasClipboardText;

    device->free = WIN_DeleteDevice;
    ...
}    

最终得到了我们想要的,此函数里面指定了特定平台下需要的功能函数。在接下来的流程中,基本所有的特定平台的功能函数都来源于此。可以发现,在SDL_config_windows.h中,定义了多个和OpenGL相关的宏定义:

/* Enable OpenGL support */
#ifndef SDL_VIDEO_OPENGL
#define SDL_VIDEO_OPENGL    1
#endif
#ifndef SDL_VIDEO_OPENGL_WGL
#define SDL_VIDEO_OPENGL_WGL    1
#endif
#ifndef SDL_VIDEO_RENDER_OGL
#define SDL_VIDEO_RENDER_OGL    1
#endif
#ifndef SDL_VIDEO_RENDER_OGL_ES2
#define SDL_VIDEO_RENDER_OGL_ES2    1
#endif
#ifndef SDL_VIDEO_OPENGL_ES2
#define SDL_VIDEO_OPENGL_ES2    1
#endif
#ifndef SDL_VIDEO_OPENGL_EGL
#define SDL_VIDEO_OPENGL_EGL    1
#endif

而WIN_CreateDevice中会优先判断SDL_VIDEO_OPENGL_WGL,因此程序会使用WGL而不是EGL(关于EGL的简单介绍)。本片文章不会去详细分析WGL,而是要分析GL ES,可以按照简易播放器的流程继续往下走,但是到使用WGL的地方会去分析EGL。

SDL_CreateWindow

此函数用于创建显示视频的窗体,下面是平台相关的代码:


SDL2/src/video/SDL_video.c
if (_this->CreateSDLWindow && _this->CreateSDLWindow(_this, window) < 0) {
    SDL_DestroyWindow(window);
    return NULL;
}

CreateSDLWindow来自前面提到过的WIN_CreateDevice函数,指向的是如下函数:

SDL2-2.0-2.8/src/video/windows/SDL_windowswindow.c
int
WIN_CreateWindow(_THIS, SDL_Window * window)
{
    ...
    hwnd = CreateWindow(SDL_Appname, TEXT(""), style, x, y, w, h, parent, NULL,
                 SDL_Instance, NULL);//此函数为windows API,创建一个windows窗口
    ...
                /* The rest of this macro mess is for OpenGL or OpenGL ES windows */
    #if SDL_VIDEO_OPENGL_ES2
        if (_this->gl_config.profile_mask == SDL_GL_CONTEXT_PROFILE_ES
    #if SDL_VIDEO_OPENGL_WGL
            && (!_this->gl_data || WIN_GL_UseEGL(_this))
    #endif /* SDL_VIDEO_OPENGL_WGL */
        ) {
    #if SDL_VIDEO_OPENGL_EGL
            if (WIN_GLES_SetupWindow(_this, window) < 0) {
                WIN_DestroyWindow(_this, window);
                return -1;
            }
            return 0;
    #else
            return SDL_SetError("Could not create GLES window surface (EGL support not configured)");
    #endif /* SDL_VIDEO_OPENGL_EGL */ 
        }
    #endif /* SDL_VIDEO_OPENGL_ES2 */
    
    #if SDL_VIDEO_OPENGL_WGL
        if (WIN_GL_SetupWindow(_this, window) < 0) {
            WIN_DestroyWindow(_this, window);
            return -1;
        }
    #else
        return SDL_SetError("Could not create GL window (WGL support not configured)");
    #endif
}

看一下WIN_GLES_SetupWindow 函数:

    
SDL2-2.0-2.8/src/video/windows/SDL_windowsopengles.c    
int WIN_GLES_SetupWindow(_THIS,SDL_Windows *window){

    /* The current context is lost in here; save it and reset it. */
    SDL_WindowData *windowdata = (SDL_WindowData *) window->driverdata;
    SDL_Window *current_win = SDL_GL_GetCurrentWindow();
    SDL_GLContext current_ctx = SDL_GL_GetCurrentContext();


    if (_this->egl_data == NULL) {
        if (SDL_EGL_LoadLibrary(_this, NULL, EGL_DEFAULT_DISPLAY, 0) < 0) {
            SDL_EGL_UnloadLibrary(_this);
            return -1;
        }
    }
  
    /* Create the GLES window surface */
    windowdata->egl_surface = SDL_EGL_CreateSurface(_this, (NativeWindowType)windowdata->hwnd);

    if (windowdata->egl_surface == EGL_NO_SURFACE) {
        return SDL_SetError("Could not create GLES window surface");
    }

    return WIN_GLES_MakeCurrent(_this, current_win, current_ctx);    
}

如果egl数据为空,则进行EGL动态库的加载,加载libEGL.dll之前,由注释得知需要加载OpenGL库才能正常工作。

SDL2-2.0-2.8/src/video/SDL_egl.c
/* EGL AND OpenGL ES support via ANGLE */
#define DEFAULT_EGL "libEGL.dll"
#define DEFAULT_OGL_ES2 "libGLESv2.dll"
int
SDL_EGL_LoadLibrary(_THIS, const char *egl_path, NativeDisplayType native_display, EGLenum platform)
{
    ...
    //加载GL ES
    path = DEFAULT_OGL_ES2;
    egl_dll_handle = SDL_LoadObject(path);     
    ...
    //加载EGL
    if (path == NULL) {
        path = DEFAULT_EGL;
    }
    dll_handle = SDL_LoadObject(path);
    ...
    //加载EGL相关函数
    /* Load new function pointers */
    LOAD_FUNC(eglGetDisplay);
    LOAD_FUNC(eglInitialize);
    LOAD_FUNC(eglTerminate);
    LOAD_FUNC(eglGetProcAddress);
    LOAD_FUNC(eglChooseConfig);
    LOAD_FUNC(eglGetConfigAttrib);
    LOAD_FUNC(eglCreateContext);
    LOAD_FUNC(eglDestroyContext);
    LOAD_FUNC(eglCreatePbufferSurface);
    LOAD_FUNC(eglCreateWindowSurface);
    LOAD_FUNC(eglDestroySurface);
    LOAD_FUNC(eglMakeCurrent);
    LOAD_FUNC(eglSwapBuffers);
    LOAD_FUNC(eglSwapInterval);
    LOAD_FUNC(eglWaitNative);
    LOAD_FUNC(eglWaitGL);
    LOAD_FUNC(eglBindAPI);
    LOAD_FUNC(eglQueryString);
    LOAD_FUNC(eglGetError);
    ...
    /* Try the implementation-specific eglGetDisplay even if eglGetPlatformDisplay fails */
    //1.获取 EGL Display 对象
    if (_this->egl_data->egl_display == EGL_NO_DISPLAY) {
        _this->egl_data->egl_display = _this->egl_data->eglGetDisplay(native_display);
    }
    if (_this->egl_data->egl_display == EGL_NO_DISPLAY) {
        return SDL_SetError("Could not get EGL display");
    }
    //2.初始化与 EGLDisplay 之间的连接
    if (_this->egl_data->eglInitialize(_this->egl_data->egl_display, NULL, NULL) != EGL_TRUE) {
        return SDL_SetError("Could not initialize EGL");
    }
    ...
}

关于EGL的介绍中使用EGL绘图的步骤, SDL_EGL_LoadLibrary中执行了1,2两步。

接下来创建surface:

EGLSurface *
SDL_EGL_CreateSurface(_THIS, NativeWindowType nw) 
{
    /* max 2 values plus terminator. */
    EGLint attribs[3];
    int attr = 0;
    
    EGLSurface * surface;
    //3.获取 EGLConfig 对象 
    if (SDL_EGL_ChooseConfig(_this) != 0) {
        return EGL_NO_SURFACE;
    }
    
#if SDL_VIDEO_DRIVER_ANDROID
    {
        /* Android docs recommend doing this!
         * Ref: http://developer.android.com/reference/android/app/NativeActivity.html 
         */
        EGLint format;
        _this->egl_data->eglGetConfigAttrib(_this->egl_data->egl_display,
                                            _this->egl_data->egl_config, 
                                            EGL_NATIVE_VISUAL_ID, &format);

        ANativeWindow_setBuffersGeometry(nw, 0, 0, format);
    }
#endif    
    if (_this->gl_config.framebuffer_srgb_capable) {
#ifdef EGL_KHR_gl_colorspace
        if (SDL_EGL_HasExtension(_this, SDL_EGL_DISPLAY_EXTENSION, "EGL_KHR_gl_colorspace")) {
            attribs[attr++] = EGL_GL_COLORSPACE_KHR;
            attribs[attr++] = EGL_GL_COLORSPACE_SRGB_KHR;
        } else
#endif
        {
            SDL_SetError("EGL implementation does not support sRGB system framebuffers");
            return EGL_NO_SURFACE;
        }
    }
    
    attribs[attr++] = EGL_NONE;
    //5.创建 EGLSurface 实例
    surface = _this->egl_data->eglCreateWindowSurface(
            _this->egl_data->egl_display,
            _this->egl_data->egl_config,
            nw, &attribs[0]);
    if (surface == EGL_NO_SURFACE) {
        SDL_EGL_SetError("unable to create an EGL window surface", "eglCreateWindowSurface");
    }
    return surface;
}

首先获取EGLConfig 对象,然后创建EGLSurface

SDL_CreateRenderer

我们选择的是OpenGL ES的driver:

#if SDL_VIDEO_RENDER_OGL_ES
    &GLES_RenderDriver,
#endif

因此Render函数为OpenGL ES的driver中的Render函数,也就是GLES_CreateRenderer:



SDL_RenderDriver GLES_RenderDriver = {
    GLES_CreateRenderer,
    {
     "opengles",
     (SDL_RENDERER_ACCELERATED | SDL_RENDERER_PRESENTVSYNC),
     1,
     {SDL_PIXELFORMAT_ABGR8888},
     0,
     0}
};

看一下Render函数的内容:

SDL_Renderer *
GLES_CreateRenderer(SDL_Window * window, Uint32 flags)
{

      SDL_Renderer *renderer;
    ...
      renderer->WindowEvent = GLES_WindowEvent;
    renderer->GetOutputSize = GLES_GetOutputSize;
    renderer->SupportsBlendMode = GLES_SupportsBlendMode;
    renderer->CreateTexture = GLES_CreateTexture;
    renderer->UpdateTexture = GLES_UpdateTexture;
    renderer->LockTexture = GLES_LockTexture;
    renderer->UnlockTexture = GLES_UnlockTexture;
    renderer->SetRenderTarget = GLES_SetRenderTarget;
    renderer->UpdateViewport = GLES_UpdateViewport;
    renderer->UpdateClipRect = GLES_UpdateClipRect;
    renderer->RenderClear = GLES_RenderClear;
    renderer->RenderDrawPoints = GLES_RenderDrawPoints;
    renderer->RenderDrawLines = GLES_RenderDrawLines;
    renderer->RenderFillRects = GLES_RenderFillRects;
    renderer->RenderCopy = GLES_RenderCopy;
    renderer->RenderCopyEx = GLES_RenderCopyEx;
    renderer->RenderReadPixels = GLES_RenderReadPixels;
    renderer->RenderPresent = GLES_RenderPresent;
    renderer->DestroyTexture = GLES_DestroyTexture;
    renderer->DestroyRenderer = GLES_DestroyRenderer;
    renderer->GL_BindTexture = GLES_BindTexture;
    renderer->GL_UnbindTexture = GLES_UnbindTexture;
    renderer->info = GLES_RenderDriver.info;
    renderer->info.flags = SDL_RENDERER_ACCELERATED;
    renderer->driverdata = data;
    renderer->window = window;

    data->context = SDL_GL_CreateContext(window);
    if (!data->context) {
        GLES_DestroyRenderer(renderer);
        goto error;
    }
    if (SDL_GL_MakeCurrent(window, data->context) < 0) {
        GLES_DestroyRenderer(renderer);
        goto error;
    }
    ...
}

此函数指定了GL ES使用的一些渲染函数,执行了上面提到的EGL渲染步骤的4,6步。看一下SDL_GL_CreateContext函数的具体内容:

SDL_GLContext
SDL_GL_CreateContext(SDL_Window * window)
{
    SDL_GLContext ctx = NULL;
    CHECK_WINDOW_MAGIC(window, NULL);

    if (!(window->flags & SDL_WINDOW_OPENGL)) {
        SDL_SetError("The specified window isn't an OpenGL window");
        return NULL;
    }

    ctx = _this->GL_CreateContext(_this, window);

    /* Creating a context is assumed to make it current in the SDL driver. */
    if (ctx) {
        _this->current_glwin = window;
        _this->current_glctx = ctx;
        SDL_TLSSet(_this->current_glwin_tls, window, NULL);
        SDL_TLSSet(_this->current_glctx_tls, ctx, NULL);
    }
    return ctx;
}

其中的GL_CreateContext函数为Windows平台下的GL ES版本的函数,由前面的WIN_CreateDevice指定的:

SDL_GLContext
WIN_GLES_CreateContext(_THIS, SDL_Window * window)
{
    SDL_GLContext context;
    SDL_WindowData *data = (SDL_WindowData *)window->driverdata;

#if SDL_VIDEO_OPENGL_WGL
    if (_this->gl_config.profile_mask != SDL_GL_CONTEXT_PROFILE_ES) {
        /* Switch to WGL based functions */
        WIN_GLES_UnloadLibrary(_this);
        _this->GL_LoadLibrary = WIN_GL_LoadLibrary;
        _this->GL_GetProcAddress = WIN_GL_GetProcAddress;
        _this->GL_UnloadLibrary = WIN_GL_UnloadLibrary;
        _this->GL_CreateContext = WIN_GL_CreateContext;
        _this->GL_MakeCurrent = WIN_GL_MakeCurrent;
        _this->GL_SetSwapInterval = WIN_GL_SetSwapInterval;
        _this->GL_GetSwapInterval = WIN_GL_GetSwapInterval;
        _this->GL_SwapWindow = WIN_GL_SwapWindow;
        _this->GL_DeleteContext = WIN_GL_DeleteContext;

        if (WIN_GL_LoadLibrary(_this, NULL) != 0) {
            return NULL;
        }

        return WIN_GL_CreateContext(_this, window);
    }
#endif

    context = SDL_EGL_CreateContext(_this, data->egl_surface);
    return context;
}

我们没有使用WGL,因此最终调用的是SDL_EGL_CreateContext,上面提到的EGL渲染过程的第4和第6步骤最终在此函数中进行:

SDL_GLContext
    SDL_EGL_CreateContext(_THIS, EGLSurface egl_surface)
{
    ...

    egl_context = _this->egl_data->eglCreateContext(_this->egl_data->egl_display,
                                      _this->egl_data->egl_config,
                                      share_context, attribs);

    if (egl_context == EGL_NO_CONTEXT) {
        SDL_EGL_SetError("Could not create EGL context", "eglCreateContext");
        return NULL;
    }

    _this->egl_data->egl_swapinterval = 0;

    if (SDL_EGL_MakeCurrent(_this, egl_surface, egl_context) < 0) {
        /* Save the SDL error set by SDL_EGL_MakeCurrent */
        char errorText[1024];
        SDL_strlcpy(errorText, SDL_GetError(), SDL_arraysize(errorText));

        /* Delete the context, which may alter the value returned by SDL_GetError() */
        SDL_EGL_DeleteContext(_this, egl_context);

        /* Restore the SDL error */
        SDL_SetError("%s", errorText);

        return NULL;
    }
    ...
}

SDL_CreateTexture

从此函数开始就是EGL绘图步骤的第七步骤,使用相关的gl*函数进行绘制。从表至里的分析就不进行了,最终调用的创建texture函数为GLES_CreateTexture,我们看一下gl开头的openGL相关函数:

static int
GLES_CreateTexture(SDL_Renderer * renderer, SDL_Texture * texture)
{
    GLES_RenderData *renderdata = (GLES_RenderData *) renderer->driverdata;
    ...

    renderdata->glGetError();
    renderdata->glEnable(GL_TEXTURE_2D);//we need to enable texturing before drawing the scene
    /*
    Any nonzero unsigned integer may be used as a texture name. To avoid
    accidentally reusing names, consistently use glGenTextures() to provide
    unused texture names.
    */
    renderdata->glGenTextures(1, &data->texture);
    result = renderdata->glGetError();
    if (result != GL_NO_ERROR) {
        SDL_free(data);
        return GLES_SetError("glGenTextures()", result);
    }

    data->type = GL_TEXTURE_2D;
    /* no NPOV textures allowed in OpenGL ES (yet) */
    texture_w = power_of_2(texture->w);
    texture_h = power_of_2(texture->h);
    data->texw = (GLfloat) texture->w / texture_w;
    data->texh = (GLfloat) texture->h / texture_h;

    data->format = format;
    data->formattype = type;
    scaleMode = GetScaleQuality();
    /*
    glBindTexture(), both creates and uses texture objects.
    When a texture name is initially bound (used with glBindTexture()), a new
    texture object is created with default values for the texture image and texture properties
    */
    renderdata->glBindTexture(data->type, data->texture);
    /*
    Sets various parameters that control how a texture is treated as it’s applied
    to a fragment or stored in a texture object
    */
    renderdata->glTexParameteri(data->type, GL_TEXTURE_MIN_FILTER, scaleMode);
    renderdata->glTexParameteri(data->type, GL_TEXTURE_MAG_FILTER, scaleMode);
    renderdata->glTexParameteri(data->type, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    renderdata->glTexParameteri(data->type, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    /*
    
    Defines a two-dimensional texture, or a one-dimensional texture array.
    */
    renderdata->glTexImage2D(data->type, 0, internalFormat, texture_w,
                             texture_h, 0, format, type, NULL);
    /*
    Disable it if texture rendering operations are finished.
    */
    renderdata->glDisable(GL_TEXTURE_2D);

    result = renderdata->glGetError();
    if (result != GL_NO_ERROR) {
        SDL_free(data);
        return GLES_SetError("glTexImage2D()", result);
    }
    
    texture->driverdata = data;
    return 0;
}    

最后成功创建了texture。

SDL_UpdateYUVTexture

万事俱备,只欠东风,OpenGL texture创建完成后,就只等渲染了。SDL中的渲染函数为SDL_UpdateYUVTexture,我们还是直接看Open GL ES版本的渲染函数GLES_UpdateTexture:

static int
GLES_UpdateTexture(SDL_Renderer * renderer, SDL_Texture * texture,
                   const SDL_Rect * rect, const void *pixels, int pitch)
{
    GLES_RenderData *renderdata = (GLES_RenderData *) renderer->driverdata;
    
    ...
    /* Reformat the texture data into a tightly packed array */
    srcPitch = rect->w * SDL_BYTESPERPIXEL(texture->format);
    src = (Uint8 *)pixels;
    if (pitch != srcPitch) {
        blob = (Uint8 *)SDL_malloc(srcPitch * rect->h);
        if (!blob) {
            return SDL_OutOfMemory();
        }
        src = blob;
        for (y = 0; y < rect->h; ++y) {
            SDL_memcpy(src, pixels, srcPitch);
            src += srcPitch;
            pixels = (Uint8 *)pixels + pitch;
        }
        src = blob;
    }

    /* Create a texture subimage with the supplied data */
    renderdata->glGetError();
    //enable texturing before drawing the scene
    renderdata->glEnable(data->type);
    /*
    When binding to a previously created texture object, that texture object becomes active. 
    */
    renderdata->glBindTexture(data->type, data->texture);
    /*
    it describes how the bitmap data is stored in computer memory
    */
    renderdata->glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
    /*
    Defines a two-dimensional texture image that replaces all or part of
    a contiguous subregion (in 2D, it’s simply a rectangle) of the current,
    existing two-dimensional texture image.
    */
    renderdata->glTexSubImage2D(data->type,
                    0,
                    rect->x,
                    rect->y,
                    rect->w,
                    rect->h,
                    data->format,
                    data->formattype,
                    src);
    renderdata->glDisable(data->type);
    SDL_free(blob);

    if (renderdata->glGetError() != GL_NO_ERROR) {
        return SDL_SetError("Failed to update texture");
    }
    return 0;
}    

pixels为需要显示的像素数据,pitch为像素间距。

SDL_RenderClear

还是只看GL ES的最终调用,主要理解glClear和glClearColor这两个函数,来自官方文档的解释写在注释中了,glClear函数用来清除窗口,glClearColor用于设置清理窗口之后显示的颜色。

static int
GLES_RenderClear(SDL_Renderer * renderer)
{
    GLES_RenderData *data = (GLES_RenderData *) renderer->driverdata;

    GLES_ActivateRenderer(renderer);

    /*
    glClearColor() establishes what color the window will be cleared to, and
    glClear() actually clears the window. Once the clearing color is set, the
    window is cleared to that color whenever glClear() is called. This clearing
    color can be changed with another call to glClearColor().
    */

    data->glClearColor((GLfloat) renderer->r * inv255f,
                 (GLfloat) renderer->g * inv255f,
                 (GLfloat) renderer->b * inv255f,
                 (GLfloat) renderer->a * inv255f);
    
    if (renderer->clipping_enabled) {
        data->glDisable(GL_SCISSOR_TEST);
    }

    data->glClear(GL_COLOR_BUFFER_BIT);

    if (renderer->clipping_enabled) {
        data->glEnable(GL_SCISSOR_TEST);
    }

    return 0;
}

SDL_RenderCopy

图像的绘制在这个函数中进行:

static int
GLES_RenderCopy(SDL_Renderer * renderer, SDL_Texture * texture,
                const SDL_Rect * srcrect, const SDL_FRect * dstrect)
{
    GLES_RenderData *data = (GLES_RenderData *) renderer->driverdata;
    GLES_TextureData *texturedata = (GLES_TextureData *) texture->driverdata;
    GLfloat minx, miny, maxx, maxy;
    GLfloat minu, maxu, minv, maxv;
    GLfloat vertices[8];
    GLfloat texCoords[8];

    GLES_ActivateRenderer(renderer);

    data->glEnable(GL_TEXTURE_2D);

    data->glBindTexture(texturedata->type, texturedata->texture);

    if (texture->modMode) {
        GLES_SetColor(data, texture->r, texture->g, texture->b, texture->a);
    } else {
        GLES_SetColor(data, 255, 255, 255, 255);
    }

    GLES_SetBlendMode(data, texture->blendMode);

    //启用或者停止纹理坐标
    GLES_SetTexCoords(data, SDL_TRUE);

    minx = dstrect->x;
    miny = dstrect->y;
    maxx = dstrect->x + dstrect->w;
    maxy = dstrect->y + dstrect->h;

    minu = (GLfloat) srcrect->x / texture->w;
    minu *= texturedata->texw;
    maxu = (GLfloat) (srcrect->x + srcrect->w) / texture->w;
    maxu *= texturedata->texw;
    minv = (GLfloat) srcrect->y / texture->h;
    minv *= texturedata->texh;
    maxv = (GLfloat) (srcrect->y + srcrect->h) / texture->h;
    maxv *= texturedata->texh;

    vertices[0] = minx;
    vertices[1] = miny;
    vertices[2] = maxx;
    vertices[3] = miny;
    vertices[4] = minx;
    vertices[5] = maxy;
    vertices[6] = maxx;
    vertices[7] = maxy;

    texCoords[0] = minu;
    texCoords[1] = minv;
    texCoords[2] = maxu;
    texCoords[3] = minv;
    texCoords[4] = minu;
    texCoords[5] = maxv;
    texCoords[6] = maxu;
    texCoords[7] = maxv;

    //告诉openGL顶点坐标数据的位置
    data->glVertexPointer(2, GL_FLOAT, 0, vertices);
    //告诉OpenGL纹理坐标的位置
    data->glTexCoordPointer(2, GL_FLOAT, 0, texCoords);
    //将数据绘制出来
    data->glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);

    data->glDisable(GL_TEXTURE_2D);

    return 0;
}

SDL_RenderPresent

顾名思义,此函数用于显示图像,他执行了EGL绘图步骤的第8步:eglSwapBuffer(),切换 front buffer 和 back buffer 送显,看一下GL ES的RenderPresent函数:

static void
GLES_RenderPresent(SDL_Renderer * renderer)
{
    GLES_ActivateRenderer(renderer);

    SDL_GL_SwapWindow(renderer->window);
}

激活Render之后就进行Swap,windows GL ES中的实现是在如下函数中:

WIN_GLES_SwapWindow

此函数在源码中搜索了半天没有搜到,原来是用了宏定义。。。,太隐蔽了。

SDL_EGL_SwapWindow_impl(WIN)


#define SDL_EGL_SwapWindow_impl(BACKEND) int \
BACKEND ## _GLES_SwapWindow(_THIS, SDL_Window * window) \
{\
    return SDL_EGL_SwapBuffers(_this, ((SDL_WindowData *) window->driverdata)->egl_surface);\
}

最终调用了EGL的eglSwapBuffers函数:

int
SDL_EGL_SwapBuffers(_THIS, EGLSurface egl_surface)
{
    if (!_this->egl_data->eglSwapBuffers(_this->egl_data->egl_display, egl_surface)) {
        return SDL_EGL_SetError("unable to show color buffer in an OS-native window", "eglSwapBuffers");
    }
    return 0;
}

SDL_PollEvent

此函数一看便知是用于侦听各种事件的,源码不再细究。

接下来就是一些销毁释放的过程了。

SDL_DestroyTexture

销毁texture通过调用glDeleteTextures来实现。

static void
GLES_DestroyTexture(SDL_Renderer * renderer, SDL_Texture * texture)
{
    GLES_RenderData *renderdata = (GLES_RenderData *) renderer->driverdata;

    GLES_TextureData *data = (GLES_TextureData *) texture->driverdata;

    GLES_ActivateRenderer(renderer);

    if (!data) {
        return;
    }
    if (data->texture) {
        renderdata->glDeleteTextures(1, &data->texture);
    }
    SDL_free(data->pixels);
    SDL_free(data);
    texture->driverdata = NULL;
}

SDL_DestroyRenderer

渲染对象的销毁使用如下函数,上面出现过glDeleteFramebuffersOES类似的函数,没有仔细研究,搜索了openGL的官方文档,竟然没有任何说明,看字面意思就是删除相关缓存的。

static void
GLES_DestroyRenderer(SDL_Renderer * renderer)
{
    GLES_RenderData *data = (GLES_RenderData *) renderer->driverdata;

    if (data) {
        if (data->context) {
            while (data->framebuffers) {
               GLES_FBOList *nextnode = data->framebuffers->next;
               data->glDeleteFramebuffersOES(1, &data->framebuffers->FBO);
               SDL_free(data->framebuffers);
               data->framebuffers = nextnode;
            }
            SDL_GL_DeleteContext(data->context);
        }
        SDL_free(data);
    }
    SDL_free(renderer);
}

然后删除Context:

void
WIN_GLES_DeleteContext(_THIS, SDL_GLContext context)
{
    SDL_EGL_DeleteContext(_this, context);
    WIN_GLES_UnloadLibrary(_this);
}

在SDL_EGL_DeleteContext中调用EGL销毁函数eglDestroyContext,EGL绘图的第11步。

void
SDL_EGL_DeleteContext(_THIS, SDL_GLContext context)
{
    EGLContext egl_context = (EGLContext) context;

    /* Clean up GLES and EGL */
    if (!_this->egl_data) {
        return;
    }
    
    if (egl_context != NULL && egl_context != EGL_NO_CONTEXT) {
        SDL_EGL_MakeCurrent(_this, NULL, NULL);
        _this->egl_data->eglDestroyContext(_this->egl_data->egl_display, egl_context);
    }
        
}

WIN_GLES_UnloadLibrary 函数实际上为SDL_EGL_UnloadLibrary函数:

#define WIN_GLES_UnloadLibrary SDL_EGL_UnloadLibrary

void
SDL_EGL_UnloadLibrary(_THIS)
{
    if (_this->egl_data) {
        if (_this->egl_data->egl_display) {
            _this->egl_data->eglTerminate(_this->egl_data->egl_display);
            _this->egl_data->egl_display = NULL;
        }

        if (_this->egl_data->dll_handle) {
            SDL_UnloadObject(_this->egl_data->dll_handle);
            _this->egl_data->dll_handle = NULL;
        }
        if (_this->egl_data->egl_dll_handle) {
            SDL_UnloadObject(_this->egl_data->egl_dll_handle);
            _this->egl_data->egl_dll_handle = NULL;
        }
        
        SDL_free(_this->egl_data);
        _this->egl_data = NULL;
    }
}

调用EGL终止函数,终止了与EGLDisplay之间的连接,也就是EGL绘图的第12步,然后卸载相关的DLL。

SDL_DestoryWindow

调用windows函数 DestroyWindow销毁窗体:

 void
WIN_DestroyWindow(_THIS, SDL_Window * window)
{
    SDL_WindowData *data = (SDL_WindowData *) window->driverdata;

    if (data) {
        ReleaseDC(data->hwnd, data->hdc);
        RemoveProp(data->hwnd, TEXT("SDL_WindowData"));
        if (data->created) {
            DestroyWindow(data->hwnd);
            if (data->parent) {
                DestroyWindow(data->parent);
            }
        } else {
            /* Restore any original event handler... */
            if (data->wndproc != NULL) {
#ifdef GWLP_WNDPROC
                SetWindowLongPtr(data->hwnd, GWLP_WNDPROC,
                                 (LONG_PTR) data->wndproc);
#else
                SetWindowLong(data->hwnd, GWL_WNDPROC,
                              (LONG_PTR) data->wndproc);
#endif
            }
        }
        SDL_free(data);
    }
    window->driverdata = NULL;
}

SDL_Quit

只看视频系统的Quit函数:

void
WIN_VideoQuit(_THIS)
{
    WIN_QuitModes(_this);
    WIN_QuitKeyboard(_this);
    WIN_QuitMouse(_this);
}

参考

OpenGL超级宝典笔记——顶点数组

上一篇:理解模板类型推断(template type deduction)


下一篇:一步步实现windows版ijkplayer系列文章之四——windows下编译ijkplyer版ffmpeg