SDL2原始碼分析之OpenGL ES在windows上的渲染過程
SDL2原始碼分析之OpenGL ES在windows上的渲染過程
ffmpeg + SDL2實現的簡易播放器
ffmpeg和SDL非常強大,通過使用ffmpeg和SDL可以用100多行程式碼就實現一個簡易的播放器(來自網上的原始碼,找不到出處了):
#include <stdio.h> #define __STDC_CONSTANT_MACROS #define SDL_MAIN_HANDLED 1 #ifdef _WIN32 //Windows //extern "C" //{ #include "libavcodec/avcodec.h" #include "libavformat/avformat.h" #include "libswscale/swscale.h" #include "SDL2/SDL.h" //}; #else //Linux... #ifdef __cplusplus extern "C" { #endif #include <libavcodec/avcodec.h> #include <libavformat/avformat.h> #include <libswscale/swscale.h> #include <SDL2/SDL.h> #ifdef __cplusplus }; #endif #endif //Full Screen #define SHOW_FULLSCREEN 0 //Output YUV420P #define OUTPUT_YUV420P 0 int main(int argc, char *argv[]) { AVFormatContext *pFormatCtx = NULL; int videoStream; unsigned i; AVCodecContext *pCodecCtxOrig = NULL; AVCodecContext *pCodecCtx = NULL; AVCodec *pCodec = NULL; AVFrame *pFrame = NULL; AVPacket packet; int frameFinished; struct SwsContext *sws_ctx = NULL; SDL_Event event; SDL_Window *screen; SDL_Renderer *renderer; SDL_Texture *texture; Uint8 *yPlane, *uPlane, *vPlane; size_t yPlaneSz, uvPlaneSz; int uvPitch; if (argc < 2) { fprintf(stderr, "Usage: test <file>\n"); exit(1); } printf("it is a test\n"); // Register all formats and codecs av_register_all(); if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) { fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError()); exit(1); } // Open video file if (avformat_open_input(&pFormatCtx,argv[1], NULL, NULL) != 0) return -1; // Couldn't open file // Retrieve stream information if (avformat_find_stream_info(pFormatCtx, NULL) < 0) return -1; // Couldn't find stream information // Dump information about file onto standard error av_dump_format(pFormatCtx, 0, argv[1], 0); // Find the first video stream videoStream = -1; for (i = 0; i < pFormatCtx->nb_streams; i++) if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) { videoStream = i; break; } if (videoStream == -1) return -1; // Didn't find a video stream // Get a pointer to the codec context for the video stream pCodecCtxOrig = pFormatCtx->streams[videoStream]->codec; // Find the decoder for the video stream pCodec = avcodec_find_decoder(pCodecCtxOrig->codec_id); if (pCodec == NULL) { fprintf(stderr, "Unsupported codec!\n"); return -1; // Codec not found } // Copy context pCodecCtx = avcodec_alloc_context3(pCodec); if (avcodec_copy_context(pCodecCtx, pCodecCtxOrig) != 0) { fprintf(stderr, "Couldn't copy codec context"); return -1; // Error copying codec context } // Open codec if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) return -1; // Could not open codec // Allocate video frame pFrame = av_frame_alloc(); // Make a screen to put our video screen = SDL_CreateWindow( "FFmpeg Tutorial", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, pCodecCtx->width, pCodecCtx->height, 0 ); if (!screen) { fprintf(stderr, "SDL: could not create window - exiting\n"); exit(1); } renderer = SDL_CreateRenderer(screen, -1, 0); if (!renderer) { fprintf(stderr, "SDL: could not create renderer - exiting\n"); exit(1); } // Allocate a place to put our YUV image on that screen texture = SDL_CreateTexture( renderer, SDL_PIXELFORMAT_YV12, SDL_TEXTUREACCESS_STREAMING, pCodecCtx->width, pCodecCtx->height ); if (!texture) { fprintf(stderr, "SDL: could not create texture - exiting\n"); exit(1); } // initialize SWS context for software scaling sws_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P, SWS_BILINEAR, NULL, NULL, NULL); // set up YV12 pixel array (12 bits per pixel) yPlaneSz = pCodecCtx->width * pCodecCtx->height; uvPlaneSz = pCodecCtx->width * pCodecCtx->height / 4; yPlane = (Uint8*)malloc(yPlaneSz); uPlane = (Uint8*)malloc(uvPlaneSz); vPlane = (Uint8*)malloc(uvPlaneSz); if (!yPlane || !uPlane || !vPlane) { fprintf(stderr, "Could not allocate pixel buffers - exiting\n"); exit(1); } uvPitch = pCodecCtx->width / 2; while (av_read_frame(pFormatCtx, &packet) >= 0) { // Is this a packet from the video stream? if (packet.stream_index == videoStream) { // Decode video frame avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet); // Did we get a video frame? if (frameFinished) { AVPicture pict; pict.data[0] = yPlane; pict.data[1] = uPlane; pict.data[2] = vPlane; pict.linesize[0] = pCodecCtx->width; pict.linesize[1] = uvPitch; pict.linesize[2] = uvPitch; // Convert the image into YUV format that SDL uses sws_scale(sws_ctx, (uint8_t const * const *) pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pict.data, pict.linesize); SDL_UpdateYUVTexture( texture, NULL, yPlane, pCodecCtx->width, uPlane, uvPitch, vPlane, uvPitch ); SDL_RenderClear(renderer); SDL_RenderCopy(renderer, texture, NULL, NULL); SDL_RenderPresent(renderer); } } // Free the packet that was allocated by av_read_frame av_free_packet(&packet); SDL_PollEvent(&event); switch (event.type) { case SDL_QUIT: SDL_DestroyTexture(texture); SDL_DestroyRenderer(renderer); SDL_DestroyWindow(screen); SDL_Quit(); exit(0); break; default: break; } } // Free the YUV frame av_frame_free(&pFrame); free(yPlane); free(uPlane); free(vPlane); // Close the codec avcodec_close(pCodecCtx); avcodec_close(pCodecCtxOrig); // Close the video file avformat_close_input(&pFormatCtx); return 0; }
ffmpeg用來解碼,使用SDL進行渲染。筆者著重對如何在windows平臺上使用SDL進行視訊渲染進行一下分析。前面的文章已經介紹了ofollow,noindex" target="_blank">如何在windows平臺上搭建gcc編譯環境 ,然後執行如下命令編譯出可執行程式:
$gcc simpleplayer.c -L/local/bin -L/bin -I/local/include -I/include -lavformat -lavcodec -lsdl2 -lavutil -lswscale
windows平臺下編譯下面這句巨集定義很關鍵:
#define SDL_MAIN_HANDLED 1
應為SDL中已經定義了main函式,不使用SDL中的main就需要加上面的巨集定義,如果不加這句話的話會報錯:
c:/mingw/bin/../lib/gcc/mingw32/6.3.0/../../../libmingw32.a(main.o):(.text.startup+0xa0): undefined reference to `WinMain@16'
最終生成a.exe,執行如下命令進行視訊播放:
$ ./a.exe ed_1024_512kb.mp4
原始碼下載
SDL2.0原始碼 ,選擇上面的source code下載下來。
原始碼分析
作者看SDL原始碼時是按照認為對自己有用的程式碼邏輯有目的的進行分析和理解,因此像是走馬觀花,不會對每個細節都分析的很清楚,下面按照簡易播放器的呼叫過程從頭到尾把相關的SDL原始碼走一遍。SDL的渲染函式都是以SDL開頭的,下面對簡易播放器中的這些變數和函式進行分析:
幾個變數
SDL_Event event;//儲存渲染結束事件 SDL_Window *screen;//播放器視窗 SDL_Renderer *renderer;//渲染物件 SDL_Texture *texture;//紋理物件
SDL_Init
用於初始化各個子系統:
SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER);
最終會呼叫下面的函式:
int SDL_InitSubSystem(Uint32 flags) { ... /* Initialize the video subsystem */ if ((flags & SDL_INIT_VIDEO)){ #if !SDL_VIDEO_DISABLED if (SDL_PrivateShouldInitSubsystem(SDL_INIT_VIDEO)) { if (SDL_VideoInit(NULL) < 0) { return (-1); } } SDL_PrivateSubsystemRefCountIncr(SDL_INIT_VIDEO); #else return SDL_SetError("SDL not built with video support"); #endif } ..... }
我們只看一下視訊子系統的初始化:
/* * Initialize the video and event subsystems -- determine native pixel format */ int SDL_VideoInit(const char *driver_name) { ... /* Select the proper video driver */ index = 0; video = NULL; if (driver_name == NULL) { driver_name = SDL_getenv("SDL_VIDEODRIVER"); } if (driver_name != NULL) { for (i = 0; bootstrap[i]; ++i) { if (SDL_strncasecmp(bootstrap[i]->name, driver_name, SDL_strlen(driver_name)) == 0) { if (bootstrap[i]->available()) { video = bootstrap[i]->create(index); break; } } } } else { for (i = 0; bootstrap[i]; ++i) { if (bootstrap[i]->available()) { video = bootstrap[i]->create(index); if (video != NULL) { break; } } } } ... }
著重看一下視訊驅動的選擇,呼叫此函式的引數傳進來的為NULL,進來之後會通過呼叫SDL_getenv獲取driver_name,但是看了一下windows的環境變數中沒有叫做SDL_VIDEODRIVER的。因此會接著往下走,else裡面會把驅動陣列從頭到尾遍歷一遍,只要找到一個available的元素,即使用此驅動。看一下驅動陣列的定義:
/* Available video drivers */ static VideoBootStrap *bootstrap[] = { #if SDL_VIDEO_DRIVER_COCOA//mac &COCOA_bootstrap, #endif #if SDL_VIDEO_DRIVER_X11 &X11_bootstrap, #endif #if SDL_VIDEO_DRIVER_MIR &MIR_bootstrap, #endif #if SDL_VIDEO_DRIVER_WAYLAND &Wayland_bootstrap, #endif #if SDL_VIDEO_DRIVER_VIVANTE &VIVANTE_bootstrap, #endif #if SDL_VIDEO_DRIVER_DIRECTFB &DirectFB_bootstrap, #endif #if SDL_VIDEO_DRIVER_WINDOWS//windows &WINDOWS_bootstrap, #endif #if SDL_VIDEO_DRIVER_WINRT//windows RT &WINRT_bootstrap, #endif #if SDL_VIDEO_DRIVER_HAIKU &HAIKU_bootstrap, #endif #if SDL_VIDEO_DRIVER_PANDORA &PND_bootstrap, #endif #if SDL_VIDEO_DRIVER_UIKIT //iphone &UIKIT_bootstrap, #endif #if SDL_VIDEO_DRIVER_ANDROID &Android_bootstrap, #endif #if SDL_VIDEO_DRIVER_PSP &PSP_bootstrap, #endif #if SDL_VIDEO_DRIVER_KMSDRM &KMSDRM_bootstrap, #endif #if SDL_VIDEO_DRIVER_RPI &RPI_bootstrap, #endif #if SDL_VIDEO_DRIVER_NACL &NACL_bootstrap, #endif #if SDL_VIDEO_DRIVER_EMSCRIPTEN &Emscripten_bootstrap, #endif #if SDL_VIDEO_DRIVER_QNX &QNX_bootstrap, #endif #if SDL_VIDEO_DRIVER_DUMMY &DUMMY_bootstrap, #endif NULL };
此陣列中的元素由巨集定義來決定其加入與否,SDL_VIDEO_DRIVER_WINDOWS 是在SDL_config_windows.h中被定義的,此標頭檔案在SDL_config.h中被引用:
/* Add any platform that doesn't build using the configure system. */ #if defined(__WIN32__) #include "SDL_config_windows.h" #elif defined(__WINRT__) #include "SDL_config_winrt.h" #elif defined(__MACOSX__) #include "SDL_config_macosx.h" #elif defined(__IPHONEOS__) #include "SDL_config_iphoneos.h" #elif defined(__ANDROID__) #include "SDL_config_android.h" #elif defined(__PSP__) #include "SDL_config_psp.h" #else /* This is a minimal configuration just to get SDL running on new platforms */ #include "SDL_config_minimal.h" #endif /* platform config */
可見不同平臺中會引用對應的標頭檔案,win32會引用SDL_config_windows.h,然後會選擇對應的驅動程式,在SDL_VIDEO_DRIVER_WINDOWS 前面的幾個巨集定義除了MAC之外,對其餘的都不熟悉,暫且認定,這些巨集定義在windwos中不會被定義。
video = bootstrap[i]->create(index);
選擇windows驅動之後,呼叫create函式進行建立,create函式指向的是如下函式:
static SDL_VideoDevice * WIN_CreateDevice(int devindex) { .... /* Set the function pointers */ device->VideoInit = WIN_VideoInit; device->VideoQuit = WIN_VideoQuit; device->GetDisplayBounds = WIN_GetDisplayBounds; device->GetDisplayUsableBounds = WIN_GetDisplayUsableBounds; device->GetDisplayDPI = WIN_GetDisplayDPI; device->GetDisplayModes = WIN_GetDisplayModes; device->SetDisplayMode = WIN_SetDisplayMode; device->PumpEvents = WIN_PumpEvents; device->CreateSDLWindow = WIN_CreateWindow; device->CreateSDLWindowFrom = WIN_CreateWindowFrom; device->SetWindowTitle = WIN_SetWindowTitle; device->SetWindowIcon = WIN_SetWindowIcon; device->SetWindowPosition = WIN_SetWindowPosition; device->SetWindowSize = WIN_SetWindowSize; device->GetWindowBordersSize = WIN_GetWindowBordersSize; device->SetWindowOpacity = WIN_SetWindowOpacity; device->ShowWindow = WIN_ShowWindow; device->HideWindow = WIN_HideWindow; device->RaiseWindow = WIN_RaiseWindow; device->MaximizeWindow = WIN_MaximizeWindow; device->MinimizeWindow = WIN_MinimizeWindow; device->RestoreWindow = WIN_RestoreWindow; device->SetWindowBordered = WIN_SetWindowBordered; device->SetWindowResizable = WIN_SetWindowResizable; device->SetWindowFullscreen = WIN_SetWindowFullscreen; device->SetWindowGammaRamp = WIN_SetWindowGammaRamp; device->GetWindowGammaRamp = WIN_GetWindowGammaRamp; device->SetWindowGrab = WIN_SetWindowGrab; device->DestroyWindow = WIN_DestroyWindow; device->GetWindowWMInfo = WIN_GetWindowWMInfo; device->CreateWindowFramebuffer = WIN_CreateWindowFramebuffer; device->UpdateWindowFramebuffer = WIN_UpdateWindowFramebuffer; device->DestroyWindowFramebuffer = WIN_DestroyWindowFramebuffer; device->OnWindowEnter = WIN_OnWindowEnter; device->SetWindowHitTest = WIN_SetWindowHitTest; device->shape_driver.CreateShaper = Win32_CreateShaper; device->shape_driver.SetWindowShape = Win32_SetWindowShape; device->shape_driver.ResizeWindowShape = Win32_ResizeWindowShape; #if SDL_VIDEO_OPENGL_WGL device->GL_LoadLibrary = WIN_GL_LoadLibrary; device->GL_GetProcAddress = WIN_GL_GetProcAddress; device->GL_UnloadLibrary = WIN_GL_UnloadLibrary; device->GL_CreateContext = WIN_GL_CreateContext; device->GL_MakeCurrent = WIN_GL_MakeCurrent; device->GL_SetSwapInterval = WIN_GL_SetSwapInterval; device->GL_GetSwapInterval = WIN_GL_GetSwapInterval; device->GL_SwapWindow = WIN_GL_SwapWindow; device->GL_DeleteContext = WIN_GL_DeleteContext; #elif SDL_VIDEO_OPENGL_EGL /* Use EGL based functions */ device->GL_LoadLibrary = WIN_GLES_LoadLibrary; device->GL_GetProcAddress = WIN_GLES_GetProcAddress; device->GL_UnloadLibrary = WIN_GLES_UnloadLibrary; device->GL_CreateContext = WIN_GLES_CreateContext; device->GL_MakeCurrent = WIN_GLES_MakeCurrent; device->GL_SetSwapInterval = WIN_GLES_SetSwapInterval; device->GL_GetSwapInterval = WIN_GLES_GetSwapInterval; device->GL_SwapWindow = WIN_GLES_SwapWindow; device->GL_DeleteContext = WIN_GLES_DeleteContext; #endif #if SDL_VIDEO_VULKAN device->Vulkan_LoadLibrary = WIN_Vulkan_LoadLibrary; device->Vulkan_UnloadLibrary = WIN_Vulkan_UnloadLibrary; device->Vulkan_GetInstanceExtensions = WIN_Vulkan_GetInstanceExtensions; device->Vulkan_CreateSurface = WIN_Vulkan_CreateSurface; #endif device->StartTextInput = WIN_StartTextInput; device->StopTextInput = WIN_StopTextInput; device->SetTextInputRect = WIN_SetTextInputRect; device->SetClipboardText = WIN_SetClipboardText; device->GetClipboardText = WIN_GetClipboardText; device->HasClipboardText = WIN_HasClipboardText; device->free = WIN_DeleteDevice; ... }
最終得到了我們想要的,此函式裡面指定了特定平臺下需要的功能函式。在接下來的流程中,基本所有的特定平臺的功能函式都來源於此。可以發現,在SDL_config_windows.h中,定義了多個和OpenGL相關的巨集定義:
/* Enable OpenGL support */ #ifndef SDL_VIDEO_OPENGL #define SDL_VIDEO_OPENGL1 #endif #ifndef SDL_VIDEO_OPENGL_WGL #define SDL_VIDEO_OPENGL_WGL1 #endif #ifndef SDL_VIDEO_RENDER_OGL #define SDL_VIDEO_RENDER_OGL1 #endif #ifndef SDL_VIDEO_RENDER_OGL_ES2 #define SDL_VIDEO_RENDER_OGL_ES21 #endif #ifndef SDL_VIDEO_OPENGL_ES2 #define SDL_VIDEO_OPENGL_ES21 #endif #ifndef SDL_VIDEO_OPENGL_EGL #define SDL_VIDEO_OPENGL_EGL1 #endif
而WIN_CreateDevice中會優先判斷SDL_VIDEO_OPENGL_WGL,因此程式會使用WGL而不是EGL(關於EGL的簡單介紹)。本片文章不會去詳細分析WGL,而是要分析GL ES,可以按照簡易播放器的流程繼續往下走,但是到使用WGL的地方會去分析EGL。
SDL_CreateWindow
此函式用於建立顯示視訊的窗體,下面是平臺相關的程式碼:
if (_this->CreateSDLWindow && _this->CreateSDLWindow(_this, window) < 0) { SDL_DestroyWindow(window); return NULL; }
CreateSDLWindow來自前面提到過的WIN_CreateDevice函式,指向的是如下函式:
int WIN_CreateWindow(_THIS, SDL_Window * window) { ... hwnd = CreateWindow(SDL_Appname, TEXT(""), style, x, y, w, h, parent, NULL, SDL_Instance, NULL);//此函式為windows API,建立一個windows視窗 ... /* The rest of this macro mess is for OpenGL or OpenGL ES windows */ #if SDL_VIDEO_OPENGL_ES2 if (_this->gl_config.profile_mask == SDL_GL_CONTEXT_PROFILE_ES #if SDL_VIDEO_OPENGL_WGL && (!_this->gl_data || WIN_GL_UseEGL(_this)) #endif /* SDL_VIDEO_OPENGL_WGL */ ) { #if SDL_VIDEO_OPENGL_EGL if (WIN_GLES_SetupWindow(_this, window) < 0) { WIN_DestroyWindow(_this, window); return -1; } return 0; #else return SDL_SetError("Could not create GLES window surface (EGL support not configured)"); #endif /* SDL_VIDEO_OPENGL_EGL */ } #endif /* SDL_VIDEO_OPENGL_ES2 */ #if SDL_VIDEO_OPENGL_WGL if (WIN_GL_SetupWindow(_this, window) < 0) { WIN_DestroyWindow(_this, window); return -1; } #else return SDL_SetError("Could not create GL window (WGL support not configured)"); #endif }
看一下WIN_GLES_SetupWindow 函式:
int WIN_GLES_SetupWindow(_THIS,SDL_Windows *window){ /* The current context is lost in here; save it and reset it. */ SDL_WindowData *windowdata = (SDL_WindowData *) window->driverdata; SDL_Window *current_win = SDL_GL_GetCurrentWindow(); SDL_GLContext current_ctx = SDL_GL_GetCurrentContext(); if (_this->egl_data == NULL) { if (SDL_EGL_LoadLibrary(_this, NULL, EGL_DEFAULT_DISPLAY, 0) < 0) { SDL_EGL_UnloadLibrary(_this); return -1; } } /* Create the GLES window surface */ windowdata->egl_surface = SDL_EGL_CreateSurface(_this, (NativeWindowType)windowdata->hwnd); if (windowdata->egl_surface == EGL_NO_SURFACE) { return SDL_SetError("Could not create GLES window surface"); } return WIN_GLES_MakeCurrent(_this, current_win, current_ctx); }
如果egl資料為空,則進行EGL動態庫的載入,載入libEGL.dll之前,由註釋得知需要載入OpenGL庫才能正常工作。
/* EGL AND OpenGL ES support via ANGLE */ #define DEFAULT_EGL "libEGL.dll" #define DEFAULT_OGL_ES2 "libGLESv2.dll" int SDL_EGL_LoadLibrary(_THIS, const char *egl_path, NativeDisplayType native_display, EGLenum platform) { ... //載入GL ES path = DEFAULT_OGL_ES2; egl_dll_handle = SDL_LoadObject(path); ... //載入EGL if (path == NULL) { path = DEFAULT_EGL; } dll_handle = SDL_LoadObject(path); ... //載入EGL相關函式 /* Load new function pointers */ LOAD_FUNC(eglGetDisplay); LOAD_FUNC(eglInitialize); LOAD_FUNC(eglTerminate); LOAD_FUNC(eglGetProcAddress); LOAD_FUNC(eglChooseConfig); LOAD_FUNC(eglGetConfigAttrib); LOAD_FUNC(eglCreateContext); LOAD_FUNC(eglDestroyContext); LOAD_FUNC(eglCreatePbufferSurface); LOAD_FUNC(eglCreateWindowSurface); LOAD_FUNC(eglDestroySurface); LOAD_FUNC(eglMakeCurrent); LOAD_FUNC(eglSwapBuffers); LOAD_FUNC(eglSwapInterval); LOAD_FUNC(eglWaitNative); LOAD_FUNC(eglWaitGL); LOAD_FUNC(eglBindAPI); LOAD_FUNC(eglQueryString); LOAD_FUNC(eglGetError); ... /* Try the implementation-specific eglGetDisplay even if eglGetPlatformDisplay fails */ //1.獲取 EGL Display 物件 if (_this->egl_data->egl_display == EGL_NO_DISPLAY) { _this->egl_data->egl_display = _this->egl_data->eglGetDisplay(native_display); } if (_this->egl_data->egl_display == EGL_NO_DISPLAY) { return SDL_SetError("Could not get EGL display"); } //2.初始化與 EGLDisplay 之間的連線 if (_this->egl_data->eglInitialize(_this->egl_data->egl_display, NULL, NULL) != EGL_TRUE) { return SDL_SetError("Could not initialize EGL"); } ... }
看關於EGL的介紹中使用EGL繪圖的步驟, SDL_EGL_LoadLibrary中執行了1,2兩步。
接下來建立surface:
EGLSurface * SDL_EGL_CreateSurface(_THIS, NativeWindowType nw) { /* max 2 values plus terminator. */ EGLint attribs[3]; int attr = 0; EGLSurface * surface; //3.獲取 EGLConfig 物件 if (SDL_EGL_ChooseConfig(_this) != 0) { return EGL_NO_SURFACE; } #if SDL_VIDEO_DRIVER_ANDROID { /* Android docs recommend doing this! * Ref: http://developer.android.com/reference/android/app/NativeActivity.html */ EGLint format; _this->egl_data->eglGetConfigAttrib(_this->egl_data->egl_display, _this->egl_data->egl_config, EGL_NATIVE_VISUAL_ID, &format); ANativeWindow_setBuffersGeometry(nw, 0, 0, format); } #endif if (_this->gl_config.framebuffer_srgb_capable) { #ifdef EGL_KHR_gl_colorspace if (SDL_EGL_HasExtension(_this, SDL_EGL_DISPLAY_EXTENSION, "EGL_KHR_gl_colorspace")) { attribs[attr++] = EGL_GL_COLORSPACE_KHR; attribs[attr++] = EGL_GL_COLORSPACE_SRGB_KHR; } else #endif { SDL_SetError("EGL implementation does not support sRGB system framebuffers"); return EGL_NO_SURFACE; } } attribs[attr++] = EGL_NONE; //5.建立 EGLSurface 例項 surface = _this->egl_data->eglCreateWindowSurface( _this->egl_data->egl_display, _this->egl_data->egl_config, nw, &attribs[0]); if (surface == EGL_NO_SURFACE) { SDL_EGL_SetError("unable to create an EGL window surface", "eglCreateWindowSurface"); } return surface; }
首先獲取EGLConfig 物件,然後建立EGLSurface
SDL_CreateRenderer
我們選擇的是OpenGL ES的driver:
#if SDL_VIDEO_RENDER_OGL_ES &GLES_RenderDriver, #endif
因此Render函式為OpenGL ES的driver中的Render函式,也就是GLES_CreateRenderer:
SDL_RenderDriver GLES_RenderDriver = { GLES_CreateRenderer, { "opengles", (SDL_RENDERER_ACCELERATED | SDL_RENDERER_PRESENTVSYNC), 1, {SDL_PIXELFORMAT_ABGR8888}, 0, 0} };
看一下Render函式的內容:
SDL_Renderer * GLES_CreateRenderer(SDL_Window * window, Uint32 flags) { SDL_Renderer *renderer; ... renderer->WindowEvent = GLES_WindowEvent; renderer->GetOutputSize = GLES_GetOutputSize; renderer->SupportsBlendMode = GLES_SupportsBlendMode; renderer->CreateTexture = GLES_CreateTexture; renderer->UpdateTexture = GLES_UpdateTexture; renderer->LockTexture = GLES_LockTexture; renderer->UnlockTexture = GLES_UnlockTexture; renderer->SetRenderTarget = GLES_SetRenderTarget; renderer->UpdateViewport = GLES_UpdateViewport; renderer->UpdateClipRect = GLES_UpdateClipRect; renderer->RenderClear = GLES_RenderClear; renderer->RenderDrawPoints = GLES_RenderDrawPoints; renderer->RenderDrawLines = GLES_RenderDrawLines; renderer->RenderFillRects = GLES_RenderFillRects; renderer->RenderCopy = GLES_RenderCopy; renderer->RenderCopyEx = GLES_RenderCopyEx; renderer->RenderReadPixels = GLES_RenderReadPixels; renderer->RenderPresent = GLES_RenderPresent; renderer->DestroyTexture = GLES_DestroyTexture; renderer->DestroyRenderer = GLES_DestroyRenderer; renderer->GL_BindTexture = GLES_BindTexture; renderer->GL_UnbindTexture = GLES_UnbindTexture; renderer->info = GLES_RenderDriver.info; renderer->info.flags = SDL_RENDERER_ACCELERATED; renderer->driverdata = data; renderer->window = window; data->context = SDL_GL_CreateContext(window); if (!data->context) { GLES_DestroyRenderer(renderer); goto error; } if (SDL_GL_MakeCurrent(window, data->context) < 0) { GLES_DestroyRenderer(renderer); goto error; } ... }
此函式指定了GL ES使用的一些渲染函式,執行了上面提到的EGL渲染步驟的4,6步。看一下SDL_GL_CreateContext函式的具體內容:
SDL_GLContext SDL_GL_CreateContext(SDL_Window * window) { SDL_GLContext ctx = NULL; CHECK_WINDOW_MAGIC(window, NULL); if (!(window->flags & SDL_WINDOW_OPENGL)) { SDL_SetError("The specified window isn't an OpenGL window"); return NULL; } ctx = _this->GL_CreateContext(_this, window); /* Creating a context is assumed to make it current in the SDL driver. */ if (ctx) { _this->current_glwin = window; _this->current_glctx = ctx; SDL_TLSSet(_this->current_glwin_tls, window, NULL); SDL_TLSSet(_this->current_glctx_tls, ctx, NULL); } return ctx; }
其中的GL_CreateContext函式為Windows平臺下的GL ES版本的函式,由前面的WIN_CreateDevice指定的:
SDL_GLContext WIN_GLES_CreateContext(_THIS, SDL_Window * window) { SDL_GLContext context; SDL_WindowData *data = (SDL_WindowData *)window->driverdata; #if SDL_VIDEO_OPENGL_WGL if (_this->gl_config.profile_mask != SDL_GL_CONTEXT_PROFILE_ES) { /* Switch to WGL based functions */ WIN_GLES_UnloadLibrary(_this); _this->GL_LoadLibrary = WIN_GL_LoadLibrary; _this->GL_GetProcAddress = WIN_GL_GetProcAddress; _this->GL_UnloadLibrary = WIN_GL_UnloadLibrary; _this->GL_CreateContext = WIN_GL_CreateContext; _this->GL_MakeCurrent = WIN_GL_MakeCurrent; _this->GL_SetSwapInterval = WIN_GL_SetSwapInterval; _this->GL_GetSwapInterval = WIN_GL_GetSwapInterval; _this->GL_SwapWindow = WIN_GL_SwapWindow; _this->GL_DeleteContext = WIN_GL_DeleteContext; if (WIN_GL_LoadLibrary(_this, NULL) != 0) { return NULL; } return WIN_GL_CreateContext(_this, window); } #endif context = SDL_EGL_CreateContext(_this, data->egl_surface); return context; }
我們沒有使用WGL,因此最終呼叫的是SDL_EGL_CreateContext,上面提到的EGL渲染過程的第4和第6步驟最終在此函式中進行:
SDL_GLContext SDL_EGL_CreateContext(_THIS, EGLSurface egl_surface) { ... egl_context = _this->egl_data->eglCreateContext(_this->egl_data->egl_display, _this->egl_data->egl_config, share_context, attribs); if (egl_context == EGL_NO_CONTEXT) { SDL_EGL_SetError("Could not create EGL context", "eglCreateContext"); return NULL; } _this->egl_data->egl_swapinterval = 0; if (SDL_EGL_MakeCurrent(_this, egl_surface, egl_context) < 0) { /* Save the SDL error set by SDL_EGL_MakeCurrent */ char errorText[1024]; SDL_strlcpy(errorText, SDL_GetError(), SDL_arraysize(errorText)); /* Delete the context, which may alter the value returned by SDL_GetError() */ SDL_EGL_DeleteContext(_this, egl_context); /* Restore the SDL error */ SDL_SetError("%s", errorText); return NULL; } ... }
SDL_CreateTexture
從此函式開始就是EGL繪圖步驟的第七步驟,使用相關的gl*函式進行繪製。從表至裡的分析就不進行了,最終呼叫的建立texture函式為GLES_CreateTexture,我們看一下gl開頭的openGL相關函式:
static int GLES_CreateTexture(SDL_Renderer * renderer, SDL_Texture * texture) { GLES_RenderData *renderdata = (GLES_RenderData *) renderer->driverdata; ... renderdata->glGetError(); renderdata->glEnable(GL_TEXTURE_2D);//we need to enable texturing before drawing the scene /* Any nonzero unsigned integer may be used as a texture name. To avoid accidentally reusing names, consistently use glGenTextures() to provide unused texture names. */ renderdata->glGenTextures(1, &data->texture); result = renderdata->glGetError(); if (result != GL_NO_ERROR) { SDL_free(data); return GLES_SetError("glGenTextures()", result); } data->type = GL_TEXTURE_2D; /* no NPOV textures allowed in OpenGL ES (yet) */ texture_w = power_of_2(texture->w); texture_h = power_of_2(texture->h); data->texw = (GLfloat) texture->w / texture_w; data->texh = (GLfloat) texture->h / texture_h; data->format = format; data->formattype = type; scaleMode = GetScaleQuality(); /* glBindTexture(), both creates and uses texture objects. When a texture name is initially bound (used with glBindTexture()), a new texture object is created with default values for the texture image and texture properties */ renderdata->glBindTexture(data->type, data->texture); /* Sets various parameters that control how a texture is treated as it’s applied to a fragment or stored in a texture object */ renderdata->glTexParameteri(data->type, GL_TEXTURE_MIN_FILTER, scaleMode); renderdata->glTexParameteri(data->type, GL_TEXTURE_MAG_FILTER, scaleMode); renderdata->glTexParameteri(data->type, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); renderdata->glTexParameteri(data->type, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); /* Defines a two-dimensional texture, or a one-dimensional texture array. */ renderdata->glTexImage2D(data->type, 0, internalFormat, texture_w, texture_h, 0, format, type, NULL); /* Disable it if texture rendering operations are finished. */ renderdata->glDisable(GL_TEXTURE_2D); result = renderdata->glGetError(); if (result != GL_NO_ERROR) { SDL_free(data); return GLES_SetError("glTexImage2D()", result); } texture->driverdata = data; return 0; }
最後成功建立了texture。
SDL_UpdateYUVTexture
萬事俱備,只欠東風,OpenGL texture建立完成後,就只等渲染了。SDL中的渲染函式為SDL_UpdateYUVTexture,我們還是直接看Open GL ES版本的渲染函式GLES_UpdateTexture:
static int GLES_UpdateTexture(SDL_Renderer * renderer, SDL_Texture * texture, const SDL_Rect * rect, const void *pixels, int pitch) { GLES_RenderData *renderdata = (GLES_RenderData *) renderer->driverdata; ... /* Reformat the texture data into a tightly packed array */ srcPitch = rect->w * SDL_BYTESPERPIXEL(texture->format); src = (Uint8 *)pixels; if (pitch != srcPitch) { blob = (Uint8 *)SDL_malloc(srcPitch * rect->h); if (!blob) { return SDL_OutOfMemory(); } src = blob; for (y = 0; y < rect->h; ++y) { SDL_memcpy(src, pixels, srcPitch); src += srcPitch; pixels = (Uint8 *)pixels + pitch; } src = blob; } /* Create a texture subimage with the supplied data */ renderdata->glGetError(); //enable texturing before drawing the scene renderdata->glEnable(data->type); /* When binding to a previously created texture object, that texture object becomes active. */ renderdata->glBindTexture(data->type, data->texture); /* it describes how the bitmap data is stored in computer memory */ renderdata->glPixelStorei(GL_UNPACK_ALIGNMENT, 1); /* Defines a two-dimensional texture image that replaces all or part of a contiguous subregion (in 2D, it’s simply a rectangle) of the current, existing two-dimensional texture image. */ renderdata->glTexSubImage2D(data->type, 0, rect->x, rect->y, rect->w, rect->h, data->format, data->formattype, src); renderdata->glDisable(data->type); SDL_free(blob); if (renderdata->glGetError() != GL_NO_ERROR) { return SDL_SetError("Failed to update texture"); } return 0; }
pixels為需要顯示的畫素資料,pitch為畫素間距。
SDL_RenderClear
還是隻看GL ES的最終呼叫,主要理解glClear和glClearColor這兩個函式,來自官方文件的解釋寫在註釋中了,glClear函式用來清除視窗,glClearColor用於設定清理視窗之後顯示的顏色。
static int GLES_RenderClear(SDL_Renderer * renderer) { GLES_RenderData *data = (GLES_RenderData *) renderer->driverdata; GLES_ActivateRenderer(renderer); /* glClearColor() establishes what color the window will be cleared to, and glClear() actually clears the window. Once the clearing color is set, the window is cleared to that color whenever glClear() is called. This clearing color can be changed with another call to glClearColor(). */ data->glClearColor((GLfloat) renderer->r * inv255f, (GLfloat) renderer->g * inv255f, (GLfloat) renderer->b * inv255f, (GLfloat) renderer->a * inv255f); if (renderer->clipping_enabled) { data->glDisable(GL_SCISSOR_TEST); } data->glClear(GL_COLOR_BUFFER_BIT); if (renderer->clipping_enabled) { data->glEnable(GL_SCISSOR_TEST); } return 0; }
SDL_RenderCopy
影象的繪製在這個函式中進行:
static int GLES_RenderCopy(SDL_Renderer * renderer, SDL_Texture * texture, const SDL_Rect * srcrect, const SDL_FRect * dstrect) { GLES_RenderData *data = (GLES_RenderData *) renderer->driverdata; GLES_TextureData *texturedata = (GLES_TextureData *) texture->driverdata; GLfloat minx, miny, maxx, maxy; GLfloat minu, maxu, minv, maxv; GLfloat vertices[8]; GLfloat texCoords[8]; GLES_ActivateRenderer(renderer); data->glEnable(GL_TEXTURE_2D); data->glBindTexture(texturedata->type, texturedata->texture); if (texture->modMode) { GLES_SetColor(data, texture->r, texture->g, texture->b, texture->a); } else { GLES_SetColor(data, 255, 255, 255, 255); } GLES_SetBlendMode(data, texture->blendMode); //啟用或者停止紋理座標 GLES_SetTexCoords(data, SDL_TRUE); minx = dstrect->x; miny = dstrect->y; maxx = dstrect->x + dstrect->w; maxy = dstrect->y + dstrect->h; minu = (GLfloat) srcrect->x / texture->w; minu *= texturedata->texw; maxu = (GLfloat) (srcrect->x + srcrect->w) / texture->w; maxu *= texturedata->texw; minv = (GLfloat) srcrect->y / texture->h; minv *= texturedata->texh; maxv = (GLfloat) (srcrect->y + srcrect->h) / texture->h; maxv *= texturedata->texh; vertices[0] = minx; vertices[1] = miny; vertices[2] = maxx; vertices[3] = miny; vertices[4] = minx; vertices[5] = maxy; vertices[6] = maxx; vertices[7] = maxy; texCoords[0] = minu; texCoords[1] = minv; texCoords[2] = maxu; texCoords[3] = minv; texCoords[4] = minu; texCoords[5] = maxv; texCoords[6] = maxu; texCoords[7] = maxv; //告訴openGL頂點座標資料的位置 data->glVertexPointer(2, GL_FLOAT, 0, vertices); //告訴OpenGL紋理座標的位置 data->glTexCoordPointer(2, GL_FLOAT, 0, texCoords); //將資料繪製出來 data->glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); data->glDisable(GL_TEXTURE_2D); return 0; }
SDL_RenderPresent
顧名思義,此函式用於顯示影象,他執行了EGL繪圖步驟的第8步:eglSwapBuffer(),切換 front buffer 和 back buffer 送顯,看一下GL ES的RenderPresent函式:
static void GLES_RenderPresent(SDL_Renderer * renderer) { GLES_ActivateRenderer(renderer); SDL_GL_SwapWindow(renderer->window); }
啟用Render之後就進行Swap,windows GL ES中的實現是在如下函式中:
WIN_GLES_SwapWindow
此函式在原始碼中搜索了半天沒有搜到,原來是用了巨集定義。。。,太隱蔽了。
SDL_EGL_SwapWindow_impl(WIN) #define SDL_EGL_SwapWindow_impl(BACKEND) int \ BACKEND ## _GLES_SwapWindow(_THIS, SDL_Window * window) \ {\ return SDL_EGL_SwapBuffers(_this, ((SDL_WindowData *) window->driverdata)->egl_surface);\ }
最終呼叫了EGL的eglSwapBuffers函式:
int SDL_EGL_SwapBuffers(_THIS, EGLSurface egl_surface) { if (!_this->egl_data->eglSwapBuffers(_this->egl_data->egl_display, egl_surface)) { return SDL_EGL_SetError("unable to show color buffer in an OS-native window", "eglSwapBuffers"); } return 0; }
SDL_PollEvent
此函式一看便知是用於偵聽各種事件的,原始碼不再細究。
接下來就是一些銷燬釋放的過程了。
SDL_DestroyTexture
銷燬texture通過呼叫glDeleteTextures來實現。
static void GLES_DestroyTexture(SDL_Renderer * renderer, SDL_Texture * texture) { GLES_RenderData *renderdata = (GLES_RenderData *) renderer->driverdata; GLES_TextureData *data = (GLES_TextureData *) texture->driverdata; GLES_ActivateRenderer(renderer); if (!data) { return; } if (data->texture) { renderdata->glDeleteTextures(1, &data->texture); } SDL_free(data->pixels); SDL_free(data); texture->driverdata = NULL; }
SDL_DestroyRenderer
渲染物件的銷燬使用如下函式,上面出現過glDeleteFramebuffersOES類似的函式,沒有仔細研究,搜尋了openGL的官方文件,竟然沒有任何說明,看字面意思就是刪除相關快取的。
static void GLES_DestroyRenderer(SDL_Renderer * renderer) { GLES_RenderData *data = (GLES_RenderData *) renderer->driverdata; if (data) { if (data->context) { while (data->framebuffers) { GLES_FBOList *nextnode = data->framebuffers->next; data->glDeleteFramebuffersOES(1, &data->framebuffers->FBO); SDL_free(data->framebuffers); data->framebuffers = nextnode; } SDL_GL_DeleteContext(data->context); } SDL_free(data); } SDL_free(renderer); }
然後刪除Context:
void WIN_GLES_DeleteContext(_THIS, SDL_GLContext context) { SDL_EGL_DeleteContext(_this, context); WIN_GLES_UnloadLibrary(_this); }
在SDL_EGL_DeleteContext中呼叫EGL銷燬函式eglDestroyContext,EGL繪圖的第11步。
void SDL_EGL_DeleteContext(_THIS, SDL_GLContext context) { EGLContext egl_context = (EGLContext) context; /* Clean up GLES and EGL */ if (!_this->egl_data) { return; } if (egl_context != NULL && egl_context != EGL_NO_CONTEXT) { SDL_EGL_MakeCurrent(_this, NULL, NULL); _this->egl_data->eglDestroyContext(_this->egl_data->egl_display, egl_context); } }
WIN_GLES_UnloadLibrary 函式實際上為SDL_EGL_UnloadLibrary函式:
#define WIN_GLES_UnloadLibrary SDL_EGL_UnloadLibrary void SDL_EGL_UnloadLibrary(_THIS) { if (_this->egl_data) { if (_this->egl_data->egl_display) { _this->egl_data->eglTerminate(_this->egl_data->egl_display); _this->egl_data->egl_display = NULL; } if (_this->egl_data->dll_handle) { SDL_UnloadObject(_this->egl_data->dll_handle); _this->egl_data->dll_handle = NULL; } if (_this->egl_data->egl_dll_handle) { SDL_UnloadObject(_this->egl_data->egl_dll_handle); _this->egl_data->egl_dll_handle = NULL; } SDL_free(_this->egl_data); _this->egl_data = NULL; } }
呼叫EGL終止函式,終止了與EGLDisplay之間的連線,也就是EGL繪圖的第12步,然後解除安裝相關的DLL。
SDL_DestoryWindow
呼叫windows函式 DestroyWindow銷燬窗體:
void WIN_DestroyWindow(_THIS, SDL_Window * window) { SDL_WindowData *data = (SDL_WindowData *) window->driverdata; if (data) { ReleaseDC(data->hwnd, data->hdc); RemoveProp(data->hwnd, TEXT("SDL_WindowData")); if (data->created) { DestroyWindow(data->hwnd); if (data->parent) { DestroyWindow(data->parent); } } else { /* Restore any original event handler... */ if (data->wndproc != NULL) { #ifdef GWLP_WNDPROC SetWindowLongPtr(data->hwnd, GWLP_WNDPROC, (LONG_PTR) data->wndproc); #else SetWindowLong(data->hwnd, GWL_WNDPROC, (LONG_PTR) data->wndproc); #endif } } SDL_free(data); } window->driverdata = NULL; }
SDL_Quit
只看視訊系統的Quit函式:
void WIN_VideoQuit(_THIS) { WIN_QuitModes(_this); WIN_QuitKeyboard(_this); WIN_QuitMouse(_this); }