2013-02-08 3 views
1

ffmpeg 및 OpenGL + SDL을 사용하여 비디오 파일을 재생하려고합니다. 재생 속도가 매우 느리고 깜박입니다. 이 코드는 여러 블로그/사이트의 누적이며 실제로 어떤 일이 일어나는지 잘 모르겠습니다. 긴 코드는 유감스럽게 생각하지만 최소화 된 버전입니다. 내 실제 코드는 창 모드에서도 잘 재생되지 않습니다. 어떻게 든 아래의 버전은 창 모드에서 부드럽게 재생됩니다. 전체 화면에서OpenGL + ffmpeg 전체 화면 모드에서 느림

#ifndef INT64_C 
#define INT64_C(c) (int64_t)(c) 
#define UINT64_C(c) (uint64_t)(c) 
#endif 

extern "C" { 
#include <libavcodec/avcodec.h> 
#include <libavformat/avformat.h> 
#include <libswscale/swscale.h> 
} 
#include <SDL.h> 
#include <GL/gl.h> 

int fullscreen = 1, videoStream = -1, frameFinished=0; 
const PixelFormat CONV_FORMAT = PIX_FMT_RGB24; 
const char *fname = "moviesample.mp4"; 
AVFormatContext *pFormatCtx = NULL; 
AVCodecContext *pCodecCtx = NULL; 
AVCodec   *pCodec = NULL; 
AVFrame   *pFrame = 0, *pFrameRGB = 0; 
AVPacket  packet; 
AVDictionary *optionsDict = NULL; 
struct SwsContext *sws_ctx = NULL; 
GLuint texture_video; 

void av_init(); 
void draw_frame(); 

int main(int argc, const char **argv) { 
    SDL_Event event; 

    av_init(); 

    uint16_t width = fullscreen ? 1600 : pCodecCtx->width; 
    uint16_t height = fullscreen ? 900 : pCodecCtx->height; 

    SDL_Init(SDL_INIT_EVERYTHING); 
    SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1); 
    SDL_SetVideoMode(width, height, 32, 
     SDL_OPENGL | SDL_HWPALETTE | SDL_HWSURFACE | SDL_HWACCEL | 
     (fullscreen ? SDL_FULLSCREEN : 0) 
    ); 

    glEnable(GL_TEXTURE_2D); 
    glClearColor(0.0f, 0.4f, 0.4f, 0.0f); 
    glViewport(0, 0, width, height); 
    glMatrixMode(GL_PROJECTION); 
    glLoadIdentity(); 
    glMatrixMode(GL_MODELVIEW); 
    glLoadIdentity(); 
    glShadeModel(GL_SMOOTH); 
    glGenTextures(1, &texture_video); 
    glBindTexture(GL_TEXTURE_2D, texture_video); 
    glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, pCodecCtx->width, pCodecCtx->height, 
     0, GL_RGB, GL_UNSIGNED_BYTE, pFrameRGB->data[0]); 
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); 
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); 

    sws_ctx = sws_getCachedContext(sws_ctx, pCodecCtx->width, pCodecCtx->height, 
     pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, CONV_FORMAT, 
     SWS_BICUBIC, NULL, NULL, NULL); 

    while (1) { 

    draw_frame(); 

    SDL_GL_SwapBuffers(); 

    SDL_PollEvent(&event); 

    switch(event.type) { 
     case SDL_QUIT: 
     SDL_Quit(); 
     exit(0); 
     break; 
     case SDL_KEYDOWN: 
     if (event.key.keysym.sym == SDLK_ESCAPE) { 
      SDL_Quit(); 
      exit(0); 
     } 
     break; 
     default: 
     break; 
    } 
    } 
    return 0; 
} 

void draw_frame() { 
    if (av_read_frame(pFormatCtx, &packet)>=0) { 
    if(packet.stream_index==videoStream) { 
     avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet); 
     if(frameFinished) { 
     sws_scale (sws_ctx, (uint8_t const * const *)pFrame->data, 
      pFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data, 
      pFrameRGB->linesize); 
     glBindTexture(GL_TEXTURE_2D, texture_video); 
     glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); 
     glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); 
     glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, pCodecCtx->width, 
      pCodecCtx->height, GL_RGB, GL_UNSIGNED_BYTE, pFrameRGB->data[0]); 
     } 

     glClear(GL_COLOR_BUFFER_BIT); 
     glScalef(1.0f, -1.0f, 1.0f); 
     glBegin(GL_QUADS); 
     glTexCoord2f(0.0f, 0.0f); 
     glVertex3f(-1.0f, -1.0f, 0.0f); 
     glTexCoord2f(0.0f, 1.0f); 
     glVertex3f(-1.0f, 1.0f, 0.0f); 
     glTexCoord2f(1.0f, 1.0f); 
     glVertex3f(1.0f, 1.0f, 0.0f); 
     glTexCoord2f(1.0f, 0.0f); 
     glVertex3f(1.0f, -1.0f, 0.0f); 
     glEnd(); 
     glScalef(1.0f, -1.0f, 1.0f); 

    } 
    av_free_packet(&packet); 
    } else { 
    av_seek_frame(pFormatCtx, videoStream, 0, AVSEEK_FLAG_FRAME); 
    } 

} 

void av_init() { 
    av_register_all(); 
    avformat_open_input(&pFormatCtx, fname, NULL, NULL); 
    avformat_find_stream_info(pFormatCtx, NULL); 
    for(uint8_t i=0; i<pFormatCtx->nb_streams; i++) 
    if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO) { 
     videoStream=i; 
     break; 
    } 
    pCodecCtx = pFormatCtx->streams[videoStream]->codec; 
    pCodec = avcodec_find_decoder(pCodecCtx->codec_id); 
    avcodec_open2(pCodecCtx, pCodec, &optionsDict); 
    pFrame = avcodec_alloc_frame(); 
    pFrameRGB = avcodec_alloc_frame(); 
    int bytes = avpicture_get_size(CONV_FORMAT, pCodecCtx->width, 
    pCodecCtx->height);   
    uint8_t *video_buffer = (uint8_t*)av_malloc(bytes * sizeof(uint8_t)); 
    avpicture_fill((AVPicture *)pFrameRGB, video_buffer, CONV_FORMAT, 
     pCodecCtx->width, pCodecCtx->height); 
} 
+0

을 당신의 glBegin/glEnd 호출을 사용 루프마다 똑같은 매개 변수가 있습니다. 오버 헤드가있는 각 프레임에서 버텍스 버퍼를 CPU에서 GPU로 업로드합니다. GPU에있는 두 개의 버텍스 버퍼 (위치에 하나, 텍스처 좌표에 하나)를 생성 할 수 있으므로 각 프레임에 낭비되는 복사본이 없습니다. –

답변

4

당신은 아마 SDL_GL_SwapBuffers()은 16ms 정도 모든 프레임 블록됩니다 즉, VSYNC가 있어요.

창 모드에서 효과를 시뮬레이트하려면 SDL_Delay(16)을 주 while(1) 루프 끝 부분에 추가하십시오. 그것은 바로 메인 루프 당 한 번 펌핑 을 희망하는 대신, 다음 프레임을 얻을 수 libav 때까지 펌프 있도록

재 작성 draw_frame() 당신은 프레임 수 :

// g++ main.cpp `pkg-config sdl gl libswscale libavcodec libavformat --libs --cflags` && SDL_VIDEO_FULLSCREEN_HEAD=0 ./a.out 
#ifndef INT64_C 
#define INT64_C(c) (int64_t)(c) 
#define UINT64_C(c) (uint64_t)(c) 
#endif 

extern "C" { 
#include <libavcodec/avcodec.h> 
#include <libavformat/avformat.h> 
#include <libswscale/swscale.h> 
} 
#include <SDL.h> 
#include <GL/gl.h> 

int fullscreen = 1, videoStream = -1, frameFinished=0; 
const PixelFormat CONV_FORMAT = PIX_FMT_RGB24; 
const char *fname = "/home/genpfault/vid.mpg"; 
AVFormatContext *pFormatCtx = NULL; 
AVCodecContext *pCodecCtx = NULL; 
AVCodec   *pCodec = NULL; 
AVFrame   *pFrame = 0, *pFrameRGB = 0; 
AVPacket  packet; 
AVDictionary *optionsDict = NULL; 
struct SwsContext *sws_ctx = NULL; 
GLuint texture_video; 

void av_init(); 
void next_frame(); 

int main(int argc, const char **argv) { 
    SDL_Event event; 

    av_init(); 

    uint16_t width = fullscreen ? 1920 : pCodecCtx->width; 
    uint16_t height = fullscreen ? 1200 : pCodecCtx->height; 

    SDL_Init(SDL_INIT_EVERYTHING); 
    SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1); 
    SDL_SetVideoMode(width, height, 32, 
     SDL_OPENGL | 
     (fullscreen ? SDL_FULLSCREEN : 0) 
    ); 

    glEnable(GL_TEXTURE_2D); 
    glClearColor(0.0f, 0.4f, 0.4f, 0.0f); 
    glViewport(0, 0, width, height); 
    glMatrixMode(GL_PROJECTION); 
    glLoadIdentity(); 
    glMatrixMode(GL_MODELVIEW); 
    glLoadIdentity(); 
    glShadeModel(GL_SMOOTH); 
    glGenTextures(1, &texture_video); 
    glBindTexture(GL_TEXTURE_2D, texture_video); 
    glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, pCodecCtx->width, pCodecCtx->height, 
     0, GL_RGB, GL_UNSIGNED_BYTE, pFrameRGB->data[0]); 
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); 
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); 

    sws_ctx = sws_getCachedContext(sws_ctx, pCodecCtx->width, pCodecCtx->height, 
     pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, CONV_FORMAT, 
     SWS_BICUBIC, NULL, NULL, NULL); 

    while (1) { 

    while(SDL_PollEvent(&event)) 
    { 
     switch(event.type) { 
      case SDL_QUIT: 
      SDL_Quit(); 
      exit(0); 
      break; 
      case SDL_KEYDOWN: 
      if (event.key.keysym.sym == SDLK_ESCAPE) { 
       SDL_Quit(); 
       exit(0); 
      } 
      break; 
      default: 
      break; 
     } 
    } 

    next_frame(); 

    glClear(GL_COLOR_BUFFER_BIT); 
    glBindTexture(GL_TEXTURE_2D, texture_video); 
    glScalef(1.0f, -1.0f, 1.0f); 
    glBegin(GL_QUADS); 
    glTexCoord2f(0.0f, 0.0f); 
    glVertex3f(-1.0f, -1.0f, 0.0f); 
    glTexCoord2f(0.0f, 1.0f); 
    glVertex3f(-1.0f, 1.0f, 0.0f); 
    glTexCoord2f(1.0f, 1.0f); 
    glVertex3f(1.0f, 1.0f, 0.0f); 
    glTexCoord2f(1.0f, 0.0f); 
    glVertex3f(1.0f, -1.0f, 0.0f); 
    glEnd(); 
    glScalef(1.0f, -1.0f, 1.0f); 

    SDL_GL_SwapBuffers(); 
    } 
    return 0; 
} 

void next_frame() 
{ 
    while(true) 
    { 
     if(av_read_frame(pFormatCtx, &packet) >= 0) 
     { 
      if(packet.stream_index == videoStream) 
      { 
       avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet); 
       if(frameFinished) 
       { 
        sws_scale (sws_ctx, (uint8_t const * const *)pFrame->data, 
        pFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data, 
        pFrameRGB->linesize); 
        glBindTexture(GL_TEXTURE_2D, texture_video); 
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); 
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); 
        glPixelStorei(GL_UNPACK_ALIGNMENT, 1); 
        glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, pCodecCtx->width, 
        pCodecCtx->height, GL_RGB, GL_UNSIGNED_BYTE, pFrameRGB->data[0]); 
        break; 
       } 
      } 
      av_free_packet(&packet); 
     } 
     else 
     { 
      av_seek_frame(pFormatCtx, videoStream, 0, AVSEEK_FLAG_FRAME); 
     }   
    } 
} 

void av_init() { 
    av_register_all(); 
    avformat_open_input(&pFormatCtx, fname, NULL, NULL); 
    avformat_find_stream_info(pFormatCtx, NULL); 
    for(uint8_t i=0; i<pFormatCtx->nb_streams; i++) 
    if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO) { 
     videoStream=i; 
     break; 
    } 
    pCodecCtx = pFormatCtx->streams[videoStream]->codec; 
    pCodec = avcodec_find_decoder(pCodecCtx->codec_id); 
    avcodec_open2(pCodecCtx, pCodec, &optionsDict); 
    pFrame = avcodec_alloc_frame(); 
    pFrameRGB = avcodec_alloc_frame(); 
    int bytes = avpicture_get_size(CONV_FORMAT, pCodecCtx->width, 
    pCodecCtx->height);   
    uint8_t *video_buffer = (uint8_t*)av_malloc(bytes * sizeof(uint8_t)); 
    avpicture_fill((AVPicture *)pFrameRGB, video_buffer, CONV_FORMAT, 
     pCodecCtx->width, pCodecCtx->height); 
} 
+0

감사합니다! 그것은 작동합니다 :) stream이 videoStream이 아니거나 frameFinished가 true 인 경우'next_frame'에 누수가 있습니까? 이 조건들은'av_free_packet' 호출을 건너 뜁니다. – fusha

+0

어,있을 수 :)'libav' 남자의별로. – genpfault

관련 문제