ffmpeg实战教程(二)用SDL播放YUV,并结合ffmpeg实现简易播放器

来源:互联网 发布:wmv文件破解软件 编辑:程序博客网 时间:2024/05/16 08:36

我们先实现用SDL播放YUV数据

先来了解一下基本概念

SDL基本函数:
[初始化]
* SDL_Init(): 初始化SDL。
* SDL_CreateWindow(): 创建窗口(Window)。
* SDL_CreateRenderer(): 基于窗口创建渲染器(Render)。
* SDL_CreateTexture(): 创建纹理(Texture)。

[循环渲染数据]
* SDL_UpdateTexture(): 设置纹理的数据。
* SDL_RenderCopy(): 纹理复制给渲染器。
* SDL_RenderPresent(): 显示。

C中的fseek函数
int fseek( FILE *stream, long offset, int origin );
第一个参数stream为文件指针
第二个参数offset为偏移量,整数表示正向偏移,负数表示负向偏移
第三个参数origin设定从文件的哪里开始偏移,可能取值为:SEEK_CUR、 SEEK_END 或 SEEK_SET
SEEK_SET: 文件开头
SEEK_CUR: 当前位置
SEEK_END: 文件结尾
其中SEEK_SET,SEEK_CUR和SEEK_END和依次为0,1和2.
简言之:
fseek(fp,100L,0);把fp指针移动到离文件开头100字节处;
fseek(fp,100L,1);把fp指针移动到离文件当前位置100字节处;
fseek(fp,100L,2);把fp指针退回到离文件结尾100字节处。

下面看一下运行效果:
这里写图片描述

下面看源代码 注释很详细

#include <stdio.h>extern "C"{#include "sdl/SDL.h"};const int bpp=12;int screen_w=500,screen_h=500;const int pixel_w=1920,pixel_h=1080;unsigned char buffer[pixel_w*pixel_h*bpp/8];//Refresh Event#define REFRESH_EVENT  (SDL_USEREVENT + 1)#define BREAK_EVENT  (SDL_USEREVENT + 2)int thread_exit=0;int refresh_video(void *opaque){    thread_exit=0;    while (!thread_exit) {        SDL_Event event;        event.type = REFRESH_EVENT;        SDL_PushEvent(&event); //发送事件 即handler        SDL_Delay(40);    }    thread_exit=0;    //Break    SDL_Event event;//代表一个事件 声明    event.type = BREAK_EVENT;    SDL_PushEvent(&event);    return 0;}int main(int argc, char* argv[]){    if(SDL_Init(SDL_INIT_VIDEO)) {  //初始化SDL系统        printf( "Could not initialize SDL - %s\n", SDL_GetError());         return -1;    }     SDL_Window *screen;     //SDL 2.0  创建窗口(Window)。    screen = SDL_CreateWindow("WS Video Play SDL2", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED,        screen_w, screen_h,SDL_WINDOW_OPENGL|SDL_WINDOW_RESIZABLE);    if(!screen) {          printf("SDL: could not create window - exiting:%s\n",SDL_GetError());          return -1;    }    //创建基于窗口创建渲染器(Render)    SDL_Renderer* sdlRenderer = SDL_CreateRenderer(screen, -1, 0);      Uint32 pixformat=0;    //IYUV: Y + U + V  (3 planes)    //YV12: Y + V + U  (3 planes)    pixformat= SDL_PIXELFORMAT_IYUV;      //创建纹理(Texture)    SDL_Texture* sdlTexture = SDL_CreateTexture(sdlRenderer,pixformat, SDL_TEXTUREACCESS_STREAMING,pixel_w,pixel_h);    FILE *fp=NULL;    //用我们上篇博客解码出来的yuv文件    fp=fopen("output.yuv","rb+");    if(fp==NULL){        printf("cannot open this file\n");        return -1;    }    //window 视频显示框    SDL_Rect sdlRect;      SDL_Thread *refresh_thread = SDL_CreateThread(refresh_video,NULL,NULL);//创建线程    SDL_Event event;    while(1){        //Wait等待事件 即监听        SDL_WaitEvent(&event);        if(event.type==REFRESH_EVENT){            if (fread(buffer, 1, pixel_w*pixel_h*bpp/8, fp) != pixel_w*pixel_h*bpp/8){                // Loop                fseek(fp, 0, SEEK_SET);                fread(buffer, 1, pixel_w*pixel_h*bpp/8, fp);            }            //设置纹理的数据            SDL_UpdateTexture( sdlTexture, NULL, buffer, pixel_w);              //FIX: If window is resize            sdlRect.x = 0;              sdlRect.y = 0;              sdlRect.w = screen_w;              sdlRect.h = screen_h;              SDL_RenderClear( sdlRenderer );             //将纹理的数据拷贝给渲染器            SDL_RenderCopy( sdlRenderer, sdlTexture, NULL, &sdlRect);              //显示            SDL_RenderPresent( sdlRenderer );          }else if(event.type==SDL_WINDOWEVENT){            //If Resize SDL_WINDOWEVENT事件 可以拉伸播放器界面            SDL_GetWindowSize(screen,&screen_w,&screen_h);        }else if(event.type==SDL_QUIT){            thread_exit=1;        }else if(event.type==BREAK_EVENT){            break;        }    }    SDL_Quit();//退出系统    return 0;}

我们把上篇博客解码出来的YUV数据拷贝到项目跟目录,然后编译运行即可
配置环境以及编译运行部分看 此系列的实战(一)
http://blog.csdn.net/king1425/article/details/71160339

下面我们结合上篇博客 把MP4解码为YUV,然后在SDL上播放。这样一个简易版的视频播放器就出现了

我们把一个MP4文件拷贝到项目跟目录 命名为ws.mp4
然后运行源码文件
看效果图:
这里写图片描述

#include <stdio.h>#define __STDC_CONSTANT_MACROS#ifdef _WIN32//Windowsextern "C"{#include "libavcodec/avcodec.h"#include "libavformat/avformat.h"#include "libswscale/swscale.h"#include "SDL2/SDL.h"};#else//Linux...#ifdef __cplusplusextern "C"{#endif#include <libavcodec/avcodec.h>#include <libavformat/avformat.h>#include <libswscale/swscale.h>#include <SDL2/SDL.h>#ifdef __cplusplus};#endif#endif//Refresh Event#define SFM_REFRESH_EVENT  (SDL_USEREVENT + 1)#define SFM_BREAK_EVENT  (SDL_USEREVENT + 2)int thread_exit=0;int thread_pause=0;int sfp_refresh_thread(void *opaque){    thread_exit=0;    thread_pause=0;    while (!thread_exit) {        if(!thread_pause){            SDL_Event event;            event.type = SFM_REFRESH_EVENT;            SDL_PushEvent(&event);        }        SDL_Delay(40);    }    thread_exit=0;    thread_pause=0;    //Break    SDL_Event event;    event.type = SFM_BREAK_EVENT;    SDL_PushEvent(&event);    return 0;}int main(int argc, char* argv[]){    AVFormatContext *pFormatCtx;    int             i, videoindex;    AVCodecContext  *pCodecCtx;    AVCodec         *pCodec;    AVFrame *pFrame,*pFrameYUV;    uint8_t *out_buffer;    AVPacket *packet;    int ret, got_picture;    //------------SDL----------------    int screen_w,screen_h;    SDL_Window *screen;     SDL_Renderer* sdlRenderer;    SDL_Texture* sdlTexture;    SDL_Rect sdlRect;    SDL_Thread *video_tid;    SDL_Event event;    struct SwsContext *img_convert_ctx;    char filepath[]="ws.mp4";    av_register_all();    avformat_network_init();    pFormatCtx = avformat_alloc_context();    if(avformat_open_input(&pFormatCtx,filepath,NULL,NULL)!=0){        printf("Couldn't open input stream.\n");        return -1;    }    if(avformat_find_stream_info(pFormatCtx,NULL)<0){        printf("Couldn't find stream information.\n");        return -1;    }    videoindex=-1;    for(i=0; i<pFormatCtx->nb_streams; i++)         if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO){            videoindex=i;            break;        }    if(videoindex==-1){        printf("Didn't find a video stream.\n");        return -1;    }    pCodecCtx=pFormatCtx->streams[videoindex]->codec;    pCodec=avcodec_find_decoder(pCodecCtx->codec_id);    if(pCodec==NULL){        printf("Codec not found.\n");        return -1;    }    if(avcodec_open2(pCodecCtx, pCodec,NULL)<0){        printf("Could not open codec.\n");        return -1;    }    pFrame=av_frame_alloc();    pFrameYUV=av_frame_alloc();    out_buffer=(uint8_t *)av_malloc(avpicture_get_size(PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));    avpicture_fill((AVPicture *)pFrameYUV, out_buffer, PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);    //Output Info-----------------------------    printf("---------------- File Information ---------------\n");    av_dump_format(pFormatCtx,0,filepath,0);    printf("-------------------------------------------------\n");    img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,         pCodecCtx->width, pCodecCtx->height, PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);     if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {          printf( "Could not initialize SDL - %s\n", SDL_GetError());         return -1;    }     //SDL 2.0 Support for multiple windows    screen_w = pCodecCtx->width;    screen_h = pCodecCtx->height;    screen = SDL_CreateWindow("WS ffmpeg player", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED,        500, 500,SDL_WINDOW_OPENGL);    if(!screen) {          printf("SDL: could not create window - exiting:%s\n",SDL_GetError());          return -1;    }    sdlRenderer = SDL_CreateRenderer(screen, -1, 0);      //IYUV: Y + U + V  (3 planes)    //YV12: Y + V + U  (3 planes)    sdlTexture = SDL_CreateTexture(sdlRenderer, SDL_PIXELFORMAT_IYUV, SDL_TEXTUREACCESS_STREAMING,pCodecCtx->width,pCodecCtx->height);      sdlRect.x=0;    sdlRect.y=0;    sdlRect.w=screen_w;    sdlRect.h=screen_h;    packet=(AVPacket *)av_malloc(sizeof(AVPacket));    video_tid = SDL_CreateThread(sfp_refresh_thread,NULL,NULL);    //------------SDL End------------    //Event Loop    for (;;) {        //Wait        SDL_WaitEvent(&event);        if(event.type==SFM_REFRESH_EVENT){            //------------------------------            if(av_read_frame(pFormatCtx, packet)>=0){                if(packet->stream_index==videoindex){                    ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);                    if(ret < 0){                        printf("Decode Error.\n");                        return -1;                    }                    if(got_picture){                        sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);                        //SDL---------------------------                        SDL_UpdateTexture( sdlTexture, NULL, pFrameYUV->data[0], pFrameYUV->linesize[0] );                          SDL_RenderClear( sdlRenderer );                          //SDL_RenderCopy( sdlRenderer, sdlTexture, &sdlRect, &sdlRect );                          SDL_RenderCopy( sdlRenderer, sdlTexture, NULL, NULL);                          SDL_RenderPresent( sdlRenderer );                          //SDL End-----------------------                    }                }                av_free_packet(packet);            }else{                //Exit Thread                thread_exit=1;            }        }else if(event.type==SDL_KEYDOWN){            //Pause            if(event.key.keysym.sym==SDLK_SPACE)                thread_pause=!thread_pause;        }else if(event.type==SDL_QUIT){            thread_exit=1;        }else if(event.type==SFM_BREAK_EVENT){            break;        }    }    sws_freeContext(img_convert_ctx);    SDL_Quit();    //--------------    av_frame_free(&pFrameYUV);    av_frame_free(&pFrame);    avcodec_close(pCodecCtx);    avformat_close_input(&pFormatCtx);    return 0;}
2 0
原创粉丝点击