使用FFmpeg+GDI解码和播放视频

来源:互联网 发布:淘宝店有多少家 编辑:程序博客网 时间:2024/05/16 14:53
完整源码
FFmpeg是跨平台的流媒体处理解决方案,包括了libavformat和libavcodec等库用来解码流媒体
本篇文章使用FFmpeg 3.1.5解码视频,现有的教程比较老了,avcodec_decode_video2等API已经不用了,用avcodec_send_packet和avcodec_receive_frame代替
API参考文档:FFmpeg Documentation

使用FFmpeg前去官网下载dev版和shared版,在项目中导入dev版的头文件和lib库,然后把shared版中的dll放到你的程序目录下

使用FFmpeg解码视频步骤:
  1. 程序开始时用av_register_all注册解码器
  2. 用avformat_open_input打开视频文件,用avformat_find_stream_info获取流信息
  3. 找到要解码的流的索引(这里用了第一个视频流)
  4. 用avcodec_find_decoder找到解码器,用avcodec_open2初始化
  5. 为了得到RGB数据用sws_getContext注册颜色转换器
  6. 用av_read_frame从流中读取packet
  7. 用avcodec_send_packet和avcodec_receive_frame解码packet,再用sws_scale转换颜色格式到RGB

Decoder.h:
#pragma onceextern "C"{#include <libavformat/avformat.h>#include <libavcodec/avcodec.h>#include <libswscale/swscale.h>};#include <thread>#include <memory>#include <functional>class CDecoder{public:CDecoder(LPCSTR fileName);virtual ~CDecoder();virtual void Run();virtual void Pause();virtual void Stop();virtual void GetVideoSize(SIZE& size);// 设置需要呈现时的回调函数virtual void SetOnPresent(std::function<void(BYTE*)>&& onPresent);protected:AVFormatContext* m_formatCtx = NULL;int m_videoStreamIndex = -1;AVCodecContext* m_codecCtx = NULL;AVCodec* m_codec = NULL;SwsContext* m_swsCtx = NULL;enum DecodeState{ DECODE_RUN, DECODE_PAUSE, DECODE_STOP };DecodeState m_decodeState = DECODE_STOP;std::unique_ptr<std::thread> m_decodeThread;std::unique_ptr<BYTE[]> m_imgData;SIZE m_videoSize;// 需要呈现时被调用std::function<void(BYTE*)> m_onPresent;void DecodeThread();};

Decoder.cpp:
#include "stdafx.h"#include "Decoder.h"CDecoder::CDecoder(LPCSTR fileName){int ret;// 容器ret = avformat_open_input(&m_formatCtx, fileName, NULL, NULL);ret = avformat_find_stream_info(m_formatCtx, NULL);for (UINT i = 0; i < m_formatCtx->nb_streams; i++){if (m_formatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO){m_videoStreamIndex = i;break;}}// 解码器AVCodecParameters* codecpar = m_formatCtx->streams[m_videoStreamIndex]->codecpar;m_codec = avcodec_find_decoder(codecpar->codec_id);m_codecCtx = avcodec_alloc_context3(m_codec);ret = avcodec_open2(m_codecCtx, m_codec, NULL);m_videoSize.cx = codecpar->width;m_videoSize.cy = codecpar->height;// 颜色转换器m_swsCtx = sws_getContext(m_videoSize.cx, m_videoSize.cy, (AVPixelFormat)codecpar->format, m_videoSize.cx,m_videoSize.cy, AV_PIX_FMT_BGRA, SWS_BICUBIC, NULL, NULL, NULL);m_imgData.reset(new BYTE[m_videoSize.cx * m_videoSize.cy * 4]);}CDecoder::~CDecoder(){// 停止解码线程m_decodeState = DECODE_STOP;if (m_decodeThread != nullptr)m_decodeThread->join();sws_freeContext(m_swsCtx);avcodec_free_context(&m_codecCtx);avformat_close_input(&m_formatCtx);}void CDecoder::Run(){TRACE(_T("Run\n"));if (m_decodeState == DECODE_RUN)return;// 启动解码线程if (m_decodeThread != nullptr)m_decodeThread->join();m_decodeState = DECODE_RUN;m_decodeThread.reset(new std::thread(&CDecoder::DecodeThread, this));}void CDecoder::Pause(){TRACE(_T("Pause\n"));if (m_decodeState != DECODE_RUN)return;// 停止解码线程m_decodeState = DECODE_PAUSE;if (m_decodeThread != nullptr){m_decodeThread->join();m_decodeThread = nullptr;}}void CDecoder::Stop(){TRACE(_T("Stop\n"));if (m_decodeState == DECODE_STOP)return;// 停止解码线程m_decodeState = DECODE_STOP;if (m_decodeThread != nullptr){m_decodeThread->join();m_decodeThread = nullptr;}av_seek_frame(m_formatCtx, m_videoStreamIndex, 0, 0);}void CDecoder::GetVideoSize(SIZE& size){size = m_videoSize;}void CDecoder::SetOnPresent(std::function<void(BYTE*)>&& onPresent){m_onPresent = std::move(onPresent);}void CDecoder::DecodeThread(){AVFrame *frame = av_frame_alloc();AVPacket packet;while (m_decodeState == DECODE_RUN && av_read_frame(m_formatCtx, &packet) >= 0){if (packet.stream_index != m_videoStreamIndex)continue;// 解码avcodec_send_packet(m_codecCtx, &packet);if (avcodec_receive_frame(m_codecCtx, frame) != 0)continue;// 转换颜色格式BYTE* pImgData = m_imgData.get();int stride = m_codecCtx->width * 4;sws_scale(m_swsCtx, frame->data, frame->linesize, 0, m_codecCtx->height, &pImgData, &stride);// 呈现if (!m_onPresent._Empty())m_onPresent(m_imgData.get());// 粗略的视频同步Sleep(DWORD(((float)m_codecCtx->time_base.num / (float)m_codecCtx->time_base.den) * 1000));}if (m_decodeState == DECODE_RUN)m_decodeState = DECODE_STOP;av_packet_unref(&packet);av_frame_free(&frame);TRACE(_T("DecodeThread结束\n"));}

使用方法:
m_decoder = new CDecoder("E:\\pump it.avi");m_decoder->SetOnPresent(std::bind(&CFFmpegGDIDlg::OnPresent, this, std::placeholders::_1));m_decoder->GetVideoSize(m_videoSize);m_dc.Create(m_videoSize.cx, m_videoSize.cy, 32);m_decoder->Run();// ......void CFFmpegGDIDlg::OnPresent(BYTE* data){m_dcLock.Lock();// RGB位图都是从下到上储存的,data不是for (int y = 0; y < m_videoSize.cy; y++){memcpy(m_dc.GetPixelAddress(0, y), data, m_videoSize.cx * 4);data += m_videoSize.cx * 4;}m_dcLock.Unlock();Invalidate(FALSE);}

这里我只是粗略的同步视频,视频播放速度可能比实际慢,比较精确的方法参考这篇文章:Tutorial 05: Synching Video
0 0
原创粉丝点击