通过live555实现H264 RTSP直播(Windows版)
来源:互联网 发布:法国兴业银行 知乎 编辑:程序博客网 时间:2024/05/22 15:55
为何标明“Windows版”,因为firehood大神已经实现了linux版:通过live555实现H264 RTSP直播
相关文章:
【1】Win7(Windows 7)下用VS2013(Visual Studio 2013)编译live555
【2】RTSP协议分析
【3】windows命名管道
一.基础
live555的学习基本上都是从E:\live555\testProgs中的testOnDemandRTSPServer.cpp示例开始的,这个例子实现了一个最简单的RTSP服务器。文件名中的“OnDemand”意思是:依指令行事,也就是说只有当客户端通过URL主动访问并发送相关指令时,该RTSP服务器才会将文件流化并推送到客户端。这个例子是基于RTP单播的,关于单播可以参考:Qt调用jrtplib实现单播、多播和广播
通过testOnDemandRTSPServer.cpp可以学习一个RTSP服务器的搭建步骤。这里新建一个名为h264LiveMediaServer的Win32控制台工程,新建并添加h264LiveMediaServer.cpp,然后将testOnDemandRTSPServer.cpp拷贝到h264LiveMediaServer.cpp,接着做少量修改,只保留与H.264会话相关的部分,如下所示:
- #include "liveMedia.hh"
- #include "BasicUsageEnvironment.hh"
-
- UsageEnvironment* env;
-
-
-
- Boolean reuseFirstSource = False;
-
-
- static void announceStream(RTSPServer* rtspServer, ServerMediaSession* sms,
- char const* streamName, char const* inputFileName);
-
- int main(int argc, char** argv)
- {
-
- TaskScheduler* scheduler = BasicTaskScheduler::createNew();
- env = BasicUsageEnvironment::createNew(*scheduler);
-
- UserAuthenticationDatabase* authDB = NULL;
-
-
-
- RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554, authDB);
- if (rtspServer == NULL)
- {
- *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
- exit(1);
- }
-
- char const* descriptionString
- = "Session streamed by \"h264LiveMediaServer\"";
-
-
- char const* streamName = "h264ESVideoTest";
-
-
-
- char const* inputFileName = "480320.264";
-
-
-
- ServerMediaSession* sms= ServerMediaSession::createNew(*env, streamName, streamName,descriptionString);
-
-
-
-
- sms->addSubsession(H264VideoFileServerMediaSubsession::createNew(*env, inputFileName, reuseFirstSource));
-
- rtspServer->addServerMediaSession(sms);
-
- announceStream(rtspServer, sms, streamName, inputFileName);
-
-
- if (rtspServer->setUpTunnelingOverHTTP(80) || rtspServer->setUpTunnelingOverHTTP(8000) || rtspServer->setUpTunnelingOverHTTP(8080))
- {
- *env << "\n(We use port " << rtspServer->httpServerPortNum() << " for optional RTSP-over-HTTP tunneling.)\n";
- }
- else
- {
- *env << "\n(RTSP-over-HTTP tunneling is not available.)\n";
- }
-
- env->taskScheduler().doEventLoop();
-
- return 0;
- }
-
- static void announceStream(RTSPServer* rtspServer, ServerMediaSession* sms,
- char const* streamName, char const* inputFileName) {
- char* url = rtspServer->rtspURL(sms);
- UsageEnvironment& env = rtspServer->envir();
- env << "\n\"" << streamName << "\" stream, from the file \""
- << inputFileName << "\"\n";
- env << "Play this stream using the URL \"" << url << "\"\n";
- delete[] url;
- }
如何测试可参考【1】,测试结果如下所示:
二.实现
在通过live555实现H264 RTSP直播中,博主是通过FIFO队列实现的,FIFO队列实际上是Linux下的命名管道,而Windows下也有命名管道,因此在Windows中的流程图如下所示:
关于Windows命名管道详见【3】。
这里不使用命名管道来实现,而是直接读取本地H264文件,分解成StartCode+NALU内存块,然后拷贝到Live555 Server。这样一来,就很容易改成命名管道的形式,命名管道的客户端只需读取本地H264文件,分解成StartCode(0x000001或0x00000001)+NALU内存块,并写入管道,命名管道服务器端(在Live555 Server中)读取管道数据,并拷贝到Live555 Server。
通过“基础”中的分析可以得出,想实现自定义服务器,需要将sms->addSubsession(H264VideoFileServerMediaSubsession::createNew(*env, inputFileName,reuseFirstSource)),中的H264VideoFileServerMediaSubsession替换成自己的子会话。H264VideoFileServerMediaSubsession类在其createNewStreamSource(unsigned /*clientSessionId*/, unsigned& estBitrate)函数中调用了ByteStreamFileSource::createNew(envir(), fFileName),而frame的获取正是在ByteStreamFileSource类中的doGetNextFrame()函数中实现的。因此,这里需要继承H264VideoFileServerMediaSubsession和ByteStreamFileSource类,并重写其中的createNewStreamSource和doGetNextFrame函数。
代码如下所示:
h264LiveFramedSource.hh
- #ifndef _H264LIVEFRAMEDSOURCE_HH
- #define _H264LIVEFRAMEDSOURCE_HH
-
-
- #include <ByteStreamFileSource.hh>
-
-
- class H264LiveFramedSource : public ByteStreamFileSource
- {
- public:
- static H264LiveFramedSource* createNew(UsageEnvironment& env, unsigned preferredFrameSize = 0, unsigned playTimePerFrame = 0);
-
-
- protected:
- H264LiveFramedSource(UsageEnvironment& env, unsigned preferredFrameSize, unsigned playTimePerFrame);
- ~H264LiveFramedSource();
-
-
- private:
-
- virtual void doGetNextFrame();
- };
-
- #endif
h264LiveFramedSource.cpp- #include "h264LiveFramedSource.hh"
- #include "GroupsockHelper.hh"
- #include "spsdecode.h"
-
- int findStartCode(unsigned char *buf, int zeros_in_startcode)
- {
- int info;
- int i;
-
- info = 1;
- for (i = 0; i < zeros_in_startcode; i++)
- if (buf[i] != 0)
- info = 0;
-
- if (buf[i] != 1)
- info = 0;
- return info;
- }
-
- int getNextNalu(FILE* inpf, unsigned char* buf)
- {
- int pos = 0;
- int startCodeFound = 0;
- int info2 = 0;
- int info3 = 0;
-
- while (!feof(inpf) && (buf[pos++] = fgetc(inpf)) == 0);
-
- while (!startCodeFound)
- {
- if (feof(inpf))
- {
- return pos - 1;
- }
- buf[pos++] = fgetc(inpf);
- info3 = findStartCode(&buf[pos - 4], 3);
- startCodeFound=(info3 == 1);
- if (info3 != 1)
- info2 = findStartCode(&buf[pos - 3], 2);
- startCodeFound = (info2 == 1 || info3 == 1);
- }
- if (info2)
- {
- fseek(inpf, -3, SEEK_CUR);
- return pos - 3;
- }
- if (info3)
- {
- fseek(inpf, -4, SEEK_CUR);
- return pos - 4;
- }
- }
-
- FILE * inpf;
- unsigned char* inBuf;
- int inLen;
- int nFrameRate;
- H264LiveFramedSource::H264LiveFramedSource(UsageEnvironment& env, unsigned preferredFrameSize, unsigned playTimePerFrame)
- : ByteStreamFileSource(env, 0, preferredFrameSize, playTimePerFrame)
- {
- char *fname = "480320.264";
- inpf = NULL;
- inpf = fopen(fname, "rb");
- inBuf = (unsigned char*)calloc(1024 * 100, sizeof(char));
- inLen = 0;
- inLen = getNextNalu(inpf, inBuf);
-
- unsigned int nSpsLen = inLen - 4;
- unsigned char *pSps = (unsigned char*)malloc(nSpsLen);
- memcpy(pSps, inBuf + 4, nSpsLen);
-
-
- int width = 0, height = 0, fps = 0;
-
- h264_decode_sps(pSps, nSpsLen, width, height, fps);
-
- nFrameRate = 0;
- if (fps)
- nFrameRate = fps;
- else
- nFrameRate = 25;
- }
-
- H264LiveFramedSource* H264LiveFramedSource::createNew(UsageEnvironment& env, unsigned preferredFrameSize, unsigned playTimePerFrame)
- {
- H264LiveFramedSource* newSource = new H264LiveFramedSource(env, preferredFrameSize, playTimePerFrame);
- return newSource;
- }
-
- H264LiveFramedSource::~H264LiveFramedSource()
- {
- free(inBuf);
- fclose(inpf);
- }
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- void H264LiveFramedSource::doGetNextFrame()
- {
- fFrameSize = inLen;
- if (fFrameSize > fMaxSize)
- {
- fNumTruncatedBytes = fFrameSize - fMaxSize;
- fFrameSize = fMaxSize;
- }
- else
- {
- fNumTruncatedBytes = 0;
- }
- memmove(fTo, inBuf, fFrameSize);
-
- inLen = 0;
- inLen = getNextNalu(inpf, inBuf);
- gettimeofday(&fPresentationTime, NULL);
- fDurationInMicroseconds = 1000000 / nFrameRate;
-
- nextTask() = envir().taskScheduler().scheduleDelayedTask(0, (TaskFunc*)FramedSource::afterGetting, this);
- }
h264LiveVideoServerMediaSubssion.hh- #ifndef _H264LIVEVIDEOSERVERMEDIASUBSSION_HH
- #define _H264LIVEVIDEOSERVERMEDIASUBSSION_HH
- #include "H264VideoFileServerMediaSubsession.hh"
-
- class H264LiveVideoServerMediaSubssion : public H264VideoFileServerMediaSubsession {
-
- public:
- static H264LiveVideoServerMediaSubssion* createNew(UsageEnvironment& env, Boolean reuseFirstSource);
-
- protected:
- H264LiveVideoServerMediaSubssion(UsageEnvironment& env, Boolean reuseFirstSource);
- ~H264LiveVideoServerMediaSubssion();
-
- protected:
-
- FramedSource* createNewStreamSource(unsigned clientSessionId, unsigned& estBitrate);
- };
-
- #endif
h264LiveVideoServerMediaSubssion.cpp
- #include "h264LiveVideoServerMediaSubssion.hh"
- #include "h264LiveFramedSource.hh"
- #include "H264VideoStreamFramer.hh"
-
- H264LiveVideoServerMediaSubssion* H264LiveVideoServerMediaSubssion::createNew(UsageEnvironment& env, Boolean reuseFirstSource)
- {
- return new H264LiveVideoServerMediaSubssion(env, reuseFirstSource);
- }
-
- H264LiveVideoServerMediaSubssion::H264LiveVideoServerMediaSubssion(UsageEnvironment& env, Boolean reuseFirstSource)
- : H264VideoFileServerMediaSubsession(env, 0, reuseFirstSource)
- {
-
- }
-
- H264LiveVideoServerMediaSubssion::~H264LiveVideoServerMediaSubssion()
- {
- }
-
- FramedSource* H264LiveVideoServerMediaSubssion::createNewStreamSource(unsigned clientSessionId, unsigned& estBitrate)
- {
-
- estBitrate = 1000;
-
- H264LiveFramedSource* liveSource = H264LiveFramedSource::createNew(envir());
- if (liveSource == NULL)
- {
- return NULL;
- }
-
-
- return H264VideoStreamFramer::createNew(envir(), liveSource);
- }
还需在h264LiveMediaServer.cpp中做相应的修改- #include "liveMedia.hh"
- #include "BasicUsageEnvironment.hh"
- #include "h264LiveVideoServerMediaSubssion.hh"
-
- UsageEnvironment* env;
-
-
-
- Boolean reuseFirstSource = True;
-
-
- static void announceStream(RTSPServer* rtspServer, ServerMediaSession* sms, char const* streamName);
-
- int main(int argc, char** argv)
- {
-
- TaskScheduler* scheduler = BasicTaskScheduler::createNew();
- env = BasicUsageEnvironment::createNew(*scheduler);
- UserAuthenticationDatabase* authDB = NULL;
-
-
-
- RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554, authDB);
- if (rtspServer == NULL)
- {
- *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
- exit(1);
- }
-
- char const* descriptionString = "Session streamed by \"h264LiveMediaServer\"";
-
-
- char const* streamName = "h264ESVideoTest";
-
-
-
-
- ServerMediaSession* sms= ServerMediaSession::createNew(*env, streamName, streamName ,descriptionString);
-
-
- sms->addSubsession(H264LiveVideoServerMediaSubssion::createNew(*env, reuseFirstSource));
-
-
- rtspServer->addServerMediaSession(sms);
-
-
- announceStream(rtspServer, sms, streamName);
-
-
- env->taskScheduler().doEventLoop();
-
- return 0;
- }
-
- static void announceStream(RTSPServer* rtspServer, ServerMediaSession* sms,char const* streamName)
- {
- char* url = rtspServer->rtspURL(sms);
- UsageEnvironment& env = rtspServer->envir();
- env << "\n\"" << streamName << "\" stream\"\n";
- env << "Play this stream using the URL \"" << url << "\"\n";
- delete[] url;
- }
关于spsdecode.h,详见: H.264(H264)解码SPS获取分辨率和帧率
三.测试
参考链接:http://blog.csdn.net/firehood_/article/details/16844397
注意* 编译时与live555的四个库会发生冲突,在项目中右键-》 链接器-》输入的附加依赖项 中添加 ws2_32.lib