android多媒体框架之流媒体具体流程篇3----base on jellybean(十三)

来源:互联网 发布:股票价格统计软件 编辑:程序博客网 时间:2024/05/23 10:20
转载

上一篇我们讲到了从web server 中获取了sessiondescription,并解析出了media server的路径和一些基本的媒体信息。下面我们开始讲述如何跟mediaserver建立连接并控制服务器端和客户端以达到播放,暂停,停止的目的。

首先跟media server建立连接 SETUP:

具体的格式如下(UDP):

C->Aaudio: SETUPrtsp://audio.com/twister/audio.en RTSP/1.0

CSeq: 1

Transport:RTP/AVP/UDP;unicast

;client_port=3056-3057

具体到代码的话,我们看myHandler.h中的setupTrack函数:

   void setupTrack(size_t index) {

        sp<APacketSource> source =

            new APacketSource(mSessionDesc,index);

……………………….

        AString url;

        CHECK(mSessionDesc->findAttribute(index,"a=control", &url));

 

        AString trackURL;

        CHECK(MakeURL(mBaseURL.c_str(),url.c_str(), &trackURL));----检查session description中取出media serverURL是否正确

        …………

 

        AString request= "SETUP ";

       request.append(trackURL);

        request.append("RTSP/1.0\r\n");------拼接request字符

 

选择TCP连接还是ARTP连接,

        if (mTryTCPInterleaving) {

            size_t interleaveIndex = 2 *(mTracks.size() - 1);

            info->mUsingInterleavedTCP =true;

            info->mRTPSocket =interleaveIndex;

            info->mRTCPSocket =interleaveIndex + 1;

 

           request.append("Transport: RTP/AVP/TCP;interleaved=");

           request.append(interleaveIndex);

           request.append("-");

           request.append(interleaveIndex + 1);

        } else {

            unsigned rtpPort;

            ARTPConnection::MakePortPair(

                    &info->mRTPSocket,&info->mRTCPSocket, &rtpPort);

 

            if (mUIDValid) {

               HTTPBase::RegisterSocketUserTag(info->mRTPSocket, mUID,

                                               (uint32_t)*(uint32_t*) "RTP_");

               HTTPBase::RegisterSocketUserTag(info->mRTCPSocket, mUID,

                                                (uint32_t)*(uint32_t*)"RTP_");

            }

 

            request.append("Transport:RTP/AVP/UDP;unicast;client_port=");

           request.append(rtpPort);

           request.append("-");

            request.append(rtpPort+ 1);

        }

 

        request.append("\r\n");

 

        if (index > 1) {

            request.append("Session:");

            request.append(mSessionID);

            request.append("\r\n");

        }

 

        request.append("\r\n");

 

        sp<AMessage> reply = newAMessage('setu', id());

        reply->setSize("index",index);

       reply->setSize("track-index", mTracks.size() - 1);

        mConn->sendRequest(request.c_str(),reply);-----发送给服务器端,等待回复,返回的Amessage是“setu

}

   

 

假设收到服务端的连接成功的消息,我们看看myHandler.h中onMessageReceived对应的”setu”如何处理,按道理应该回复回来的信息如下(UDP):

A->C: RTSP/1.0200 OK

CSeq: 1

Session: 12345678

Transport:RTP/AVP/UDP;unicast

;client_port=3056-3057;

;server_port=5000-5001

 

 

virtualvoid onMessageReceived(const sp<AMessage> &msg) {

……

    case 'setu':

            {

                ……………………….

                int32_t result;

               CHECK(msg->findInt32("result", &result));

 

                ALOGI("SETUP(%d) completedwith result %d (%s)",

                     index, result,strerror(-result));

 

                if (result == OK) {

                    CHECK(track != NULL);

 

                    sp<RefBase> obj;

                    CHECK(msg->findObject("response",&obj));

                    sp<ARTSPResponse>response =

                       static_cast<ARTSPResponse *>(obj.get());

 

                    if(response->mStatusCode != 200) {

                        result = UNKNOWN_ERROR;

                    } else {

                       ssize_t i = response->mHeaders.indexOfKey("session");-------查找session id

                        CHECK_GE(i, 0);

 

                       mSessionID = response->mHeaders.valueAt(i);

 

………………………..

 

                        i =mSessionID.find(";");

                        if (i >= 0) {

                            // Remove options,i.e. ";timeout=90"

                            mSessionID.erase(i,mSessionID.size() - i);

                        }

 

                        i = response->mHeaders.indexOfKey("server");---server

                        if (i >= 0) {

                            AString server =response->mHeaders.valueAt(i);

                            if(server.startsWith("XenonStreamer")

                                    ||server.startsWith("XTream")) {

                                ALOGI("Usefake timestamps");

                                mUseSR = false;

                            }

                        }

 

                        sp<AMessage>notify = new AMessage('accu', id());

                       notify->setSize("track-index", trackIndex);

 

                        i =response->mHeaders.indexOfKey("transport");---transport

                        CHECK_GE(i, 0);

 

                        if(track->mRTPSocket != -1 && track->mRTCPSocket != -1) {

                            if(!track->mUsingInterleavedTCP) {

                                AStringtransport = response->mHeaders.valueAt(i);

 

 

……………….

                ++index;

                if (result == OK &&index < mSessionDesc->countTracks()) {

                    setupTrack(index);----一般有两条track,先是audio track然后是videotrack

                } else if(mSetupTracksSuccessful) {

建立完成后就可以“PLAY”了

                    ++mKeepAliveGeneration;

                    postKeepAlive();

 

                    AStringrequest = "PLAY ";---------发送”PLAY”请求给服务器端

                   request.append(mControlURL);

                   request.append(" RTSP/1.0\r\n");

 

                   request.append("Session: ");

                   request.append(mSessionID);

                    request.append("\r\n");

 

                   request.append("\r\n");

 

                   sp<AMessage> reply = new AMessage('play', id());

                   mConn->sendRequest(request.c_str(), reply);

                } else {

                    sp<AMessage> reply = newAMessage('disc', id());

                   mConn->disconnect(reply);

                }

                break;

            }

 

完成“SETUP”阶段就可以“PLAY”了,发送给服务器端的格式如下:

C->V:PLAY rtsp://video.com/twister/video RTSP/1.0

CSeq: 2

Session:23456789

Range:smpte=0:10:00-

代码在myHandler.h中onMessageReceived对应的”setu”。

下面我们分析下服务器端返回后客户端如何处理“PLAY”。还是在myHandler.h中onMessageReceived函数:

 

            case 'play':

            {

                ………..

 

                if (result == OK) {

                    sp<RefBase> obj;

                   CHECK(msg->findObject("response", &obj));

                    sp<ARTSPResponse>response =

                        static_cast<ARTSPResponse*>(obj.get());

 

                    if(response->mStatusCode != 200) {

                        result = UNKNOWN_ERROR;

                    } else {

                        parsePlayResponse(response);---解析response回来的数据

 

………………

                }

 

                if (result != OK) {

                    sp<AMessage> reply =new AMessage('disc', id());

                   mConn->disconnect(reply);

                }

 

                break;

            }

response回来的格式一般如下:

V->C:RTSP/1.0 200 OK

CSeq: 2

Session:23456789

Range:smpte=0:10:00-0:20:00------------------播放从10分钟到20分钟时间段的视频

RTP-Info:url=rtsp://video.com/twister/video

;seq=12312232;rtptime=78712811

 

 

voidparsePlayResponse(const sp<ARTSPResponse> &response) {

        if (mTracks.size() == 0) {

            ALOGV("parsePlayResponse: latepackets ignored.");

            return;

        }

 

        mPlayResponseReceived = true;

 

        ssize_t i =response->mHeaders.indexOfKey("range");

…………

        AString range = response->mHeaders.valueAt(i);

………………

 

        i =response->mHeaders.indexOfKey("rtp-info");

        CHECK_GE(i, 0);

 

        AString rtpInfo =response->mHeaders.valueAt(i);

        List<AString> streamInfos;

        SplitString(rtpInfo, ",",&streamInfos);

 

        int n = 1;

        for (List<AString>::iterator it =streamInfos.begin();

             it != streamInfos.end(); ++it) {

            (*it).trim();

            ALOGV("streamInfo[%d] =%s", n, (*it).c_str());

 

            CHECK(GetAttribute((*it).c_str(),"url", &val));

 

            size_t trackIndex = 0;

            while (trackIndex <mTracks.size()) {

                size_t startpos = 0;

                if(mTracks.editItemAt(trackIndex).mURL.size() >= val.size()) {

                    startpos =mTracks.editItemAt(trackIndex).mURL.size() - val.size();

                }

                // Use AString::find in orderto allow the url in the RTP-Info to be a

                // truncated variant (example:"url=trackID=1") of the complete SETUP url

                if(mTracks.editItemAt(trackIndex).mURL.find(val.c_str(), startpos) == -1) {

                    ++trackIndex;

                } else {

                    // Found track

                    break;

                }

            }

            CHECK_LT(trackIndex,mTracks.size());

 

            char *end;

            unsigned long seq = 0;

            if (GetAttribute((*it).c_str(),"seq", &val)) {

                seq = strtoul(val.c_str(),&end, 10);

            } else {

               CHECK(GetAttribute((*it).c_str(), "rtptime", &val));

            }

 

            TrackInfo *info = &mTracks.editItemAt(trackIndex);

            info->mFirstSeqNumInSegment =seq;

            info->mNewSegment = true;

 

            uint32_t rtpTime = 0;

            if (GetAttribute((*it).c_str(),"rtptime", &val)) {

                rtpTime = strtoul(val.c_str(),&end, 10);

                mReceivedRTPTime = true;

                ALOGV("track #%d:rtpTime=%u <=> npt=%.2f", n, rtpTime, npt1);

            } else {

                ALOGV("no rtptime in playresponse: track #%d: rtpTime=%u <=> npt=%.2f", n,

                        rtpTime, npt1);

               CHECK(GetAttribute((*it).c_str(), "seq", &val));

            }

 

            info->mRTPAnchor = rtpTime;

            mLastMediaTimeUs = (int64_t)(npt1 *1E6);

            mMediaAnchorUs = mLastMediaTimeUs;

 

            // Removing packets with old RTPtimestamps

            while (!info->mPackets.empty()){

                sp<ABuffer> accessUnit =*info->mPackets.begin();

                uint32_t firstRtpTime;

               CHECK(accessUnit->meta()->findInt32("rtp-time", (int32_t*)&firstRtpTime));

                if (firstRtpTime == rtpTime) {

                    break;

                }

               info->mPackets.erase(info->mPackets.begin());

            }

            ++n;

        }

   

 

至此video source 和audiosource就可以通过RTP不断的往客户端发送,客户端拿到这些数据就可以通过相应的解码器解析播放了。

我们的流媒体播放流程也讲得差不多了,如何关闭两端的流程就由大伙自己去看了。但是大家要注意一点有时候一些服务在关闭的时候没有发回“TEARDOWN”的response。
阅读全文
0 0
原创粉丝点击