欢迎使用CSDN-markdown编辑器

来源:互联网 发布:精雕编程软件 编辑:程序博客网 时间:2024/06/13 22:45

之前使用WebRTC 旧版本, 更新到了最新版本. version: 1.1.20621 1.1.19878(应该区别不大. cocoapods更新太费劲了)

交换消息处依旧会使用伪代码. 别担心我已经注释掉了. 不会出现满目红色错误滴

更新

还是我们的小伙伴cocoapods. 只支持9.0以上咯

source 'https://github.com/CocoaPods/Specs.git'target 'xxx' do    platform :ios, '9.0'    pod 'GoogleWebRTC'end


pod install

变化之处

RTCIceServer 创建方式

url

为Turn Server地址. 以数组方式呈现, 多个. 第一篇文章有介绍

RTCIceServer *server = [[RTCIceServer alloc] initWithURLStrings:url                                                       username:@"u1"                                                     credential:@"p1"];

RTCPeerConnection

  1. 创建方式

    //RTCConfiguration 有很多配置信息但是我都没有使用[��]. 其实是我没去看呢...有时间补充上RTCConfiguration *config = [[RTCConfiguration alloc] init];config.iceServers = stunServerArray;RTCPeerConnection *connection = [factory peerConnectionWithConfiguration:config                                                         constraints:[self  peerConnectionConstraints]                                                            delegate:self];//self peerConnectionConstraints]此方法和以前一致
  2. 代理方法
    代理方法名字全部改变 但是和之前对比一样就能看出来就是改了几个字母而已….

RTCRtpSender和RTCMediaStream

RTCMediaStream 虽然未被删除, 但是我觉得RTCRtpSender就是来取代他的.

static NSString *const kARDMediaStreamId = @"ARDAMSa0";
  1. 创建AudioSender

    - (RTCRtpSender *)createAudioSender {    RTCMediaConstraints *constraints = [self defaultMediaAudioConstraints];    RTCAudioSource *source = [_factory audioSourceWithConstraints:constraints];    RTCAudioTrack *audioTrack = [_factory audioTrackWithSource:source trackId:kARDAudioTrackId];    RTCRtpSender *sender =    [_peerConnection senderWithKind:kRTCMediaStreamTrackKindAudio                       streamId:kARDMediaStreamId];     if (audioTrack) {        sender.track = audioTrack;     }    return sender;}//MARK : kRTCMediaConstraintsLevelControl : 按照字面意思理解是等级控制. 暂时为true. 具体功能没测试出来- (RTCMediaConstraints *)defaultMediaAudioConstraints {     RTCMediaConstraints *constraints = [[RTCMediaConstraints alloc] initWithMandatoryConstraints:    @{     kRTCMediaConstraintsLevelControl : kRTCMediaConstraintsValueTrue} optionalConstraints:nil    ];    return constraints;}
  2. 创建VideoSender

    - (RTCRtpSender *)createVideoSender {    RTCRtpSender *sender = [_peerConnection senderWithKind:kRTCMediaStreamTrackKindVideo streamId:kARDMediaStreamId];    RTCVideoTrack *videoTrack = [self createVideoTrackWithDirecion:AVCaptureDevicePositionFront];    if (videoTrack) {//这个代理和之前相同, 将视频Track 返回到界面上去渲染.         if (self.delegate && [self.delegate respondsToSelector:@selector(dspersonWebRTCdidReceiveLocalVideoTrack:)]) {        [self.delegate dspersonWebRTCdidReceiveLocalVideoTrack:videoTrack];        }      sender.track = videoTrack;     }     return sender;}//默认创建为前置摄像头(需求问题. 自定义即可)- (RTCVideoTrack *)createVideoTrackWithDirecion:(AVCaptureDevicePosition)position {    RTCVideoTrack *localVideoTrack = nil;    #if !TARGET_IPHONE_SIMULATOR && TARGET_OS_IPHONE    //更新方法    localVideoTrack =        [_factory videoTrackWithSource:self.source                           trackId:kARDVideoTrackId];    #endif    return localVideoTrack;}

    这个时候已经将音视频Track 添加到_peerConnection中了. 和之前mediaStream道理相同

删除了RTCPair 这个类

之前创建媒体约束(RTCMediaConstraints)都需要此类, 现在更改为字典创建

删除了RTCSessionDescriptionDelegate

删除了这个代理, 我的心一凉(默默的在想这得改多少啊…..)但是还好谷歌还是比较亲民的. 这个代理删除了但是居然不用删除这段代码(还能废物再利用��).

SDP创建都改为RTCPeerConnection block形式创建

  1. Offer 和 Answer 的约束

    - (RTCMediaConstraints *)defaultOfferConstraints {NSDictionary *mandatoryConstraints = @{                                       @"OfferToReceiveAudio" : @"true",                                       @"OfferToReceiveVideo" : @"true"                                       };RTCMediaConstraints* constraints = [[RTCMediaConstraints alloc]                        initWithMandatoryConstraints:mandatoryConstraints                                optionalConstraints:nil];return constraints;}
  2. 创建Offer

    [_peerConnection offerForConstraints:[self defaultOfferConstraints]                     completionHandler:^(RTCSessionDescription * _Nullable sdp, NSError * _Nullable  error) {        [self peerConnection:manager.peerConnection  didCreateSessionDescription:sdp error:error];//这个方法就是以前的代理方法. 废物再利用了.    }];

废物再利用代码

  1. 旧版本创建本地SDP代理

    - (void)peerConnection:(RTCPeerConnection *)peerConnectiondidCreateSessionDescription:(RTCSessionDescription *)sdp             error:(NSError *)error {     //dispatch_async_on_main_queue(^{//一个主线程的block. 自己添加主线程    if (error) {        if (DSpersonKit.isENABLED_DEBUG) {            [MBProgressHUD showErrorMessage:error.description];        }        //DSpersonKitLog(@"\n����������������发送本地SDP 出现错误����������������\n%@", error);        //此处出现错误我会直接发送关闭并且发给对方        //[[DSWebRTCManager shareInstance] closeAndRemoveWithP2P:self receive:false];        return;    }      //DSpersonKitLog(@"发送本地SDP");         //@weakify(self);//weak          [peerConnection setLocalDescription:sdp completionHandler:^(NSError *_Nullable error) {        //@strongify(self); strong        [self peerConnection:self.peerConnection didSetSessionDescriptionWithError:error];     }];         //此处需要将获得的sdp         //[_webSocket send:sdp];     //});}
  2. 旧版本设置远程SDP

    - (void)peerConnection:(RTCPeerConnection *)peerConnectiondidSetSessionDescriptionWithError:(NSError *)error {// dispatch_async_on_main_queue(^{     if (error) {        if (DSpersonKit.isENABLED_DEBUG) {            [MBProgressHUD showErrorMessage:error.description];        }       // DSpersonKitLog(@"\n����������������远程SDP协议出现错误����������������\n%@", error);       // [[DSWebRTCManager shareInstance] closeAndRemoveWithP2P:self receive:false];       //同1. 旧版本创建本地SDP代理        return;    }    //如果我们正在回答我们,远程Offer. 我们需要创建的answer, 和一个本地描述()    if (!_isInititor && !_peerConnection.localDescription) {       // DSpersonKitLog(@"接收到远端发来的Offer, 创建本地Answer");        //他应该在SetRemoteDescription之后调用, 否则报错       // @weakify(self);        [_peerConnection answerForConstraints:[self defaultOfferConstraints]                            completionHandler:^(RTCSessionDescription * _Nullable sdp, NSError * _Nullable error) {                                //@strongify(self);                                [self peerConnection:self.peerConnection                         didCreateSessionDescription:sdp                                               error:error];                            }];    }     //});}
  3. 拿到offer 设置到本地

    从服务器获取到的SDP设置到本地

    [self setLocalDescription:sdp completionHandler:^(NSError * _Nullable error) {    [self peerConnection:self.peerConnection didSetSessionDescriptionWithError:error];}];

RTCICECandidate 变化

  1. 更名为RTCIceCandidate
  2. 代理名字改变但是不影响和之前方式一致

    - (void)peerConnection:(RTCPeerConnection *)peerConnectiondidGenerateIceCandidate:(RTCIceCandidate *)candidate  {    if (!_peerConnection) {    //DSpersonKitLog(@"因为出现了错误, 或者收到了Bye 消息. 已经关闭了P2P. 不再发送 ICE Candidate return ")        return;    }//dispatch_run_in_async(^{//因为要发送很多次记得要异步哦    //[_webSocket send: candidate];//});}
    ///此代理为新增的, 经过测试没有调用. 估计是时机不正确- (void)peerConnection:(nonnull RTCPeerConnection *)peerConnection didRemoveIceCandidates:(nonnull NSArray<RTCIceCandidate *> *)candidates;
  3. 状态名字发生改变. 改为RTCIceConnectionState

RTCDataChannel 变化

  1. 代理名字发生改变(也是一眼就能认出来什么意思)
  2. 状态名字更改为readyState枚举名字改变. (此处不一一介绍了)

错误问题

SDP错误

Error Domain=org.webrtc.RTCPeerConnection Code=-1 "SessionDescription is NULL." UserInfo={NSLocalizedDescription=SessionDescription is NULL.}

此错误是因为SDP错误所致, 虽然SDP不为空但是可能格式不争取仔细查看SDP正确格式是

```v=0//开头o=- 2225376018456128074 2 IN IP4 127.0.0.1.......省略a=ssrc:325890970 label:ARDAMSv0//结尾```

RTCDataChannel 问题

还是未解决这个问题不清楚原因, 具体请看第一篇文章最后

Google VP8/VP9 编码转为H264

只需要处理SDP即可

#warnging  sdpString 此处使用的是description.sdp. demo中使用的是description.description+ (RTCSessionDescription *)descriptionForDescription:(RTCSessionDescription *)description                                 preferredVideoCodec:(NSString *)codec {    NSString *sdpString = description.sdp;    NSString *lineSeparator = @"\n";    NSString *mLineSeparator = @" ";    // Copied from PeerConnectionClient.java.    // TODO(tkchin): Move this to a shared C++ file.    NSMutableArray *lines =    [NSMutableArray arrayWithArray:     [sdpString componentsSeparatedByString:lineSeparator]];    // Find the line starting with "m=video".    NSInteger mLineIndex = -1;    for (NSInteger i = 0; i < lines.count; ++i) {        if ([lines[i] hasPrefix:@"m=video"]) {            mLineIndex = i;            break;        }    }    if (mLineIndex == -1) {        //  RTCLog(@"No m=video line, so can't prefer %@", codec);        return description;    }    // An array with all payload types with name |codec|. The payload types are    // integers in the range 96-127, but they are stored as strings here.    NSMutableArray *codecPayloadTypes = [[NSMutableArray alloc] init];    // a=rtpmap:<payload type> <encoding name>/<clock rate>    // [/<encoding parameters>]    NSString *pattern =    [NSString stringWithFormat:@"^a=rtpmap:(\\d+) %@(/\\d+)+[\r]?$", codec];    NSRegularExpression *regex =    [NSRegularExpression regularExpressionWithPattern:pattern                                              options:0                                                error:nil];    for (NSString *line in lines) {        NSTextCheckingResult *codecMatches =        [regex firstMatchInString:line                          options:0                            range:NSMakeRange(0, line.length)];        if (codecMatches) {            [codecPayloadTypes             addObject:[line substringWithRange:[codecMatches rangeAtIndex:1]]];        }    }    if ([codecPayloadTypes count] == 0) {        // RTCLog(@"No payload types with name %@", codec);        return description;    }    NSArray *origMLineParts =    [lines[mLineIndex] componentsSeparatedByString:mLineSeparator];    // The format of ML should be: m=<media> <port> <proto> <fmt> ...    const int kHeaderLength = 3;    if (origMLineParts.count <= kHeaderLength) {        //RTCLogWarning(@"Wrong SDP media description format: %@", lines[mLineIndex]);        return description;    }    // Split the line into header and payloadTypes.    NSRange headerRange = NSMakeRange(0, kHeaderLength);    NSRange payloadRange =    NSMakeRange(kHeaderLength, origMLineParts.count - kHeaderLength);    NSArray *header = [origMLineParts subarrayWithRange:headerRange];    NSMutableArray *payloadTypes = [NSMutableArray                                    arrayWithArray:[origMLineParts subarrayWithRange:payloadRange]];    // Reconstruct the line with |codecPayloadTypes| moved to the beginning of the    // payload types.    NSMutableArray *newMLineParts = [NSMutableArray arrayWithCapacity:origMLineParts.count];    [newMLineParts addObjectsFromArray:header];    [newMLineParts addObjectsFromArray:codecPayloadTypes];    [payloadTypes removeObjectsInArray:codecPayloadTypes];    [newMLineParts addObjectsFromArray:payloadTypes];    NSString *newMLine = [newMLineParts componentsJoinedByString:mLineSeparator];    [lines replaceObjectAtIndex:mLineIndex                     withObject:newMLine];    NSString *mangledSdpString = [lines componentsJoinedByString:lineSeparator];    return [[RTCSessionDescription alloc] initWithType:description.type                                                   sdp:mangledSdpString];}

原文地址

原创粉丝点击