[jitsi-dev] iOS webrtc


#1

Hello,

with that last change to my sdp, there was a real flurry of activity. up to the point of a crash in
- (void)peerConnection:(RTCPeerConnection *)peerConnection
           addedStream:(RTCMediaStream *)stream {
    
    NSLog(@"PCO onAddStream.");
    dispatch_async(dispatch_get_main_queue(), ^(void) {
  NSAssert([stream.audioTracks count] >= 1,
    @"Expected at least 1 audio stream");
        
  NSAssert([stream.videoTracks count] >= 1,
    @"Expected at least 1 video stream");

        if ([stream.videoTracks count] > 0) {
            [[self videoView] renderVideoTrackInterface:[stream.videoTracks objectAtIndex:0]];
            //[[self delegate] whiteboardConnection:self renderRemoteVideo:[stream.videoTracks objectAtIndex:0]];
        }
    });
}

i commented the assertions out just to see and i did get the completion callback of my session descdription so i think that is ok now!

any ideas on why i am getting this call with stream counts of 0? I am going over the order of operations for creating and initializing things before i even start trying to understand that.

from what i understand now,

the way the original code worked was
  
  when it got 'onOpen'
  RTCPair *audio =
  [[RTCPair alloc] initWithKey:@"OfferToReceiveAudio" value:@"true"];
  //** video added
  RTCPair *video =
  [[RTCPair alloc] initWithKey:@"OfferToReceiveVideo" value:@"true"];
  NSArray *mandatory = @[ audio , video ];
    
  RTCMediaConstraints *constraints =
  [[RTCMediaConstraints alloc] initWithMandatoryConstraints:mandatory
                                          optionalConstraints:nil];
  [self.peerConnection createOfferWithDelegate:self constraints:constraints];
  
  onMessage would be ice candidates then and SDP
    when ice candidates all received it would
    
      RTCMediaConstraints *_constraints = [[RTCMediaConstraints alloc] initWithMandatoryConstraints:@[[[RTCPair alloc] initWithKey:@"OfferToReceiveAudio" value:@"true"], [[RTCPair alloc] initWithKey:@"OfferToReceiveVideo" value:@"true"]] optionalConstraints:@[[[RTCPair alloc] initWithKey:@"internalSctpDataChannels" value:@"true"], [[RTCPair alloc] initWithKey:@"DtlsSrtpKeyAgreement" value:@"true"]]];
    
  self.queuedRemoteCandidates = [NSMutableArray array];
  self.peerConnectionFactory = [[RTCPeerConnectionFactory alloc] init];
  self.pcObserver = [[PCObserver alloc] initWithDelegate:self];
  self.peerConnection =
  [self.peerConnectionFactory peerConnectionWithICEServers:servers
                                                 constraints:_constraints
                                                    delegate:self.pcObserver];
  RTCMediaStream *lms = [self.peerConnectionFactory mediaStreamWithLabel:@"ARDAMS"];
  NSLog(@"Adding Audio and Video devices ...");
  [lms addAudioTrack:[self.peerConnectionFactory audioTrackWithID:@"ARDAMSa0"]];
    
  //** http://code.google.com/p/webrtc/issues/detail?id=2246
    
  NSString *cameraID = nil;
    //** back camera
    //AVCaptureDevice *captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    //cameraID = [captureDevice localizedName];
    
    //** front camera
  for (AVCaptureDevice *captureDevice in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] ) {
    if (!cameraID || captureDevice.position == AVCaptureDevicePositionFront) {
      cameraID = [captureDevice localizedName];
    }
  }
    
  RTCVideoCapturer *capturer = [RTCVideoCapturer capturerWithDeviceName:cameraID];
  RTCVideoSource *videoSource = [self.peerConnectionFactory videoSourceWithCapturer:capturer constraints:nil];
  [self setLocalVideoTrack:[self.peerConnectionFactory videoTrackWithID:@"ARDAMSv0" source:videoSource]];
  if ([self localVideoTrack]) {
    [lms addVideoTrack:[self localVideoTrack]];
  }
    
  //** this adds the local camera video feed to the view as a preview
  //[self.viewController.videoView renderVideoTrackInterface:[self localVideoTrack]];
  // [[self localVideoTrack] addRenderer:self.viewController.videoRenderer];
    
  //** pass the videoView to the observer, for later rendering
  self.pcObserver.videoView = self.viewController.videoView;
    
  //** add stream
  [self.peerConnection addStream:lms constraints:_constraints];
    
  [(APPRTCAppDelegate*)[UIApplication sharedApplication].delegate displayLogMessage:@"onICEServers - add local stream."];

  NSLog(@"Adding Audio and Video devices ... DONE");

  then on either 'offer' or 'answer' it does
    NSString *sdpString = [objects objectForKey:@"sdp"];
  RTCSessionDescription *sdp = [[RTCSessionDescription alloc]
                                      initWithType:value sdp:[APPRTCJitsiDelegate preferISAC:sdpString]];
  [self.peerConnection setRemoteDescriptionWithDelegate:self sessionDescription:sdp];
  [(APPRTCAppDelegate*)[UIApplication sharedApplication].delegate displayLogMessage:@"PC - setRemoteDescription."];

  then the session delegates pretty much took over.

    should i be following the same order?

    should i be doing a createOffer?
  
Peter Mycue
pmycue@us.ibm.com
pmycue@gmail.com
704-626-9772


#2

I think jitsi meet sends you separate _streams_ for audio and video -- so you should get stream with >0 audio tracks and another with >0 video tracks.

···

Am 06.11.2014 um 14:33 schrieb Peter Mycue:

Hello,

with that last change to my sdp, there was a real flurry of activity.
up to the point of a crash in
- (void)peerConnection:(RTCPeerConnection *)peerConnection
            addedStream:(RTCMediaStream *)stream {

NSLog(@"PCO onAddStream.");
dispatch_async(dispatch_get_main_queue(), ^(void) {
NSAssert([stream.audioTracks count] >= 1,
@"Expected at least 1 audio stream");

NSAssert([stream.videoTracks count] >= 1,
@"Expected at least 1 video stream");

         if ([stream.videoTracks count] > 0) {
             [[selfvideoView]
renderVideoTrackInterface:[stream.videoTracksobjectAtIndex:0]];
//[[self delegate] whiteboardConnection:self
renderRemoteVideo:[stream.videoTracks objectAtIndex:0]];
         }
     });
}

i commented the assertions out just to see and i did get the completion
callback of my session descdription so i think that is ok now!

any ideas on why i am getting this call with stream counts of 0? I am
going over the order of operations for creating and initializing things
before i even start trying to understand that.