Date: Fri, 29 Mar 2024 11:39:11 +0100 (CET) Message-ID: <2029841303.47459.1711708751986@docs.flashphoner.com> Subject: Exported From Confluence MIME-Version: 1.0 Content-Type: multipart/related; boundary="----=_Part_47458_627620255.1711708751986" ------=_Part_47458_627620255.1711708751986 Content-Type: text/html; charset=UTF-8 Content-Transfer-Encoding: quoted-printable Content-Location: file:///C:/exported.html
Since IOS SDK build 2.6.32 it is possible to receive stream publisher events w= hile playing the stream. This can be used to detect if audio or video is mu= ted by stream publisher: when piblisher uses muteAusio()/muteVideo() method= , a special event is sending to all the subscribers. To receive this event = while playing a stream, define the function FPWCSApi2Stream.onStreamEvent in Objective C application= or WCSStream.onStreamEvent in Swift application.
Objective C example c= ode
[_remoteStream= onStreamEvent:^(FPWCSApi2StreamEvent *streamEvent){ NSLog(@"No remote stream, %@", streamEvent.type); if ([streamEvent.type isEqual:[FPWCSApi2Model streamEventTypeToStri= ng:kFPWCSStreamEventTypeAudioMuted]]) { [_remoteControl onAudioMute:true]; } if ([streamEvent.type isEqual:[FPWCSApi2Model streamEventTypeToStri= ng:kFPWCSStreamEventTypeAudioUnmuted]]) { [_remoteControl onAudioMute:false]; } if ([streamEvent.type isEqual:[FPWCSApi2Model streamEventTypeToStri= ng:kFPWCSStreamEventTypeVideoMuted]]) { [_remoteControl onVideoMute:true]; } if ([streamEvent.type isEqual:[FPWCSApi2Model streamEventTypeToStri= ng:kFPWCSStreamEventTypeVideoUnmuted]]) { [_remoteControl onVideoMute:false]; } }];=20
Swift example code=
playStr= eam?.onStreamEvent({streamEvent in if (streamEvent?.type =3D=3D FPWCSApi2Model.streamEventType= (toString: .fpwcsStreamEventTypeAudioMuted)) { self.remoteViewController?.onAudioMute(true); } if (streamEvent?.type =3D=3D FPWCSApi2Model.streamEventType= (toString: .fpwcsStreamEventTypeAudioUnmuted)) { self.remoteViewController?.onAudioMute(false); } if (streamEvent?.type =3D=3D FPWCSApi2Model.streamEventType= (toString: .fpwcsStreamEventTypeVideoMuted)) { self.remoteViewController?.onVideoMute(true); } if (streamEvent?.type =3D=3D FPWCSApi2Model.streamEventType= (toString: .fpwcsStreamEventTypeVideoUnmuted)) { self.remoteViewController?.onVideoMute(false); } });=20
When a subscriber connects to a stream to play, this stream status can b= e received in StreamStatusPlaying eve= nt handler using Stream.getAudioState() and Stream.getVideoState() methods<= /span>
Objective C example c= ode
[_remoteStream= on:kFPWCSStreamStatusPlaying callback:^(FPWCSApi2Stream *stream){ [self changeStreamStatus:stream]; [self onStarted]; _useLoudSpeaker.control.userInteractionEnabled =3D YES; [_remoteControl onAudioMute:[stream getAudioState].muted]; [_remoteControl onVideoMute:[stream getVideoState].muted]; }];=20
Swift example code=
@IBAction func = playPressed(_ sender: Any) { changeViewState(playButton,false) if (playButton.title(for: .normal) =3D=3D "PLAY") { let options =3D FPWCSApi2StreamOptions() options.name =3D playName.text; options.display =3D remoteDisplay.videoView; options.constraints =3D remoteMediaConstrains; options.transport =3D tcpTransport.isOn ? kFPWCSTransport.fpwcs= TransportTCP : kFPWCSTransport.fpwcsTransportUDP; do { playStream =3D try session!.createStream(options) } catch { print(error); } playStream?.on(.fpwcsStreamStatusPlaying, {rStream in self.changeStreamStatus(rStream!) self.onPlaying(rStream!); }); ... } } ... fileprivate func onPlaying(_ stream:FPWCSApi2Stream) { playButton.setTitle("STOP", for:.normal) changeViewState(loudSpeaker, true) changeViewState(playButton, true) self.remoteViewController!.onAudioMute(stream.getAudioState()?.mute= d ?? false) self.remoteViewController!.onVideoMute(stream.getVideoState()?.mute= d ?? false) }=20