diff --git a/WebRTC-Sample-App/VideoViewController.swift b/WebRTC-Sample-App/VideoViewController.swift index c3e9bfd..30859b5 100644 --- a/WebRTC-Sample-App/VideoViewController.swift +++ b/WebRTC-Sample-App/VideoViewController.swift @@ -259,6 +259,15 @@ extension VideoViewController: AntMediaClientDelegate { func localStreamStarted(streamId: String) { print("Local stream added") self.fullVideoView.isHidden = false + + var localVideoTrack:RTCVideoTrack? = self.client?.getLocalVideoTrack(); + + print("local video trackId:\(localVideoTrack?.trackId)"); + + var localAudioTrack:RTCAudioTrack? = self.client?.getLocalAudioTrack(); + + print("local audio trackId:\(localAudioTrack?.trackId)"); + } diff --git a/WebRTCiOSSDK/api/AntMediaClient.swift b/WebRTCiOSSDK/api/AntMediaClient.swift index 95ae1fa..f05171f 100644 --- a/WebRTCiOSSDK/api/AntMediaClient.swift +++ b/WebRTCiOSSDK/api/AntMediaClient.swift @@ -508,9 +508,14 @@ open class AntMediaClient: NSObject, AntMediaClientProtocol { if (self.webRTCClientMap[id] == nil) { AntMediaClient.printf("Has wsClient? (start) : \(String(describing: self.webRTCClientMap[id]))") - self.webRTCClientMap[id] = WebRTCClient.init(remoteVideoView: remoteView, localVideoView: localView, delegate: self, mode: mode != .unspecified ? mode : self.mode , cameraPosition: self.cameraPosition, targetWidth: self.targetWidth, targetHeight: self.targetHeight, videoEnabled: self.videoEnable, enableDataChannel: self.enableDataChannel, useExternalCameraSource: self.useExternalCameraSource, externalAudio: self.externalAudioEnabled, externalVideoCapture: self.externalVideoCapture, cameraSourceFPS: self.cameraSourceFPS, streamId:id, + self.webRTCClientMap[id] = WebRTCClient.init(remoteVideoView: remoteView, localVideoView: localView, delegate: self, cameraPosition: self.cameraPosition, targetWidth: self.targetWidth, targetHeight: self.targetHeight, videoEnabled: self.videoEnable, enableDataChannel: self.enableDataChannel, useExternalCameraSource: self.useExternalCameraSource, externalAudio: self.externalAudioEnabled, externalVideoCapture: self.externalVideoCapture, cameraSourceFPS: self.cameraSourceFPS, streamId:id, degradationPreference: self.degradationPreference); + + if (self.mode != .play) { + self.webRTCClientMap[id]?.addLocalMediaStream(); + } + self.webRTCClientMap[id]?.setToken(token) AntMediaClient.rtcAudioSession.add(self); @@ -637,6 +642,14 @@ open class AntMediaClient: NSObject, AntMediaClientProtocol { self.sendAudioTrackStatusNotification(enabled:enableTrack); } + open func getLocalAudioTrack() -> RTCAudioTrack? { + self.webRTCClientMap[self.publisherStreamId ?? (self.p2pStreamId ?? "")]?.getLocalAudioTrack(); + } + + open func getLocalVideoTrack() -> RTCVideoTrack? { + self.webRTCClientMap[self.publisherStreamId ?? (self.p2pStreamId ?? "")]?.getLocalVideoTrack(); + } + func sendNotification(eventType:String, streamId: String = "") { let notification = [ EVENT_TYPE: eventType, diff --git a/WebRTCiOSSDK/api/AntMediaClientProtocol.swift b/WebRTCiOSSDK/api/AntMediaClientProtocol.swift index 3b3b6cc..866c7a5 100644 --- a/WebRTCiOSSDK/api/AntMediaClientProtocol.swift +++ b/WebRTCiOSSDK/api/AntMediaClientProtocol.swift @@ -356,6 +356,15 @@ public protocol AntMediaClientProtocol { */ func enableTrack(trackId:String, enabled:Bool) + /** + Get the local video track which is local camera or external source which is screen + */ + func getLocalVideoTrack() -> RTCVideoTrack?; + + /** + Get the local audio track which is the local microphone + */ + func getLocalAudioTrack() -> RTCAudioTrack?; /** Call this method to join a conference room - Parameters diff --git a/WebRTCiOSSDK/api/webrtc/WebRTCClient.swift b/WebRTCiOSSDK/api/webrtc/WebRTCClient.swift index a866a1c..83a21d2 100644 --- a/WebRTCiOSSDK/api/webrtc/WebRTCClient.swift +++ b/WebRTCiOSSDK/api/webrtc/WebRTCClient.swift @@ -85,16 +85,16 @@ class WebRTCClient: NSObject { self.peerConnection = WebRTCClient.factory.peerConnection(with: configuration, constraints: defaultConstraint, delegate: self) } - public convenience init(remoteVideoView: RTCVideoRenderer?, localVideoView: RTCVideoRenderer?, delegate: WebRTCClientDelegate, mode: AntMediaClientMode, cameraPosition: AVCaptureDevice.Position, targetWidth: Int, targetHeight: Int, streamId: String) { + public convenience init(remoteVideoView: RTCVideoRenderer?, localVideoView: RTCVideoRenderer?, delegate: WebRTCClientDelegate, cameraPosition: AVCaptureDevice.Position, targetWidth: Int, targetHeight: Int, streamId: String) { self.init(remoteVideoView: remoteVideoView, localVideoView: localVideoView, delegate: delegate, - mode: mode, cameraPosition: cameraPosition, targetWidth: targetWidth, targetHeight: targetHeight, videoEnabled: true, enableDataChannel:false, streamId: streamId) + cameraPosition: cameraPosition, targetWidth: targetWidth, targetHeight: targetHeight, videoEnabled: true, enableDataChannel:false, streamId: streamId) } - public convenience init(remoteVideoView: RTCVideoRenderer?, localVideoView: RTCVideoRenderer?, delegate: WebRTCClientDelegate, mode: AntMediaClientMode, cameraPosition: AVCaptureDevice.Position, targetWidth: Int, targetHeight: Int, videoEnabled: Bool, enableDataChannel: Bool, streamId: String) { + public convenience init(remoteVideoView: RTCVideoRenderer?, localVideoView: RTCVideoRenderer?, delegate: WebRTCClientDelegate, cameraPosition: AVCaptureDevice.Position, targetWidth: Int, targetHeight: Int, videoEnabled: Bool, enableDataChannel: Bool, streamId: String) { self.init(remoteVideoView: remoteVideoView, localVideoView: localVideoView, delegate: delegate, - mode: mode, cameraPosition: cameraPosition, targetWidth: targetWidth, targetHeight: targetHeight, videoEnabled: true, enableDataChannel:false, useExternalCameraSource: false, streamId: streamId) + cameraPosition: cameraPosition, targetWidth: targetWidth, targetHeight: targetHeight, videoEnabled: true, enableDataChannel:false, useExternalCameraSource: false, streamId: streamId) } - public convenience init(remoteVideoView: RTCVideoRenderer?, localVideoView: RTCVideoRenderer?, delegate: WebRTCClientDelegate, mode: AntMediaClientMode, cameraPosition: AVCaptureDevice.Position, targetWidth: Int, targetHeight: Int, videoEnabled: Bool, enableDataChannel: Bool, useExternalCameraSource: Bool, externalAudio: Bool = false, externalVideoCapture: Bool = false, cameraSourceFPS: Int = 30, streamId: String, + public convenience init(remoteVideoView: RTCVideoRenderer?, localVideoView: RTCVideoRenderer?, delegate: WebRTCClientDelegate, cameraPosition: AVCaptureDevice.Position, targetWidth: Int, targetHeight: Int, videoEnabled: Bool, enableDataChannel: Bool, useExternalCameraSource: Bool, externalAudio: Bool = false, externalVideoCapture: Bool = false, cameraSourceFPS: Int = 30, streamId: String, degradationPreference: RTCDegradationPreference = RTCDegradationPreference.maintainResolution) { self.init(remoteVideoView: remoteVideoView, localVideoView: localVideoView, delegate: delegate, externalAudio: externalAudio) self.cameraPosition = cameraPosition @@ -107,12 +107,10 @@ class WebRTCClient: NSObject { self.cameraSourceFPS = cameraSourceFPS; self.streamId = streamId; self.degradationPreference = degradationPreference - - if (mode != .play) { - self.addLocalMediaStream() - } } + + public func externalVideoCapture(externalVideoCapture: Bool) { self.externalVideoCapture = externalVideoCapture; } @@ -412,7 +410,7 @@ class WebRTCClient: NSObject { } - private func addLocalMediaStream() -> Bool { + public func addLocalMediaStream() -> Bool { AntMediaClient.printf("Add local media streams") @@ -444,6 +442,16 @@ class WebRTCClient: NSObject { return true } + public func getLocalVideoTrack() -> RTCVideoTrack { + return self.localVideoTrack; + } + + public func getLocalAudioTrack() -> RTCAudioTrack { + return self.localAudioTrack; + } + + + public func setDegradationPreference(degradationPreference:RTCDegradationPreference) { self.degradationPreference = degradationPreference }