diff --git a/SRTHaishinKit/SRTStream.swift b/SRTHaishinKit/SRTStream.swift index 5a1ff658c..4b529883c 100644 --- a/SRTHaishinKit/SRTStream.swift +++ b/SRTHaishinKit/SRTStream.swift @@ -4,7 +4,7 @@ import HaishinKit import libsrt /// An object that provides the interface to control a one-way channel over a SRTConnection. -public class SRTStream: NetStream { +public final class SRTStream: NetStream { private enum ReadyState: UInt8 { case initialized = 0 case open = 1 @@ -19,7 +19,6 @@ public class SRTStream: NetStream { private var action: (() -> Void)? private var keyValueObservations: [NSKeyValueObservation] = [] private weak var connection: SRTConnection? - private lazy var audioEngine: AVAudioEngine = .init() private lazy var writer: TSWriter = { var writer = TSWriter() @@ -56,6 +55,13 @@ public class SRTStream: NetStream { mixer.startDecoding() readyState = .playing case .publish: + writer.expectedMedias.removeAll() + if videoInputFormat != nil { + writer.expectedMedias.insert(.video) + } + if audioInputFormat != nil { + writer.expectedMedias.insert(.audio) + } mixer.startEncoding(writer) mixer.startRunning() writer.startRunning() @@ -90,46 +96,6 @@ public class SRTStream: NetStream { keyValueObservations.removeAll() } - /** - Prepare the stream to process media of the given type - - - parameters: - - type: An AVMediaType you will be sending via an appendSampleBuffer call - - As with appendSampleBuffer only video and audio types are supported - */ - public func attachRawMedia(_ type: AVMediaType) { - writer.expectedMedias.insert(type) - } - - /** - Remove a media type that was added via attachRawMedia - - - parameters: - - type: An AVMediaType that was added via an attachRawMedia call - */ - public func detachRawMedia(_ type: AVMediaType) { - writer.expectedMedias.remove(type) - } - - override public func attachCamera(_ camera: AVCaptureDevice?, onError: ((any Error) -> Void)? = nil) { - if camera == nil { - writer.expectedMedias.remove(.video) - } else { - writer.expectedMedias.insert(.video) - } - super.attachCamera(camera, onError: onError) - } - - override public func attachAudio(_ audio: AVCaptureDevice?, automaticallyConfiguresApplicationAudioSession: Bool = true, onError: ((any Error) -> Void)? = nil) { - if audio == nil { - writer.expectedMedias.remove(.audio) - } else { - writer.expectedMedias.insert(.audio) - } - super.attachAudio(audio, automaticallyConfiguresApplicationAudioSession: automaticallyConfiguresApplicationAudioSession, onError: onError) - } - /// Sends streaming audio, vidoe and data message from client. public func publish(_ name: String? = "") { lockQueue.async { diff --git a/Sources/Net/NetStream.swift b/Sources/Net/NetStream.swift index 82cd9c6fe..7606c4feb 100644 --- a/Sources/Net/NetStream.swift +++ b/Sources/Net/NetStream.swift @@ -50,9 +50,6 @@ open class NetStream: NSObject { } } - /// Specifies the delegate.. - public weak var delegate: (any NetStreamDelegate)? - /// Specifies the audio monitoring enabled or not. public var isMonitoringEnabled: Bool { get { @@ -186,6 +183,16 @@ open class NetStream: NSObject { } } + /// The video input format. + public var videoInputFormat: CMVideoFormatDescription? { + return mixer.videoIO.inputFormat + } + + /// The audio input format. + public var audioInputFormat: AVAudioFormat? { + return mixer.audioIO.inputFormat + } + /// The number of frames per second being displayed. @objc public internal(set) dynamic var currentFPS: UInt16 = 0 @@ -199,6 +206,9 @@ open class NetStream: NSObject { } } + /// Specifies the delegate.. + public weak var delegate: (any NetStreamDelegate)? + /// Creates a NetStream object. override public init() { super.init() @@ -212,7 +222,7 @@ open class NetStream: NSObject { /// Attaches the primary camera object. /// - Warning: This method can't use appendSampleBuffer at the same time. @available(tvOS 17.0, *) - open func attachCamera(_ device: AVCaptureDevice?, onError: ((_ error: any Error) -> Void)? = nil) { + public func attachCamera(_ device: AVCaptureDevice?, onError: ((_ error: any Error) -> Void)? = nil) { lockQueue.async { do { try self.mixer.videoIO.attachCamera(device) @@ -225,7 +235,7 @@ open class NetStream: NSObject { /// Attaches the 2ndary camera object for picture in picture. /// - Warning: This method can't use appendSampleBuffer at the same time. @available(iOS 13.0, tvOS 17.0, *) - open func attachMultiCamera(_ device: AVCaptureDevice?, onError: ((_ error: any Error) -> Void)? = nil) { + public func attachMultiCamera(_ device: AVCaptureDevice?, onError: ((_ error: any Error) -> Void)? = nil) { lockQueue.async { do { try self.mixer.videoIO.attachMultiCamera(device) @@ -238,7 +248,7 @@ open class NetStream: NSObject { /// Attaches the audio capture object. /// - Warning: This method can't use appendSampleBuffer at the same time. @available(tvOS 17.0, *) - open func attachAudio(_ device: AVCaptureDevice?, automaticallyConfiguresApplicationAudioSession: Bool = false, onError: ((_ error: any Error) -> Void)? = nil) { + public func attachAudio(_ device: AVCaptureDevice?, automaticallyConfiguresApplicationAudioSession: Bool = false, onError: ((_ error: any Error) -> Void)? = nil) { lockQueue.async { do { try self.mixer.audioIO.attachAudio(device, automaticallyConfiguresApplicationAudioSession: automaticallyConfiguresApplicationAudioSession)