Skip to content

Commit

Permalink
Merge pull request #1317 from shogo4405/feature/input-format
Browse files Browse the repository at this point in the history
Remove SRTStream.attachRawMedia/detachRawMedia.
  • Loading branch information
shogo4405 authored Oct 10, 2023
2 parents 86a8950 + 72c4713 commit dbf7095
Show file tree
Hide file tree
Showing 2 changed files with 24 additions and 48 deletions.
50 changes: 8 additions & 42 deletions SRTHaishinKit/SRTStream.swift
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import HaishinKit
import libsrt

/// An object that provides the interface to control a one-way channel over a SRTConnection.
public class SRTStream: NetStream {
public final class SRTStream: NetStream {
private enum ReadyState: UInt8 {
case initialized = 0
case open = 1
Expand All @@ -19,7 +19,6 @@ public class SRTStream: NetStream {
private var action: (() -> Void)?
private var keyValueObservations: [NSKeyValueObservation] = []
private weak var connection: SRTConnection?
private lazy var audioEngine: AVAudioEngine = .init()

private lazy var writer: TSWriter = {
var writer = TSWriter()
Expand Down Expand Up @@ -56,6 +55,13 @@ public class SRTStream: NetStream {
mixer.startDecoding()
readyState = .playing
case .publish:
writer.expectedMedias.removeAll()
if videoInputFormat != nil {
writer.expectedMedias.insert(.video)
}
if audioInputFormat != nil {
writer.expectedMedias.insert(.audio)
}
mixer.startEncoding(writer)
mixer.startRunning()
writer.startRunning()
Expand Down Expand Up @@ -90,46 +96,6 @@ public class SRTStream: NetStream {
keyValueObservations.removeAll()
}

/**
Prepare the stream to process media of the given type

- parameters:
- type: An AVMediaType you will be sending via an appendSampleBuffer call

As with appendSampleBuffer only video and audio types are supported
*/
public func attachRawMedia(_ type: AVMediaType) {
writer.expectedMedias.insert(type)
}

/**
Remove a media type that was added via attachRawMedia

- parameters:
- type: An AVMediaType that was added via an attachRawMedia call
*/
public func detachRawMedia(_ type: AVMediaType) {
writer.expectedMedias.remove(type)
}

override public func attachCamera(_ camera: AVCaptureDevice?, onError: ((any Error) -> Void)? = nil) {
if camera == nil {
writer.expectedMedias.remove(.video)
} else {
writer.expectedMedias.insert(.video)
}
super.attachCamera(camera, onError: onError)
}

override public func attachAudio(_ audio: AVCaptureDevice?, automaticallyConfiguresApplicationAudioSession: Bool = true, onError: ((any Error) -> Void)? = nil) {
if audio == nil {
writer.expectedMedias.remove(.audio)
} else {
writer.expectedMedias.insert(.audio)
}
super.attachAudio(audio, automaticallyConfiguresApplicationAudioSession: automaticallyConfiguresApplicationAudioSession, onError: onError)
}

/// Sends streaming audio, vidoe and data message from client.
public func publish(_ name: String? = "") {
lockQueue.async {
Expand Down
22 changes: 16 additions & 6 deletions Sources/Net/NetStream.swift
Original file line number Diff line number Diff line change
Expand Up @@ -50,9 +50,6 @@ open class NetStream: NSObject {
}
}

/// Specifies the delegate..
public weak var delegate: (any NetStreamDelegate)?

/// Specifies the audio monitoring enabled or not.
public var isMonitoringEnabled: Bool {
get {
Expand Down Expand Up @@ -186,6 +183,16 @@ open class NetStream: NSObject {
}
}

/// The video input format.
public var videoInputFormat: CMVideoFormatDescription? {
return mixer.videoIO.inputFormat
}

/// The audio input format.
public var audioInputFormat: AVAudioFormat? {
return mixer.audioIO.inputFormat
}

/// The number of frames per second being displayed.
@objc public internal(set) dynamic var currentFPS: UInt16 = 0

Expand All @@ -199,6 +206,9 @@ open class NetStream: NSObject {
}
}

/// Specifies the delegate..
public weak var delegate: (any NetStreamDelegate)?

/// Creates a NetStream object.
override public init() {
super.init()
Expand All @@ -212,7 +222,7 @@ open class NetStream: NSObject {
/// Attaches the primary camera object.
/// - Warning: This method can't use appendSampleBuffer at the same time.
@available(tvOS 17.0, *)
open func attachCamera(_ device: AVCaptureDevice?, onError: ((_ error: any Error) -> Void)? = nil) {
public func attachCamera(_ device: AVCaptureDevice?, onError: ((_ error: any Error) -> Void)? = nil) {
lockQueue.async {
do {
try self.mixer.videoIO.attachCamera(device)
Expand All @@ -225,7 +235,7 @@ open class NetStream: NSObject {
/// Attaches the 2ndary camera object for picture in picture.
/// - Warning: This method can't use appendSampleBuffer at the same time.
@available(iOS 13.0, tvOS 17.0, *)
open func attachMultiCamera(_ device: AVCaptureDevice?, onError: ((_ error: any Error) -> Void)? = nil) {
public func attachMultiCamera(_ device: AVCaptureDevice?, onError: ((_ error: any Error) -> Void)? = nil) {
lockQueue.async {
do {
try self.mixer.videoIO.attachMultiCamera(device)
Expand All @@ -238,7 +248,7 @@ open class NetStream: NSObject {
/// Attaches the audio capture object.
/// - Warning: This method can't use appendSampleBuffer at the same time.
@available(tvOS 17.0, *)
open func attachAudio(_ device: AVCaptureDevice?, automaticallyConfiguresApplicationAudioSession: Bool = false, onError: ((_ error: any Error) -> Void)? = nil) {
public func attachAudio(_ device: AVCaptureDevice?, automaticallyConfiguresApplicationAudioSession: Bool = false, onError: ((_ error: any Error) -> Void)? = nil) {
lockQueue.async {
do {
try self.mixer.audioIO.attachAudio(device, automaticallyConfiguresApplicationAudioSession: automaticallyConfiguresApplicationAudioSession)
Expand Down

0 comments on commit dbf7095

Please sign in to comment.