Skip to content

Commit

Permalink
Merge pull request #1336 from shogo4405/feature/multi-captures
Browse files Browse the repository at this point in the history
Refactor IOVideoUnit#attachCamera method.
  • Loading branch information
shogo4405 authored Nov 17, 2023
2 parents 17916e6 + d6bc7a4 commit f805b71
Show file tree
Hide file tree
Showing 13 changed files with 581 additions and 538 deletions.
16 changes: 9 additions & 7 deletions Examples/iOS/IngestViewController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -44,10 +44,11 @@ final class IngestViewController: UIViewController {
pipIntentView.isUserInteractionEnabled = true
view.addSubview(pipIntentView)

// If you're using multi-camera functionality, please make sure isMultiCamSessionEnabled = true. Initialization point.
stream.isMultiCamSessionEnabled = true
if let orientation = DeviceUtil.videoOrientation(by: UIApplication.shared.statusBarOrientation) {
stream.videoOrientation = orientation
}

stream.isMonitoringEnabled = DeviceUtil.isHeadphoneConnected()
stream.audioSettings.bitRate = 64 * 1000
stream.bitrateStrategy = VideoAdaptiveNetBitRateStrategy(mamimumVideoBitrate: VideoCodecSettings.default.bitRate)
Expand All @@ -62,18 +63,19 @@ final class IngestViewController: UIViewController {
super.viewWillAppear(animated)
let back = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: currentPosition)

// If you're using multi-camera functionality, please make sure to call the attachMultiCamera method first. This is required for iOS 14 and 15, among others.
if #available(iOS 13.0, *) {
let front = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front)
stream.videoCapture(for: 1)?.isVideoMirrored = true
stream.attachMultiCamera(front)
}
stream.attachCamera(back) { error in
logger.warn(error)
}
stream.attachAudio(AVCaptureDevice.default(for: .audio), automaticallyConfiguresApplicationAudioSession: false) { error in
logger.warn(error)
}

if #available(iOS 13.0, *) {
let front = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front)
stream.videoCapture(for: 1)?.isVideoMirrored = true
stream.attachMultiCamera(front)
}

stream.addObserver(self, forKeyPath: "currentFPS", options: .new, context: nil)
(view as? (any NetStreamDrawable))?.attachStream(stream)
NotificationCenter.default.addObserver(self, selector: #selector(didInterruptionNotification(_:)), name: AVAudioSession.interruptionNotification, object: nil)
Expand Down
4 changes: 4 additions & 0 deletions HaishinKit.xcodeproj/project.pbxproj
Original file line number Diff line number Diff line change
Expand Up @@ -190,6 +190,7 @@
BC56765B2AD2AED800524F7E /* AudioCapture.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC56765A2AD2AED800524F7E /* AudioCapture.swift */; };
BC570B4828E9ACC10098A12C /* IOUnit.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC570B4728E9ACC10098A12C /* IOUnit.swift */; };
BC6692F32AC2F717009EC058 /* NetBitRateStrategyConvertible.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC6692F22AC2F717009EC058 /* NetBitRateStrategyConvertible.swift */; };
BC6889782B011AEB0026A4C2 /* IOCaptureSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC6889772B011AEB0026A4C2 /* IOCaptureSession.swift */; };
BC6FC91E29609A6800A746EE /* ShapeFactory.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC6FC91D29609A6800A746EE /* ShapeFactory.swift */; };
BC6FC9222961B3D800A746EE /* vImage_CGImageFormat+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC6FC9212961B3D800A746EE /* vImage_CGImageFormat+Extension.swift */; };
BC701F332AAD808F00C4BEFE /* SRTHaishinKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = BCCC45962AA289FA0016EFE8 /* SRTHaishinKit.framework */; };
Expand Down Expand Up @@ -602,6 +603,7 @@
BC56765A2AD2AED800524F7E /* AudioCapture.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioCapture.swift; sourceTree = "<group>"; };
BC570B4728E9ACC10098A12C /* IOUnit.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOUnit.swift; sourceTree = "<group>"; };
BC6692F22AC2F717009EC058 /* NetBitRateStrategyConvertible.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NetBitRateStrategyConvertible.swift; sourceTree = "<group>"; };
BC6889772B011AEB0026A4C2 /* IOCaptureSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOCaptureSession.swift; sourceTree = "<group>"; };
BC6FC91D29609A6800A746EE /* ShapeFactory.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ShapeFactory.swift; sourceTree = "<group>"; };
BC6FC9212961B3D800A746EE /* vImage_CGImageFormat+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "vImage_CGImageFormat+Extension.swift"; sourceTree = "<group>"; };
BC7C56B6299E579F00C41A9B /* AudioCodecSettings.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioCodecSettings.swift; sourceTree = "<group>"; };
Expand Down Expand Up @@ -1048,6 +1050,7 @@
BCFC51FD2AAB420700014428 /* IOAudioResampler.swift */,
BC5019C02A6D266B0046E02F /* IOAudioRingBuffer.swift */,
29B876891CD70AFE00FC07DA /* IOAudioUnit.swift */,
BC6889772B011AEB0026A4C2 /* IOCaptureSession.swift */,
BC1102492925147300D48035 /* IOCaptureUnit.swift */,
BC0F1FD92ACC4CC100C326FF /* IOCaptureVideoPreview.swift */,
29B8768B1CD70AFE00FC07DA /* IOMixer.swift */,
Expand Down Expand Up @@ -1727,6 +1730,7 @@
BC1DC4FB2A02868900E928ED /* FLVVideoFourCC.swift in Sources */,
2976A47E1D48C5C700B53EF2 /* IORecorder.swift in Sources */,
BC110257292E661E00D48035 /* MultiCamCaptureSettings.swift in Sources */,
BC6889782B011AEB0026A4C2 /* IOCaptureSession.swift in Sources */,
BC3802142AB5E7CC001AE399 /* IOAudioCaptureUnit.swift in Sources */,
29B876B21CD70B2800FC07DA /* RTMPMuxer.swift in Sources */,
2958912E1EEB8F4100CE51E1 /* FLVSoundType.swift in Sources */,
Expand Down
6 changes: 3 additions & 3 deletions Sources/Media/IOAudioCaptureUnit.swift
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
#if os(iOS) || os(tvOS) || os(macOS)
import AVFoundation
import Foundation

#if os(iOS) || os(tvOS) || os(macOS)
@available(tvOS 17.0, *)
final class IOAudioCaptureUnit: IOCaptureUnit {
typealias Output = AVCaptureAudioDataOutput
Expand All @@ -13,7 +13,7 @@ final class IOAudioCaptureUnit: IOCaptureUnit {

func attachDevice(_ device: AVCaptureDevice?, audioUnit: IOAudioUnit) throws {
setSampleBufferDelegate(nil)
detachSession(audioUnit.mixer?.session)
audioUnit.mixer?.session.detachCapture(self)
guard let device else {
self.device = nil
input = nil
Expand All @@ -23,7 +23,7 @@ final class IOAudioCaptureUnit: IOCaptureUnit {
self.device = device
input = try AVCaptureDeviceInput(device: device)
output = AVCaptureAudioDataOutput()
attachSession(audioUnit.mixer?.session)
audioUnit.mixer?.session.attachCapture(self)
setSampleBufferDelegate(audioUnit)
}

Expand Down
29 changes: 12 additions & 17 deletions Sources/Media/IOAudioUnit.swift
Original file line number Diff line number Diff line change
Expand Up @@ -59,35 +59,30 @@ final class IOAudioUnit: NSObject, IOUnit {
#if os(tvOS)
private var _capture: Any?
@available(tvOS 17.0, *)
var capture: IOAudioCaptureUnit {
private var capture: IOAudioCaptureUnit {
if _capture == nil {
_capture = IOAudioCaptureUnit()
}
return _capture as! IOAudioCaptureUnit
}
#elseif os(iOS) || os(macOS)
private(set) var capture: IOAudioCaptureUnit = .init()
private var capture: IOAudioCaptureUnit = .init()
#endif

#if os(iOS) || os(macOS) || os(tvOS)
@available(tvOS 17.0, *)
func attachAudio(_ device: AVCaptureDevice?, automaticallyConfiguresApplicationAudioSession: Bool) throws {
guard let mixer else {
return
}
mixer.session.beginConfiguration()
defer {
mixer.session.commitConfiguration()
}
guard let device else {
try capture.attachDevice(nil, audioUnit: self)
inputFormat = nil
return
try mixer?.session.configuration { session in
guard let device else {
try capture.attachDevice(nil, audioUnit: self)
inputFormat = nil
return
}
try capture.attachDevice(device, audioUnit: self)
#if os(iOS)
session.automaticallyConfiguresApplicationAudioSession = automaticallyConfiguresApplicationAudioSession
#endif
}
try capture.attachDevice(device, audioUnit: self)
#if os(iOS)
mixer.session.automaticallyConfiguresApplicationAudioSession = automaticallyConfiguresApplicationAudioSession
#endif
}
#endif

Expand Down
Loading

0 comments on commit f805b71

Please sign in to comment.