From d8819eae635cb822d9942c61a5a387871f72077c Mon Sep 17 00:00:00 2001 From: shogo4405 Date: Thu, 28 Sep 2023 18:26:49 +0900 Subject: [PATCH] Refactor IOMixer, IOVideoUnit, IOAudioUnit. --- HaishinKit.xcodeproj/project.pbxproj | 8 +- SRTHaishinKit/SRTStream.swift | 2 +- .../CMFormatDescription+Extension.swift | 4 + Sources/Media/IOAudioUnit.swift | 40 +--- Sources/Media/IOMixer.swift | 62 +++-- Sources/Media/IOVideoMixer.swift | 117 ++++++++++ Sources/Media/IOVideoUnit.swift | 211 +++++++----------- 7 files changed, 265 insertions(+), 179 deletions(-) create mode 100644 Sources/Media/IOVideoMixer.swift diff --git a/HaishinKit.xcodeproj/project.pbxproj b/HaishinKit.xcodeproj/project.pbxproj index a711f952e..8f3d86da1 100644 --- a/HaishinKit.xcodeproj/project.pbxproj +++ b/HaishinKit.xcodeproj/project.pbxproj @@ -168,6 +168,7 @@ BC3004F9296C351D00119932 /* RTMPPlaybackViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC3004F8296C351D00119932 /* RTMPPlaybackViewController.swift */; }; BC31DBD22A653D1600C4DEA3 /* IOAudioMonitor.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC31DBD12A653D1600C4DEA3 /* IOAudioMonitor.swift */; }; BC32E88829C9971100051507 /* InstanceHolder.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC32E88729C9971100051507 /* InstanceHolder.swift */; }; + BC34836A2AC56F3A002926F1 /* IOVideoMixer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC3483692AC56F3A002926F1 /* IOVideoMixer.swift */; }; BC34DFF025EBB53F005F975A /* Logboard.xcframework in Frameworks */ = {isa = PBXBuildFile; fileRef = BC34DFD125EBB12C005F975A /* Logboard.xcframework */; }; BC34E00225EBB59C005F975A /* Logboard.xcframework in Frameworks */ = {isa = PBXBuildFile; fileRef = BC34DFD125EBB12C005F975A /* Logboard.xcframework */; }; BC34FA0B286CB90A00EFAF27 /* PiPHKView.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC34FA0A286CB90A00EFAF27 /* PiPHKView.swift */; }; @@ -576,6 +577,7 @@ BC3004F8296C351D00119932 /* RTMPPlaybackViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTMPPlaybackViewController.swift; sourceTree = ""; }; BC31DBD12A653D1600C4DEA3 /* IOAudioMonitor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOAudioMonitor.swift; sourceTree = ""; }; BC32E88729C9971100051507 /* InstanceHolder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = InstanceHolder.swift; sourceTree = ""; }; + BC3483692AC56F3A002926F1 /* IOVideoMixer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOVideoMixer.swift; sourceTree = ""; }; BC34DFD125EBB12C005F975A /* Logboard.xcframework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.xcframework; name = Logboard.xcframework; path = Carthage/Build/Logboard.xcframework; sourceTree = ""; }; BC34FA0A286CB90A00EFAF27 /* PiPHKView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PiPHKView.swift; sourceTree = ""; }; BC3802112AB5E770001AE399 /* IOVideoCaptureUnit.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOVideoCaptureUnit.swift; sourceTree = ""; }; @@ -1043,6 +1045,7 @@ children = ( BC9F9C7726F8C16600B01ED0 /* Choreographer.swift */, 299B13261D3B751400A1E8F5 /* HKView.swift */, + BC959EEE296EE4190067BA97 /* ImageTransform.swift */, BC3802132AB5E7CC001AE399 /* IOAudioCaptureUnit.swift */, BC31DBD12A653D1600C4DEA3 /* IOAudioMonitor.swift */, BCFC51FD2AAB420700014428 /* IOAudioResampler.swift */, @@ -1055,10 +1058,10 @@ 299B131C1D35272D00A1E8F5 /* IOUIScreenCaptureUnit.swift */, BC570B4728E9ACC10098A12C /* IOUnit.swift */, BC3802112AB5E770001AE399 /* IOVideoCaptureUnit.swift */, + BC3483692AC56F3A002926F1 /* IOVideoMixer.swift */, 29B8768E1CD70AFE00FC07DA /* IOVideoUnit.swift */, - BC959EEE296EE4190067BA97 /* ImageTransform.swift */, - 2999C3742071138F00892E55 /* MTHKView.swift */, 2901A4ED1D437170002BBD23 /* MediaLink.swift */, + 2999C3742071138F00892E55 /* MTHKView.swift */, BC110256292E661E00D48035 /* MultiCamCaptureSettings.swift */, BC34FA0A286CB90A00EFAF27 /* PiPHKView.swift */, BC3004CD296B0A1700119932 /* Shape.swift */, @@ -1766,6 +1769,7 @@ 295891121EEB8D7200CE51E1 /* FLVFrameType.swift in Sources */, 29B876961CD70AFE00FC07DA /* VideoEffect.swift in Sources */, BCFC51FE2AAB420700014428 /* IOAudioResampler.swift in Sources */, + BC34836A2AC56F3A002926F1 /* IOVideoMixer.swift in Sources */, BC1DC5062A02963600E928ED /* FLVTagType.swift in Sources */, 29B876691CD70AB300FC07DA /* Constants.swift in Sources */, 29B8766D1CD70AB300FC07DA /* DataConvertible.swift in Sources */, diff --git a/SRTHaishinKit/SRTStream.swift b/SRTHaishinKit/SRTStream.swift index 5d80d5483..5f8cdb959 100644 --- a/SRTHaishinKit/SRTStream.swift +++ b/SRTHaishinKit/SRTStream.swift @@ -213,6 +213,6 @@ extension SRTStream: TSReaderDelegate { guard readyState == .playing else { return } - mixer.appendSampleBuffer(sampleBuffer) + appendSampleBuffer(sampleBuffer) } } diff --git a/Sources/Extension/CMFormatDescription+Extension.swift b/Sources/Extension/CMFormatDescription+Extension.swift index 8040e76e2..28e3e7d88 100644 --- a/Sources/Extension/CMFormatDescription+Extension.swift +++ b/Sources/Extension/CMFormatDescription+Extension.swift @@ -6,6 +6,10 @@ extension CMFormatDescription { CMFormatDescriptionGetMediaType(self) } + var _mediaSubType: FourCharCode { + CMFormatDescriptionGetMediaSubType(self) + } + @available(iOS, obsoleted: 13.0) @available(tvOS, obsoleted: 13.0) @available(macOS, obsoleted: 10.15) diff --git a/Sources/Media/IOAudioUnit.swift b/Sources/Media/IOAudioUnit.swift index 90fec9e6a..37e162d35 100644 --- a/Sources/Media/IOAudioUnit.swift +++ b/Sources/Media/IOAudioUnit.swift @@ -81,8 +81,12 @@ final class IOAudioUnit: NSObject, IOUnit { } func appendSampleBuffer(_ sampleBuffer: CMSampleBuffer) { - presentationTimeStamp = sampleBuffer.presentationTimeStamp - resampler.appendSampleBuffer(sampleBuffer.muted(muted)) + switch sampleBuffer.formatDescription?.audioStreamBasicDescription?.mFormatID { + case kAudioFormatLinearPCM: + resampler.appendSampleBuffer(sampleBuffer.muted(muted)) + default: + codec.appendSampleBuffer(sampleBuffer) + } } } @@ -105,7 +109,7 @@ extension IOAudioUnit: IOUnitDecoding { if let playerNode = mixer?.mediaLink.playerNode { mixer?.audioEngine?.attach(playerNode) } - codec.delegate = self + codec.delegate = mixer codec.startRunning() } @@ -122,34 +126,7 @@ extension IOAudioUnit: IOUnitDecoding { extension IOAudioUnit: AVCaptureAudioDataOutputSampleBufferDelegate { // MARK: AVCaptureAudioDataOutputSampleBufferDelegate func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { - appendSampleBuffer(sampleBuffer) - } -} - -extension IOAudioUnit: AudioCodecDelegate { - // MARK: AudioConverterDelegate - func audioCodec(_ codec: AudioCodec, errorOccurred error: AudioCodec.Error) { - } - - func audioCodec(_ codec: AudioCodec, didOutput audioFormat: AVAudioFormat) { - do { - mixer?.audioFormat = audioFormat - if let audioEngine = mixer?.audioEngine, audioEngine.isRunning == false { - try audioEngine.start() - } - } catch { - logger.error(error) - } - } - - func audioCodec(_ codec: AudioCodec, didOutput audioBuffer: AVAudioBuffer, presentationTimeStamp: CMTime) { - guard let audioBuffer = audioBuffer as? AVAudioPCMBuffer else { - return - } - if let mixer { - mixer.delegate?.mixer(mixer, didOutput: audioBuffer, presentationTimeStamp: presentationTimeStamp) - } - mixer?.mediaLink.enqueueAudio(audioBuffer) + resampler.appendSampleBuffer(sampleBuffer.muted(muted)) } } @@ -165,6 +142,7 @@ extension IOAudioUnit: IOAudioResamplerDelegate { } func resampler(_ resampler: IOAudioResampler, didOutput audioBuffer: AVAudioPCMBuffer, presentationTimeStamp: CMTime) { + self.presentationTimeStamp = presentationTimeStamp if let mixer { mixer.delegate?.mixer(mixer, didOutput: audioBuffer, presentationTimeStamp: presentationTimeStamp) if mixer.recorder.isRunning.value, let sampleBuffer = audioBuffer.makeSampleBuffer(presentationTimeStamp) { diff --git a/Sources/Media/IOMixer.swift b/Sources/Media/IOMixer.swift index 0c958370d..2aa72ef4e 100644 --- a/Sources/Media/IOMixer.swift +++ b/Sources/Media/IOMixer.swift @@ -212,25 +212,6 @@ public final class IOMixer { IOMixer.audioEngineHolder.release(audioEngine) } - /// Append a CMSampleBuffer with media type. - public func appendSampleBuffer(_ sampleBuffer: CMSampleBuffer) { - switch readyState { - case .encoding: - break - case .decoding: - switch sampleBuffer.formatDescription?._mediaType { - case kCMMediaType_Audio: - audioIO.codec.appendSampleBuffer(sampleBuffer) - case kCMMediaType_Video: - videoIO.codec.appendSampleBuffer(sampleBuffer) - default: - break - } - case .standby: - break - } - } - #if os(iOS) || os(tvOS) @available(tvOS 17.0, *) private func makeSession() -> AVCaptureSession { @@ -438,3 +419,46 @@ extension IOMixer: Running { } #endif } + +extension IOMixer: AudioCodecDelegate { + // MARK: AudioCodecDelegate + public func audioCodec(_ codec: AudioCodec, errorOccurred error: AudioCodec.Error) { + } + + public func audioCodec(_ codec: AudioCodec, didOutput audioFormat: AVAudioFormat) { + do { + self.audioFormat = audioFormat + if let audioEngine = audioEngine, audioEngine.isRunning == false { + try audioEngine.start() + } + } catch { + logger.error(error) + } + } + + public func audioCodec(_ codec: AudioCodec, didOutput audioBuffer: AVAudioBuffer, presentationTimeStamp: CMTime) { + guard let audioBuffer = audioBuffer as? AVAudioPCMBuffer else { + return + } + delegate?.mixer(self, didOutput: audioBuffer, presentationTimeStamp: presentationTimeStamp) + mediaLink.enqueueAudio(audioBuffer) + } +} + +extension IOMixer: VideoCodecDelegate { + // MARK: VideoCodecDelegate + public func videoCodec(_ codec: VideoCodec, didOutput formatDescription: CMFormatDescription?) { + } + + public func videoCodec(_ codec: VideoCodec, didOutput sampleBuffer: CMSampleBuffer) { + mediaLink.enqueueVideo(sampleBuffer) + } + + public func videoCodec(_ codec: VideoCodec, errorOccurred error: VideoCodec.Error) { + logger.trace(error) + } + + public func videoCodecWillDropFame(_ codec: VideoCodec) -> Bool { + return false + } +} diff --git a/Sources/Media/IOVideoMixer.swift b/Sources/Media/IOVideoMixer.swift new file mode 100644 index 000000000..4468a990c --- /dev/null +++ b/Sources/Media/IOVideoMixer.swift @@ -0,0 +1,117 @@ +import CoreImage +import CoreMedia +import Foundation + +protocol IOVideoMixerDelegate: AnyObject { + func videoMixer(_ videoMixer: IOVideoMixer, didOutput imageBuffer: CVImageBuffer, presentationTimeStamp: CMTime) +} + +private let kIOVideoMixer_defaultAttributes: [NSString: NSObject] = [ + kCVPixelBufferPixelFormatTypeKey: NSNumber(value: kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange), + kCVPixelBufferMetalCompatibilityKey: kCFBooleanTrue +] + +final class IOVideoMixer { + var muted = false + var multiCamCaptureSettings: MultiCamCaptureSettings = .default + weak var delegate: T? + var context: CIContext = .init() + + private var extent = CGRect.zero { + didSet { + guard extent != oldValue else { + return + } + CVPixelBufferPoolCreate(nil, nil, attributes as CFDictionary?, &pixelBufferPool) + pixelBufferPool?.createPixelBuffer(&pixelBuffer) + } + } + private var attributes: [NSString: NSObject] { + var attributes: [NSString: NSObject] = kIOVideoMixer_defaultAttributes + attributes[kCVPixelBufferWidthKey] = NSNumber(value: Int(extent.width)) + attributes[kCVPixelBufferHeightKey] = NSNumber(value: Int(extent.height)) + return attributes + } + private var buffer: CVPixelBuffer? + private var pixelBuffer: CVPixelBuffer? + private var pixelBufferPool: CVPixelBufferPool? + private var multiCamSampleBuffer: CMSampleBuffer? + private(set) var effects: Set = [] + + @inline(__always) + func effect(_ buffer: CVImageBuffer, info: CMSampleBuffer?) -> CIImage { + var image = CIImage(cvPixelBuffer: buffer) + for effect in effects { + image = effect.execute(image, info: info) + } + return image + } + + func registerEffect(_ effect: VideoEffect) -> Bool { + effect.ciContext = context + return effects.insert(effect).inserted + } + + func unregisterEffect(_ effect: VideoEffect) -> Bool { + effect.ciContext = nil + return effects.remove(effect) != nil + } + + func appendSampleBuffer(_ sampleBuffer: CMSampleBuffer, channel: Int, isVideoMirrored: Bool) { + switch channel { + case 0: + var imageBuffer: CVImageBuffer? + guard let buffer = sampleBuffer.imageBuffer else { + return + } + buffer.lockBaseAddress() + defer { + buffer.unlockBaseAddress() + imageBuffer?.unlockBaseAddress() + } + #if os(macOS) + if isVideoMirrored { + buffer.reflectHorizontal() + } + #endif + if let multiCamPixelBuffer = multiCamSampleBuffer?.imageBuffer { + multiCamPixelBuffer.lockBaseAddress() + switch multiCamCaptureSettings.mode { + case .pip: + buffer.over( + multiCamPixelBuffer, + regionOfInterest: multiCamCaptureSettings.regionOfInterest, + radius: multiCamCaptureSettings.cornerRadius + ) + case .splitView: + buffer.split(multiCamPixelBuffer, direction: multiCamCaptureSettings.direction) + } + multiCamPixelBuffer.unlockBaseAddress() + } + if !effects.isEmpty { + let image = effect(buffer, info: sampleBuffer) + extent = image.extent + #if os(macOS) + pixelBufferPool?.createPixelBuffer(&imageBuffer) + #else + if buffer.width != Int(extent.width) || buffer.height != Int(extent.height) { + pixelBufferPool?.createPixelBuffer(&imageBuffer) + } + #endif + imageBuffer?.lockBaseAddress() + context.render(image, to: imageBuffer ?? buffer) + } + if muted { + imageBuffer = pixelBuffer + } + delegate?.videoMixer(self, didOutput: imageBuffer ?? buffer, presentationTimeStamp: sampleBuffer.presentationTimeStamp) + if !muted { + pixelBuffer = buffer + } + case 1: + multiCamSampleBuffer = sampleBuffer + default: + break + } + } +} diff --git a/Sources/Media/IOVideoUnit.swift b/Sources/Media/IOVideoUnit.swift index 9f7347206..1c74b80e4 100644 --- a/Sources/Media/IOVideoUnit.swift +++ b/Sources/Media/IOVideoUnit.swift @@ -8,19 +8,7 @@ final class IOVideoUnit: NSObject, IOUnit { case multiCamNotSupported } - static let defaultAttributes: [NSString: NSObject] = [ - kCVPixelBufferPixelFormatTypeKey: NSNumber(value: kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange), - kCVPixelBufferMetalCompatibilityKey: kCFBooleanTrue - ] - - let lockQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.VideoIOComponent.lock") - var context: CIContext = .init() { - didSet { - for effect in effects { - effect.ciContext = context - } - } - } + let lockQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.IOVideoUnit.lock") weak var drawable: (any NetStreamDrawable)? { didSet { #if os(iOS) || os(macOS) @@ -35,7 +23,14 @@ final class IOVideoUnit: NSObject, IOUnit { return codec }() weak var mixer: IOMixer? - var muted = false + var muted: Bool { + get { + videoMixer.muted + } + set { + videoMixer.muted = newValue + } + } var frameRate = IOMixer.defaultFrameRate { didSet { if #available(tvOS 17.0, *) { @@ -44,6 +39,7 @@ final class IOVideoUnit: NSObject, IOUnit { } } } + var context: CIContext = .init() #if !os(tvOS) var videoOrientation: AVCaptureVideoOrientation = .portrait { didSet { @@ -103,25 +99,11 @@ final class IOVideoUnit: NSObject, IOUnit { private(set) var multiCamCapture: IOVideoCaptureUnit = .init() #endif private(set) var presentationTimeStamp: CMTime = .invalid - private(set) var effects: Set = [] - private var extent = CGRect.zero { - didSet { - guard extent != oldValue else { - return - } - CVPixelBufferPoolCreate(nil, nil, attributes as CFDictionary?, &pixelBufferPool) - pixelBufferPool?.createPixelBuffer(&pixelBuffer) - } - } - private var attributes: [NSString: NSObject] { - var attributes: [NSString: NSObject] = Self.defaultAttributes - attributes[kCVPixelBufferWidthKey] = NSNumber(value: Int(extent.width)) - attributes[kCVPixelBufferHeightKey] = NSNumber(value: Int(extent.height)) - return attributes - } - private var pixelBuffer: CVPixelBuffer? - private var pixelBufferPool: CVPixelBufferPool? - private var multiCamSampleBuffer: CMSampleBuffer? + private lazy var videoMixer: IOVideoMixer = { + var videoMixer = IOVideoMixer() + videoMixer.delegate = self + return videoMixer + }() deinit { if Thread.isMainThread { @@ -214,86 +196,66 @@ final class IOVideoUnit: NSObject, IOUnit { multiCamCapture.setTorchMode(torchMode) } - @inline(__always) - func effect(_ buffer: CVImageBuffer, info: CMSampleBuffer?) -> CIImage { - var image = CIImage(cvPixelBuffer: buffer) - for effect in effects { - image = effect.execute(image, info: info) - } - return image - } - func registerEffect(_ effect: VideoEffect) -> Bool { - effect.ciContext = context - return effects.insert(effect).inserted + return videoMixer.registerEffect(effect) } func unregisterEffect(_ effect: VideoEffect) -> Bool { - effect.ciContext = nil - return effects.remove(effect) != nil + return videoMixer.unregisterEffect(effect) } func appendSampleBuffer(_ sampleBuffer: CMSampleBuffer) { - inputFormat = sampleBuffer.formatDescription - guard let buffer = sampleBuffer.imageBuffer else { - return - } - presentationTimeStamp = sampleBuffer.presentationTimeStamp - var imageBuffer: CVImageBuffer? - buffer.lockBaseAddress() - defer { - buffer.unlockBaseAddress() - imageBuffer?.unlockBaseAddress() - } - #if os(macOS) - if capture.isVideoMirrored == true { - buffer.reflectHorizontal() - } - #endif - if let multiCamPixelBuffer = multiCamSampleBuffer?.imageBuffer { - multiCamPixelBuffer.lockBaseAddress() - switch multiCamCaptureSettings.mode { - case .pip: - buffer.over( - multiCamPixelBuffer, - regionOfInterest: multiCamCaptureSettings.regionOfInterest, - radius: multiCamCaptureSettings.cornerRadius - ) - case .splitView: - buffer.split(multiCamPixelBuffer, direction: multiCamCaptureSettings.direction) - } - multiCamPixelBuffer.unlockBaseAddress() - } - if drawable != nil || !effects.isEmpty { - let image = effect(buffer, info: sampleBuffer) - extent = image.extent - if !effects.isEmpty { - #if os(macOS) - pixelBufferPool?.createPixelBuffer(&imageBuffer) - #else - if buffer.width != Int(extent.width) || buffer.height != Int(extent.height) { - pixelBufferPool?.createPixelBuffer(&imageBuffer) - } - #endif - imageBuffer?.lockBaseAddress() - context.render(image, to: imageBuffer ?? buffer) - } - drawable?.enqueue(sampleBuffer) - } - if muted { - imageBuffer = pixelBuffer - } - codec.appendImageBuffer( - imageBuffer ?? buffer, - presentationTimeStamp: sampleBuffer.presentationTimeStamp, - duration: sampleBuffer.duration - ) - mixer?.recorder.appendPixelBuffer( - imageBuffer ?? buffer, - withPresentationTime: sampleBuffer.presentationTimeStamp - ) - if !muted { - pixelBuffer = buffer + switch sampleBuffer.formatDescription?._mediaSubType { + case kCVPixelFormatType_1Monochrome, + kCVPixelFormatType_2Indexed, + kCVPixelFormatType_8Indexed, + kCVPixelFormatType_1IndexedGray_WhiteIsZero, + kCVPixelFormatType_2IndexedGray_WhiteIsZero, + kCVPixelFormatType_4IndexedGray_WhiteIsZero, + kCVPixelFormatType_8IndexedGray_WhiteIsZero, + kCVPixelFormatType_16BE555, + kCVPixelFormatType_16LE555, + kCVPixelFormatType_16LE5551, + kCVPixelFormatType_16BE565, + kCVPixelFormatType_16LE565, + kCVPixelFormatType_24RGB, + kCVPixelFormatType_24BGR, + kCVPixelFormatType_32ARGB, + kCVPixelFormatType_32BGRA, + kCVPixelFormatType_32ABGR, + kCVPixelFormatType_32RGBA, + kCVPixelFormatType_64ARGB, + kCVPixelFormatType_48RGB, + kCVPixelFormatType_32AlphaGray, + kCVPixelFormatType_16Gray, + kCVPixelFormatType_30RGB, + kCVPixelFormatType_422YpCbCr8, + kCVPixelFormatType_4444YpCbCrA8, + kCVPixelFormatType_4444YpCbCrA8R, + kCVPixelFormatType_4444AYpCbCr8, + kCVPixelFormatType_4444AYpCbCr16, + kCVPixelFormatType_444YpCbCr8, + kCVPixelFormatType_422YpCbCr16, + kCVPixelFormatType_422YpCbCr10, + kCVPixelFormatType_444YpCbCr10, + kCVPixelFormatType_420YpCbCr8Planar, + kCVPixelFormatType_420YpCbCr8PlanarFullRange, + kCVPixelFormatType_422YpCbCr_4A_8BiPlanar, + kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, + kCVPixelFormatType_420YpCbCr8BiPlanarFullRange, + kCVPixelFormatType_422YpCbCr8_yuvs, + kCVPixelFormatType_422YpCbCr8FullRange, + kCVPixelFormatType_OneComponent8, + kCVPixelFormatType_TwoComponent8, + kCVPixelFormatType_OneComponent16Half, + kCVPixelFormatType_OneComponent32Float, + kCVPixelFormatType_TwoComponent16Half, + kCVPixelFormatType_TwoComponent32Float, + kCVPixelFormatType_64RGBAHalf, + kCVPixelFormatType_128RGBAFloat: + videoMixer.appendSampleBuffer(sampleBuffer, channel: 0, isVideoMirrored: false) + default: + codec.appendSampleBuffer(sampleBuffer) } } } @@ -308,21 +270,19 @@ extension IOVideoUnit: IOUnitEncoding { func stopEncoding() { codec.stopRunning() codec.delegate = nil - pixelBuffer = nil } } extension IOVideoUnit: IOUnitDecoding { // MARK: IOUnitDecoding func startDecoding() { - codec.delegate = self + codec.delegate = mixer codec.startRunning() } func stopDecoding() { codec.stopRunning() drawable?.enqueue(nil) - pixelBuffer = nil } } @@ -331,27 +291,26 @@ extension IOVideoUnit: AVCaptureVideoDataOutputSampleBufferDelegate { // MARK: AVCaptureVideoDataOutputSampleBufferDelegate func captureOutput(_ captureOutput: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { if capture.output == captureOutput { - appendSampleBuffer(sampleBuffer) + videoMixer.appendSampleBuffer(sampleBuffer, channel: 0, isVideoMirrored: connection.isVideoMirrored) + drawable?.enqueue(sampleBuffer) } else if multiCamCapture.output == captureOutput { - multiCamSampleBuffer = sampleBuffer + videoMixer.appendSampleBuffer(sampleBuffer, channel: 1, isVideoMirrored: connection.isVideoMirrored) } } } -extension IOVideoUnit: VideoCodecDelegate { - // MARK: VideoCodecDelegate - func videoCodec(_ codec: VideoCodec, didOutput formatDescription: CMFormatDescription?) { - } - - func videoCodec(_ codec: VideoCodec, didOutput sampleBuffer: CMSampleBuffer) { - mixer?.mediaLink.enqueueVideo(sampleBuffer) - } - - func videoCodec(_ codec: VideoCodec, errorOccurred error: VideoCodec.Error) { - logger.trace(error) - } - - func videoCodecWillDropFame(_ codec: VideoCodec) -> Bool { - return false +extension IOVideoUnit: IOVideoMixerDelegate { + // MARK: IOVideoMixerDelegate + func videoMixer(_ videoMixer: IOVideoMixer, didOutput imageBuffer: CVImageBuffer, presentationTimeStamp: CMTime) { + self.presentationTimeStamp = presentationTimeStamp + codec.appendImageBuffer( + imageBuffer, + presentationTimeStamp: presentationTimeStamp, + duration: .invalid + ) + mixer?.recorder.appendPixelBuffer( + imageBuffer, + withPresentationTime: presentationTimeStamp + ) } }