Skip to content

Commit

Permalink
Refactor IOMixer, IOVideoUnit, IOAudioUnit.
Browse files Browse the repository at this point in the history
  • Loading branch information
shogo4405 committed Sep 29, 2023
1 parent 7c94fbb commit d8819ea
Show file tree
Hide file tree
Showing 7 changed files with 265 additions and 179 deletions.
8 changes: 6 additions & 2 deletions HaishinKit.xcodeproj/project.pbxproj
Original file line number Diff line number Diff line change
Expand Up @@ -168,6 +168,7 @@
BC3004F9296C351D00119932 /* RTMPPlaybackViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC3004F8296C351D00119932 /* RTMPPlaybackViewController.swift */; };
BC31DBD22A653D1600C4DEA3 /* IOAudioMonitor.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC31DBD12A653D1600C4DEA3 /* IOAudioMonitor.swift */; };
BC32E88829C9971100051507 /* InstanceHolder.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC32E88729C9971100051507 /* InstanceHolder.swift */; };
BC34836A2AC56F3A002926F1 /* IOVideoMixer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC3483692AC56F3A002926F1 /* IOVideoMixer.swift */; };
BC34DFF025EBB53F005F975A /* Logboard.xcframework in Frameworks */ = {isa = PBXBuildFile; fileRef = BC34DFD125EBB12C005F975A /* Logboard.xcframework */; };
BC34E00225EBB59C005F975A /* Logboard.xcframework in Frameworks */ = {isa = PBXBuildFile; fileRef = BC34DFD125EBB12C005F975A /* Logboard.xcframework */; };
BC34FA0B286CB90A00EFAF27 /* PiPHKView.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC34FA0A286CB90A00EFAF27 /* PiPHKView.swift */; };
Expand Down Expand Up @@ -576,6 +577,7 @@
BC3004F8296C351D00119932 /* RTMPPlaybackViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTMPPlaybackViewController.swift; sourceTree = "<group>"; };
BC31DBD12A653D1600C4DEA3 /* IOAudioMonitor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOAudioMonitor.swift; sourceTree = "<group>"; };
BC32E88729C9971100051507 /* InstanceHolder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = InstanceHolder.swift; sourceTree = "<group>"; };
BC3483692AC56F3A002926F1 /* IOVideoMixer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOVideoMixer.swift; sourceTree = "<group>"; };
BC34DFD125EBB12C005F975A /* Logboard.xcframework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.xcframework; name = Logboard.xcframework; path = Carthage/Build/Logboard.xcframework; sourceTree = "<group>"; };
BC34FA0A286CB90A00EFAF27 /* PiPHKView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PiPHKView.swift; sourceTree = "<group>"; };
BC3802112AB5E770001AE399 /* IOVideoCaptureUnit.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOVideoCaptureUnit.swift; sourceTree = "<group>"; };
Expand Down Expand Up @@ -1043,6 +1045,7 @@
children = (
BC9F9C7726F8C16600B01ED0 /* Choreographer.swift */,
299B13261D3B751400A1E8F5 /* HKView.swift */,
BC959EEE296EE4190067BA97 /* ImageTransform.swift */,
BC3802132AB5E7CC001AE399 /* IOAudioCaptureUnit.swift */,
BC31DBD12A653D1600C4DEA3 /* IOAudioMonitor.swift */,
BCFC51FD2AAB420700014428 /* IOAudioResampler.swift */,
Expand All @@ -1055,10 +1058,10 @@
299B131C1D35272D00A1E8F5 /* IOUIScreenCaptureUnit.swift */,
BC570B4728E9ACC10098A12C /* IOUnit.swift */,
BC3802112AB5E770001AE399 /* IOVideoCaptureUnit.swift */,
BC3483692AC56F3A002926F1 /* IOVideoMixer.swift */,
29B8768E1CD70AFE00FC07DA /* IOVideoUnit.swift */,
BC959EEE296EE4190067BA97 /* ImageTransform.swift */,
2999C3742071138F00892E55 /* MTHKView.swift */,
2901A4ED1D437170002BBD23 /* MediaLink.swift */,
2999C3742071138F00892E55 /* MTHKView.swift */,
BC110256292E661E00D48035 /* MultiCamCaptureSettings.swift */,
BC34FA0A286CB90A00EFAF27 /* PiPHKView.swift */,
BC3004CD296B0A1700119932 /* Shape.swift */,
Expand Down Expand Up @@ -1766,6 +1769,7 @@
295891121EEB8D7200CE51E1 /* FLVFrameType.swift in Sources */,
29B876961CD70AFE00FC07DA /* VideoEffect.swift in Sources */,
BCFC51FE2AAB420700014428 /* IOAudioResampler.swift in Sources */,
BC34836A2AC56F3A002926F1 /* IOVideoMixer.swift in Sources */,
BC1DC5062A02963600E928ED /* FLVTagType.swift in Sources */,
29B876691CD70AB300FC07DA /* Constants.swift in Sources */,
29B8766D1CD70AB300FC07DA /* DataConvertible.swift in Sources */,
Expand Down
2 changes: 1 addition & 1 deletion SRTHaishinKit/SRTStream.swift
Original file line number Diff line number Diff line change
Expand Up @@ -213,6 +213,6 @@ extension SRTStream: TSReaderDelegate {
guard readyState == .playing else {
return
}
mixer.appendSampleBuffer(sampleBuffer)
appendSampleBuffer(sampleBuffer)
}
}
4 changes: 4 additions & 0 deletions Sources/Extension/CMFormatDescription+Extension.swift
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,10 @@ extension CMFormatDescription {
CMFormatDescriptionGetMediaType(self)
}

var _mediaSubType: FourCharCode {
CMFormatDescriptionGetMediaSubType(self)
}

@available(iOS, obsoleted: 13.0)
@available(tvOS, obsoleted: 13.0)
@available(macOS, obsoleted: 10.15)
Expand Down
40 changes: 9 additions & 31 deletions Sources/Media/IOAudioUnit.swift
Original file line number Diff line number Diff line change
Expand Up @@ -81,8 +81,12 @@ final class IOAudioUnit: NSObject, IOUnit {
}

func appendSampleBuffer(_ sampleBuffer: CMSampleBuffer) {
presentationTimeStamp = sampleBuffer.presentationTimeStamp
resampler.appendSampleBuffer(sampleBuffer.muted(muted))
switch sampleBuffer.formatDescription?.audioStreamBasicDescription?.mFormatID {
case kAudioFormatLinearPCM:
resampler.appendSampleBuffer(sampleBuffer.muted(muted))
default:
codec.appendSampleBuffer(sampleBuffer)
}
}
}

Expand All @@ -105,7 +109,7 @@ extension IOAudioUnit: IOUnitDecoding {
if let playerNode = mixer?.mediaLink.playerNode {
mixer?.audioEngine?.attach(playerNode)
}
codec.delegate = self
codec.delegate = mixer
codec.startRunning()
}

Expand All @@ -122,34 +126,7 @@ extension IOAudioUnit: IOUnitDecoding {
extension IOAudioUnit: AVCaptureAudioDataOutputSampleBufferDelegate {
// MARK: AVCaptureAudioDataOutputSampleBufferDelegate
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
appendSampleBuffer(sampleBuffer)
}
}

extension IOAudioUnit: AudioCodecDelegate {
// MARK: AudioConverterDelegate
func audioCodec(_ codec: AudioCodec, errorOccurred error: AudioCodec.Error) {
}

func audioCodec(_ codec: AudioCodec, didOutput audioFormat: AVAudioFormat) {
do {
mixer?.audioFormat = audioFormat
if let audioEngine = mixer?.audioEngine, audioEngine.isRunning == false {
try audioEngine.start()
}
} catch {
logger.error(error)
}
}

func audioCodec(_ codec: AudioCodec, didOutput audioBuffer: AVAudioBuffer, presentationTimeStamp: CMTime) {
guard let audioBuffer = audioBuffer as? AVAudioPCMBuffer else {
return
}
if let mixer {
mixer.delegate?.mixer(mixer, didOutput: audioBuffer, presentationTimeStamp: presentationTimeStamp)
}
mixer?.mediaLink.enqueueAudio(audioBuffer)
resampler.appendSampleBuffer(sampleBuffer.muted(muted))
}
}

Expand All @@ -165,6 +142,7 @@ extension IOAudioUnit: IOAudioResamplerDelegate {
}

func resampler(_ resampler: IOAudioResampler<IOAudioUnit>, didOutput audioBuffer: AVAudioPCMBuffer, presentationTimeStamp: CMTime) {
self.presentationTimeStamp = presentationTimeStamp
if let mixer {
mixer.delegate?.mixer(mixer, didOutput: audioBuffer, presentationTimeStamp: presentationTimeStamp)
if mixer.recorder.isRunning.value, let sampleBuffer = audioBuffer.makeSampleBuffer(presentationTimeStamp) {
Expand Down
62 changes: 43 additions & 19 deletions Sources/Media/IOMixer.swift
Original file line number Diff line number Diff line change
Expand Up @@ -212,25 +212,6 @@ public final class IOMixer {
IOMixer.audioEngineHolder.release(audioEngine)
}

/// Append a CMSampleBuffer with media type.
public func appendSampleBuffer(_ sampleBuffer: CMSampleBuffer) {
switch readyState {
case .encoding:
break
case .decoding:
switch sampleBuffer.formatDescription?._mediaType {
case kCMMediaType_Audio:
audioIO.codec.appendSampleBuffer(sampleBuffer)
case kCMMediaType_Video:
videoIO.codec.appendSampleBuffer(sampleBuffer)
default:
break
}
case .standby:
break
}
}

#if os(iOS) || os(tvOS)
@available(tvOS 17.0, *)
private func makeSession() -> AVCaptureSession {
Expand Down Expand Up @@ -438,3 +419,46 @@ extension IOMixer: Running {
}
#endif
}

extension IOMixer: AudioCodecDelegate {
// MARK: AudioCodecDelegate
public func audioCodec(_ codec: AudioCodec, errorOccurred error: AudioCodec.Error) {
}

public func audioCodec(_ codec: AudioCodec, didOutput audioFormat: AVAudioFormat) {
do {
self.audioFormat = audioFormat
if let audioEngine = audioEngine, audioEngine.isRunning == false {
try audioEngine.start()
}
} catch {
logger.error(error)
}
}

public func audioCodec(_ codec: AudioCodec, didOutput audioBuffer: AVAudioBuffer, presentationTimeStamp: CMTime) {
guard let audioBuffer = audioBuffer as? AVAudioPCMBuffer else {
return
}
delegate?.mixer(self, didOutput: audioBuffer, presentationTimeStamp: presentationTimeStamp)
mediaLink.enqueueAudio(audioBuffer)
}
}

extension IOMixer: VideoCodecDelegate {
// MARK: VideoCodecDelegate
public func videoCodec(_ codec: VideoCodec, didOutput formatDescription: CMFormatDescription?) {
}

public func videoCodec(_ codec: VideoCodec, didOutput sampleBuffer: CMSampleBuffer) {
mediaLink.enqueueVideo(sampleBuffer)
}

public func videoCodec(_ codec: VideoCodec, errorOccurred error: VideoCodec.Error) {
logger.trace(error)
}

public func videoCodecWillDropFame(_ codec: VideoCodec) -> Bool {
return false
}
}
117 changes: 117 additions & 0 deletions Sources/Media/IOVideoMixer.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,117 @@
import CoreImage
import CoreMedia
import Foundation

protocol IOVideoMixerDelegate: AnyObject {
func videoMixer(_ videoMixer: IOVideoMixer<Self>, didOutput imageBuffer: CVImageBuffer, presentationTimeStamp: CMTime)
}

private let kIOVideoMixer_defaultAttributes: [NSString: NSObject] = [
kCVPixelBufferPixelFormatTypeKey: NSNumber(value: kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange),
kCVPixelBufferMetalCompatibilityKey: kCFBooleanTrue
]

final class IOVideoMixer<T: IOVideoMixerDelegate> {
var muted = false
var multiCamCaptureSettings: MultiCamCaptureSettings = .default
weak var delegate: T?
var context: CIContext = .init()

private var extent = CGRect.zero {
didSet {
guard extent != oldValue else {
return
}
CVPixelBufferPoolCreate(nil, nil, attributes as CFDictionary?, &pixelBufferPool)
pixelBufferPool?.createPixelBuffer(&pixelBuffer)
}
}
private var attributes: [NSString: NSObject] {
var attributes: [NSString: NSObject] = kIOVideoMixer_defaultAttributes
attributes[kCVPixelBufferWidthKey] = NSNumber(value: Int(extent.width))
attributes[kCVPixelBufferHeightKey] = NSNumber(value: Int(extent.height))
return attributes
}
private var buffer: CVPixelBuffer?
private var pixelBuffer: CVPixelBuffer?
private var pixelBufferPool: CVPixelBufferPool?
private var multiCamSampleBuffer: CMSampleBuffer?
private(set) var effects: Set<VideoEffect> = []

@inline(__always)
func effect(_ buffer: CVImageBuffer, info: CMSampleBuffer?) -> CIImage {
var image = CIImage(cvPixelBuffer: buffer)
for effect in effects {
image = effect.execute(image, info: info)
}
return image
}

func registerEffect(_ effect: VideoEffect) -> Bool {
effect.ciContext = context
return effects.insert(effect).inserted
}

func unregisterEffect(_ effect: VideoEffect) -> Bool {
effect.ciContext = nil
return effects.remove(effect) != nil
}

func appendSampleBuffer(_ sampleBuffer: CMSampleBuffer, channel: Int, isVideoMirrored: Bool) {
switch channel {
case 0:
var imageBuffer: CVImageBuffer?
guard let buffer = sampleBuffer.imageBuffer else {
return
}
buffer.lockBaseAddress()
defer {
buffer.unlockBaseAddress()
imageBuffer?.unlockBaseAddress()
}
#if os(macOS)
if isVideoMirrored {
buffer.reflectHorizontal()
}
#endif
if let multiCamPixelBuffer = multiCamSampleBuffer?.imageBuffer {
multiCamPixelBuffer.lockBaseAddress()
switch multiCamCaptureSettings.mode {
case .pip:
buffer.over(
multiCamPixelBuffer,
regionOfInterest: multiCamCaptureSettings.regionOfInterest,
radius: multiCamCaptureSettings.cornerRadius
)
case .splitView:
buffer.split(multiCamPixelBuffer, direction: multiCamCaptureSettings.direction)
}
multiCamPixelBuffer.unlockBaseAddress()
}
if !effects.isEmpty {
let image = effect(buffer, info: sampleBuffer)
extent = image.extent
#if os(macOS)
pixelBufferPool?.createPixelBuffer(&imageBuffer)
#else
if buffer.width != Int(extent.width) || buffer.height != Int(extent.height) {
pixelBufferPool?.createPixelBuffer(&imageBuffer)
}
#endif
imageBuffer?.lockBaseAddress()
context.render(image, to: imageBuffer ?? buffer)
}
if muted {
imageBuffer = pixelBuffer
}
delegate?.videoMixer(self, didOutput: imageBuffer ?? buffer, presentationTimeStamp: sampleBuffer.presentationTimeStamp)
if !muted {
pixelBuffer = buffer
}
case 1:
multiCamSampleBuffer = sampleBuffer
default:
break
}
}
}
Loading

0 comments on commit d8819ea

Please sign in to comment.