diff --git a/Examples/iOS/IngestViewController.swift b/Examples/iOS/IngestViewController.swift index a343d4157..7cbeeb336 100644 --- a/Examples/iOS/IngestViewController.swift +++ b/Examples/iOS/IngestViewController.swift @@ -32,6 +32,7 @@ final class IngestViewController: UIViewController { audioCapture.delegate = self return audioCapture }() + private let audioPlayer = AudioFilePlayer() override func viewDidLoad() { super.viewDidLoad() @@ -55,6 +56,12 @@ final class IngestViewController: UIViewController { videoBitrateSlider?.value = Float(VideoCodecSettings.default.bitRate) / 1000 audioBitrateSlider?.value = Float(AudioCodecSettings.default.bitRate) / 1000 + audioPlayer.delegate = self + + DispatchQueue.main.asyncAfter(deadline: .now() + 10) { + self.audioPlayer.play() + } + NotificationCenter.default.addObserver(self, selector: #selector(on(_:)), name: UIDevice.orientationDidChangeNotification, object: nil) } @@ -370,3 +377,9 @@ extension IngestViewController: UIPickerViewDataSource { return AVAudioSession.sharedInstance().preferredInput?.dataSources?[row].dataSourceName ?? "" } } + +extension IngestViewController: AudioFilePlayerDelegate { + func didReceiveAudioSample(_ buffer: AVAudioPCMBuffer, time: AVAudioTime) { + stream.append(buffer, channel: 1, when: time) + } +} diff --git a/Examples/iOS/Screencast/SampleHandler.swift b/Examples/iOS/Screencast/SampleHandler.swift index 4a9c9551b..8c3c4af70 100644 --- a/Examples/iOS/Screencast/SampleHandler.swift +++ b/Examples/iOS/Screencast/SampleHandler.swift @@ -18,8 +18,6 @@ open class SampleHandler: RPBroadcastSampleHandler { RTMPStream(connection: rtmpConnection) }() - private var isMirophoneOn = false - deinit { rtmpConnection.removeEventListener(.ioError, selector: #selector(rtmpErrorHandler), observer: self) rtmpConnection.removeEventListener(.rtmpStatus, selector: #selector(rtmpStatusEvent), observer: self) @@ -47,13 +45,12 @@ open class SampleHandler: RPBroadcastSampleHandler { } rtmpStream.append(sampleBuffer) case .audioMic: - isMirophoneOn = true if CMSampleBufferDataIsReady(sampleBuffer) { - rtmpStream.append(sampleBuffer) + rtmpStream.append(sampleBuffer, channel: 0) } case .audioApp: - if !isMirophoneOn && CMSampleBufferDataIsReady(sampleBuffer) { - rtmpStream.append(sampleBuffer) + if CMSampleBufferDataIsReady(sampleBuffer) { + rtmpStream.append(sampleBuffer, channel: 1) } @unknown default: break diff --git a/Examples/macOS/AudioFilePlayer.swift b/Examples/macOS/AudioFilePlayer.swift new file mode 100644 index 000000000..aa7a23864 --- /dev/null +++ b/Examples/macOS/AudioFilePlayer.swift @@ -0,0 +1,71 @@ +import AVFoundation +import CoreMedia +import CoreAudio + +protocol AudioFilePlayerDelegate: AnyObject { + func didReceiveAudioSample(_ buffer: AVAudioPCMBuffer, time: AVAudioTime) +} + +class AudioFilePlayer: NSObject { + private var audioFile: AVAudioFile? + private var audioEngine: AVAudioEngine = AVAudioEngine() + private var audioPlayerNode: AVAudioPlayerNode = AVAudioPlayerNode() + private var format: AVAudioFormat? + weak var delegate: (any AudioFilePlayerDelegate)? + + override init() { + super.init() + loadAudioFile() + setupAudioEngine() + } + + private func loadAudioFile() { + guard let fileURL = Bundle.main.url(forResource: "audio", withExtension: "wav") else { + print("Audio file not found in bundle") + return + } + + do { + let file = try AVAudioFile(forReading: fileURL) + self.audioFile = file + self.format = file.processingFormat + } catch { + print("Error loading audio file: \(error)") + } + } + + private func setupAudioEngine() { + guard let format = self.format else { + print("Audio format is nil") + return + } + + audioEngine.attach(audioPlayerNode) + audioEngine.connect(audioPlayerNode, to: audioEngine.mainMixerNode, format: format) + + audioPlayerNode.volume = 0 + + do { + try audioEngine.start() + } catch { + print("Error starting audio engine: \(error)") + } + } + + func play() { + guard let audioFile = self.audioFile, let format = self.format else { + print("Audio file or format is nil") + return + } + + audioPlayerNode.scheduleFile(audioFile, at: nil) { + // File finished playing + } + + audioPlayerNode.installTap(onBus: 0, bufferSize: 1024, format: format) { [weak self] (buffer, time) in + self?.delegate?.didReceiveAudioSample(buffer, time: time) + } + + audioPlayerNode.play() + } +} diff --git a/Examples/macOS/CameraIngestViewController.swift b/Examples/macOS/CameraIngestViewController.swift index 0925155ba..9ca02b380 100644 --- a/Examples/macOS/CameraIngestViewController.swift +++ b/Examples/macOS/CameraIngestViewController.swift @@ -22,6 +22,7 @@ final class CameraIngestViewController: NSViewController { private var stream: IOStream { return netStreamSwitcher.stream } + private let audioPlayer = AudioFilePlayer() override func viewDidLoad() { super.viewDidLoad() @@ -30,12 +31,11 @@ final class CameraIngestViewController: NSViewController { cameraPopUpButton?.present(mediaType: .video) netStreamSwitcher.uri = Preference.defaultInstance.uri ?? "" lfView?.attachStream(stream) + audioPlayer.delegate = self } override func viewDidAppear() { super.viewDidAppear() - // TODO: Implement output sample rate sync of resamplers in the audio mixer - stream.audioSettings = .init(sampleRate: 48000) stream.attachAudio(DeviceUtil.device(withLocalizedName: audioPopUpButton.titleOfSelectedItem!, mediaType: .audio)) stream.attachCamera(DeviceUtil.device(withLocalizedName: cameraPopUpButton.titleOfSelectedItem!, mediaType: .video), channel: 0) var devices = AVCaptureDevice.devices(for: .video) @@ -43,6 +43,11 @@ final class CameraIngestViewController: NSViewController { if let device = devices.first { stream.attachCamera(device, channel: 1) } + + DispatchQueue.main.asyncAfter(deadline: .now() + 5) { + self.stream.isMonitoringEnabled = true + self.audioPlayer.play() + } } @IBAction private func publishOrStop(_ sender: NSButton) { @@ -76,3 +81,9 @@ final class CameraIngestViewController: NSViewController { stream.attachCamera(device, channel: 0) } } + +extension CameraIngestViewController: AudioFilePlayerDelegate { + func didReceiveAudioSample(_ buffer: AVAudioPCMBuffer, time: AVAudioTime) { + stream.append(buffer, channel: 1, when: time) + } +} diff --git a/Examples/macOS/audio.wav b/Examples/macOS/audio.wav new file mode 100644 index 000000000..0f4f83c20 Binary files /dev/null and b/Examples/macOS/audio.wav differ diff --git a/HaishinKit.xcodeproj/project.pbxproj b/HaishinKit.xcodeproj/project.pbxproj index a055342d6..a82cd9b87 100644 --- a/HaishinKit.xcodeproj/project.pbxproj +++ b/HaishinKit.xcodeproj/project.pbxproj @@ -130,7 +130,11 @@ 2EC97B7227880FF400D8BE32 /* OnTapGestureView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2EC97B6E27880FF400D8BE32 /* OnTapGestureView.swift */; }; 2EC97B7327880FF400D8BE32 /* Views.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2EC97B6F27880FF400D8BE32 /* Views.swift */; }; 2EC97B7427880FF400D8BE32 /* MTHKSwiftUiView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2EC97B7027880FF400D8BE32 /* MTHKSwiftUiView.swift */; }; + B327845D2B928BBD00F9A7F1 /* audio.wav in Resources */ = {isa = PBXBuildFile; fileRef = B3D687842B8573D800E6A28E /* audio.wav */; }; + B327845E2B928BBD00F9A7F1 /* AudioFilePlayer.swift in Sources */ = {isa = PBXBuildFile; fileRef = B3D687862B85743200E6A28E /* AudioFilePlayer.swift */; }; B3D687822B80302B00E6A28E /* IOAudioMixer.swift in Sources */ = {isa = PBXBuildFile; fileRef = B3D687812B80302B00E6A28E /* IOAudioMixer.swift */; }; + B3D687852B8573D800E6A28E /* audio.wav in Resources */ = {isa = PBXBuildFile; fileRef = B3D687842B8573D800E6A28E /* audio.wav */; }; + B3D687872B85743200E6A28E /* AudioFilePlayer.swift in Sources */ = {isa = PBXBuildFile; fileRef = B3D687862B85743200E6A28E /* AudioFilePlayer.swift */; }; BC0394562AA8A384006EDE38 /* Logboard.xcframework in Frameworks */ = {isa = PBXBuildFile; fileRef = BC34DFD125EBB12C005F975A /* Logboard.xcframework */; }; BC03945F2AA8AFF5006EDE38 /* ExpressibleByIntegerLiteral+ExtensionTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC03945E2AA8AFF5006EDE38 /* ExpressibleByIntegerLiteral+ExtensionTests.swift */; }; BC04A2D42AD2D1D700C87A3E /* AVAudioTime+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC04A2D32AD2D1D700C87A3E /* AVAudioTime+Extension.swift */; }; @@ -579,6 +583,8 @@ 2EC97B6F27880FF400D8BE32 /* Views.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Views.swift; sourceTree = ""; }; 2EC97B7027880FF400D8BE32 /* MTHKSwiftUiView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MTHKSwiftUiView.swift; sourceTree = ""; }; B3D687812B80302B00E6A28E /* IOAudioMixer.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = IOAudioMixer.swift; sourceTree = ""; }; + B3D687842B8573D800E6A28E /* audio.wav */ = {isa = PBXFileReference; lastKnownFileType = audio.wav; path = audio.wav; sourceTree = ""; }; + B3D687862B85743200E6A28E /* AudioFilePlayer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioFilePlayer.swift; sourceTree = ""; }; BC03945E2AA8AFF5006EDE38 /* ExpressibleByIntegerLiteral+ExtensionTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "ExpressibleByIntegerLiteral+ExtensionTests.swift"; sourceTree = ""; }; BC04A2D32AD2D1D700C87A3E /* AVAudioTime+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioTime+Extension.swift"; sourceTree = ""; }; BC04A2D52AD2D95500C87A3E /* CMTime+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CMTime+Extension.swift"; sourceTree = ""; }; @@ -978,6 +984,8 @@ 296543541D62FE3E00734698 /* macOS */ = { isa = PBXGroup; children = ( + B3D687842B8573D800E6A28E /* audio.wav */, + B3D687862B85743200E6A28E /* AudioFilePlayer.swift */, BC3004FA296C3FC400119932 /* Extension */, 296543641D62FEB700734698 /* AppDelegate.swift */, 296543651D62FEB700734698 /* Assets.xcassets */, @@ -1638,6 +1646,7 @@ isa = PBXResourcesBuildPhase; buildActionMask = 2147483647; files = ( + B327845D2B928BBD00F9A7F1 /* audio.wav in Resources */, 291F4E381CF206E600F59C51 /* Icon.png in Resources */, 296897651CDB028C0074D5F0 /* Assets.xcassets in Resources */, 296897661CDB028C0074D5F0 /* LaunchScreen.storyboard in Resources */, @@ -1650,6 +1659,7 @@ buildActionMask = 2147483647; files = ( 291619661E7EFB09009FB344 /* Main.storyboard in Resources */, + B3D687852B8573D800E6A28E /* audio.wav in Resources */, 2923A1F71D63005F0019FBCD /* Assets.xcassets in Resources */, ); runOnlyForDeploymentPostprocessing = 0; @@ -1955,6 +1965,7 @@ 296897681CDB02940074D5F0 /* IngestViewController.swift in Sources */, BCE0E33D2AD369550082C16F /* NetStreamSwitcher.swift in Sources */, BCFB355524FA27EA00DC5108 /* PlaybackViewController.swift in Sources */, + B327845E2B928BBD00F9A7F1 /* AudioFilePlayer.swift in Sources */, 2955F51F1D09EBAD004CC995 /* VisualEffect.swift in Sources */, 295074311E462105007F15A4 /* PreferenceViewController.swift in Sources */, ); @@ -1965,6 +1976,7 @@ buildActionMask = 2147483647; files = ( BC3004F9296C351D00119932 /* PlaybackViewController.swift in Sources */, + B3D687872B85743200E6A28E /* AudioFilePlayer.swift in Sources */, 2923A1F81D6300650019FBCD /* AppDelegate.swift in Sources */, BC3004F7296C20DF00119932 /* NSStoryboard.Name+Extension.swift in Sources */, 2923A1F41D6300510019FBCD /* MainWindowController.swift in Sources */, @@ -2339,7 +2351,7 @@ FRAMEWORK_SEARCH_PATHS = "$(inherited)"; INFOPLIST_FILE = "$(SRCROOT)/Platforms/iOS/Info.plist"; INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks"; - IPHONEOS_DEPLOYMENT_TARGET = 12.0; + IPHONEOS_DEPLOYMENT_TARGET = 13.0; LD_RUNPATH_SEARCH_PATHS = ( "$(inherited)", "@executable_path/Frameworks", diff --git a/Sources/IO/IOAudioMixer.swift b/Sources/IO/IOAudioMixer.swift index eb3e50d33..5d5537f18 100644 --- a/Sources/IO/IOAudioMixer.swift +++ b/Sources/IO/IOAudioMixer.swift @@ -42,9 +42,7 @@ final class IOAudioMixer { var isEnabled = true var settings: IOAudioResamplerSettings = .init() { didSet { - if settings.invalidate(oldValue) { - setupAudioEngine() - } + outputFormat = settings.mixerOutputFormat } } var inputFormat: AVAudioFormat? { @@ -65,12 +63,19 @@ final class IOAudioMixer { private var tracks: [UInt8: Track] = [:] private var anchor: AVAudioTime? private var sampleTime: AVAudioFramePosition = kIOAudioMixer_sampleTime + private var sampleTime2: AVAudioFramePosition = kIOAudioMixer_sampleTime private var defaultResampler: IOAudioResampler { return track(for: kIOAudioMixer_defaultResamplerTag).resampler } - private var engine: AVAudioEngine? - private var mixer: AVAudioMixerNode? { - return engine?.mainMixerNode + private var mixerAudioUnit: AudioUnit? + private var outputAudioUnit: AudioUnit? + private let inputRenderCallback: AURenderCallback = { (inRefCon: UnsafeMutableRawPointer, _: UnsafeMutablePointer, _: UnsafePointer, inBusNumber: UInt32, inNumberFrames: UInt32, ioData: UnsafeMutablePointer?) in + let audioMixer = Unmanaged.fromOpaque(inRefCon).takeUnretainedValue() + let status = audioMixer.provideInput(inNumberFrames, channel: UInt8(inBusNumber), ioData: ioData) + guard status == noErr else { + fatalError() + } + return status } func append(_ sampleBuffer: CMSampleBuffer, channel: UInt8 = 0) { @@ -94,40 +99,171 @@ final class IOAudioMixer { } private func setupAudioEngine() { + if let outputAudioUnit { + AudioOutputUnitStop(outputAudioUnit) + AudioUnitUninitialize(outputAudioUnit) + AudioComponentInstanceDispose(outputAudioUnit) + self.outputAudioUnit = nil + } + if let mixerAudioUnit { + AudioUnitUninitialize(mixerAudioUnit) + AudioComponentInstanceDispose(mixerAudioUnit) + self.mixerAudioUnit = nil + } guard isEnabled else { return } - engine?.stop() guard let outputFormat else { return } + logger.info("outputFormat: \(outputFormat)") sampleTime = kIOAudioMixer_sampleTime - let engine = AVAudioEngine() - for channel in tracks.keys { - var track = tracks[channel]! - let node = AVAudioSourceNode(format: outputFormat) { [weak self] (isSilence: UnsafeMutablePointer, timestamp: UnsafePointer, frameCount: AVAudioFrameCount, outputData: UnsafeMutablePointer) in - return self?.processInput(isSilence: isSilence.pointee.boolValue, timestamp: timestamp.pointee, frameCount, channel: channel, ioData: outputData) ?? noErr + + let mixerSubType: OSType + #if os(macOS) + mixerSubType = kAudioUnitSubType_StereoMixer + #else + mixerSubType = kAudioUnitSubType_MultiChannelMixer + #endif + + var mixerComponentDescription = AudioComponentDescription( + componentType: kAudioUnitType_Mixer, + componentSubType: mixerSubType, + componentManufacturer: kAudioUnitManufacturer_Apple, + componentFlags: 0, + componentFlagsMask: 0) + + guard let audioComponent = AudioComponentFindNext(nil, &mixerComponentDescription) else { + fatalError() + } + + var status = AudioComponentInstanceNew(audioComponent, &mixerAudioUnit) + + guard status == noErr, let mixerAudioUnit else { + fatalError() + } + + var streamFormat = outputFormat.streamDescription.pointee + + var busCount = UInt32(tracks.keys.count) + status = AudioUnitSetProperty(mixerAudioUnit, + kAudioUnitProperty_ElementCount, + kAudioUnitScope_Input, + 0, + &busCount, + UInt32(MemoryLayout.size)) + + guard status == noErr else { + fatalError() + } + + for (input, track) in tracks { + track.resampler.settings = track.resampler.settings.updateMixerFormat(format: outputFormat) + + status = AudioUnitSetProperty(mixerAudioUnit, + kAudioUnitProperty_StreamFormat, + kAudioUnitScope_Input, + UInt32(input), + &streamFormat, + UInt32(MemoryLayout.size)) + + guard status == noErr else { + fatalError() } - engine.attach(node) - engine.connect(node, to: engine.mainMixerNode, format: outputFormat) - track.update(inputNode: node) - tracks[channel] = track - } - do { - try engine.enableManualRenderingMode(.offline, format: outputFormat, maximumFrameCount: kIOAudioMixer_frameCapacity) - try engine.start() - self.engine = engine - } catch { - delegate?.audioMixer(self, errorOccurred: .failedToMix(error: error)) + + var callbackStruct = AURenderCallbackStruct(inputProc: inputRenderCallback, + inputProcRefCon: Unmanaged.passUnretained(self).toOpaque()) + status = AudioUnitSetProperty(mixerAudioUnit, + kAudioUnitProperty_SetRenderCallback, + kAudioUnitScope_Input, + UInt32(input), + &callbackStruct, + UInt32(MemoryLayout.size)) + + guard status == noErr else { + fatalError() + } + } + + var outputDescription = AudioComponentDescription( + componentType: kAudioUnitType_Output, + componentSubType: kAudioUnitSubType_GenericOutput, + componentManufacturer: kAudioUnitManufacturer_Apple, + componentFlags: 0, + componentFlagsMask: 0) + + guard let outputComponent = AudioComponentFindNext(nil, &outputDescription) else { + fatalError() + } + + status = AudioComponentInstanceNew(outputComponent, &outputAudioUnit) + + guard status == noErr, let outputAudioUnit else { + fatalError() + } + + status = AudioUnitSetProperty(outputAudioUnit, + kAudioUnitProperty_StreamFormat, + kAudioUnitScope_Input, + 0, + &streamFormat, + UInt32(MemoryLayout.size)) + + guard status == noErr else { + fatalError() + } + + status = AudioUnitSetProperty(outputAudioUnit, + kAudioUnitProperty_StreamFormat, + kAudioUnitScope_Output, + 0, + &streamFormat, + UInt32(MemoryLayout.size)) + + guard status == noErr else { + fatalError() + } + + var connection = AudioUnitConnection(sourceAudioUnit: mixerAudioUnit, + sourceOutputNumber: 0, + destInputNumber: 0) + + status = AudioUnitSetProperty(outputAudioUnit, + kAudioUnitProperty_MakeConnection, + kAudioUnitScope_Input, + 0, + &connection, + UInt32(MemoryLayout.size)) + + guard status == noErr else { + fatalError() + } + + status = AudioUnitInitialize(mixerAudioUnit) + + guard status == noErr else { + fatalError() + } + + status = AudioUnitInitialize(outputAudioUnit) + + guard status == noErr else { + fatalError() + } + + status = AudioOutputUnitStart(outputAudioUnit) + + guard status == noErr else { + fatalError() } } - private func processInput(isSilence: Bool, timestamp: AudioTimeStamp, _ inNumberFrames: UInt32, channel: UInt8, ioData: UnsafeMutablePointer?) -> OSStatus { + private func provideInput(_ inNumberFrames: UInt32, channel: UInt8, ioData: UnsafeMutablePointer?) -> OSStatus { guard let ringBuffer = track(for: channel).ringBuffer else { return noErr } - if isSilence { + if ringBuffer.counts == 0 { guard let bufferList = UnsafeMutableAudioBufferListPointer(ioData) else { return noErr } @@ -136,38 +272,75 @@ final class IOAudioMixer { } return noErr } - return ringBuffer.render(inNumberFrames, ioData: ioData) + let status = ringBuffer.render(inNumberFrames, ioData: ioData) + return status } - @inline(__always) - private func mix(numberOfFrames: AVAudioFrameCount) { - guard let engine else { + private func processingLoop(numberOfFrames: AVAudioFrameCount) { + guard let audioUnit = outputAudioUnit, let outputFormat else { return } - do { - let buffer = AVAudioPCMBuffer(pcmFormat: engine.manualRenderingFormat, frameCapacity: engine.manualRenderingMaximumFrameCount)! - let status = try engine.renderOffline(numberOfFrames, to: buffer) - switch status { - case .success: - processMixerOutput(buffer: buffer) - case .cannotDoInCurrentContext, .insufficientDataFromInputNode, .error: - logger.trace(status) - @unknown default: + var timeStamp = AudioTimeStamp() + timeStamp.mFlags = .sampleTimeValid + timeStamp.mSampleTime = Float64(sampleTime) + + let channelCount = outputFormat.channelCount + let audioBufferList = AudioBufferList.allocate(maximumBuffers: Int(channelCount)) + for i in 0.. Track { @@ -183,8 +356,7 @@ final class IOAudioMixer { if let format { logger.info("Created a track for channel \(channel); format: \(format)") - } - else { + } else { logger.info("Created a track for channel \(channel)") } @@ -231,7 +403,7 @@ extension IOAudioMixer: IOAudioResamplerDelegate { track.ringBuffer?.append(audioBuffer, when: when) if resampler.tag == kIOAudioMixer_defaultResamplerTag { - mix(numberOfFrames: audioBuffer.frameLength) + processingLoop(numberOfFrames: audioBuffer.frameLength) } } diff --git a/Sources/IO/IOAudioResampler.swift b/Sources/IO/IOAudioResampler.swift index ec286de6f..541b29587 100644 --- a/Sources/IO/IOAudioResampler.swift +++ b/Sources/IO/IOAudioResampler.swift @@ -15,17 +15,29 @@ struct IOAudioResamplerSettings { let channels: UInt32 let downmix: Bool let channelMap: [NSNumber]? + // Used temporarily so all resamplers have the same output format + let mixerOutputFormat: AVAudioFormat? - init(sampleRate: Float64 = 0, channels: UInt32 = 0, downmix: Bool = false, channelMap: [NSNumber]? = nil) { + init(sampleRate: Float64 = 0, channels: UInt32 = 0, downmix: Bool = false, channelMap: [NSNumber]? = nil, mixerOutputFormat: AVAudioFormat? = nil) { self.sampleRate = sampleRate self.channels = channels self.downmix = downmix self.channelMap = channelMap + self.mixerOutputFormat = mixerOutputFormat + } + + func updateMixerFormat(format: AVAudioFormat?) -> IOAudioResamplerSettings { + .init(sampleRate: sampleRate, + channels: channels, + downmix: downmix, + channelMap: channelMap, + mixerOutputFormat: format) } func invalidate(_ oldValue: IOAudioResamplerSettings) -> Bool { return !(sampleRate == oldValue.sampleRate && - channels == oldValue.channels) + channels == oldValue.channels && + mixerOutputFormat == oldValue.mixerOutputFormat) } func apply(_ converter: AVAudioConverter?, oldValue: IOAudioResamplerSettings?) { @@ -228,7 +240,7 @@ final class IOAudioResampler { private func setUp(_ inSourceFormat: inout AudioStreamBasicDescription) { let inputFormat = Self.makeAudioFormat(&inSourceFormat) - let outputFormat = settings.makeOutputFormat(inputFormat) ?? inputFormat + let outputFormat = settings.mixerOutputFormat ?? (settings.makeOutputFormat(inputFormat) ?? inputFormat) if let inputFormat { inputBuffer = .init(pcmFormat: inputFormat, frameCapacity: 1024 * 4) ringBuffer = .init(inputFormat) @@ -238,7 +250,7 @@ final class IOAudioResampler { } if let inputFormat, let outputFormat { if logger.isEnabledFor(level: .info) { - logger.info("inputFormat:", inputFormat, ",outputFormat:", outputFormat) + logger.info("tag:", tag, ", inputFormat:", inputFormat, ", outputFormat:", outputFormat) } sampleTime = kIOAudioResampler_sampleTime audioConverter = .init(from: inputFormat, to: outputFormat)