Skip to content

Commit

Permalink
Tests
Browse files Browse the repository at this point in the history
  • Loading branch information
hiroshihorie committed Jan 6, 2025
1 parent 3b7af6a commit 622caa9
Show file tree
Hide file tree
Showing 3 changed files with 224 additions and 16 deletions.
86 changes: 70 additions & 16 deletions Sources/LiveKit/Track/AudioManager.swift
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,8 @@ public class AudioManager: Loggable {
#endif

public typealias DeviceUpdateFunc = (_ audioManager: AudioManager) -> Void
public typealias OnEngineWillStart = (_ audioManager: AudioManager, _ engine: AVAudioEngine, _ playoutEnabled: Bool, _ recordingEnabled: Bool) -> Void
public typealias OnEngineWillConnectInput = (_ audioManager: AudioManager, _ engine: AVAudioEngine, _ inputMixerNode: AVAudioMixerNode) -> Void

#if os(iOS) || os(visionOS) || os(tvOS)

Expand Down Expand Up @@ -208,13 +210,56 @@ public class AudioManager: Loggable {

public var onDeviceUpdate: DeviceUpdateFunc? {
didSet {
RTC.audioDeviceModule.setDevicesUpdatedHandler { [weak self] in
RTC.audioDeviceModule.setDevicesDidUpdateCallback { [weak self] in
guard let self else { return }
self.onDeviceUpdate?(self)
}
}
}

public var onEngineWillConnectInput: OnEngineWillConnectInput? {
didSet {
RTC.audioDeviceModule.setOnEngineWillConnectInputCallback { [weak self] engine, inputMixerNode in
guard let self else { return }
self.onEngineWillConnectInput?(self, engine, inputMixerNode)
}
}
}

public var isManualRenderingMode: Bool {
get { RTC.audioDeviceModule.isManualRenderingMode }
set {
let result = RTC.audioDeviceModule.setManualRenderingMode(newValue)
if !result {
log("Failed to set manual rendering mode", .error)
}
}
}

// MARK: Testing

public func startPlayout() {
RTC.audioDeviceModule.initPlayout()
RTC.audioDeviceModule.startPlayout()
}

public func stopPlayout() {
RTC.audioDeviceModule.stopPlayout()
}

public func initRecording() {
RTC.audioDeviceModule.initRecording()
}

public func startRecording() {
RTC.audioDeviceModule.initRecording()
RTC.audioDeviceModule.startRecording()
}

public func stopRecording() {
RTC.audioDeviceModule.stopRecording()
}

// MARK: - Internal

enum `Type` {
Expand All @@ -224,19 +269,34 @@ public class AudioManager: Loggable {

let state = StateSync(State())

// MARK: - Private
init() {
RTC.audioDeviceModule.setOnEngineWillStartCallback { [weak self] _, isPlayoutEnabled, isRecordingEnabled in
guard let self else { return }
self.log("OnEngineWillStart isPlayoutEnabled: \(isPlayoutEnabled), isRecordingEnabled: \(isRecordingEnabled)")

private let _configureRunner = SerialRunnerActor<Void>()
#if os(iOS) || os(visionOS) || os(tvOS)
self.log("Configuring audio session...")
let session = LKRTCAudioSession.sharedInstance()
let config = LKRTCAudioSessionConfiguration.webRTC()

if isRecordingEnabled {
config.category = AVAudioSession.Category.playAndRecord.rawValue
config.mode = AVAudioSession.Mode.videoChat.rawValue
config.categoryOptions = [.defaultToSpeaker, .allowBluetooth]
} else {
config.category = AVAudioSession.Category.playback.rawValue
config.mode = AVAudioSession.Mode.spokenAudio.rawValue
config.categoryOptions = [.mixWithOthers]
}

#if os(iOS) || os(visionOS) || os(tvOS)
private func _asyncConfigure(newState: State, oldState: State) async throws {
try await _configureRunner.run {
self.log("\(oldState) -> \(newState)")
let configureFunc = newState.customConfigureFunc ?? self.defaultConfigureAudioSessionFunc
configureFunc(newState, oldState)
session.lockForConfiguration()
try? session.setConfiguration(config)
session.unlockForConfiguration()
#endif
}
}
#endif

// MARK: - Private

func trackDidStart(_ type: Type) async throws {
let (newState, oldState) = state.mutate { state in
Expand All @@ -245,9 +305,6 @@ public class AudioManager: Loggable {
if type == .remote { state.remoteTracksCount += 1 }
return (state, oldState)
}
#if os(iOS) || os(visionOS) || os(tvOS)
try await _asyncConfigure(newState: newState, oldState: oldState)
#endif
}

func trackDidStop(_ type: Type) async throws {
Expand All @@ -257,9 +314,6 @@ public class AudioManager: Loggable {
if type == .remote { state.remoteTracksCount = max(state.remoteTracksCount - 1, 0) }
return (state, oldState)
}
#if os(iOS) || os(visionOS) || os(tvOS)
try await _asyncConfigure(newState: newState, oldState: oldState)
#endif
}

#if os(iOS) || os(visionOS) || os(tvOS)
Expand Down
96 changes: 96 additions & 0 deletions Tests/LiveKitTests/AudioEngineTests.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
/*
* Copyright 2025 LiveKit
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

import AVFoundation
@testable import LiveKit
import LiveKitWebRTC
import XCTest

class AudioEngineTests: XCTestCase {
override class func setUp() {
LiveKitSDK.setLoggerStandardOutput()
RTCSetMinDebugLogLevel(.info)
}

override func tearDown() async throws {}

// Test if mic is authorized. Only works on device.
func testMicAuthorized() async {
let status = AVCaptureDevice.authorizationStatus(for: .audio)
if status != .authorized {
let result = await AVCaptureDevice.requestAccess(for: .audio)
XCTAssert(result)
}

XCTAssert(status == .authorized)
}

// Test start generating local audio buffer without joining to room.
func testPrejoinLocalAudioBuffer() async throws {
// Set up expectation...
let didReceiveAudioFrame = expectation(description: "Did receive audio frame")
didReceiveAudioFrame.assertForOverFulfill = false

// Start watching for audio frame...
let audioFrameWatcher = AudioTrackWatcher(id: "notifier01") { _ in
didReceiveAudioFrame.fulfill()
}

let localMicTrack = LocalAudioTrack.createTrack()
// Attach audio frame watcher...
localMicTrack.add(audioRenderer: audioFrameWatcher)

Task.detached {
print("Starting audio track in 3 seconds...")
try? await Task.sleep(for: .seconds(3))
AudioManager.shared.startRecording()
}

// Wait for audio frame...
print("Waiting for first audio frame...")
await fulfillment(of: [didReceiveAudioFrame], timeout: 10)

// Remove audio frame watcher...
localMicTrack.remove(audioRenderer: audioFrameWatcher)
}

// Test the manual rendering mode (no-device mode) of AVAudioEngine based AudioDeviceModule.
// In manual rendering, no device access will be initialized such as mic and speaker.
func testManualRenderingMode() async throws {
// Attach sin wave generator when engine requests input node...
// inputMixerNode will automatically convert to RTC's internal format (int16).
// AVAudioEngine.attach() retains the node.
AudioManager.shared.onEngineWillConnectInput = { _, engine, inputMixerNode in
let sin = SineWaveSourceNode()
engine.attach(sin)
engine.connect(sin, to: inputMixerNode, format: nil)
}

// Set manual rendering mode...
AudioManager.shared.isManualRenderingMode = true

// Check if manual rendering mode is set...
let isManualRenderingMode = AudioManager.shared.isManualRenderingMode
print("manualRenderingMode: \(isManualRenderingMode)")
XCTAssert(isManualRenderingMode)

// Start rendering...
AudioManager.shared.startRecording()

// Render for 10 seconds...
try? await Task.sleep(for: .seconds(10))
}
}
58 changes: 58 additions & 0 deletions Tests/LiveKitTests/Support/SinWaveSourceNode.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
/*
* Copyright 2025 LiveKit
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

import AVFAudio

class SineWaveSourceNode: AVAudioSourceNode {
private let sampleRate: Double
private let frequency: Double

init(frequency: Double = 440.0, sampleRate: Double = 48000.0) {
self.frequency = frequency
self.sampleRate = sampleRate

let format = AVAudioFormat(standardFormatWithSampleRate: sampleRate, channels: 1)!

var currentPhase = 0.0
let phaseIncrement = 2.0 * Double.pi * frequency / sampleRate

let renderBlock: AVAudioSourceNodeRenderBlock = { _, _, frameCount, audioBufferList in
print("SineWaveSourceNode render block, frameCount: \(frameCount)")

let ablPointer = UnsafeMutableAudioBufferListPointer(audioBufferList)
guard let ptr = ablPointer[0].mData?.assumingMemoryBound(to: Float.self) else {
return kAudioUnitErr_InvalidParameter
}

// Generate sine wave samples
for frame in 0 ..< Int(frameCount) {
ptr[frame] = Float(sin(currentPhase))

// Update the phase
currentPhase += phaseIncrement

// Keep phase in [0, 2π] to prevent floating point errors
if currentPhase >= 2.0 * Double.pi {
currentPhase -= 2.0 * Double.pi
}
}

return noErr
}

super.init(format: format, renderBlock: renderBlock)
}
}

0 comments on commit 622caa9

Please sign in to comment.