Skip to content

Commit

Permalink
Make private var codec IOVideoUnit, IOAudioUnit.
Browse files Browse the repository at this point in the history
  • Loading branch information
shogo4405 committed Sep 30, 2023
1 parent 53689f0 commit 3050627
Show file tree
Hide file tree
Showing 6 changed files with 54 additions and 48 deletions.
34 changes: 29 additions & 5 deletions Sources/Media/IOAudioUnit.swift
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,6 @@ import SwiftPMSupport
final class IOAudioUnit: NSObject, IOUnit {
typealias FormatDescription = CMAudioFormatDescription

lazy var codec: AudioCodec = {
var codec = AudioCodec()
codec.lockQueue = lockQueue
return codec
}()
let lockQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.AudioIOComponent.lock")
var soundTransform: SoundTransform = .init() {
didSet {
Expand Down Expand Up @@ -39,7 +34,15 @@ final class IOAudioUnit: NSObject, IOUnit {
var outputFormat: FormatDescription? {
return codec.outputFormat?.formatDescription
}
var inputBuffer: AVAudioBuffer? {
return codec.inputBuffer
}
private(set) var presentationTimeStamp: CMTime = .invalid
private lazy var codec: AudioCodec = {
var codec = AudioCodec()
codec.lockQueue = lockQueue
return codec
}()
private lazy var resampler: IOAudioResampler<IOAudioUnit> = {
var resampler = IOAudioResampler<IOAudioUnit>()
resampler.delegate = self
Expand Down Expand Up @@ -90,6 +93,26 @@ final class IOAudioUnit: NSObject, IOUnit {
codec.appendSampleBuffer(sampleBuffer)
}
}

func appendAudioBuffer(_ audioBuffer: AVAudioBuffer, presentationTimeStamp: CMTime) {
codec.appendAudioBuffer(audioBuffer, presentationTimeStamp: presentationTimeStamp)
}

func setAudioStreamBasicDescription(_ audioStreamBasicDescription: AudioStreamBasicDescription?) {
guard var audioStreamBasicDescription else {
return
}
let status = CMAudioFormatDescriptionCreate(
allocator: kCFAllocatorDefault,
asbd: &audioStreamBasicDescription,
layoutSize: 0,
layout: nil,
magicCookieSize: 0,
magicCookie: nil,
extensions: nil,
formatDescriptionOut: &inputFormat
)
}
}

extension IOAudioUnit: IOUnitEncoding {
Expand All @@ -108,6 +131,7 @@ extension IOAudioUnit: IOUnitEncoding {
extension IOAudioUnit: IOUnitDecoding {
// MARK: IOUnitDecoding
func startDecoding() {
codec.settings.format = .pcm
if let playerNode = mixer?.mediaLink.playerNode {
mixer?.audioEngine?.attach(playerNode)
}
Expand Down
27 changes: 2 additions & 25 deletions Sources/Media/IOMixer.swift
Original file line number Diff line number Diff line change
Expand Up @@ -24,12 +24,6 @@ public final class IOMixer {
return AVAudioEngine()
}

enum ReadyState {
case standby
case encoding
case decoding
}

public var hasVideo: Bool {
get {
mediaLink.hasVideo
Expand Down Expand Up @@ -200,7 +194,6 @@ public final class IOMixer {
}
#endif

private var readyState: ReadyState = .standby
private(set) lazy var audioEngine: AVAudioEngine? = {
return IOMixer.audioEngineHolder.retain()
}()
Expand Down Expand Up @@ -247,46 +240,30 @@ public final class IOMixer {
extension IOMixer: IOUnitEncoding {
/// Starts encoding for video and audio data.
public func startEncoding(_ delegate: any AVCodecDelegate) {
guard readyState == .standby else {
return
}
readyState = .encoding
videoIO.startEncoding(delegate)
audioIO.startEncoding(delegate)
}

/// Stop encoding.
public func stopEncoding() {
guard readyState == .encoding else {
return
}
videoIO.stopEncoding()
audioIO.stopEncoding()
readyState = .standby
}
}

extension IOMixer: IOUnitDecoding {
/// Starts decoding for video and audio data.
public func startDecoding() {
guard readyState == .standby else {
return
}
audioIO.startDecoding()
videoIO.startDecoding()
mediaLink.startRunning()
readyState = .decoding
}

/// Stop decoding.
public func stopDecoding() {
guard readyState == .decoding else {
return
}
mediaLink.stopRunning()
audioIO.stopDecoding()
videoIO.stopDecoding()
readyState = .standby
}
}

Expand Down Expand Up @@ -375,8 +352,8 @@ extension IOMixer: Running {
let isMultiCamSupported = true
#endif
guard let device = error.device, let format = device.videoFormat(
width: sessionPreset.width ?? Int32(videoIO.codec.settings.videoSize.width),
height: sessionPreset.height ?? Int32(videoIO.codec.settings.videoSize.height),
width: sessionPreset.width ?? Int32(videoIO.settings.videoSize.width),
height: sessionPreset.height ?? Int32(videoIO.settings.videoSize.height),
frameRate: videoIO.frameRate,
isMultiCamSupported: isMultiCamSupported
), device.activeFormat != format else {
Expand Down
19 changes: 13 additions & 6 deletions Sources/Media/IOVideoUnit.swift
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,6 @@ final class IOVideoUnit: NSObject, IOUnit {
}
}
var multiCamCaptureSettings: MultiCamCaptureSettings = .default
lazy var codec: VideoCodec = {
var codec = VideoCodec()
codec.lockQueue = lockQueue
return codec
}()
weak var mixer: IOMixer?
var muted: Bool {
get {
Expand All @@ -31,7 +26,14 @@ final class IOVideoUnit: NSObject, IOUnit {
videoMixer.muted = newValue
}
}

var settings: VideoCodecSettings {
get {
return codec.settings
}
set {
codec.settings = newValue
}
}
#if os(iOS) || os(macOS) || os(tvOS)
var frameRate = IOMixer.defaultFrameRate {
didSet {
Expand Down Expand Up @@ -111,6 +113,11 @@ final class IOVideoUnit: NSObject, IOUnit {
videoMixer.delegate = self
return videoMixer
}()
private lazy var codec: VideoCodec = {
var codec = VideoCodec()
codec.lockQueue = lockQueue
return codec
}()

deinit {
if Thread.isMainThread {
Expand Down
4 changes: 2 additions & 2 deletions Sources/Net/NetStream.swift
Original file line number Diff line number Diff line change
Expand Up @@ -171,10 +171,10 @@ open class NetStream: NSObject {
/// Specifies the video compression properties.
public var videoSettings: VideoCodecSettings {
get {
mixer.videoIO.codec.settings
mixer.videoIO.settings
}
set {
mixer.videoIO.codec.settings = newValue
mixer.videoIO.settings = newValue
}
}

Expand Down
2 changes: 1 addition & 1 deletion Sources/RTMP/RTMPConnection.swift
Original file line number Diff line number Diff line change
Expand Up @@ -311,7 +311,7 @@ public class RTMPConnection: EventDispatcher {
var outputBufferSize: Int = 0
for stream in streams {
// in bytes.
outputBufferSize += (Int(stream.mixer.videoIO.codec.settings.bitRate) + stream.mixer.audioIO.codec.settings.bitRate) / 8
outputBufferSize += (Int(stream.mixer.videoIO.settings.bitRate) + stream.mixer.audioIO.settings.bitRate) / 8
}
if socket.outputBufferSize < outputBufferSize {
socket.outputBufferSize = outputBufferSize
Expand Down
16 changes: 7 additions & 9 deletions Sources/RTMP/RTMPMessage.swift
Original file line number Diff line number Diff line change
Expand Up @@ -586,15 +586,13 @@ final class RTMPAudioMessage: RTMPMessage {
switch payload[1] {
case FLVAACPacketType.seq.rawValue:
let config = AudioSpecificConfig(bytes: [UInt8](payload[codec.headerSize..<payload.count]))
stream.mixer.audioIO.codec.settings.format = .pcm
stream.mixer.audioIO.codec.inSourceFormat = config?.audioStreamBasicDescription()
stream.mixer.audioIO.setAudioStreamBasicDescription(config?.audioStreamBasicDescription())
case FLVAACPacketType.raw.rawValue:
if stream.mixer.audioIO.codec.inSourceFormat == nil {
stream.mixer.audioIO.codec.settings.format = .pcm
stream.mixer.audioIO.codec.inSourceFormat = makeAudioStreamBasicDescription()
if stream.mixer.audioIO.inputFormat == nil {
stream.mixer.audioIO.setAudioStreamBasicDescription(makeAudioStreamBasicDescription())
}
if let audioBuffer = makeAudioBuffer(stream) {
stream.mixer.audioIO.codec.appendAudioBuffer(audioBuffer, presentationTimeStamp: CMTime(seconds: stream.audioTimestamp / 1000, preferredTimescale: 1000))
stream.mixer.audioIO.appendAudioBuffer(audioBuffer, presentationTimeStamp: CMTime(seconds: stream.audioTimestamp / 1000, preferredTimescale: 1000))
}
default:
break
Expand All @@ -603,7 +601,7 @@ final class RTMPAudioMessage: RTMPMessage {

private func makeAudioBuffer(_ stream: RTMPStream) -> AVAudioBuffer? {
return payload.withUnsafeMutableBytes { (buffer: UnsafeMutableRawBufferPointer) -> AVAudioBuffer? in
guard let baseAddress = buffer.baseAddress, let buffer = stream.mixer.audioIO.codec.inputBuffer as? AVAudioCompressedBuffer else {
guard let baseAddress = buffer.baseAddress, let buffer = stream.mixer.audioIO.inputBuffer as? AVAudioCompressedBuffer else {
return nil
}
let byteCount = payload.count - codec.headerSize
Expand Down Expand Up @@ -653,7 +651,7 @@ final class RTMPVideoMessage: RTMPMessage {
makeFormatDescription(stream, format: .h264)
case FLVAVCPacketType.nal.rawValue:
if let sampleBuffer = makeSampleBuffer(stream, type: type, offset: 0) {
stream.mixer.videoIO.codec.appendSampleBuffer(sampleBuffer)
stream.mixer.videoIO.appendSampleBuffer(sampleBuffer)
}
default:
break
Expand All @@ -668,7 +666,7 @@ final class RTMPVideoMessage: RTMPMessage {
makeFormatDescription(stream, format: .hevc)
case FLVVideoPacketType.codedFrames.rawValue:
if let sampleBuffer = makeSampleBuffer(stream, type: type, offset: 3) {
stream.mixer.videoIO.codec.appendSampleBuffer(sampleBuffer)
stream.mixer.videoIO.appendSampleBuffer(sampleBuffer)
}
default:
break
Expand Down

0 comments on commit 3050627

Please sign in to comment.