diff --git a/Examples/tvOS/AppDelegate.swift b/Examples/tvOS/AppDelegate.swift
index 671d50b6d..00c3ac716 100644
--- a/Examples/tvOS/AppDelegate.swift
+++ b/Examples/tvOS/AppDelegate.swift
@@ -1,10 +1,24 @@
+import AVFAudio
+import HaishinKit
+import Logboard
import UIKit
+let logger = LBLogger.with("com.haishinkit.Exsample.iOS")
+
@UIApplicationMain
class AppDelegate: UIResponder, UIApplicationDelegate {
var window: UIWindow?
func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
- true
+ // LBLogger.with(HaishinKitIdentifier).level = .trace
+ let session = AVAudioSession.sharedInstance()
+ do {
+ // If you set the "mode" parameter, stereo capture is not possible, so it is left unspecified.
+ try session.setCategory(.playAndRecord)
+ try session.setActive(true)
+ } catch {
+ logger.error(error)
+ }
+ return true
}
}
diff --git a/Examples/tvOS/Base.lproj/Base.lproj/Main.storyboard b/Examples/tvOS/Base.lproj/Base.lproj/Main.storyboard
deleted file mode 100644
index c134ddad2..000000000
--- a/Examples/tvOS/Base.lproj/Base.lproj/Main.storyboard
+++ /dev/null
@@ -1,43 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/Examples/tvOS/Base.lproj/Main.storyboard b/Examples/tvOS/Base.lproj/Main.storyboard
new file mode 100644
index 000000000..97c46b26c
--- /dev/null
+++ b/Examples/tvOS/Base.lproj/Main.storyboard
@@ -0,0 +1,73 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/Examples/tvOS/Info.plist b/Examples/tvOS/Info.plist
index 1cb14c3a0..4ea126539 100644
--- a/Examples/tvOS/Info.plist
+++ b/Examples/tvOS/Info.plist
@@ -2,6 +2,10 @@
+ NSMicrophoneUsageDescription
+
+ NSCameraUsageDescription
+
CFBundleDevelopmentRegion
en
CFBundleDisplayName
diff --git a/Examples/tvOS/ViewController.swift b/Examples/tvOS/ViewController.swift
index 222bd12ac..d5027f569 100644
--- a/Examples/tvOS/ViewController.swift
+++ b/Examples/tvOS/ViewController.swift
@@ -1,22 +1,62 @@
+import AVFoundation
+import AVKit
import HaishinKit
import UIKit
+enum Mode {
+ case publish
+ case playback
+}
+
final class ViewController: UIViewController {
@IBOutlet private weak var lfView: MTHKView!
-
- var rtmpConnection = RTMPConnection()
- var rtmpStream: RTMPStream!
+ @IBOutlet private weak var playbackOrPublishSegment: UISegmentedControl! {
+ didSet {
+ guard AVContinuityDevicePickerViewController.isSupported else {
+ return
+ }
+ playbackOrPublishSegment.removeSegment(at: 1, animated: false)
+ }
+ }
+ private var mode: Mode = .playback {
+ didSet {
+ logger.info(mode)
+ }
+ }
+ private var connection = RTMPConnection()
+ private var stream: RTMPStream!
override func viewDidLoad() {
super.viewDidLoad()
- rtmpStream = RTMPStream(connection: rtmpConnection)
- rtmpConnection.addEventListener(.rtmpStatus, selector: #selector(rtmpStatusHandler), observer: self)
- rtmpConnection.connect(Preference.defaultInstance.uri!)
+ stream = RTMPStream(connection: connection)
+ connection.addEventListener(.rtmpStatus, selector: #selector(rtmpStatusHandler), observer: self)
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
- lfView?.attachStream(rtmpStream)
+ lfView?.attachStream(stream)
+ }
+
+ @IBAction func segmentedControl(_ sender: UISegmentedControl) {
+ switch sender.titleForSegment(at: sender.selectedSegmentIndex) {
+ case "Publish":
+ mode = .publish
+ case "Playback":
+ mode = .playback
+ default:
+ break
+ }
+ }
+
+ @IBAction func go(_ sender: UIButton) {
+ switch mode {
+ case .publish:
+ let picker = AVContinuityDevicePickerViewController()
+ picker.delegate = self
+ present(picker, animated: true)
+ case .playback:
+ connection.connect(Preference.defaultInstance.uri!)
+ }
}
@objc
@@ -31,9 +71,24 @@ final class ViewController: UIViewController {
switch code {
case RTMPConnection.Code.connectSuccess.rawValue:
- rtmpStream!.play(Preference.defaultInstance.streamName)
+ switch mode {
+ case .publish:
+ stream.publish(Preference.defaultInstance.streamName)
+ case .playback:
+ stream.play(Preference.defaultInstance.streamName)
+ }
default:
break
}
}
}
+
+extension ViewController: AVContinuityDevicePickerViewControllerDelegate {
+ // MARK: AVContinuityDevicePickerViewControllerDelegate
+ func continuityDevicePicker( _ pickerViewController: AVContinuityDevicePickerViewController, didConnect device: AVContinuityDevice) {
+ if let camera = device.videoDevices.first {
+ logger.info(camera)
+ stream.attachCamera(camera)
+ }
+ }
+}
diff --git a/HaishinKit.xcodeproj/project.pbxproj b/HaishinKit.xcodeproj/project.pbxproj
index 3b7d309a4..1ee070c57 100644
--- a/HaishinKit.xcodeproj/project.pbxproj
+++ b/HaishinKit.xcodeproj/project.pbxproj
@@ -764,9 +764,9 @@
290907CE1C3961BC00F2E80C /* Util */ = {
isa = PBXGroup;
children = (
- BC701F312AAC676C00C4BEFE /* AVAudioFormatFactory.swift */,
298BCF321DD4C44A007FF86A /* AnyUtil.swift */,
29DC17B221D0CC0600E26CED /* Atomic.swift */,
+ BC701F312AAC676C00C4BEFE /* AVAudioFormatFactory.swift */,
29B876B81CD70B3900FC07DA /* ByteArray.swift */,
29B876631CD70AB300FC07DA /* Constants.swift */,
BC0D236C26331BAB001DDA0C /* DataBuffer.swift */,
@@ -1105,7 +1105,7 @@
29D3D4FE1ED0536700DD4AA6 /* tvOS */ = {
isa = PBXGroup;
children = (
- 29D3D5011ED053C000DD4AA6 /* Base.lproj */,
+ 29D3D5021ED053C000DD4AA6 /* Main.storyboard */,
29D3D4FF1ED053C000DD4AA6 /* AppDelegate.swift */,
29D3D5001ED053C000DD4AA6 /* Assets.xcassets */,
29D3D5041ED053C000DD4AA6 /* Info.plist */,
@@ -1114,14 +1114,6 @@
path = tvOS;
sourceTree = "";
};
- 29D3D5011ED053C000DD4AA6 /* Base.lproj */ = {
- isa = PBXGroup;
- children = (
- 29D3D5021ED053C000DD4AA6 /* Main.storyboard */,
- );
- path = Base.lproj;
- sourceTree = "";
- };
29EA87D11E799EFF0043A5F8 /* Extension */ = {
isa = PBXGroup;
children = (
@@ -1490,7 +1482,6 @@
29D3D4EE1ED0531500DD4AA6 = {
CreatedOnToolsVersion = 8.3.2;
LastSwiftMigration = 1020;
- ProvisioningStyle = Automatic;
};
BCCC45952AA289FA0016EFE8 = {
CreatedOnToolsVersion = 14.3;
@@ -2499,6 +2490,8 @@
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
+ CODE_SIGN_IDENTITY = "Apple Development";
+ CODE_SIGN_STYLE = Automatic;
DEBUG_INFORMATION_FORMAT = dwarf;
DEVELOPMENT_TEAM = SUEQ2SZ2L5;
ENABLE_USER_SCRIPT_SANDBOXING = NO;
@@ -2512,6 +2505,7 @@
OTHER_SWIFT_FLAGS = "-enable-upcoming-feature ExistentialAny";
PRODUCT_BUNDLE_IDENTIFIER = "com.haishinkit.Example-tvOS";
PRODUCT_NAME = "$(TARGET_NAME)";
+ PROVISIONING_PROFILE_SPECIFIER = "";
SDKROOT = appletvos;
SUPPORTED_PLATFORMS = "appletvos appletvsimulator";
SUPPORTS_MACCATALYST = NO;
@@ -2519,7 +2513,7 @@
SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = 3;
- TVOS_DEPLOYMENT_TARGET = 12.0;
+ TVOS_DEPLOYMENT_TARGET = 17.0;
};
name = Debug;
};
@@ -2532,6 +2526,8 @@
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
+ CODE_SIGN_IDENTITY = "Apple Development";
+ CODE_SIGN_STYLE = Automatic;
DEVELOPMENT_TEAM = SUEQ2SZ2L5;
ENABLE_USER_SCRIPT_SANDBOXING = NO;
FRAMEWORK_SEARCH_PATHS = "$(inherited)";
@@ -2544,13 +2540,14 @@
OTHER_SWIFT_FLAGS = "-enable-upcoming-feature ExistentialAny";
PRODUCT_BUNDLE_IDENTIFIER = "com.haishinkit.Example-tvOS";
PRODUCT_NAME = "$(TARGET_NAME)";
+ PROVISIONING_PROFILE_SPECIFIER = "";
SDKROOT = appletvos;
SUPPORTED_PLATFORMS = "appletvos appletvsimulator";
SUPPORTS_MACCATALYST = NO;
SUPPORTS_MAC_DESIGNED_FOR_IPHONE_IPAD = NO;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = 3;
- TVOS_DEPLOYMENT_TARGET = 12.0;
+ TVOS_DEPLOYMENT_TARGET = 17.0;
};
name = Release;
};
diff --git a/Sources/Codec/VideoCodecSettings.swift b/Sources/Codec/VideoCodecSettings.swift
index cd0c0f205..3a6e99dfd 100644
--- a/Sources/Codec/VideoCodecSettings.swift
+++ b/Sources/Codec/VideoCodecSettings.swift
@@ -86,12 +86,6 @@ public struct VideoCodecSettings: Codable {
public var frameInterval: Double
/// Specifies the keyframeInterval.
public var maxKeyFrameIntervalDuration: Int32
- /// Specifies the scalingMode.
- public var scalingMode: ScalingMode
- /// Specifies the allowFrameRecording.
- public var allowFrameReordering: Bool? // swiftlint:disable:this discouraged_optional_boolean
- /// Specifies the bitRateMode.
- public var bitRateMode: BitRateMode
/// Specifies the H264 profileLevel.
public var profileLevel: String {
didSet {
@@ -102,6 +96,12 @@ public struct VideoCodecSettings: Codable {
}
}
}
+ /// Specifies the scalingMode.
+ public var scalingMode: ScalingMode
+ /// Specifies the bitRateMode.
+ public var bitRateMode: BitRateMode
+ /// Specifies the allowFrameRecording.
+ public var allowFrameReordering: Bool? // swiftlint:disable:this discouraged_optional_boolean
/// Specifies the HardwareEncoder is enabled(TRUE), or not(FALSE) for macOS.
public var isHardwareEncoderEnabled = true
@@ -110,13 +110,13 @@ public struct VideoCodecSettings: Codable {
/// Creates a new VideoCodecSettings instance.
public init(
videoSize: CGSize = .init(width: 854, height: 480),
- profileLevel: String = kVTProfileLevel_H264_Baseline_3_1 as String,
bitRate: Int = 640 * 1000,
frameInterval: Double = 0.0,
maxKeyFrameIntervalDuration: Int32 = 2,
+ profileLevel: String = kVTProfileLevel_H264_Baseline_3_1 as String,
scalingMode: ScalingMode = .trim,
bitRateMode: BitRateMode = .average,
- allowFrameReordering: Bool? = nil, // swiftlint:disable:this discouraged_optional_boolean
+ allowFrameReordering: Bool? = nil, // swiftlint:disable:this discouraged_optional_boolean,
isHardwareEncoderEnabled: Bool = true
) {
self.videoSize = videoSize
diff --git a/Sources/Media/IOMixer.swift b/Sources/Media/IOMixer.swift
index fccee2711..f07c4b00f 100644
--- a/Sources/Media/IOMixer.swift
+++ b/Sources/Media/IOMixer.swift
@@ -9,8 +9,10 @@ import UIKit
protocol IOMixerDelegate: AnyObject {
func mixer(_ mixer: IOMixer, didOutput audio: AVAudioPCMBuffer, presentationTimeStamp: CMTime)
func mixer(_ mixer: IOMixer, didOutput video: CMSampleBuffer)
- #if os(iOS)
+ #if os(iOS) || os(tvOS)
+ @available(tvOS 17.0, *)
func mixer(_ mixer: IOMixer, sessionWasInterrupted session: AVCaptureSession, reason: AVCaptureSession.InterruptionReason?)
+ @available(tvOS 17.0, *)
func mixer(_ mixer: IOMixer, sessionInterruptionEnded session: AVCaptureSession)
#endif
}
@@ -316,7 +318,7 @@ extension IOMixer: Running {
@available(tvOS 17.0, *)
private func addSessionObservers(_ session: AVCaptureSession) {
NotificationCenter.default.addObserver(self, selector: #selector(sessionRuntimeError(_:)), name: .AVCaptureSessionRuntimeError, object: session)
- #if os(iOS)
+ #if os(iOS) || os(tvOS)
NotificationCenter.default.addObserver(self, selector: #selector(sessionInterruptionEnded(_:)), name: .AVCaptureSessionInterruptionEnded, object: session)
NotificationCenter.default.addObserver(self, selector: #selector(sessionWasInterrupted(_:)), name: .AVCaptureSessionWasInterrupted, object: session)
#endif
@@ -324,7 +326,7 @@ extension IOMixer: Running {
@available(tvOS 17.0, *)
private func removeSessionObservers(_ session: AVCaptureSession) {
- #if os(iOS)
+ #if os(iOS) || os(tvOS)
NotificationCenter.default.removeObserver(self, name: .AVCaptureSessionWasInterrupted, object: session)
NotificationCenter.default.removeObserver(self, name: .AVCaptureSessionInterruptionEnded, object: session)
#endif
@@ -341,7 +343,7 @@ extension IOMixer: Running {
let error = AVError(_nsError: errorValue)
switch error.code {
case .unsupportedDeviceActiveFormat:
- #if os(iOS)
+ #if os(iOS) || os(tvOS)
let isMultiCamSupported: Bool
if #available(iOS 13.0, *) {
isMultiCamSupported = session is AVCaptureMultiCamSession
@@ -371,7 +373,7 @@ extension IOMixer: Running {
} catch {
logger.warn(error)
}
- #if os(iOS)
+ #if os(iOS) || os(tvOS)
case .mediaServicesWereReset:
startCaptureSessionIfNeeded()
#endif
@@ -380,7 +382,8 @@ extension IOMixer: Running {
}
}
- #if os(iOS)
+ #if os(iOS) || os(tvOS)
+ @available(tvOS 17.0, *)
@objc
private func sessionWasInterrupted(_ notification: Notification) {
guard let session = notification.object as? AVCaptureSession else {
@@ -395,6 +398,7 @@ extension IOMixer: Running {
delegate?.mixer(self, sessionWasInterrupted: session, reason: reason)
}
+ @available(tvOS 17.0, *)
@objc
private func sessionInterruptionEnded(_ notification: Notification) {
delegate?.mixer(self, sessionInterruptionEnded: session)
diff --git a/Sources/Net/NetStream.swift b/Sources/Net/NetStream.swift
index 3e051e609..3baef5a83 100644
--- a/Sources/Net/NetStream.swift
+++ b/Sources/Net/NetStream.swift
@@ -4,7 +4,7 @@ import CoreMedia
#if canImport(ScreenCaptureKit)
import ScreenCaptureKit
#endif
-#if os(iOS)
+#if os(iOS) || os(tvOS)
import UIKit
#endif
@@ -14,10 +14,12 @@ public protocol NetStreamDelegate: AnyObject {
func stream(_ stream: NetStream, didOutput audio: AVAudioBuffer, presentationTimeStamp: CMTime)
/// Tells the receiver to playback a video packet incoming.
func stream(_ stream: NetStream, didOutput video: CMSampleBuffer)
- #if os(iOS)
+ #if os(iOS) || os(tvOS)
/// Tells the receiver to session was interrupted.
+ @available(tvOS 17.0, *)
func stream(_ stream: NetStream, sessionWasInterrupted session: AVCaptureSession, reason: AVCaptureSession.InterruptionReason?)
/// Tells the receiver to session interrupted ended.
+ @available(tvOS 17.0, *)
func stream(_ stream: NetStream, sessionInterruptionEnded session: AVCaptureSession)
#endif
/// Tells the receiver to video codec error occured.
@@ -187,7 +189,7 @@ open class NetStream: NSObject {
/// Creates a NetStream object.
override public init() {
super.init()
- #if os(iOS)
+ #if os(iOS) || os(tvOS)
NotificationCenter.default.addObserver(self, selector: #selector(didEnterBackground(_:)), name: UIApplication.didEnterBackgroundNotification, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(willEnterForeground(_:)), name: UIApplication.willEnterForegroundNotification, object: nil)
#endif
@@ -251,7 +253,7 @@ open class NetStream: NSObject {
#if os(macOS)
/// Attaches the screen input object.
- open func attachScreen(_ input: AVCaptureScreenInput?) {
+ public func attachScreen(_ input: AVCaptureScreenInput?) {
lockQueue.async {
self.mixer.videoIO.attachScreen(input)
}
@@ -300,7 +302,7 @@ open class NetStream: NSObject {
mixer.recorder.stopRunning()
}
- #if os(iOS)
+ #if os(iOS) || os(tvOS)
@objc
private func didEnterBackground(_ notification: Notification) {
// Require main thread. Otherwise the microphone cannot be used in the background.
@@ -326,11 +328,13 @@ extension NetStream: IOMixerDelegate {
delegate?.stream(self, didOutput: audio, presentationTimeStamp: presentationTimeStamp)
}
- #if os(iOS)
+ #if os(iOS) || os(tvOS)
+ @available(tvOS 17.0, *)
func mixer(_ mixer: IOMixer, sessionWasInterrupted session: AVCaptureSession, reason: AVCaptureSession.InterruptionReason?) {
delegate?.stream(self, sessionWasInterrupted: session, reason: reason)
}
+ @available(tvOS 17.0, *)
func mixer(_ mixer: IOMixer, sessionInterruptionEnded session: AVCaptureSession) {
delegate?.stream(self, sessionInterruptionEnded: session)
}