Skip to content

Commit

Permalink
Feature Screen API.
Browse files Browse the repository at this point in the history
  • Loading branch information
shogo4405 committed Jun 1, 2024
1 parent d041959 commit 926157a
Show file tree
Hide file tree
Showing 27 changed files with 1,420 additions and 384 deletions.
2 changes: 1 addition & 1 deletion .swiftlint.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ excluded:
opt_in_rules:
# - anyobject_protocol
- array_init
- attributes
# - attributes
# - closure_body_length
- closure_end_indentation
- closure_spacing
Expand Down
48 changes: 20 additions & 28 deletions Examples/iOS/IngestViewController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@ final class IngestViewController: UIViewController {
@IBOutlet private weak var audioDevicePicker: UIPickerView!
@IBOutlet private weak var audioMonoStereoSegmentCOntrol: UISegmentedControl!

private var pipIntentView = UIView()
private var currentEffect: VideoEffect?
private var currentPosition: AVCaptureDevice.Position = .back
private var retryCount: Int = 0
Expand All @@ -32,17 +31,22 @@ final class IngestViewController: UIViewController {
audioCapture.delegate = self
return audioCapture
}()
private var videoScreenObject = VideoTrackScreenObject()

override func viewDidLoad() {
super.viewDidLoad()

netStreamSwitcher.uri = Preference.defaultInstance.uri ?? ""

pipIntentView.layer.borderWidth = 1.0
pipIntentView.layer.borderColor = UIColor.white.cgColor
pipIntentView.bounds = IOVideoMixerSettings.default.regionOfInterest
pipIntentView.isUserInteractionEnabled = true
view.addSubview(pipIntentView)
stream.videoMixerSettings.size = .init(width: 720, height: 1280)
stream.screen.backgroundColor = UIColor.white.cgColor

videoScreenObject.cornerRadius = 16.0
videoScreenObject.track = 1
videoScreenObject.horizontalAlignment = .right
videoScreenObject.layoutMargin = .init(top: 16, left: 0, bottom: 0, right: 16)
videoScreenObject.size = .init(width: 160 * 2, height: 90 * 2)
try? stream.screen.addChild(videoScreenObject)

// If you want to use the multi-camera feature, please make sure stream.isMultiCamSessionEnabled = true. Before attachCamera or attachAudio.
stream.isMultiCamSessionEnabled = true
Expand Down Expand Up @@ -95,42 +99,30 @@ final class IngestViewController: UIViewController {
NotificationCenter.default.removeObserver(self)
}

override func viewWillTransition(to size: CGSize, with coordinator: any UIViewControllerTransitionCoordinator) {
if UIDevice.current.orientation.isLandscape {
stream.videoMixerSettings.size = .init(width: 1280, height: 720)
} else {
stream.videoMixerSettings.size = .init(width: 720, height: 1280)
}
}

// swiftlint:disable:next block_based_kvo
override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey: Any]?, context: UnsafeMutableRawPointer?) {
if Thread.isMainThread {
currentFPSLabel?.text = "\(stream.currentFPS)"
}
}

override func touchesMoved(_ touches: Set<UITouch>, with event: UIEvent?) {
guard let touch = touches.first else {
return
}
if touch.view == pipIntentView {
let destLocation = touch.location(in: view)
let prevLocation = touch.previousLocation(in: view)
var currentFrame = pipIntentView.frame
let deltaX = destLocation.x - prevLocation.x
let deltaY = destLocation.y - prevLocation.y
currentFrame.origin.x += deltaX
currentFrame.origin.y += deltaY
pipIntentView.frame = currentFrame
stream.videoMixerSettings = IOVideoMixerSettings(
mode: stream.videoMixerSettings.mode,
cornerRadius: 16.0,
regionOfInterest: currentFrame,
direction: .east
)
}
}

@IBAction func rotateCamera(_ sender: UIButton) {
logger.info("rotateCamera")
if stream.isMultiCamSessionEnabled {
if stream.videoMixerSettings.mainTrack == 0 {
stream.videoMixerSettings.mainTrack = 1
videoScreenObject.track = 0
} else {
stream.videoMixerSettings.mainTrack = 0
videoScreenObject.track = 1
}
} else {
let position: AVCaptureDevice.Position = currentPosition == .back ? .front : .back
Expand Down
47 changes: 47 additions & 0 deletions Examples/macOS/CameraIngestViewController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ final class CameraIngestViewController: NSViewController {
private var stream: IOStream {
return netStreamSwitcher.stream
}
private var textScreenObject = TextScreenObject()

override func viewDidLoad() {
super.viewDidLoad()
Expand All @@ -33,6 +34,46 @@ final class CameraIngestViewController: NSViewController {

override func viewDidAppear() {
super.viewDidAppear()

textScreenObject.horizontalAlignment = .right
textScreenObject.verticalAlignment = .bottom
textScreenObject.layoutMargin = .init(top: 0, left: 0, bottom: 16, right: 16)

stream.screen.backgroundColor = NSColor.black.cgColor

let videoScreenObject = VideoTrackScreenObject()
videoScreenObject.cornerRadius = 32.0
videoScreenObject.track = 1
videoScreenObject.horizontalAlignment = .right
videoScreenObject.layoutMargin = .init(top: 16, left: 0, bottom: 0, right: 16)
videoScreenObject.size = .init(width: 160 * 2, height: 90 * 2)
_ = videoScreenObject.registerVideoEffect(MonochromeEffect())

let imageScreenObject = ImageScreenObject()
let imageURL = URL(fileURLWithPath: Bundle.main.path(forResource: "game_jikkyou", ofType: "png") ?? "")
if let provider = CGDataProvider(url: imageURL as CFURL) {
imageScreenObject.verticalAlignment = .bottom
imageScreenObject.layoutMargin = .init(top: 0, left: 0, bottom: 16, right: 0)
imageScreenObject.cgImage = CGImage(
pngDataProviderSource: provider,
decode: nil,
shouldInterpolate: false,
intent: .defaultIntent
)
} else {
logger.info("no image")
}

let assetScreenObject = AssetScreenObject()
assetScreenObject.size = .init(width: 180, height: 180)
assetScreenObject.layoutMargin = .init(top: 16, left: 16, bottom: 0, right: 0)
try? assetScreenObject.startReading(AVAsset(url: URL(fileURLWithPath: Bundle.main.path(forResource: "SampleVideo_360x240_5mb", ofType: "mp4") ?? "")))
try? stream.screen.addChild(assetScreenObject)
try? stream.screen.addChild(videoScreenObject)
try? stream.screen.addChild(imageScreenObject)
try? stream.screen.addChild(textScreenObject)
stream.screen.delegate = self

stream.attachAudio(DeviceUtil.device(withLocalizedName: audioPopUpButton.titleOfSelectedItem!, mediaType: .audio))

var audios = AVCaptureDevice.devices(for: .audio)
Expand Down Expand Up @@ -79,3 +120,9 @@ final class CameraIngestViewController: NSViewController {
stream.attachCamera(device, track: 0)
}
}

extension CameraIngestViewController: ScreenDelegate {
func screen(_ screen: Screen, willLayout time: CMTime) {
textScreenObject.string = Date().description
}
}
Binary file added Examples/macOS/game_jikkyou.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading

0 comments on commit 926157a

Please sign in to comment.