forked from livekit/client-sdk-swift
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathBroadcastScreenCapturer.swift
116 lines (94 loc) · 4.06 KB
/
BroadcastScreenCapturer.swift
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
//
// BroadcastScreenCapturer.m
// RCTWebRTC
//
// Created by Alex-Dan Bumbu on 06/01/2021.
//
#if os(iOS)
import Foundation
import WebRTC
import Promises
import UIKit
class BroadcastScreenCapturer: BufferCapturer {
static let kRTCScreensharingSocketFD = "rtc_SSFD"
static let kAppGroupIdentifierKey = "RTCAppGroupIdentifier"
static let kRTCScreenSharingExtension = "RTCScreenSharingExtension"
var frameReader: SocketConnectionFrameReader?
override func startCapture() -> Promise<Bool> {
super.startCapture().then(on: queue) {didStart -> Promise<Bool> in
guard didStart, self.frameReader == nil else {
// already started
return Promise(false)
}
guard let identifier = self.lookUpAppGroupIdentifier(),
let filePath = self.filePathForIdentifier(identifier)
else {
return Promise { false }
}
return Promise { fufill, _ in
let bounds = UIScreen.main.bounds
let width = bounds.size.width
let height = bounds.size.height
let screenDimension = CMVideoDimensions(width: Int32(width), height: Int32(height))
// pre fill dimensions, so that we don't have to wait for the broadcast to start to get actual dimensions.
// should be able to safely predict using actual screen dimensions.
let targetDimensions = screenDimension
.aspectFit(size: self.options.dimensions.max)
.toEncodeSafeDimensions()
defer { self.dimensions = targetDimensions }
let frameReader = SocketConnectionFrameReader()
guard let socketConnection = BroadcastServerSocketConnection(filePath: filePath, streamDelegate: frameReader)
else {
fufill(false)
return
}
frameReader.didCapture = { pixelBuffer, rotation in
self.capture(pixelBuffer, rotation: rotation)
}
frameReader.startCapture(with: socketConnection)
self.frameReader = frameReader
fufill(true)
}
}
}
override func stopCapture() -> Promise<Bool> {
super.stopCapture().then(on: queue) { didStop -> Promise<Bool> in
guard didStop, self.frameReader != nil else {
// already stopped
return Promise(false)
}
return Promise { fulfill, _ in
self.frameReader?.stopCapture()
self.frameReader = nil
fulfill(true)
}
}
}
private func lookUpAppGroupIdentifier() -> String? {
return Bundle.main.infoDictionary?[BroadcastScreenCapturer.kAppGroupIdentifierKey] as? String
}
private func filePathForIdentifier(_ identifier: String) -> String? {
guard let sharedContainer = FileManager.default.containerURL(forSecurityApplicationGroupIdentifier: identifier)
else {
return nil
}
let filePath = sharedContainer.appendingPathComponent(BroadcastScreenCapturer.kRTCScreensharingSocketFD).path
return filePath
}
}
extension LocalVideoTrack {
/// Creates a track that captures screen capture from a broadcast upload extension
public static func createBroadcastScreenCapturerTrack(name: String = Track.screenShareVideoName,
source: VideoTrack.Source = .screenShareVideo,
options: ScreenShareCaptureOptions = ScreenShareCaptureOptions()) -> LocalVideoTrack {
let videoSource = Engine.createVideoSource(forScreenShare: true)
let capturer = BroadcastScreenCapturer(delegate: videoSource, options: BufferCaptureOptions(from: options))
return LocalVideoTrack(
name: name,
source: source,
capturer: capturer,
videoSource: videoSource
)
}
}
#endif