Skip to content

Commit

Permalink
Merge pull request #1633 from shogo4405/feature/migration-1618
Browse files Browse the repository at this point in the history
Take timestamps into consideration VideoTrackScreenObject.
  • Loading branch information
shogo4405 authored Nov 26, 2024
2 parents e83507c + fdf193c commit 2957ae2
Show file tree
Hide file tree
Showing 7 changed files with 96 additions and 20 deletions.
11 changes: 8 additions & 3 deletions HaishinKit/Sources/Mixer/MediaMixer.swift
Original file line number Diff line number Diff line change
Expand Up @@ -321,8 +321,8 @@ public final actor MediaMixer {
Task { @ScreenActor in
displayLink.preferredFramesPerSecond = await Int(frameRate)
displayLink.startRunning()
for await _ in displayLink.updateFrames where displayLink.isRunning {
guard let buffer = screen.makeSampleBuffer() else {
for await updateFrame in displayLink.updateFrames where displayLink.isRunning {
guard let buffer = screen.makeSampleBuffer(updateFrame) else {
continue
}
for output in await self.outputs where await output.videoTrackId == UInt8.max {
Expand Down Expand Up @@ -358,7 +358,12 @@ extension MediaMixer: AsyncRunner {
Task {
for await inputs in videoIO.inputs where isRunning {
Task { @ScreenActor in
screen.append(inputs.0, buffer: inputs.1)
var sampleBuffer = inputs.1
screen.append(inputs.0, buffer: sampleBuffer)
if await videoMixerSettings.mainTrack == inputs.0 {
let diff = ceil((screen.targetTimestamp - sampleBuffer.presentationTimeStamp.seconds) * 10000) / 10000
screen.videoCaptureLatency = diff
}
}
for output in outputs where await output.videoTrackId == inputs.0 {
output.mixer(self, didOutput: inputs.1)
Expand Down
4 changes: 1 addition & 3 deletions HaishinKit/Sources/Mixer/VideoCaptureUnit.swift
Original file line number Diff line number Diff line change
Expand Up @@ -118,10 +118,8 @@ final class VideoCaptureUnit: CaptureUnit {
return
}
try? configuration?(capture)
try capture.attachDevice(device, session: session, videoUnit: self)
}
if device == nil {
videoMixer.reset(track)
try capture.attachDevice(device, session: session, videoUnit: self)
}
}

Expand Down
26 changes: 18 additions & 8 deletions HaishinKit/Sources/Screen/Screen.swift
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ public final class Screen: ScreenObjectContainerConvertible {
public static let size = CGSize(width: 1280, height: 720)

private static let lockFrags = CVPixelBufferLockFlags(rawValue: 0)
private static let preferredTimescale: CMTimeScale = 1000000000

/// The total of child counts.
public var childCounts: Int {
Expand Down Expand Up @@ -63,10 +64,11 @@ public final class Screen: ScreenObjectContainerConvertible {
}
#endif

var videoTrackScreenObject = VideoTrackScreenObject()
private var root: ScreenObjectContainer = .init()
var videoCaptureLatency: TimeInterval = 0.0
private(set) var renderer = ScreenRendererByCPU()
private var timeStamp: CMTime = .invalid
private(set) var targetTimestamp: TimeInterval = 0.0
private(set) var videoTrackScreenObject = VideoTrackScreenObject()
private var root: ScreenObjectContainer = .init()
private var attributes: [NSString: NSObject] {
return [
kCVPixelBufferPixelFormatTypeKey: NSNumber(value: kCVPixelFormatType_32ARGB),
Expand All @@ -81,6 +83,7 @@ public final class Screen: ScreenObjectContainerConvertible {
outputFormat = nil
}
}
private var presentationTimeStamp: CMTime = .zero

/// Creates a screen object.
public init() {
Expand Down Expand Up @@ -115,7 +118,10 @@ public final class Screen: ScreenObjectContainerConvertible {
}
}

func makeSampleBuffer() -> CMSampleBuffer? {
func makeSampleBuffer(_ updateFrame: DisplayLinkTime) -> CMSampleBuffer? {
defer {
targetTimestamp = updateFrame.targetTimestamp
}
var pixelBuffer: CVPixelBuffer?
pixelBufferPool?.createPixelBuffer(&pixelBuffer)
guard let pixelBuffer else {
Expand All @@ -134,13 +140,16 @@ public final class Screen: ScreenObjectContainerConvertible {
if let dictionary = CVBufferGetAttachments(pixelBuffer, .shouldNotPropagate) {
CVBufferSetAttachments(pixelBuffer, dictionary, .shouldPropagate)
}
let now = CMClock.hostTimeClock.time
let presentationTimeStamp = CMTime(seconds: updateFrame.timestamp - videoCaptureLatency, preferredTimescale: Self.preferredTimescale)
guard self.presentationTimeStamp <= presentationTimeStamp else {
return nil
}
self.presentationTimeStamp = presentationTimeStamp
var timingInfo = CMSampleTimingInfo(
duration: timeStamp == .invalid ? .zero : now - timeStamp,
presentationTimeStamp: now,
duration: CMTime(seconds: updateFrame.targetTimestamp - updateFrame.timestamp, preferredTimescale: Self.preferredTimescale),
presentationTimeStamp: presentationTimeStamp,
decodeTimeStamp: .invalid
)
timeStamp = now
var sampleBuffer: CMSampleBuffer?
guard CMSampleBufferCreateReadyWithImageBuffer(
allocator: kCFAllocatorDefault,
Expand All @@ -163,6 +172,7 @@ public final class Screen: ScreenObjectContainerConvertible {
defer {
try? sampleBuffer.imageBuffer?.unlockBaseAddress(Self.lockFrags)
}
renderer.presentationTimeStamp = sampleBuffer.presentationTimeStamp
renderer.setTarget(sampleBuffer.imageBuffer)
if let dimensions = sampleBuffer.formatDescription?.dimensions {
root.size = dimensions.size
Expand Down
27 changes: 21 additions & 6 deletions HaishinKit/Sources/Screen/ScreenObject.swift
Original file line number Diff line number Diff line change
Expand Up @@ -208,6 +208,7 @@ public final class ImageScreenObject: ScreenObject {

/// An object that manages offscreen rendering a video track source.
public final class VideoTrackScreenObject: ScreenObject, ChromaKeyProcessable {
static let capacity: Int = 3
public var chromaKeyColor: CGColor?

/// Specifies the track number how the displays the visual content.
Expand All @@ -230,6 +231,11 @@ public final class VideoTrackScreenObject: ScreenObject, ChromaKeyProcessable {
}
}

/// The frame rate.
public var frameRate: Int {
frameTracker.frameRate
}

override var blendMode: ScreenObject.BlendMode {
if 0.0 < cornerRadius || chromaKeyColor != nil {
return .alpha
Expand All @@ -238,19 +244,18 @@ public final class VideoTrackScreenObject: ScreenObject, ChromaKeyProcessable {
}

private var queue: TypedBlockQueue<CMSampleBuffer>?
private var effects: [any VideoEffect] = .init()
private var effects: [VideoEffect] = .init()
private var frameTracker = FrameTracker()

/// Create a screen object.
override public init() {
super.init()
horizontalAlignment = .center
do {
queue = try TypedBlockQueue(capacity: 1, handlers: .outputPTSSortedSampleBuffers)
queue = try TypedBlockQueue(capacity: Self.capacity, handlers: .outputPTSSortedSampleBuffers)
} catch {
logger.error(error)
}
Task {
horizontalAlignment = .center
}
}

/// Registers a video effect.
Expand All @@ -272,9 +277,11 @@ public final class VideoTrackScreenObject: ScreenObject, ChromaKeyProcessable {
}

override public func makeImage(_ renderer: some ScreenRenderer) -> CGImage? {
guard let sampleBuffer = queue?.dequeue(), let pixelBuffer = sampleBuffer.imageBuffer else {
guard let sampleBuffer = queue?.dequeue(renderer.presentationTimeStamp),
let pixelBuffer = sampleBuffer.imageBuffer else {
return nil
}
frameTracker.update(sampleBuffer.presentationTimeStamp)
// Resizing before applying the filter for performance optimization.
var image = CIImage(cvPixelBuffer: pixelBuffer).transformed(by: videoGravity.scale(
bounds.size,
Expand Down Expand Up @@ -307,12 +314,20 @@ public final class VideoTrackScreenObject: ScreenObject, ChromaKeyProcessable {
}
}

override public func draw(_ renderer: some ScreenRenderer) {
super.draw(renderer)
if queue?.isEmpty == false {
invalidateLayout()
}
}

func enqueue(_ sampleBuffer: CMSampleBuffer) {
try? queue?.enqueue(sampleBuffer)
invalidateLayout()
}

func reset() {
frameTracker.clear()
try? queue?.reset()
invalidateLayout()
}
Expand Down
5 changes: 5 additions & 0 deletions HaishinKit/Sources/Screen/ScreenRenderer.swift
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@ public protocol ScreenRenderer: AnyObject {
var backgroundColor: CGColor { get set }
/// The current screen bounds.
var bounds: CGRect { get }
/// The current presentationTimeStamp.
var presentationTimeStamp: CMTime { get }
/// Layouts a screen object.
func layout(_ screenObject: ScreenObject)
/// Draws a sceen object.
Expand All @@ -25,6 +27,7 @@ final class ScreenRendererByCPU: ScreenRenderer {
static let doNotTile = vImage_Flags(kvImageDoNotTile)

var bounds: CGRect = .init(origin: .zero, size: Screen.size)
var presentationTimeStamp: CMTime = .zero

lazy var context = {
guard let deive = MTLCreateSystemDefaultDevice() else {
Expand Down Expand Up @@ -65,6 +68,7 @@ final class ScreenRendererByCPU: ScreenRenderer {
}
}
}

private var format = vImage_CGImageFormat(
bitsPerComponent: 8,
bitsPerPixel: 32,
Expand All @@ -73,6 +77,7 @@ final class ScreenRendererByCPU: ScreenRenderer {
version: 0,
decode: nil,
renderingIntent: .defaultIntent)

private var images: [ScreenObject: vImage_Buffer] = [:]
private var canvas: vImage_Buffer = .init()
private var converter: vImageConverter?
Expand Down
26 changes: 26 additions & 0 deletions HaishinKit/Sources/Util/FrameTracker.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
import CoreMedia

struct FrameTracker {
static let seconds = 1.0

private(set) var frameRate: Int = 0
private var count = 0
private var rotated: CMTime = .zero

init() {
}

mutating func update(_ time: CMTime) {
count += 1
if Self.seconds <= (time - rotated).seconds {
rotated = time
frameRate = count
count = 0
}
}

mutating func clear() {
count = 0
rotated = .zero
}
}
17 changes: 17 additions & 0 deletions HaishinKit/Sources/Util/TypedBlockQueue.swift
Original file line number Diff line number Diff line change
Expand Up @@ -46,3 +46,20 @@ final class TypedBlockQueue<T: AnyObject> {
try queue.reset()
}
}

extension TypedBlockQueue where T == CMSampleBuffer {
func dequeue(_ presentationTimeStamp: CMTime) -> CMSampleBuffer? {
var result: CMSampleBuffer?
while !queue.isEmpty {
guard let head else {
break
}
if head.presentationTimeStamp <= presentationTimeStamp {
result = dequeue()
} else {
return result
}
}
return result
}
}

0 comments on commit 2957ae2

Please sign in to comment.