mirror of
https://github.com/yattee/yattee.git
synced 2026-04-14 11:36:56 +00:00
Yattee v2 rewrite
This commit is contained in:
1944
Yattee/Services/Player/MPV/MPVClient.swift
Normal file
1944
Yattee/Services/Player/MPV/MPVClient.swift
Normal file
File diff suppressed because it is too large
Load Diff
290
Yattee/Services/Player/MPV/MPVLogging.swift
Normal file
290
Yattee/Services/Player/MPV/MPVLogging.swift
Normal file
@@ -0,0 +1,290 @@
|
||||
//
|
||||
// MPVLogging.swift
|
||||
// Yattee
|
||||
//
|
||||
// Centralized MPV rendering diagnostic logging.
|
||||
// Logs to Console (print) AND LoggingService for persistence.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
#if os(iOS) || os(tvOS)
|
||||
import OpenGLES
|
||||
#elseif os(macOS)
|
||||
import OpenGL
|
||||
#endif
|
||||
|
||||
/// Centralized MPV rendering diagnostic logging.
|
||||
/// Use this to diagnose rare rendering issues (black/green screen while audio plays).
|
||||
enum MPVLogging {
|
||||
// MARK: - Setting Check
|
||||
|
||||
/// Thread-safe cached check for verbose logging setting.
|
||||
/// Uses atomic operations for thread safety without locks.
|
||||
private static var _cachedIsEnabled: Bool = false
|
||||
private static var _lastCheckTime: UInt64 = 0
|
||||
private static let cacheDurationNanos: UInt64 = 1_000_000_000 // 1 second
|
||||
|
||||
/// Check if verbose logging is enabled (cached for performance).
|
||||
/// Safe to call from any thread.
|
||||
private static func isEnabled() -> Bool {
|
||||
let now = DispatchTime.now().uptimeNanoseconds
|
||||
|
||||
// Refresh cache every second
|
||||
if now - _lastCheckTime > cacheDurationNanos {
|
||||
_lastCheckTime = now
|
||||
// Read from UserDefaults directly for thread safety
|
||||
// (SettingsManager is @MainActor)
|
||||
_cachedIsEnabled = UserDefaults.standard.bool(forKey: "verboseMPVLogging")
|
||||
}
|
||||
|
||||
return _cachedIsEnabled
|
||||
}
|
||||
|
||||
// MARK: - Logging Functions
|
||||
|
||||
/// Log a verbose MPV rendering diagnostic message.
|
||||
/// Only logs if verbose MPV logging is enabled in settings.
|
||||
/// Thread-safe and can be called from any queue.
|
||||
///
|
||||
/// - Parameters:
|
||||
/// - message: The main log message
|
||||
/// - details: Optional additional details
|
||||
/// - file: Source file (auto-captured)
|
||||
/// - function: Function name (auto-captured)
|
||||
/// - line: Line number (auto-captured)
|
||||
static func log(
|
||||
_ message: String,
|
||||
details: String? = nil,
|
||||
file: String = #file,
|
||||
function: String = #function,
|
||||
line: Int = #line
|
||||
) {
|
||||
guard isEnabled() else { return }
|
||||
|
||||
let timestamp = Self.timestamp()
|
||||
let threadName = Self.threadName()
|
||||
let fileName = (file as NSString).lastPathComponent
|
||||
|
||||
let fullMessage = "[MPV-Verbose] [\(timestamp)] [\(threadName)] \(message)"
|
||||
|
||||
// Log to Console immediately (thread-safe)
|
||||
print(fullMessage)
|
||||
if let details {
|
||||
print(" \(details)")
|
||||
}
|
||||
print(" [\(fileName):\(line) \(function)]")
|
||||
|
||||
// Log to LoggingService on MainActor for persistence
|
||||
let logDetails = details.map { "\($0)\n[\(fileName):\(line) \(function)]" }
|
||||
?? "[\(fileName):\(line) \(function)]"
|
||||
|
||||
Task { @MainActor in
|
||||
LoggingService.shared.log(
|
||||
level: .debug,
|
||||
category: .mpv,
|
||||
message: "[MPV-Verbose] \(message)",
|
||||
details: logDetails
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/// Log with warning level for potential issues.
|
||||
static func warn(
|
||||
_ message: String,
|
||||
details: String? = nil,
|
||||
file: String = #file,
|
||||
function: String = #function,
|
||||
line: Int = #line
|
||||
) {
|
||||
guard isEnabled() else { return }
|
||||
|
||||
let timestamp = Self.timestamp()
|
||||
let threadName = Self.threadName()
|
||||
let fileName = (file as NSString).lastPathComponent
|
||||
|
||||
let fullMessage = "[MPV-Verbose] ⚠️ \(timestamp)] [\(threadName)] \(message)"
|
||||
|
||||
print(fullMessage)
|
||||
if let details {
|
||||
print(" \(details)")
|
||||
}
|
||||
print(" [\(fileName):\(line) \(function)]")
|
||||
|
||||
let logDetails = details.map { "\($0)\n[\(fileName):\(line) \(function)]" }
|
||||
?? "[\(fileName):\(line) \(function)]"
|
||||
|
||||
Task { @MainActor in
|
||||
LoggingService.shared.log(
|
||||
level: .warning,
|
||||
category: .mpv,
|
||||
message: "[MPV-Verbose] \(message)",
|
||||
details: logDetails
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/// Log OpenGL/EAGL state for debugging context and framebuffer issues.
|
||||
///
|
||||
/// - Parameters:
|
||||
/// - prefix: Description of the operation (e.g., "createFramebuffer")
|
||||
/// - framebuffer: The framebuffer ID
|
||||
/// - renderbuffer: The renderbuffer ID
|
||||
/// - width: Framebuffer width
|
||||
/// - height: Framebuffer height
|
||||
/// - contextCurrent: Whether the GL context is current
|
||||
/// - framebufferComplete: Whether the framebuffer is complete (nil if not checked)
|
||||
static func logGLState(
|
||||
_ prefix: String,
|
||||
framebuffer: UInt32,
|
||||
renderbuffer: UInt32,
|
||||
width: Int32,
|
||||
height: Int32,
|
||||
contextCurrent: Bool,
|
||||
framebufferComplete: Bool? = nil
|
||||
) {
|
||||
var state = "FB:\(framebuffer) RB:\(renderbuffer) \(width)x\(height) ctx:\(contextCurrent ? "✓" : "✗")"
|
||||
if let complete = framebufferComplete {
|
||||
state += " complete:\(complete ? "✓" : "✗")"
|
||||
}
|
||||
|
||||
log("\(prefix): \(state)")
|
||||
}
|
||||
|
||||
/// Log display link state changes.
|
||||
///
|
||||
/// - Parameters:
|
||||
/// - action: The action being performed (e.g., "start", "stop", "pause")
|
||||
/// - isPaused: Current paused state
|
||||
/// - targetFPS: Target frame rate if applicable
|
||||
/// - reason: Optional reason for the action
|
||||
static func logDisplayLink(
|
||||
_ action: String,
|
||||
isPaused: Bool? = nil,
|
||||
targetFPS: Double? = nil,
|
||||
reason: String? = nil
|
||||
) {
|
||||
var details: [String] = []
|
||||
if let isPaused {
|
||||
details.append("paused:\(isPaused)")
|
||||
}
|
||||
if let targetFPS {
|
||||
details.append("targetFPS:\(String(format: "%.1f", targetFPS))")
|
||||
}
|
||||
if let reason {
|
||||
details.append("reason:\(reason)")
|
||||
}
|
||||
|
||||
let detailsStr = details.isEmpty ? nil : details.joined(separator: " ")
|
||||
log("DisplayLink \(action)", details: detailsStr)
|
||||
}
|
||||
|
||||
/// Log view lifecycle events.
|
||||
///
|
||||
/// - Parameters:
|
||||
/// - event: The lifecycle event (e.g., "willMove(toSuperview:)", "didMoveToSuperview")
|
||||
/// - hasSuperview: Whether the view has a superview after the event
|
||||
/// - details: Additional context
|
||||
static func logViewLifecycle(
|
||||
_ event: String,
|
||||
hasSuperview: Bool,
|
||||
details: String? = nil
|
||||
) {
|
||||
log("View \(event)", details: "hasSuperview:\(hasSuperview)" + (details.map { " \($0)" } ?? ""))
|
||||
}
|
||||
|
||||
/// Log app lifecycle / scene phase transitions.
|
||||
///
|
||||
/// - Parameters:
|
||||
/// - event: The lifecycle event
|
||||
/// - isPiPActive: Whether PiP is currently active
|
||||
/// - isRendering: Whether rendering is active
|
||||
static func logAppLifecycle(
|
||||
_ event: String,
|
||||
isPiPActive: Bool? = nil,
|
||||
isRendering: Bool? = nil
|
||||
) {
|
||||
var details: [String] = []
|
||||
if let isPiPActive {
|
||||
details.append("pip:\(isPiPActive)")
|
||||
}
|
||||
if let isRendering {
|
||||
details.append("rendering:\(isRendering)")
|
||||
}
|
||||
|
||||
let detailsStr = details.isEmpty ? nil : details.joined(separator: " ")
|
||||
log("App \(event)", details: detailsStr)
|
||||
}
|
||||
|
||||
/// Log rotation and fullscreen transitions.
|
||||
///
|
||||
/// - Parameters:
|
||||
/// - event: The transition event
|
||||
/// - fromOrientation: Previous orientation if applicable
|
||||
/// - toOrientation: Target orientation if applicable
|
||||
static func logTransition(
|
||||
_ event: String,
|
||||
fromSize: CGSize? = nil,
|
||||
toSize: CGSize? = nil
|
||||
) {
|
||||
var details: [String] = []
|
||||
if let fromSize {
|
||||
details.append("from:\(Int(fromSize.width))x\(Int(fromSize.height))")
|
||||
}
|
||||
if let toSize {
|
||||
details.append("to:\(Int(toSize.width))x\(Int(toSize.height))")
|
||||
}
|
||||
|
||||
let detailsStr = details.isEmpty ? nil : details.joined(separator: " ")
|
||||
log("Transition \(event)", details: detailsStr)
|
||||
}
|
||||
|
||||
/// Log render operations (use sparingly to avoid log spam).
|
||||
///
|
||||
/// - Parameters:
|
||||
/// - event: The render event
|
||||
/// - fbo: Framebuffer being rendered to
|
||||
/// - width: Render width
|
||||
/// - height: Render height
|
||||
/// - success: Whether the operation succeeded
|
||||
static func logRender(
|
||||
_ event: String,
|
||||
fbo: Int32? = nil,
|
||||
width: Int32? = nil,
|
||||
height: Int32? = nil,
|
||||
success: Bool? = nil
|
||||
) {
|
||||
var details: [String] = []
|
||||
if let fbo {
|
||||
details.append("fbo:\(fbo)")
|
||||
}
|
||||
if let width, let height {
|
||||
details.append("\(width)x\(height)")
|
||||
}
|
||||
if let success {
|
||||
details.append(success ? "✓" : "✗")
|
||||
}
|
||||
|
||||
let detailsStr = details.isEmpty ? nil : details.joined(separator: " ")
|
||||
log("Render \(event)", details: detailsStr)
|
||||
}
|
||||
|
||||
// MARK: - Private Helpers
|
||||
|
||||
private static func timestamp() -> String {
|
||||
let formatter = DateFormatter()
|
||||
formatter.dateFormat = "HH:mm:ss.SSS"
|
||||
return formatter.string(from: Date())
|
||||
}
|
||||
|
||||
private static func threadName() -> String {
|
||||
if Thread.isMainThread {
|
||||
return "main"
|
||||
}
|
||||
if let name = Thread.current.name, !name.isEmpty {
|
||||
return name
|
||||
}
|
||||
// Get queue label if available
|
||||
let label = String(cString: __dispatch_queue_get_label(nil), encoding: .utf8) ?? "unknown"
|
||||
return label
|
||||
}
|
||||
}
|
||||
293
Yattee/Services/Player/MPV/MPVOGLView.swift
Normal file
293
Yattee/Services/Player/MPV/MPVOGLView.swift
Normal file
@@ -0,0 +1,293 @@
|
||||
//
|
||||
// MPVOGLView.swift
|
||||
// Yattee
|
||||
//
|
||||
// NSView that hosts MPVOpenGLLayer and manages CADisplayLink for macOS.
|
||||
// Moves all rendering off the main thread for smooth UI during video playback.
|
||||
//
|
||||
|
||||
#if os(macOS)
|
||||
|
||||
import AppKit
|
||||
import CoreMedia
|
||||
import CoreVideo
|
||||
import Libmpv
|
||||
|
||||
// MARK: - MPVOGLView
|
||||
|
||||
/// View for MPV video rendering on macOS.
|
||||
/// Hosts an MPVOpenGLLayer and manages CADisplayLink for vsync timing.
|
||||
final class MPVOGLView: NSView {
|
||||
// MARK: - Properties
|
||||
|
||||
/// The OpenGL layer that handles rendering.
|
||||
private(set) lazy var videoLayer: MPVOpenGLLayer = {
|
||||
MPVOpenGLLayer(videoView: self)
|
||||
}()
|
||||
|
||||
/// Reference to the MPV client.
|
||||
private weak var mpvClient: MPVClient?
|
||||
|
||||
/// CADisplayLink for frame timing and vsync (macOS 14+).
|
||||
private var displayLink: CADisplayLink?
|
||||
|
||||
/// Whether the view has been uninitialized.
|
||||
private var isUninited = false
|
||||
|
||||
/// Lock for thread-safe access to isUninited.
|
||||
private let uninitLock = NSLock()
|
||||
|
||||
// MARK: - First Frame Tracking
|
||||
|
||||
/// Tracks whether MPV has signaled it has a frame ready to render.
|
||||
var mpvHasFrameReady = false
|
||||
|
||||
/// Callback when first frame is rendered.
|
||||
var onFirstFrameRendered: (() -> Void)? {
|
||||
get { videoLayer.onFirstFrameRendered }
|
||||
set { videoLayer.onFirstFrameRendered = newValue }
|
||||
}
|
||||
|
||||
// MARK: - Video Info
|
||||
|
||||
/// Video frame rate from MPV (for debug overlay).
|
||||
var videoFPS: Double = 60.0
|
||||
|
||||
/// Actual display link frame rate.
|
||||
var displayLinkActualFPS: Double = 60.0
|
||||
|
||||
/// Current display link target frame rate (for debug overlay).
|
||||
var displayLinkTargetFPS: Double {
|
||||
displayLinkActualFPS
|
||||
}
|
||||
|
||||
// MARK: - PiP Properties (forwarded to layer)
|
||||
|
||||
/// Whether to capture frames for PiP.
|
||||
var captureFramesForPiP: Bool {
|
||||
get { videoLayer.captureFramesForPiP }
|
||||
set { videoLayer.captureFramesForPiP = newValue }
|
||||
}
|
||||
|
||||
/// Whether PiP is currently active.
|
||||
var isPiPActive: Bool {
|
||||
get { videoLayer.isPiPActive }
|
||||
set { videoLayer.isPiPActive = newValue }
|
||||
}
|
||||
|
||||
/// Callback when a new frame is ready for PiP.
|
||||
var onFrameReady: ((CVPixelBuffer, CMTime) -> Void)? {
|
||||
get { videoLayer.onFrameReady }
|
||||
set { videoLayer.onFrameReady = newValue }
|
||||
}
|
||||
|
||||
/// Video content width (actual video dimensions for letterbox cropping).
|
||||
var videoContentWidth: Int {
|
||||
get { videoLayer.videoContentWidth }
|
||||
set { videoLayer.videoContentWidth = newValue }
|
||||
}
|
||||
|
||||
/// Video content height (actual video dimensions for letterbox cropping).
|
||||
var videoContentHeight: Int {
|
||||
get { videoLayer.videoContentHeight }
|
||||
set { videoLayer.videoContentHeight = newValue }
|
||||
}
|
||||
|
||||
// MARK: - Initialization
|
||||
|
||||
override init(frame frameRect: NSRect) {
|
||||
super.init(frame: frameRect)
|
||||
commonInit()
|
||||
}
|
||||
|
||||
required init?(coder: NSCoder) {
|
||||
super.init(coder: coder)
|
||||
commonInit()
|
||||
}
|
||||
|
||||
/// Convenience initializer with zero frame.
|
||||
convenience init() {
|
||||
self.init(frame: .zero)
|
||||
}
|
||||
|
||||
private func commonInit() {
|
||||
// Set up layer-backed view
|
||||
wantsLayer = true
|
||||
layer = videoLayer
|
||||
|
||||
// Configure layer properties
|
||||
videoLayer.contentsScale = NSScreen.main?.backingScaleFactor ?? 2.0
|
||||
|
||||
// Configure view properties
|
||||
autoresizingMask = [.width, .height]
|
||||
}
|
||||
|
||||
deinit {
|
||||
uninit()
|
||||
}
|
||||
|
||||
// MARK: - Setup
|
||||
|
||||
/// Set up with an MPV client.
|
||||
func setup(with client: MPVClient) throws {
|
||||
self.mpvClient = client
|
||||
|
||||
// Set up the layer
|
||||
try videoLayer.setup(with: client)
|
||||
|
||||
// Start display link
|
||||
startDisplayLink()
|
||||
}
|
||||
|
||||
/// Async setup variant.
|
||||
func setupAsync(with client: MPVClient) async throws {
|
||||
try setup(with: client)
|
||||
}
|
||||
|
||||
// MARK: - View Lifecycle
|
||||
|
||||
override var isOpaque: Bool { true }
|
||||
|
||||
override func viewDidMoveToWindow() {
|
||||
super.viewDidMoveToWindow()
|
||||
|
||||
if let window {
|
||||
// Recreate display link for new window
|
||||
stopDisplayLink()
|
||||
startDisplayLink()
|
||||
|
||||
// Update contents scale for new window
|
||||
videoLayer.contentsScale = window.backingScaleFactor
|
||||
}
|
||||
}
|
||||
|
||||
override func viewDidChangeBackingProperties() {
|
||||
super.viewDidChangeBackingProperties()
|
||||
|
||||
// Update contents scale when backing properties change
|
||||
if let scale = window?.backingScaleFactor {
|
||||
videoLayer.contentsScale = scale
|
||||
}
|
||||
|
||||
// Update display refresh rate
|
||||
updateDisplayRefreshRate()
|
||||
}
|
||||
|
||||
override func draw(_ dirtyRect: NSRect) {
|
||||
// No-op - the layer handles all drawing
|
||||
}
|
||||
|
||||
// MARK: - CADisplayLink Management
|
||||
|
||||
func startDisplayLink() {
|
||||
guard displayLink == nil else { return }
|
||||
|
||||
// Create display link using modern API (macOS 14+)
|
||||
displayLink = displayLink(target: self, selector: #selector(displayLinkFired(_:)))
|
||||
displayLink?.add(to: .main, forMode: .common)
|
||||
|
||||
// Update refresh rate info
|
||||
updateDisplayRefreshRate()
|
||||
|
||||
LoggingService.shared.debug("MPVOGLView: display link started", category: .mpv)
|
||||
}
|
||||
|
||||
func stopDisplayLink() {
|
||||
displayLink?.invalidate()
|
||||
displayLink = nil
|
||||
|
||||
LoggingService.shared.debug("MPVOGLView: display link stopped", category: .mpv)
|
||||
}
|
||||
|
||||
@objc private func displayLinkFired(_ sender: CADisplayLink) {
|
||||
// Check if uninited (thread-safe)
|
||||
uninitLock.lock()
|
||||
let uninited = isUninited
|
||||
uninitLock.unlock()
|
||||
|
||||
guard !uninited else { return }
|
||||
|
||||
// Report frame swap to MPV for vsync timing
|
||||
mpvClient?.reportSwap()
|
||||
}
|
||||
|
||||
/// Update display link for the current display.
|
||||
func updateDisplayLink() {
|
||||
// With CADisplayLink, we just need to update the refresh rate info
|
||||
updateDisplayRefreshRate()
|
||||
}
|
||||
|
||||
/// Update the cached display refresh rate.
|
||||
private func updateDisplayRefreshRate() {
|
||||
guard let screen = window?.screen else {
|
||||
displayLinkActualFPS = 60.0
|
||||
return
|
||||
}
|
||||
|
||||
// Get refresh rate from screen
|
||||
displayLinkActualFPS = Double(screen.maximumFramesPerSecond)
|
||||
if displayLinkActualFPS <= 0 {
|
||||
displayLinkActualFPS = 60.0
|
||||
}
|
||||
|
||||
LoggingService.shared.debug("MPVOGLView: display refresh rate: \(displayLinkActualFPS) Hz", category: .mpv)
|
||||
}
|
||||
|
||||
// MARK: - Public Methods
|
||||
|
||||
/// Reset first frame tracking (call when loading new content).
|
||||
func resetFirstFrameTracking() {
|
||||
mpvHasFrameReady = false
|
||||
videoLayer.resetFirstFrameTracking()
|
||||
}
|
||||
|
||||
/// Clear the view to black.
|
||||
func clearToBlack() {
|
||||
videoLayer.clearToBlack()
|
||||
}
|
||||
|
||||
/// Pause rendering.
|
||||
func pauseRendering() {
|
||||
// For now, just stop triggering updates
|
||||
// The layer will still respond to explicit update() calls
|
||||
}
|
||||
|
||||
/// Resume rendering.
|
||||
func resumeRendering() {
|
||||
videoLayer.update(force: true)
|
||||
}
|
||||
|
||||
/// Update cached time position for PiP timestamps.
|
||||
func updateTimePosition(_ time: Double) {
|
||||
videoLayer.updateTimePosition(time)
|
||||
}
|
||||
|
||||
/// Clear the main view for PiP transition (stub for now).
|
||||
func clearMainViewForPiP() {
|
||||
clearToBlack()
|
||||
}
|
||||
|
||||
/// Update PiP target render size - forces recreation of PiP capture resources.
|
||||
func updatePiPTargetSize(_ size: CMVideoDimensions) {
|
||||
videoLayer.updatePiPTargetSize(size)
|
||||
}
|
||||
|
||||
// MARK: - Cleanup
|
||||
|
||||
/// Uninitialize the view and release resources.
|
||||
func uninit() {
|
||||
uninitLock.lock()
|
||||
defer { uninitLock.unlock() }
|
||||
|
||||
guard !isUninited else { return }
|
||||
isUninited = true
|
||||
|
||||
stopDisplayLink()
|
||||
videoLayer.uninit()
|
||||
|
||||
// Note: onFirstFrameRendered and onFrameReady are forwarded to videoLayer,
|
||||
// and videoLayer.uninit() clears them
|
||||
}
|
||||
}
|
||||
|
||||
#endif
|
||||
862
Yattee/Services/Player/MPV/MPVOpenGLLayer.swift
Normal file
862
Yattee/Services/Player/MPV/MPVOpenGLLayer.swift
Normal file
@@ -0,0 +1,862 @@
|
||||
//
|
||||
// MPVOpenGLLayer.swift
|
||||
// Yattee
|
||||
//
|
||||
// CAOpenGLLayer subclass for MPV rendering on macOS.
|
||||
// Renders on a background thread to avoid blocking the main thread.
|
||||
//
|
||||
|
||||
#if os(macOS)
|
||||
|
||||
import AppKit
|
||||
import OpenGL.GL
|
||||
import OpenGL.GL3
|
||||
import Libmpv
|
||||
import CoreMedia
|
||||
import CoreVideo
|
||||
|
||||
// MARK: - OpenGL Pixel Format Attributes
|
||||
|
||||
private let glFormatBase: [CGLPixelFormatAttribute] = [
|
||||
kCGLPFAOpenGLProfile,
|
||||
CGLPixelFormatAttribute(kCGLOGLPVersion_3_2_Core.rawValue),
|
||||
kCGLPFAAccelerated,
|
||||
kCGLPFADoubleBuffer,
|
||||
kCGLPFAAllowOfflineRenderers,
|
||||
CGLPixelFormatAttribute(0)
|
||||
]
|
||||
|
||||
private let glFormat10Bit: [CGLPixelFormatAttribute] = [
|
||||
kCGLPFAOpenGLProfile,
|
||||
CGLPixelFormatAttribute(kCGLOGLPVersion_3_2_Core.rawValue),
|
||||
kCGLPFAAccelerated,
|
||||
kCGLPFADoubleBuffer,
|
||||
kCGLPFAAllowOfflineRenderers,
|
||||
kCGLPFAColorSize,
|
||||
CGLPixelFormatAttribute(64),
|
||||
kCGLPFAColorFloat,
|
||||
CGLPixelFormatAttribute(0)
|
||||
]
|
||||
|
||||
// MARK: - MPVOpenGLLayer
|
||||
|
||||
/// OpenGL layer for MPV rendering on macOS.
|
||||
/// Renders on a background thread to avoid blocking the main thread during video playback.
|
||||
final class MPVOpenGLLayer: CAOpenGLLayer {
|
||||
// MARK: - Properties
|
||||
|
||||
/// Reference to the video view that hosts this layer.
|
||||
private weak var videoView: MPVOGLView?
|
||||
|
||||
/// Reference to the MPV client for rendering.
|
||||
private weak var mpvClient: MPVClient?
|
||||
|
||||
/// Dedicated queue for OpenGL rendering (off main thread).
|
||||
private let renderQueue = DispatchQueue(label: "stream.yattee.mpv.render", qos: .userInteractive)
|
||||
|
||||
/// CGL context for OpenGL rendering.
|
||||
private let cglContext: CGLContextObj
|
||||
|
||||
/// CGL pixel format used to create the context.
|
||||
private let cglPixelFormat: CGLPixelFormatObj
|
||||
|
||||
/// Lock to single-thread calls to `display`.
|
||||
private let displayLock = NSRecursiveLock()
|
||||
|
||||
/// Buffer depth (8 for standard, 16 for 10-bit).
|
||||
private var bufferDepth: GLint = 8
|
||||
|
||||
/// Current framebuffer object ID.
|
||||
private var fbo: GLint = 1
|
||||
|
||||
/// When `true` the frame needs to be rendered.
|
||||
private var needsFlip = false
|
||||
private let needsFlipLock = NSLock()
|
||||
|
||||
/// When `true` drawing will proceed even if mpv indicates nothing needs to be done.
|
||||
private var forceDraw = false
|
||||
private let forceDrawLock = NSLock()
|
||||
|
||||
/// Whether the layer has been set up with an MPV client.
|
||||
private var isSetup = false
|
||||
|
||||
/// Whether the layer is being cleaned up.
|
||||
private var isUninited = false
|
||||
|
||||
/// Tracks whether first frame has been rendered.
|
||||
private var hasRenderedFirstFrame = false
|
||||
|
||||
/// Callback when first frame is rendered.
|
||||
var onFirstFrameRendered: (() -> Void)?
|
||||
|
||||
// MARK: - PiP Capture Properties
|
||||
|
||||
/// Zero-copy texture cache for efficient PiP capture.
|
||||
private var textureCache: CVOpenGLTextureCache?
|
||||
|
||||
/// Framebuffer for PiP capture.
|
||||
private var pipFramebuffer: GLuint = 0
|
||||
|
||||
/// Texture from CVOpenGLTextureCache (bound to pixel buffer).
|
||||
private var pipTexture: CVOpenGLTexture?
|
||||
|
||||
/// Pixel buffer for PiP capture (IOSurface-backed for zero-copy).
|
||||
private var pipPixelBuffer: CVPixelBuffer?
|
||||
|
||||
/// Current PiP capture dimensions.
|
||||
private var pipCaptureWidth: Int = 0
|
||||
private var pipCaptureHeight: Int = 0
|
||||
|
||||
/// Offscreen render FBO for PiP mode (when layer isn't visible).
|
||||
private var pipRenderFBO: GLuint = 0
|
||||
|
||||
/// Render texture for PiP mode FBO.
|
||||
private var pipRenderTexture: GLuint = 0
|
||||
|
||||
/// Dimensions of the PiP render FBO.
|
||||
private var pipRenderWidth: Int = 0
|
||||
private var pipRenderHeight: Int = 0
|
||||
|
||||
/// Whether to capture frames for PiP.
|
||||
var captureFramesForPiP = false
|
||||
|
||||
/// Whether PiP is currently active.
|
||||
var isPiPActive = false
|
||||
|
||||
/// Callback when a frame is ready for PiP.
|
||||
var onFrameReady: ((CVPixelBuffer, CMTime) -> Void)?
|
||||
|
||||
/// Video content width (actual video, not view size) - for letterbox/pillarbox cropping.
|
||||
var videoContentWidth: Int = 0
|
||||
|
||||
/// Video content height (actual video, not view size) - for letterbox/pillarbox cropping.
|
||||
var videoContentHeight: Int = 0
|
||||
|
||||
/// Cached time position for PiP presentation timestamps.
|
||||
private var cachedTimePos: Double = 0
|
||||
|
||||
/// Frame counter for PiP logging.
|
||||
private var pipFrameCount: UInt64 = 0
|
||||
|
||||
// MARK: - Initialization
|
||||
|
||||
/// Creates an MPVOpenGLLayer for the given video view.
|
||||
init(videoView: MPVOGLView) {
|
||||
self.videoView = videoView
|
||||
|
||||
// Create pixel format (try 10-bit first, fall back to 8-bit)
|
||||
let (pixelFormat, depth) = MPVOpenGLLayer.createPixelFormat()
|
||||
self.cglPixelFormat = pixelFormat
|
||||
self.bufferDepth = depth
|
||||
|
||||
// Create OpenGL context
|
||||
self.cglContext = MPVOpenGLLayer.createContext(pixelFormat: pixelFormat)
|
||||
|
||||
super.init()
|
||||
|
||||
// Configure layer
|
||||
autoresizingMask = [.layerWidthSizable, .layerHeightSizable]
|
||||
backgroundColor = NSColor.black.cgColor
|
||||
isOpaque = true
|
||||
|
||||
// Set color space to device RGB (sRGB) to prevent color space conversion issues
|
||||
// Without this, macOS may apply unwanted gamma/color transformations
|
||||
colorspace = CGColorSpaceCreateDeviceRGB()
|
||||
|
||||
// Use appropriate contents format for bit depth
|
||||
if bufferDepth > 8 {
|
||||
contentsFormat = .RGBA16Float
|
||||
}
|
||||
|
||||
// Start with synchronous drawing disabled (we control updates via renderQueue)
|
||||
isAsynchronous = false
|
||||
|
||||
let colorDepth = bufferDepth
|
||||
Task { @MainActor in
|
||||
LoggingService.shared.debug("MPVOpenGLLayer: initialized with \(colorDepth)-bit color", category: .mpv)
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a shadow copy of the layer (called by Core Animation during scale changes).
|
||||
override init(layer: Any) {
|
||||
let previousLayer = layer as! MPVOpenGLLayer
|
||||
self.videoView = previousLayer.videoView
|
||||
self.mpvClient = previousLayer.mpvClient
|
||||
self.cglPixelFormat = previousLayer.cglPixelFormat
|
||||
self.cglContext = previousLayer.cglContext
|
||||
self.bufferDepth = previousLayer.bufferDepth
|
||||
self.isSetup = previousLayer.isSetup
|
||||
|
||||
super.init(layer: layer)
|
||||
|
||||
autoresizingMask = previousLayer.autoresizingMask
|
||||
backgroundColor = previousLayer.backgroundColor
|
||||
isOpaque = previousLayer.isOpaque
|
||||
colorspace = previousLayer.colorspace
|
||||
contentsFormat = previousLayer.contentsFormat
|
||||
isAsynchronous = previousLayer.isAsynchronous
|
||||
|
||||
Task { @MainActor in
|
||||
LoggingService.shared.debug("MPVOpenGLLayer: created shadow copy", category: .mpv)
|
||||
}
|
||||
}
|
||||
|
||||
required init?(coder: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
deinit {
|
||||
uninit()
|
||||
}
|
||||
|
||||
// MARK: - Setup
|
||||
|
||||
/// Set up the layer with an MPV client.
|
||||
func setup(with client: MPVClient) throws {
|
||||
guard !isSetup else {
|
||||
Task { @MainActor in
|
||||
LoggingService.shared.debug("MPVOpenGLLayer: already set up", category: .mpv)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
self.mpvClient = client
|
||||
|
||||
// Make context current for render context creation
|
||||
CGLSetCurrentContext(cglContext)
|
||||
|
||||
// Create MPV render context
|
||||
let success = client.createRenderContext(getProcAddress: macOSGetProcAddress)
|
||||
guard success else {
|
||||
Task { @MainActor in
|
||||
LoggingService.shared.error("MPVOpenGLLayer: failed to create MPV render context", category: .mpv)
|
||||
}
|
||||
throw MPVRenderError.renderContextFailed(-1)
|
||||
}
|
||||
|
||||
// Store CGL context in client for locking
|
||||
client.setOpenGLContext(cglContext)
|
||||
|
||||
// Set up render update callback
|
||||
client.onRenderUpdate = { [weak self] in
|
||||
self?.update()
|
||||
}
|
||||
|
||||
// Note: We don't set onVideoFrameReady here anymore.
|
||||
// The mpvHasFrameReady flag is now set in draw() when we actually render a frame.
|
||||
// This is more accurate and avoids the issue where the render callback
|
||||
// was consuming the frame-ready flag before canDraw() could check it.
|
||||
|
||||
isSetup = true
|
||||
Task { @MainActor in
|
||||
LoggingService.shared.debug("MPVOpenGLLayer: setup complete", category: .mpv)
|
||||
}
|
||||
}
|
||||
|
||||
/// Clean up resources.
|
||||
func uninit() {
|
||||
guard !isUninited else { return }
|
||||
isUninited = true
|
||||
|
||||
// Clean up PiP capture resources
|
||||
destroyPiPCapture()
|
||||
onFrameReady = nil
|
||||
|
||||
// Clear callbacks
|
||||
mpvClient?.onRenderUpdate = nil
|
||||
mpvClient?.onVideoFrameReady = nil
|
||||
onFirstFrameRendered = nil
|
||||
|
||||
Task { @MainActor in
|
||||
LoggingService.shared.debug("MPVOpenGLLayer: uninit complete", category: .mpv)
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - CAOpenGLLayer Overrides
|
||||
|
||||
override func canDraw(
|
||||
inCGLContext ctx: CGLContextObj,
|
||||
pixelFormat pf: CGLPixelFormatObj,
|
||||
forLayerTime t: CFTimeInterval,
|
||||
displayTime ts: UnsafePointer<CVTimeStamp>?
|
||||
) -> Bool {
|
||||
guard !isUninited, isSetup else { return false }
|
||||
|
||||
// Check if force draw is requested or MPV has a frame ready
|
||||
let force = forceDrawLock.withLock { forceDraw }
|
||||
if force { return true }
|
||||
|
||||
return mpvClient?.shouldRenderUpdateFrame() ?? false
|
||||
}
|
||||
|
||||
override func draw(
|
||||
inCGLContext ctx: CGLContextObj,
|
||||
pixelFormat pf: CGLPixelFormatObj,
|
||||
forLayerTime t: CFTimeInterval,
|
||||
displayTime ts: UnsafePointer<CVTimeStamp>?
|
||||
) {
|
||||
guard !isUninited, isSetup, let mpvClient else { return }
|
||||
|
||||
// Reset flags
|
||||
needsFlipLock.withLock { needsFlip = false }
|
||||
forceDrawLock.withLock { forceDraw = false }
|
||||
|
||||
// Clear the buffer
|
||||
glClear(GLbitfield(GL_COLOR_BUFFER_BIT))
|
||||
|
||||
// Get current FBO binding and viewport dimensions
|
||||
var currentFBO: GLint = 0
|
||||
glGetIntegerv(GLenum(GL_DRAW_FRAMEBUFFER_BINDING), ¤tFBO)
|
||||
|
||||
var viewport: [GLint] = [0, 0, 0, 0]
|
||||
glGetIntegerv(GLenum(GL_VIEWPORT), &viewport)
|
||||
|
||||
let width = viewport[2]
|
||||
let height = viewport[3]
|
||||
|
||||
guard width > 0, height > 0 else { return }
|
||||
|
||||
// Use the detected FBO (or fallback to cached)
|
||||
if currentFBO != 0 {
|
||||
fbo = currentFBO
|
||||
}
|
||||
|
||||
// Render the frame
|
||||
mpvClient.renderWithDepth(
|
||||
fbo: fbo,
|
||||
width: width,
|
||||
height: height,
|
||||
depth: bufferDepth
|
||||
)
|
||||
|
||||
glFlush()
|
||||
|
||||
// Capture frame for PiP if enabled
|
||||
if captureFramesForPiP {
|
||||
captureFrameForPiP(viewWidth: width, viewHeight: height, mainFBO: fbo)
|
||||
}
|
||||
|
||||
// Mark that we've rendered a frame (for first-frame tracking)
|
||||
if let videoView, !videoView.mpvHasFrameReady {
|
||||
DispatchQueue.main.async { [weak self] in
|
||||
self?.videoView?.mpvHasFrameReady = true
|
||||
}
|
||||
}
|
||||
|
||||
// Notify on first frame
|
||||
if !hasRenderedFirstFrame {
|
||||
hasRenderedFirstFrame = true
|
||||
DispatchQueue.main.async { [weak self] in
|
||||
self?.onFirstFrameRendered?()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override func copyCGLPixelFormat(forDisplayMask mask: UInt32) -> CGLPixelFormatObj {
|
||||
cglPixelFormat
|
||||
}
|
||||
|
||||
override func copyCGLContext(forPixelFormat pf: CGLPixelFormatObj) -> CGLContextObj {
|
||||
cglContext
|
||||
}
|
||||
|
||||
/// Trigger a display update (dispatched to render queue).
|
||||
override func display() {
|
||||
displayLock.lock()
|
||||
defer { displayLock.unlock() }
|
||||
|
||||
let isUpdate = needsFlipLock.withLock { needsFlip }
|
||||
|
||||
if Thread.isMainThread {
|
||||
super.display()
|
||||
} else {
|
||||
// When not on main thread, use explicit transaction
|
||||
CATransaction.begin()
|
||||
super.display()
|
||||
CATransaction.commit()
|
||||
}
|
||||
|
||||
// Flush any implicit transaction
|
||||
CATransaction.flush()
|
||||
|
||||
// Handle cases where canDraw/draw weren't called by AppKit but MPV has frames ready.
|
||||
// This can happen when the view is in another space or not visible.
|
||||
// We need to tell MPV to skip rendering to prevent frame buildup.
|
||||
let stillNeedsFlip = needsFlipLock.withLock { needsFlip }
|
||||
guard isUpdate && stillNeedsFlip else { return }
|
||||
|
||||
// If we get here, display() was called but draw() wasn't invoked by AppKit.
|
||||
// Need to do a skip render to keep MPV's frame queue moving.
|
||||
guard let mpvClient, let renderContext = mpvClient.mpvRenderContext,
|
||||
mpvClient.shouldRenderUpdateFrame() else { return }
|
||||
|
||||
// Must lock OpenGL context before calling mpv render functions
|
||||
mpvClient.lockAndSetOpenGLContext()
|
||||
defer { mpvClient.unlockOpenGLContext() }
|
||||
|
||||
var skip: CInt = 1
|
||||
withUnsafeMutablePointer(to: &skip) { skipPtr in
|
||||
var params: [mpv_render_param] = [
|
||||
mpv_render_param(type: MPV_RENDER_PARAM_SKIP_RENDERING, data: skipPtr),
|
||||
mpv_render_param(type: MPV_RENDER_PARAM_INVALID, data: nil)
|
||||
]
|
||||
_ = params.withUnsafeMutableBufferPointer { paramsPtr in
|
||||
mpv_render_context_render(renderContext, paramsPtr.baseAddress)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Public Methods
|
||||
|
||||
/// Request a render update (called when MPV signals a new frame).
|
||||
func update(force: Bool = false) {
|
||||
renderQueue.async { [weak self] in
|
||||
guard let self, !self.isUninited else { return }
|
||||
|
||||
if force {
|
||||
self.forceDrawLock.withLock { self.forceDraw = true }
|
||||
}
|
||||
self.needsFlipLock.withLock { self.needsFlip = true }
|
||||
|
||||
// When PiP is active, the layer may not be visible so CAOpenGLLayer.draw()
|
||||
// won't be called by Core Animation. We need to manually render and capture
|
||||
// frames for PiP.
|
||||
if self.isPiPActive && self.captureFramesForPiP {
|
||||
self.renderForPiP()
|
||||
} else {
|
||||
self.display()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Render a frame specifically for PiP capture (when main view is hidden).
|
||||
private func renderForPiP() {
|
||||
guard !isUninited, isSetup, let mpvClient else { return }
|
||||
guard mpvClient.shouldRenderUpdateFrame() else { return }
|
||||
|
||||
// Lock and set OpenGL context
|
||||
CGLLockContext(cglContext)
|
||||
CGLSetCurrentContext(cglContext)
|
||||
defer { CGLUnlockContext(cglContext) }
|
||||
|
||||
// Use video dimensions for render size, or fall back to reasonable defaults
|
||||
let width = GLint(videoContentWidth > 0 ? videoContentWidth : 1920)
|
||||
let height = GLint(videoContentHeight > 0 ? videoContentHeight : 1080)
|
||||
|
||||
guard width > 0, height > 0 else { return }
|
||||
|
||||
// Set up offscreen render FBO if needed
|
||||
setupPiPRenderFBO(width: Int(width), height: Int(height))
|
||||
|
||||
guard pipRenderFBO != 0 else { return }
|
||||
|
||||
// Bind our render FBO
|
||||
glBindFramebuffer(GLenum(GL_FRAMEBUFFER), pipRenderFBO)
|
||||
glViewport(0, 0, width, height)
|
||||
|
||||
// Render the frame to our FBO
|
||||
mpvClient.renderWithDepth(
|
||||
fbo: GLint(pipRenderFBO),
|
||||
width: width,
|
||||
height: height,
|
||||
depth: bufferDepth
|
||||
)
|
||||
|
||||
glFlush()
|
||||
|
||||
// Report frame swap for vsync timing - important for smooth PiP playback
|
||||
mpvClient.reportSwap()
|
||||
|
||||
// Capture frame for PiP
|
||||
captureFrameForPiP(viewWidth: width, viewHeight: height, mainFBO: GLint(pipRenderFBO))
|
||||
|
||||
// Unbind FBO
|
||||
glBindFramebuffer(GLenum(GL_FRAMEBUFFER), 0)
|
||||
}
|
||||
|
||||
/// Set up offscreen FBO for PiP rendering.
|
||||
private func setupPiPRenderFBO(width: Int, height: Int) {
|
||||
// Skip if dimensions unchanged and FBO exists
|
||||
if width == pipRenderWidth && height == pipRenderHeight && pipRenderFBO != 0 {
|
||||
return
|
||||
}
|
||||
|
||||
// Clean up existing FBO
|
||||
if pipRenderFBO != 0 {
|
||||
glDeleteFramebuffers(1, &pipRenderFBO)
|
||||
pipRenderFBO = 0
|
||||
}
|
||||
if pipRenderTexture != 0 {
|
||||
glDeleteTextures(1, &pipRenderTexture)
|
||||
pipRenderTexture = 0
|
||||
}
|
||||
|
||||
pipRenderWidth = width
|
||||
pipRenderHeight = height
|
||||
|
||||
// Create render texture
|
||||
glGenTextures(1, &pipRenderTexture)
|
||||
glBindTexture(GLenum(GL_TEXTURE_2D), pipRenderTexture)
|
||||
glTexImage2D(
|
||||
GLenum(GL_TEXTURE_2D),
|
||||
0,
|
||||
GL_RGBA8,
|
||||
GLsizei(width),
|
||||
GLsizei(height),
|
||||
0,
|
||||
GLenum(GL_RGBA),
|
||||
GLenum(GL_UNSIGNED_BYTE),
|
||||
nil
|
||||
)
|
||||
glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_MIN_FILTER), GL_LINEAR)
|
||||
glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_MAG_FILTER), GL_LINEAR)
|
||||
glBindTexture(GLenum(GL_TEXTURE_2D), 0)
|
||||
|
||||
// Create FBO and attach texture
|
||||
glGenFramebuffers(1, &pipRenderFBO)
|
||||
glBindFramebuffer(GLenum(GL_FRAMEBUFFER), pipRenderFBO)
|
||||
glFramebufferTexture2D(
|
||||
GLenum(GL_FRAMEBUFFER),
|
||||
GLenum(GL_COLOR_ATTACHMENT0),
|
||||
GLenum(GL_TEXTURE_2D),
|
||||
pipRenderTexture,
|
||||
0
|
||||
)
|
||||
|
||||
// Check FBO status
|
||||
let status = glCheckFramebufferStatus(GLenum(GL_FRAMEBUFFER))
|
||||
if status != GL_FRAMEBUFFER_COMPLETE {
|
||||
Task { @MainActor in
|
||||
LoggingService.shared.warning("MPVOpenGLLayer: PiP render FBO incomplete: \(status)", category: .mpv)
|
||||
}
|
||||
}
|
||||
|
||||
glBindFramebuffer(GLenum(GL_FRAMEBUFFER), 0)
|
||||
|
||||
Task { @MainActor in
|
||||
LoggingService.shared.debug("MPVOpenGLLayer: Created PiP render FBO \(width)x\(height)", category: .mpv)
|
||||
}
|
||||
}
|
||||
|
||||
/// Clear the layer to black.
|
||||
func clearToBlack() {
|
||||
renderQueue.async { [weak self] in
|
||||
guard let self else { return }
|
||||
|
||||
CGLSetCurrentContext(self.cglContext)
|
||||
glClearColor(0.0, 0.0, 0.0, 1.0)
|
||||
glClear(GLbitfield(GL_COLOR_BUFFER_BIT))
|
||||
glFlush()
|
||||
|
||||
// Force a display to show the cleared frame
|
||||
self.update(force: true)
|
||||
}
|
||||
}
|
||||
|
||||
/// Reset first frame tracking (call when loading new content).
|
||||
func resetFirstFrameTracking() {
|
||||
hasRenderedFirstFrame = false
|
||||
}
|
||||
|
||||
// MARK: - Pixel Format and Context Creation
|
||||
|
||||
/// Create a CGL pixel format, trying 10-bit first, falling back to 8-bit.
|
||||
private static func createPixelFormat() -> (CGLPixelFormatObj, GLint) {
|
||||
var pixelFormat: CGLPixelFormatObj?
|
||||
var numPixelFormats: GLint = 0
|
||||
|
||||
// Try 10-bit first
|
||||
var result = CGLChoosePixelFormat(glFormat10Bit, &pixelFormat, &numPixelFormats)
|
||||
if result == kCGLNoError, let pf = pixelFormat {
|
||||
Task { @MainActor in
|
||||
LoggingService.shared.debug("MPVOpenGLLayer: created 10-bit pixel format", category: .mpv)
|
||||
}
|
||||
return (pf, 16)
|
||||
}
|
||||
|
||||
// Fall back to 8-bit
|
||||
result = CGLChoosePixelFormat(glFormatBase, &pixelFormat, &numPixelFormats)
|
||||
if result == kCGLNoError, let pf = pixelFormat {
|
||||
Task { @MainActor in
|
||||
LoggingService.shared.debug("MPVOpenGLLayer: created 8-bit pixel format", category: .mpv)
|
||||
}
|
||||
return (pf, 8)
|
||||
}
|
||||
|
||||
// This should not happen on any reasonable Mac
|
||||
fatalError("MPVOpenGLLayer: failed to create any OpenGL pixel format")
|
||||
}
|
||||
|
||||
/// Create a CGL context with the given pixel format.
|
||||
private static func createContext(pixelFormat: CGLPixelFormatObj) -> CGLContextObj {
|
||||
var context: CGLContextObj?
|
||||
let result = CGLCreateContext(pixelFormat, nil, &context)
|
||||
|
||||
guard result == kCGLNoError, let ctx = context else {
|
||||
fatalError("MPVOpenGLLayer: failed to create OpenGL context: \(result)")
|
||||
}
|
||||
|
||||
// Enable vsync
|
||||
var swapInterval: GLint = 1
|
||||
CGLSetParameter(ctx, kCGLCPSwapInterval, &swapInterval)
|
||||
|
||||
// Enable multi-threaded OpenGL engine for better performance
|
||||
CGLEnable(ctx, kCGLCEMPEngine)
|
||||
|
||||
CGLSetCurrentContext(ctx)
|
||||
|
||||
Task { @MainActor in
|
||||
LoggingService.shared.debug("MPVOpenGLLayer: created CGL context with vsync and multi-threaded engine", category: .mpv)
|
||||
}
|
||||
|
||||
return ctx
|
||||
}
|
||||
|
||||
// MARK: - PiP Capture Methods
|
||||
|
||||
/// Update the cached time position for PiP presentation timestamps.
|
||||
func updateTimePosition(_ time: Double) {
|
||||
cachedTimePos = time
|
||||
}
|
||||
|
||||
/// Update the target PiP capture size and force recreation of capture resources.
|
||||
/// Called when PiP window size changes (via didTransitionToRenderSize).
|
||||
func updatePiPTargetSize(_ size: CMVideoDimensions) {
|
||||
// Force recreation of capture resources at new size by resetting dimensions
|
||||
pipCaptureWidth = 0
|
||||
pipCaptureHeight = 0
|
||||
Task { @MainActor in
|
||||
LoggingService.shared.debug("MPVOpenGLLayer: Updated PiP target size to \(size.width)x\(size.height)", category: .mpv)
|
||||
}
|
||||
}
|
||||
|
||||
/// Set up the texture cache and PiP framebuffer for zero-copy capture.
|
||||
private func setupPiPCapture(width: Int, height: Int) {
|
||||
// Skip if dimensions unchanged and resources exist
|
||||
if width == pipCaptureWidth && height == pipCaptureHeight && textureCache != nil {
|
||||
return
|
||||
}
|
||||
|
||||
// Clean up existing resources
|
||||
destroyPiPCapture()
|
||||
|
||||
pipCaptureWidth = width
|
||||
pipCaptureHeight = height
|
||||
|
||||
// Create texture cache with our CGL context and pixel format
|
||||
var cache: CVOpenGLTextureCache?
|
||||
let cacheResult = CVOpenGLTextureCacheCreate(
|
||||
kCFAllocatorDefault,
|
||||
nil,
|
||||
cglContext,
|
||||
cglPixelFormat,
|
||||
nil,
|
||||
&cache
|
||||
)
|
||||
guard cacheResult == kCVReturnSuccess, let cache else {
|
||||
Task { @MainActor in
|
||||
LoggingService.shared.warning("MPVOpenGLLayer PiP: Failed to create texture cache: \(cacheResult)", category: .mpv)
|
||||
}
|
||||
return
|
||||
}
|
||||
textureCache = cache
|
||||
|
||||
// Create pixel buffer with IOSurface backing for zero-copy
|
||||
let pixelBufferAttributes: [String: Any] = [
|
||||
kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA,
|
||||
kCVPixelBufferWidthKey as String: width,
|
||||
kCVPixelBufferHeightKey as String: height,
|
||||
kCVPixelBufferIOSurfacePropertiesKey as String: [:] as [String: Any],
|
||||
kCVPixelBufferOpenGLCompatibilityKey as String: true
|
||||
]
|
||||
|
||||
var pixelBuffer: CVPixelBuffer?
|
||||
let pbResult = CVPixelBufferCreate(
|
||||
kCFAllocatorDefault,
|
||||
width,
|
||||
height,
|
||||
kCVPixelFormatType_32BGRA,
|
||||
pixelBufferAttributes as CFDictionary,
|
||||
&pixelBuffer
|
||||
)
|
||||
guard pbResult == kCVReturnSuccess, let pixelBuffer else {
|
||||
Task { @MainActor in
|
||||
LoggingService.shared.warning("MPVOpenGLLayer PiP: Failed to create pixel buffer: \(pbResult)", category: .mpv)
|
||||
}
|
||||
return
|
||||
}
|
||||
pipPixelBuffer = pixelBuffer
|
||||
|
||||
// Create GL texture from pixel buffer via texture cache
|
||||
var texture: CVOpenGLTexture?
|
||||
let texResult = CVOpenGLTextureCacheCreateTextureFromImage(
|
||||
kCFAllocatorDefault,
|
||||
cache,
|
||||
pixelBuffer,
|
||||
nil,
|
||||
&texture
|
||||
)
|
||||
guard texResult == kCVReturnSuccess, let texture else {
|
||||
Task { @MainActor in
|
||||
LoggingService.shared.warning("MPVOpenGLLayer PiP: Failed to create texture from pixel buffer: \(texResult)", category: .mpv)
|
||||
}
|
||||
return
|
||||
}
|
||||
pipTexture = texture
|
||||
|
||||
// Get texture properties (macOS typically uses GL_TEXTURE_RECTANGLE_ARB)
|
||||
let textureTarget = CVOpenGLTextureGetTarget(texture)
|
||||
let textureName = CVOpenGLTextureGetName(texture)
|
||||
|
||||
// Create FBO for PiP capture
|
||||
glGenFramebuffers(1, &pipFramebuffer)
|
||||
glBindFramebuffer(GLenum(GL_FRAMEBUFFER), pipFramebuffer)
|
||||
|
||||
// Attach texture to FBO
|
||||
glFramebufferTexture2D(
|
||||
GLenum(GL_FRAMEBUFFER),
|
||||
GLenum(GL_COLOR_ATTACHMENT0),
|
||||
textureTarget,
|
||||
textureName,
|
||||
0
|
||||
)
|
||||
|
||||
// Verify FBO is complete
|
||||
let status = glCheckFramebufferStatus(GLenum(GL_FRAMEBUFFER))
|
||||
if status != GL_FRAMEBUFFER_COMPLETE {
|
||||
Task { @MainActor in
|
||||
LoggingService.shared.warning("MPVOpenGLLayer PiP: Framebuffer incomplete: \(status)", category: .mpv)
|
||||
}
|
||||
destroyPiPCapture()
|
||||
return
|
||||
}
|
||||
|
||||
// Restore default framebuffer
|
||||
glBindFramebuffer(GLenum(GL_FRAMEBUFFER), 0)
|
||||
|
||||
Task { @MainActor in
|
||||
LoggingService.shared.debug("MPVOpenGLLayer PiP: Zero-copy capture setup complete (\(width)x\(height)), textureTarget=\(textureTarget)", category: .mpv)
|
||||
}
|
||||
}
|
||||
|
||||
/// Clean up PiP capture resources.
|
||||
private func destroyPiPCapture() {
|
||||
if pipFramebuffer != 0 {
|
||||
glDeleteFramebuffers(1, &pipFramebuffer)
|
||||
pipFramebuffer = 0
|
||||
}
|
||||
if pipRenderFBO != 0 {
|
||||
glDeleteFramebuffers(1, &pipRenderFBO)
|
||||
pipRenderFBO = 0
|
||||
}
|
||||
if pipRenderTexture != 0 {
|
||||
glDeleteTextures(1, &pipRenderTexture)
|
||||
pipRenderTexture = 0
|
||||
}
|
||||
pipTexture = nil
|
||||
pipPixelBuffer = nil
|
||||
if let cache = textureCache {
|
||||
CVOpenGLTextureCacheFlush(cache, 0)
|
||||
}
|
||||
textureCache = nil
|
||||
pipCaptureWidth = 0
|
||||
pipCaptureHeight = 0
|
||||
pipRenderWidth = 0
|
||||
pipRenderHeight = 0
|
||||
}
|
||||
|
||||
/// Capture the current framebuffer contents as a CVPixelBuffer for PiP (zero-copy).
|
||||
private func captureFrameForPiP(viewWidth: GLint, viewHeight: GLint, mainFBO: GLint) {
|
||||
guard viewWidth > 0, viewHeight > 0, let callback = onFrameReady else { return }
|
||||
|
||||
// Use actual video dimensions for capture (avoid capturing letterbox/pillarbox black bars)
|
||||
// If video dimensions not set, fall back to view dimensions
|
||||
let captureVideoWidth = videoContentWidth > 0 ? videoContentWidth : Int(viewWidth)
|
||||
let captureVideoHeight = videoContentHeight > 0 ? videoContentHeight : Int(viewHeight)
|
||||
|
||||
// Set up or update capture resources if needed (based on video dimensions)
|
||||
setupPiPCapture(width: captureVideoWidth, height: captureVideoHeight)
|
||||
|
||||
guard pipFramebuffer != 0, let pixelBuffer = pipPixelBuffer else { return }
|
||||
|
||||
// Calculate the source rect in the framebuffer that contains just the video
|
||||
// (excluding letterbox/pillarbox black bars)
|
||||
let videoAspect = CGFloat(captureVideoWidth) / CGFloat(captureVideoHeight)
|
||||
let viewAspect = CGFloat(viewWidth) / CGFloat(viewHeight)
|
||||
|
||||
var srcX: GLint = 0
|
||||
var srcY: GLint = 0
|
||||
var srcWidth = viewWidth
|
||||
var srcHeight = viewHeight
|
||||
|
||||
if videoAspect > viewAspect {
|
||||
// Video is wider than view - pillarboxed (black bars on top/bottom)
|
||||
let scaledHeight = CGFloat(viewWidth) / videoAspect
|
||||
srcY = GLint((CGFloat(viewHeight) - scaledHeight) / 2)
|
||||
srcHeight = GLint(scaledHeight)
|
||||
} else if videoAspect < viewAspect {
|
||||
// Video is taller than view - letterboxed (black bars on left/right)
|
||||
let scaledWidth = CGFloat(viewHeight) * videoAspect
|
||||
srcX = GLint((CGFloat(viewWidth) - scaledWidth) / 2)
|
||||
srcWidth = GLint(scaledWidth)
|
||||
}
|
||||
|
||||
// Bind PiP framebuffer as draw target
|
||||
glBindFramebuffer(GLenum(GL_DRAW_FRAMEBUFFER), pipFramebuffer)
|
||||
glBindFramebuffer(GLenum(GL_READ_FRAMEBUFFER), GLenum(mainFBO))
|
||||
|
||||
// Blit from main framebuffer to PiP framebuffer (with scaling and vertical flip)
|
||||
// Source: just the video area in main framebuffer (bottom-left origin)
|
||||
// Dest: PiP texture (top-left origin, so we flip Y)
|
||||
glBlitFramebuffer(
|
||||
srcX, srcY, srcX + srcWidth, srcY + srcHeight, // src rect (video area only)
|
||||
0, GLint(pipCaptureHeight), GLint(pipCaptureWidth), 0, // dst rect (flipped Y)
|
||||
GLbitfield(GL_COLOR_BUFFER_BIT),
|
||||
GLenum(GL_LINEAR)
|
||||
)
|
||||
|
||||
// Restore main framebuffer
|
||||
glBindFramebuffer(GLenum(GL_FRAMEBUFFER), GLenum(mainFBO))
|
||||
|
||||
// Flush to ensure texture is updated before passing to AVSampleBufferDisplayLayer
|
||||
glFlush()
|
||||
|
||||
// Create presentation time from cached time position
|
||||
let presentationTime = CMTime(seconds: cachedTimePos, preferredTimescale: 90000)
|
||||
|
||||
// Log periodically
|
||||
pipFrameCount += 1
|
||||
if pipFrameCount <= 3 || pipFrameCount % 120 == 0 {
|
||||
// Capture values to avoid capturing self in @Sendable closure
|
||||
let frameCount = pipFrameCount
|
||||
let timePos = cachedTimePos
|
||||
Task { @MainActor in
|
||||
LoggingService.shared.debug("MPVOpenGLLayer PiP: Captured frame #\(frameCount), \(captureVideoWidth)x\(captureVideoHeight), time=\(timePos)", category: .mpv)
|
||||
}
|
||||
}
|
||||
|
||||
// Deliver the frame - pixel buffer is already populated via zero-copy
|
||||
DispatchQueue.main.async {
|
||||
callback(pixelBuffer, presentationTime)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - OpenGL Proc Address
|
||||
|
||||
/// Get OpenGL function address for macOS.
|
||||
private func macOSGetProcAddress(
|
||||
_ ctx: UnsafeMutableRawPointer?,
|
||||
_ name: UnsafePointer<CChar>?
|
||||
) -> UnsafeMutableRawPointer? {
|
||||
guard let name else { return nil }
|
||||
let symbolName = String(cString: name)
|
||||
|
||||
guard let framework = CFBundleGetBundleWithIdentifier("com.apple.opengl" as CFString) else {
|
||||
return nil
|
||||
}
|
||||
|
||||
return CFBundleGetFunctionPointerForName(framework, symbolName as CFString)
|
||||
}
|
||||
|
||||
#endif
|
||||
986
Yattee/Services/Player/MPV/MPVPiPBridge.swift
Normal file
986
Yattee/Services/Player/MPV/MPVPiPBridge.swift
Normal file
@@ -0,0 +1,986 @@
|
||||
//
|
||||
// MPVPiPBridge.swift
|
||||
// Yattee
|
||||
//
|
||||
// Native Picture-in-Picture support for MPV using AVSampleBufferDisplayLayer.
|
||||
//
|
||||
|
||||
#if os(iOS) || os(macOS)
|
||||
import AVKit
|
||||
import CoreMedia
|
||||
import CoreVideo
|
||||
import os
|
||||
|
||||
#if os(iOS)
|
||||
import UIKit
|
||||
typealias PlatformView = UIView
|
||||
typealias PlatformColor = UIColor
|
||||
#elseif os(macOS)
|
||||
import AppKit
|
||||
typealias PlatformView = NSView
|
||||
typealias PlatformColor = NSColor
|
||||
#endif
|
||||
|
||||
/// Bridges MPV video output to AVPictureInPictureController using AVSampleBufferDisplayLayer.
|
||||
/// This enables native PiP for MPV-rendered content.
|
||||
@MainActor
|
||||
final class MPVPiPBridge: NSObject {
|
||||
// MARK: - Properties
|
||||
|
||||
private let sampleBufferLayer = AVSampleBufferDisplayLayer()
|
||||
private var pipController: AVPictureInPictureController?
|
||||
private weak var mpvBackend: MPVBackend?
|
||||
|
||||
/// Whether PiP is currently active.
|
||||
var isPiPActive: Bool {
|
||||
pipController?.isPictureInPictureActive ?? false
|
||||
}
|
||||
|
||||
/// Whether PiP is possible (controller exists and is not nil).
|
||||
var isPiPPossible: Bool {
|
||||
pipController?.isPictureInPicturePossible ?? false
|
||||
}
|
||||
|
||||
/// Callback for when user wants to restore from PiP to main app.
|
||||
var onRestoreUserInterface: (() async -> Void)?
|
||||
|
||||
/// Callback for when PiP active status changes.
|
||||
var onPiPStatusChanged: ((Bool) -> Void)?
|
||||
|
||||
/// Callback for when PiP will start (for early UI updates like clearing main view)
|
||||
var onPiPWillStart: (() -> Void)?
|
||||
|
||||
/// Callback for when PiP will stop (to resume main view rendering before animation ends)
|
||||
var onPiPWillStop: (() -> Void)?
|
||||
|
||||
/// Callback for when PiP stops without restore (user clicked close button in PiP)
|
||||
var onPiPDidStopWithoutRestore: (() -> Void)?
|
||||
|
||||
/// Callback for when isPictureInPicturePossible changes
|
||||
var onPiPPossibleChanged: ((Bool) -> Void)?
|
||||
|
||||
/// Callback for when PiP render size changes (for resizing capture buffers)
|
||||
var onPiPRenderSizeChanged: ((CMVideoDimensions) -> Void)?
|
||||
|
||||
/// KVO observation for isPictureInPicturePossible
|
||||
private var pipPossibleObservation: NSKeyValueObservation?
|
||||
|
||||
/// Current PiP render size from AVPictureInPictureController
|
||||
private var currentPiPRenderSize: CMVideoDimensions?
|
||||
|
||||
/// Current video aspect ratio (width / height)
|
||||
private var videoAspectRatio: CGFloat = 16.0 / 9.0
|
||||
|
||||
/// Track whether restore was requested before PiP stopped
|
||||
private var restoreWasRequested = false
|
||||
|
||||
#if os(macOS)
|
||||
/// Timer to periodically update layer frame to match superlayer
|
||||
private var layerResizeTimer: Timer?
|
||||
/// Track if we've logged the PiP window hierarchy already
|
||||
private var hasLoggedPiPHierarchy = false
|
||||
/// Views we've hidden that need to be restored before PiP cleanup.
|
||||
/// Uses weak references to avoid retaining AVKit internal views that get deallocated
|
||||
/// when the PiP window closes, which would cause crashes in objc_release.
|
||||
private var hiddenPiPViews = NSHashTable<NSView>.weakObjects()
|
||||
#endif
|
||||
|
||||
// MARK: - Format Descriptions
|
||||
|
||||
private var currentFormatDescription: CMVideoFormatDescription?
|
||||
private var lastPresentationTime: CMTime = .zero
|
||||
|
||||
/// Timebase for controlling sample buffer display timing
|
||||
private var timebase: CMTimebase?
|
||||
|
||||
/// Cache last pixel buffer to re-enqueue during close animation
|
||||
private var lastPixelBuffer: CVPixelBuffer?
|
||||
|
||||
// MARK: - Playback State (Thread-Safe for nonisolated delegate methods)
|
||||
|
||||
/// Cached duration for PiP time range (thread-safe)
|
||||
private let _duration = OSAllocatedUnfairLock(initialState: 0.0)
|
||||
/// Cached paused state for PiP (thread-safe)
|
||||
private let _isPaused = OSAllocatedUnfairLock(initialState: false)
|
||||
|
||||
/// Update cached playback state from backend (call periodically)
|
||||
func updatePlaybackState(duration: Double, currentTime: Double, isPaused: Bool) {
|
||||
_duration.withLock { $0 = duration }
|
||||
_isPaused.withLock { $0 = isPaused }
|
||||
|
||||
// Update timebase with current playback position
|
||||
if let timebase {
|
||||
let time = CMTime(seconds: currentTime, preferredTimescale: 90000)
|
||||
CMTimebaseSetTime(timebase, time: time)
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Setup
|
||||
|
||||
/// Set up PiP with the given MPV backend and container view.
|
||||
/// - Parameters:
|
||||
/// - backend: The MPV backend to connect to
|
||||
/// - containerView: The view to embed the sample buffer layer in
|
||||
func setup(backend: MPVBackend, in containerView: PlatformView) {
|
||||
self.mpvBackend = backend
|
||||
|
||||
// Configure sample buffer layer
|
||||
sampleBufferLayer.frame = containerView.bounds
|
||||
#if os(macOS)
|
||||
// On macOS, use resize to fill the entire area (ignoring aspect ratio)
|
||||
// This works around AVKit's PiP window sizing that includes title bar height
|
||||
sampleBufferLayer.videoGravity = .resize
|
||||
sampleBufferLayer.contentsGravity = .resize
|
||||
// Enable auto-resizing to fill superlayer
|
||||
sampleBufferLayer.autoresizingMask = [.layerWidthSizable, .layerHeightSizable]
|
||||
#else
|
||||
sampleBufferLayer.videoGravity = .resizeAspect
|
||||
sampleBufferLayer.contentsGravity = .resizeAspect
|
||||
#endif
|
||||
sampleBufferLayer.backgroundColor = PlatformColor.clear.cgColor
|
||||
|
||||
// Set up timebase for controlling playback timing
|
||||
var timebase: CMTimebase?
|
||||
CMTimebaseCreateWithSourceClock(
|
||||
allocator: kCFAllocatorDefault,
|
||||
sourceClock: CMClockGetHostTimeClock(),
|
||||
timebaseOut: &timebase
|
||||
)
|
||||
if let timebase {
|
||||
self.timebase = timebase
|
||||
sampleBufferLayer.controlTimebase = timebase
|
||||
CMTimebaseSetRate(timebase, rate: 1.0)
|
||||
CMTimebaseSetTime(timebase, time: .zero)
|
||||
}
|
||||
|
||||
// IMPORTANT: Hide the layer during normal playback so it doesn't cover
|
||||
// the OpenGL rendering. It will be shown when PiP is active.
|
||||
sampleBufferLayer.isHidden = true
|
||||
|
||||
// Layer must be in view hierarchy for PiP to work, but can be hidden
|
||||
#if os(iOS)
|
||||
containerView.layer.addSublayer(sampleBufferLayer)
|
||||
#elseif os(macOS)
|
||||
// On macOS, add the layer to the container view's layer.
|
||||
// The warning about NSHostingController is unavoidable with AVSampleBufferDisplayLayer PiP,
|
||||
// but it doesn't affect functionality - the PiP window works correctly.
|
||||
containerView.wantsLayer = true
|
||||
if let layer = containerView.layer {
|
||||
// Add on top - the layer is hidden during normal playback anyway
|
||||
layer.addSublayer(sampleBufferLayer)
|
||||
}
|
||||
sampleBufferLayer.frame = containerView.bounds
|
||||
#endif
|
||||
|
||||
// Create content source for sample buffer playback
|
||||
let contentSource = AVPictureInPictureController.ContentSource(
|
||||
sampleBufferDisplayLayer: sampleBufferLayer,
|
||||
playbackDelegate: self
|
||||
)
|
||||
|
||||
// Create PiP controller
|
||||
pipController = AVPictureInPictureController(contentSource: contentSource)
|
||||
pipController?.delegate = self
|
||||
|
||||
// Observe isPictureInPicturePossible changes via KVO
|
||||
// Note: Don't use .initial here - callbacks aren't set up yet when setup() is called.
|
||||
// Use notifyPiPPossibleState() after setting up callbacks.
|
||||
pipPossibleObservation = pipController?.observe(\.isPictureInPicturePossible, options: [.new]) { [weak self] _, change in
|
||||
let isPossible = change.newValue ?? false
|
||||
Task { @MainActor [weak self] in
|
||||
self?.onPiPPossibleChanged?(isPossible)
|
||||
}
|
||||
}
|
||||
|
||||
#if os(iOS)
|
||||
// Observe app lifecycle to handle background transitions while PiP is active
|
||||
NotificationCenter.default.addObserver(
|
||||
self,
|
||||
selector: #selector(appWillResignActive),
|
||||
name: UIApplication.willResignActiveNotification,
|
||||
object: nil
|
||||
)
|
||||
NotificationCenter.default.addObserver(
|
||||
self,
|
||||
selector: #selector(appDidEnterBackground),
|
||||
name: UIApplication.didEnterBackgroundNotification,
|
||||
object: nil
|
||||
)
|
||||
#endif
|
||||
|
||||
LoggingService.shared.debug("MPVPiPBridge: Setup complete", category: .mpv)
|
||||
}
|
||||
|
||||
/// Manually notify current isPiPPossible state.
|
||||
/// Call this after setting up onPiPPossibleChanged callback.
|
||||
func notifyPiPPossibleState() {
|
||||
onPiPPossibleChanged?(isPiPPossible)
|
||||
}
|
||||
|
||||
/// Update the video aspect ratio for proper PiP sizing.
|
||||
/// Call this when video dimensions are known or change.
|
||||
/// - Parameter aspectRatio: Video width divided by height (e.g., 16/9 = 1.777...)
|
||||
func updateVideoAspectRatio(_ aspectRatio: CGFloat) {
|
||||
guard aspectRatio > 0 else { return }
|
||||
videoAspectRatio = aspectRatio
|
||||
|
||||
#if os(macOS)
|
||||
// On macOS, update layer bounds to match aspect ratio
|
||||
// This helps AVKit size the PiP window correctly
|
||||
let currentBounds = sampleBufferLayer.bounds
|
||||
let newHeight = currentBounds.width / aspectRatio
|
||||
let newBounds = CGRect(x: 0, y: 0, width: currentBounds.width, height: newHeight)
|
||||
|
||||
CATransaction.begin()
|
||||
CATransaction.setDisableActions(true)
|
||||
sampleBufferLayer.bounds = newBounds
|
||||
CATransaction.commit()
|
||||
|
||||
LoggingService.shared.debug("MPVPiPBridge: Updated aspect ratio to \(aspectRatio), layer bounds: \(newBounds)", category: .mpv)
|
||||
#else
|
||||
// On iOS, don't modify bounds when PiP is inactive - this causes frame misalignment
|
||||
// (negative Y offset) which breaks the system's PiP restore UI positioning.
|
||||
// AVKit gets the aspect ratio from the enqueued video frames.
|
||||
|
||||
// If PiP is active and video changed, calculate and update the layer frame.
|
||||
// The superlayer bounds don't update during PiP (view hierarchy hidden),
|
||||
// so we calculate the correct frame based on screen width and aspect ratio.
|
||||
if isPiPActive {
|
||||
// Detect significant aspect ratio change - if so, flush buffer to force AVKit
|
||||
// to re-read video dimensions from the new format description
|
||||
let currentBounds = sampleBufferLayer.bounds
|
||||
if currentBounds.height > 0 {
|
||||
let previousRatio = currentBounds.width / currentBounds.height
|
||||
let ratioChange = abs(aspectRatio - previousRatio) / previousRatio
|
||||
|
||||
if ratioChange > 0.05 { // >5% change indicates new video
|
||||
// Flush buffer and clear format description to force AVKit to re-read dimensions
|
||||
sampleBufferLayer.sampleBufferRenderer.flush()
|
||||
currentFormatDescription = nil
|
||||
|
||||
LoggingService.shared.debug("MPVPiPBridge: Flushed buffer for aspect ratio change \(previousRatio) -> \(aspectRatio)", category: .mpv)
|
||||
}
|
||||
}
|
||||
|
||||
let screenWidth = UIScreen.main.bounds.width
|
||||
// Calculate height based on aspect ratio, capped to leave room for details
|
||||
let maxHeight = UIScreen.main.bounds.height * 0.6 // Leave 40% for details
|
||||
let calculatedHeight = screenWidth / aspectRatio
|
||||
let height = min(calculatedHeight, maxHeight)
|
||||
let width = height < calculatedHeight ? height * aspectRatio : screenWidth
|
||||
let newFrame = CGRect(x: 0, y: 0, width: width, height: height)
|
||||
|
||||
CATransaction.begin()
|
||||
CATransaction.setDisableActions(true)
|
||||
sampleBufferLayer.frame = newFrame
|
||||
sampleBufferLayer.bounds = CGRect(origin: .zero, size: newFrame.size)
|
||||
CATransaction.commit()
|
||||
pipController?.invalidatePlaybackState()
|
||||
LoggingService.shared.debug("MPVPiPBridge: Updated aspect ratio to \(aspectRatio), calculated layer frame during PiP: \(newFrame)", category: .mpv)
|
||||
} else {
|
||||
LoggingService.shared.debug("MPVPiPBridge: Updated aspect ratio to \(aspectRatio) (bounds unchanged on iOS)", category: .mpv)
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
#if os(iOS)
|
||||
@objc private func appWillResignActive() {
|
||||
guard isPiPActive, let timebase else { return }
|
||||
|
||||
// Sync timebase and pre-buffer frames before iOS throttles/suspends
|
||||
if let currentTime = mpvBackend?.currentTime {
|
||||
let time = CMTime(seconds: currentTime, preferredTimescale: 90000)
|
||||
CMTimebaseSetTime(timebase, time: time)
|
||||
}
|
||||
CMTimebaseSetRate(timebase, rate: 1.0)
|
||||
preBufferFramesForBackgroundTransition()
|
||||
}
|
||||
|
||||
@objc private func appDidEnterBackground() {
|
||||
guard isPiPActive, let timebase else { return }
|
||||
|
||||
// Ensure timebase is synced and running
|
||||
if let currentTime = mpvBackend?.currentTime {
|
||||
let time = CMTime(seconds: currentTime, preferredTimescale: 90000)
|
||||
CMTimebaseSetTime(timebase, time: time)
|
||||
}
|
||||
CMTimebaseSetRate(timebase, rate: 1.0)
|
||||
|
||||
// Pre-buffer additional frames as secondary buffer
|
||||
preBufferFramesForBackgroundTransition()
|
||||
}
|
||||
|
||||
/// Pre-buffer frames with future timestamps to bridge iOS background suspension.
|
||||
/// Note: iOS suspends app code for ~300-400ms during background transition.
|
||||
/// Pre-buffered frames show the same content but keep the layer fed.
|
||||
private func preBufferFramesForBackgroundTransition() {
|
||||
guard let pixelBuffer = lastPixelBuffer,
|
||||
let formatDescription = currentFormatDescription,
|
||||
let timebase else { return }
|
||||
|
||||
let currentTimebaseTime = CMTimebaseGetTime(timebase)
|
||||
let frameInterval = CMTime(value: 1, timescale: 30)
|
||||
var currentPTS = currentTimebaseTime
|
||||
|
||||
// Pre-enqueue 30 frames (~1 second) to bridge the iOS suspension gap
|
||||
for _ in 0..<30 {
|
||||
currentPTS = CMTimeAdd(currentPTS, frameInterval)
|
||||
|
||||
var sampleTimingInfo = CMSampleTimingInfo(
|
||||
duration: frameInterval,
|
||||
presentationTimeStamp: currentPTS,
|
||||
decodeTimeStamp: .invalid
|
||||
)
|
||||
|
||||
var sampleBuffer: CMSampleBuffer?
|
||||
CMSampleBufferCreateReadyWithImageBuffer(
|
||||
allocator: kCFAllocatorDefault,
|
||||
imageBuffer: pixelBuffer,
|
||||
formatDescription: formatDescription,
|
||||
sampleTiming: &sampleTimingInfo,
|
||||
sampleBufferOut: &sampleBuffer
|
||||
)
|
||||
|
||||
guard let sampleBuffer else { continue }
|
||||
|
||||
if sampleBufferLayer.sampleBufferRenderer.status != .failed {
|
||||
sampleBufferLayer.sampleBufferRenderer.enqueue(sampleBuffer)
|
||||
}
|
||||
}
|
||||
|
||||
lastPresentationTime = currentPTS
|
||||
}
|
||||
#endif
|
||||
|
||||
/// Update the layer frame when container bounds change.
|
||||
/// On macOS, the frame should be relative to the window's content view.
|
||||
func updateLayerFrame(_ frame: CGRect) {
|
||||
sampleBufferLayer.frame = frame
|
||||
}
|
||||
|
||||
#if os(macOS)
|
||||
/// Update the layer frame based on container view's bounds.
|
||||
/// Call this on macOS when the container view's size changes.
|
||||
func updateLayerFrame(for containerView: NSView) {
|
||||
sampleBufferLayer.frame = containerView.bounds
|
||||
}
|
||||
#endif
|
||||
|
||||
/// Move the sample buffer layer to a new container view.
|
||||
/// This is needed when transitioning from fullscreen to PiP,
|
||||
/// as the layer must be in a visible window hierarchy.
|
||||
func moveLayer(to containerView: PlatformView) {
|
||||
sampleBufferLayer.removeFromSuperlayer()
|
||||
sampleBufferLayer.frame = containerView.bounds
|
||||
#if os(iOS)
|
||||
containerView.layer.addSublayer(sampleBufferLayer)
|
||||
#elseif os(macOS)
|
||||
containerView.wantsLayer = true
|
||||
containerView.layer?.addSublayer(sampleBufferLayer)
|
||||
#endif
|
||||
}
|
||||
|
||||
/// Clean up and release resources.
|
||||
func cleanup() {
|
||||
NotificationCenter.default.removeObserver(self)
|
||||
pipPossibleObservation?.invalidate()
|
||||
pipPossibleObservation = nil
|
||||
lastPixelBuffer = nil
|
||||
pipController?.stopPictureInPicture()
|
||||
pipController = nil
|
||||
sampleBufferLayer.removeFromSuperlayer()
|
||||
sampleBufferLayer.sampleBufferRenderer.flush(removingDisplayedImage: true, completionHandler: nil)
|
||||
mpvBackend = nil
|
||||
}
|
||||
|
||||
/// Flush the sample buffer to clear any displayed frame.
|
||||
/// Call this when stopping playback to prevent stale frames when reusing backend.
|
||||
func flushBuffer() {
|
||||
sampleBufferLayer.sampleBufferRenderer.flush(removingDisplayedImage: true, completionHandler: nil)
|
||||
frameCount = 0
|
||||
}
|
||||
|
||||
// MARK: - Frame Enqueueing
|
||||
|
||||
/// Track frame count for logging
|
||||
private var frameCount = 0
|
||||
|
||||
/// Enqueue a video frame from MPV for display.
|
||||
/// This is called by MPV's render callback when a frame is ready.
|
||||
/// - Parameters:
|
||||
/// - pixelBuffer: The decoded video frame as CVPixelBuffer
|
||||
/// - presentationTime: The presentation timestamp for this frame
|
||||
func enqueueFrame(_ pixelBuffer: CVPixelBuffer, presentationTime: CMTime) {
|
||||
frameCount += 1
|
||||
|
||||
// Log first few frames and then periodically
|
||||
if frameCount <= 3 || frameCount % 60 == 0 {
|
||||
let width = CVPixelBufferGetWidth(pixelBuffer)
|
||||
let height = CVPixelBufferGetHeight(pixelBuffer)
|
||||
LoggingService.shared.debug("MPVPiPBridge: Enqueue frame #\(frameCount), size: \(width)x\(height), layer status: \(sampleBufferLayer.sampleBufferRenderer.status.rawValue)", category: .mpv)
|
||||
}
|
||||
|
||||
// Create format description if needed or if dimensions changed
|
||||
let width = CVPixelBufferGetWidth(pixelBuffer)
|
||||
let height = CVPixelBufferGetHeight(pixelBuffer)
|
||||
|
||||
if currentFormatDescription == nil ||
|
||||
CMVideoFormatDescriptionGetDimensions(currentFormatDescription!).width != Int32(width) ||
|
||||
CMVideoFormatDescriptionGetDimensions(currentFormatDescription!).height != Int32(height) {
|
||||
var formatDescription: CMVideoFormatDescription?
|
||||
CMVideoFormatDescriptionCreateForImageBuffer(
|
||||
allocator: kCFAllocatorDefault,
|
||||
imageBuffer: pixelBuffer,
|
||||
formatDescriptionOut: &formatDescription
|
||||
)
|
||||
currentFormatDescription = formatDescription
|
||||
}
|
||||
|
||||
guard let formatDescription = currentFormatDescription else { return }
|
||||
|
||||
// Create sample timing info
|
||||
var sampleTimingInfo = CMSampleTimingInfo(
|
||||
duration: CMTime(value: 1, timescale: 30), // Approximate frame duration
|
||||
presentationTimeStamp: presentationTime,
|
||||
decodeTimeStamp: .invalid
|
||||
)
|
||||
|
||||
// Create sample buffer
|
||||
var sampleBuffer: CMSampleBuffer?
|
||||
CMSampleBufferCreateReadyWithImageBuffer(
|
||||
allocator: kCFAllocatorDefault,
|
||||
imageBuffer: pixelBuffer,
|
||||
formatDescription: formatDescription,
|
||||
sampleTiming: &sampleTimingInfo,
|
||||
sampleBufferOut: &sampleBuffer
|
||||
)
|
||||
|
||||
guard let sampleBuffer else { return }
|
||||
|
||||
// Cache the pixel buffer for re-enqueuing during close animation
|
||||
lastPixelBuffer = pixelBuffer
|
||||
|
||||
// Enqueue on sample buffer layer
|
||||
if sampleBufferLayer.sampleBufferRenderer.status != .failed {
|
||||
sampleBufferLayer.sampleBufferRenderer.enqueue(sampleBuffer)
|
||||
lastPresentationTime = presentationTime
|
||||
} else {
|
||||
// Flush and retry if layer is in failed state
|
||||
sampleBufferLayer.sampleBufferRenderer.flush()
|
||||
sampleBufferLayer.sampleBufferRenderer.enqueue(sampleBuffer)
|
||||
}
|
||||
}
|
||||
|
||||
/// Re-enqueue the last frame to prevent placeholder from showing
|
||||
private func reenqueueLastFrame() {
|
||||
guard let pixelBuffer = lastPixelBuffer,
|
||||
let formatDescription = currentFormatDescription else { return }
|
||||
|
||||
// Increment presentation time slightly to avoid duplicate timestamps
|
||||
let newPresentationTime = CMTimeAdd(lastPresentationTime, CMTime(value: 1, timescale: 30))
|
||||
|
||||
var sampleTimingInfo = CMSampleTimingInfo(
|
||||
duration: CMTime(value: 1, timescale: 30),
|
||||
presentationTimeStamp: newPresentationTime,
|
||||
decodeTimeStamp: .invalid
|
||||
)
|
||||
|
||||
var sampleBuffer: CMSampleBuffer?
|
||||
CMSampleBufferCreateReadyWithImageBuffer(
|
||||
allocator: kCFAllocatorDefault,
|
||||
imageBuffer: pixelBuffer,
|
||||
formatDescription: formatDescription,
|
||||
sampleTiming: &sampleTimingInfo,
|
||||
sampleBufferOut: &sampleBuffer
|
||||
)
|
||||
|
||||
guard let sampleBuffer else { return }
|
||||
|
||||
if sampleBufferLayer.sampleBufferRenderer.status != .failed {
|
||||
sampleBufferLayer.sampleBufferRenderer.enqueue(sampleBuffer)
|
||||
lastPresentationTime = newPresentationTime
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - PiP Control
|
||||
|
||||
/// Start Picture-in-Picture.
|
||||
func startPiP() {
|
||||
guard let pipController, pipController.isPictureInPicturePossible else {
|
||||
LoggingService.shared.warning("MPVPiPBridge: PiP not possible", category: .mpv)
|
||||
return
|
||||
}
|
||||
|
||||
#if os(iOS)
|
||||
// Update layer frame to match current superlayer bounds before starting PiP.
|
||||
// This ensures the frame is correct for the current video's player area.
|
||||
if let superlayer = sampleBufferLayer.superlayer {
|
||||
CATransaction.begin()
|
||||
CATransaction.setDisableActions(true)
|
||||
sampleBufferLayer.frame = superlayer.bounds
|
||||
sampleBufferLayer.bounds = CGRect(origin: .zero, size: superlayer.bounds.size)
|
||||
CATransaction.commit()
|
||||
LoggingService.shared.debug("MPVPiPBridge: Updated layer frame before PiP: \(superlayer.bounds)", category: .mpv)
|
||||
}
|
||||
#endif
|
||||
|
||||
// Show the layer before starting PiP - it needs to be visible for PiP to work
|
||||
sampleBufferLayer.isHidden = false
|
||||
|
||||
pipController.startPictureInPicture()
|
||||
LoggingService.shared.debug("MPVPiPBridge: Starting PiP", category: .mpv)
|
||||
}
|
||||
|
||||
/// Stop Picture-in-Picture.
|
||||
func stopPiP() {
|
||||
pipController?.stopPictureInPicture()
|
||||
// Layer will be hidden in didStopPictureInPicture delegate
|
||||
LoggingService.shared.debug("MPVPiPBridge: Stopping PiP", category: .mpv)
|
||||
}
|
||||
|
||||
/// Toggle Picture-in-Picture.
|
||||
func togglePiP() {
|
||||
if isPiPActive {
|
||||
stopPiP()
|
||||
} else {
|
||||
startPiP()
|
||||
}
|
||||
}
|
||||
|
||||
/// Invalidate and update the playback state in PiP window.
|
||||
func invalidatePlaybackState() {
|
||||
pipController?.invalidatePlaybackState()
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - AVPictureInPictureSampleBufferPlaybackDelegate
|
||||
|
||||
extension MPVPiPBridge: AVPictureInPictureSampleBufferPlaybackDelegate {
|
||||
nonisolated func pictureInPictureController(
|
||||
_ pictureInPictureController: AVPictureInPictureController,
|
||||
setPlaying playing: Bool
|
||||
) {
|
||||
// Update cached state immediately for responsive UI
|
||||
_isPaused.withLock { $0 = !playing }
|
||||
|
||||
Task { @MainActor in
|
||||
// Update timebase rate
|
||||
if let timebase {
|
||||
CMTimebaseSetRate(timebase, rate: playing ? 1.0 : 0.0)
|
||||
}
|
||||
|
||||
if playing {
|
||||
mpvBackend?.play()
|
||||
} else {
|
||||
mpvBackend?.pause()
|
||||
}
|
||||
// Notify PiP system that state changed
|
||||
pipController?.invalidatePlaybackState()
|
||||
}
|
||||
}
|
||||
|
||||
nonisolated func pictureInPictureControllerTimeRangeForPlayback(
|
||||
_ pictureInPictureController: AVPictureInPictureController
|
||||
) -> CMTimeRange {
|
||||
let duration = _duration.withLock { $0 }
|
||||
// Return actual duration if known
|
||||
if duration > 0 {
|
||||
return CMTimeRange(start: .zero, duration: CMTime(seconds: duration, preferredTimescale: 90000))
|
||||
}
|
||||
// Fallback to a reasonable default until we know the actual duration
|
||||
return CMTimeRange(start: .zero, duration: CMTime(seconds: 3600, preferredTimescale: 90000))
|
||||
}
|
||||
|
||||
nonisolated func pictureInPictureControllerIsPlaybackPaused(
|
||||
_ pictureInPictureController: AVPictureInPictureController
|
||||
) -> Bool {
|
||||
_isPaused.withLock { $0 }
|
||||
}
|
||||
|
||||
// Optional: Handle skip by interval (completion handler style to avoid compiler crash)
|
||||
nonisolated func pictureInPictureController(
|
||||
_ pictureInPictureController: AVPictureInPictureController,
|
||||
skipByInterval skipInterval: CMTime,
|
||||
completion completionHandler: @escaping @Sendable () -> Void
|
||||
) {
|
||||
Task { @MainActor in
|
||||
let currentTime = mpvBackend?.currentTime ?? 0
|
||||
let newTime = currentTime + skipInterval.seconds
|
||||
await mpvBackend?.seek(to: max(0, newTime))
|
||||
completionHandler()
|
||||
}
|
||||
}
|
||||
|
||||
// Optional: Whether to prohibit background audio
|
||||
nonisolated func pictureInPictureControllerShouldProhibitBackgroundAudioPlayback(
|
||||
_ pictureInPictureController: AVPictureInPictureController
|
||||
) -> Bool {
|
||||
false // Allow background audio
|
||||
}
|
||||
|
||||
// Required: Handle render size changes
|
||||
nonisolated func pictureInPictureController(
|
||||
_ pictureInPictureController: AVPictureInPictureController,
|
||||
didTransitionToRenderSize newRenderSize: CMVideoDimensions
|
||||
) {
|
||||
Task { @MainActor in
|
||||
currentPiPRenderSize = newRenderSize
|
||||
onPiPRenderSizeChanged?(newRenderSize)
|
||||
LoggingService.shared.debug("MPVPiPBridge: PiP render size changed to \(newRenderSize.width)x\(newRenderSize.height)", category: .mpv)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - AVPictureInPictureControllerDelegate
|
||||
|
||||
extension MPVPiPBridge: AVPictureInPictureControllerDelegate {
|
||||
nonisolated func pictureInPictureControllerWillStartPictureInPicture(
|
||||
_ pictureInPictureController: AVPictureInPictureController
|
||||
) {
|
||||
Task { @MainActor in
|
||||
// Reset PiP render size - will be updated by didTransitionToRenderSize
|
||||
currentPiPRenderSize = nil
|
||||
// Reset restore flag - will be set if user clicks restore button
|
||||
restoreWasRequested = false
|
||||
// Show the sample buffer layer when PiP starts
|
||||
sampleBufferLayer.isHidden = false
|
||||
|
||||
#if os(macOS)
|
||||
// Ensure our layer has no background that could cause black areas
|
||||
sampleBufferLayer.backgroundColor = nil
|
||||
|
||||
// Hide other sublayers (like _NSOpenGLViewBackingLayer) that would cover our video
|
||||
if let superlayer = sampleBufferLayer.superlayer,
|
||||
let sublayers = superlayer.sublayers {
|
||||
for layer in sublayers where layer !== sampleBufferLayer {
|
||||
layer.isHidden = true
|
||||
LoggingService.shared.debug("MPVPiPBridge: Hiding layer \(type(of: layer)) for PiP", category: .mpv)
|
||||
}
|
||||
}
|
||||
|
||||
// Clear backgrounds on parent layers that could cause black areas
|
||||
// The container view's backing layer often has a black background
|
||||
var currentLayer: CALayer? = sampleBufferLayer.superlayer
|
||||
var depth = 0
|
||||
while let layer = currentLayer {
|
||||
let layerType = String(describing: type(of: layer))
|
||||
if layer.backgroundColor != nil {
|
||||
LoggingService.shared.debug("MPVPiPBridge: Clearing background on \(layerType) at depth \(depth)", category: .mpv)
|
||||
layer.backgroundColor = nil
|
||||
}
|
||||
currentLayer = layer.superlayer
|
||||
depth += 1
|
||||
if depth > 5 { break } // Don't go too far up
|
||||
}
|
||||
#endif
|
||||
|
||||
// Notify to clear main view immediately
|
||||
onPiPWillStart?()
|
||||
LoggingService.shared.debug("MPVPiPBridge: Will start PiP", category: .mpv)
|
||||
}
|
||||
}
|
||||
|
||||
nonisolated func pictureInPictureControllerDidStartPictureInPicture(
|
||||
_ pictureInPictureController: AVPictureInPictureController
|
||||
) {
|
||||
Task { @MainActor in
|
||||
onPiPStatusChanged?(true)
|
||||
// Debug: Log layer frame and bounds
|
||||
LoggingService.shared.debug("MPVPiPBridge: Did start PiP - layer frame: \(sampleBufferLayer.frame), bounds: \(sampleBufferLayer.bounds), videoGravity: \(sampleBufferLayer.videoGravity.rawValue)", category: .mpv)
|
||||
|
||||
#if os(macOS)
|
||||
// Start timer to update layer frame to match PiP window
|
||||
startLayerResizeTimer()
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
nonisolated func pictureInPictureController(
|
||||
_ pictureInPictureController: AVPictureInPictureController,
|
||||
failedToStartPictureInPictureWithError error: Error
|
||||
) {
|
||||
Task { @MainActor in
|
||||
// Hide the layer again since PiP failed
|
||||
sampleBufferLayer.isHidden = true
|
||||
|
||||
#if os(macOS)
|
||||
// Unhide other sublayers that we hid when trying to start PiP
|
||||
if let superlayer = sampleBufferLayer.superlayer,
|
||||
let sublayers = superlayer.sublayers {
|
||||
for layer in sublayers where layer !== sampleBufferLayer {
|
||||
layer.isHidden = false
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
onPiPStatusChanged?(false)
|
||||
LoggingService.shared.logMPVError("MPVPiPBridge: Failed to start PiP", error: error)
|
||||
}
|
||||
}
|
||||
|
||||
nonisolated func pictureInPictureControllerWillStopPictureInPicture(
|
||||
_ pictureInPictureController: AVPictureInPictureController
|
||||
) {
|
||||
Task { @MainActor in
|
||||
#if os(macOS)
|
||||
// Restore hidden views BEFORE cleanup to prevent crashes
|
||||
restoreHiddenPiPViews()
|
||||
#endif
|
||||
|
||||
// Pre-enqueue multiple copies of the last frame to ensure buffer has content
|
||||
// throughout the entire close animation (typically ~0.3-0.5 seconds)
|
||||
for _ in 0..<30 {
|
||||
reenqueueLastFrame()
|
||||
}
|
||||
|
||||
// Resume main view rendering before animation ends
|
||||
// Keep sampleBufferLayer visible and receiving frames during close animation
|
||||
// to avoid showing the "video is playing in picture in picture" placeholder
|
||||
onPiPWillStop?()
|
||||
|
||||
LoggingService.shared.debug("MPVPiPBridge: Will stop PiP, pre-enqueued frames", category: .mpv)
|
||||
}
|
||||
}
|
||||
|
||||
nonisolated func pictureInPictureControllerDidStopPictureInPicture(
|
||||
_ pictureInPictureController: AVPictureInPictureController
|
||||
) {
|
||||
Task { @MainActor in
|
||||
#if os(macOS)
|
||||
// Stop layer resize timer
|
||||
stopLayerResizeTimer()
|
||||
|
||||
// Unhide other sublayers that we hid when PiP started
|
||||
if let superlayer = sampleBufferLayer.superlayer,
|
||||
let sublayers = superlayer.sublayers {
|
||||
for layer in sublayers where layer !== sampleBufferLayer {
|
||||
layer.isHidden = false
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
// Hide the sample buffer layer when PiP stops
|
||||
sampleBufferLayer.isHidden = true
|
||||
|
||||
// Clear cached pixel buffer
|
||||
lastPixelBuffer = nil
|
||||
|
||||
onPiPStatusChanged?(false)
|
||||
|
||||
// If restore wasn't requested, notify that PiP stopped without restore
|
||||
// (user clicked X button instead of restore button)
|
||||
if !restoreWasRequested {
|
||||
LoggingService.shared.debug("MPVPiPBridge: Did stop PiP without restore (close button)", category: .mpv)
|
||||
onPiPDidStopWithoutRestore?()
|
||||
} else {
|
||||
LoggingService.shared.debug("MPVPiPBridge: Did stop PiP (with restore)", category: .mpv)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
nonisolated func pictureInPictureController(
|
||||
_ pictureInPictureController: AVPictureInPictureController,
|
||||
restoreUserInterfaceForPictureInPictureStopWithCompletionHandler completionHandler: @escaping (Bool) -> Void
|
||||
) {
|
||||
Task { @MainActor in
|
||||
// Mark that restore was requested - didStopPictureInPicture will check this
|
||||
restoreWasRequested = true
|
||||
LoggingService.shared.debug("MPVPiPBridge: Restore requested", category: .mpv)
|
||||
await onRestoreUserInterface?()
|
||||
completionHandler(true)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - macOS Layer Resize Timer
|
||||
|
||||
#if os(macOS)
|
||||
extension MPVPiPBridge {
|
||||
/// Start a timer to periodically resize the layer to match the PiP window.
|
||||
/// This is needed on macOS because AVKit doesn't automatically resize the layer.
|
||||
func startLayerResizeTimer() {
|
||||
stopLayerResizeTimer()
|
||||
|
||||
// Check immediately
|
||||
updateLayerFrameToMatchPiPWindow()
|
||||
|
||||
// Then check periodically
|
||||
layerResizeTimer = Timer.scheduledTimer(withTimeInterval: 0.1, repeats: true) { [weak self] _ in
|
||||
Task { @MainActor [weak self] in
|
||||
self?.updateLayerFrameToMatchPiPWindow()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Stop the layer resize timer.
|
||||
func stopLayerResizeTimer() {
|
||||
layerResizeTimer?.invalidate()
|
||||
layerResizeTimer = nil
|
||||
}
|
||||
|
||||
/// Find the PiP window by enumerating all windows.
|
||||
private func findPiPWindow() -> NSWindow? {
|
||||
// Get all windows in the app
|
||||
let allWindows = NSApplication.shared.windows
|
||||
|
||||
for window in allWindows {
|
||||
let className = String(describing: type(of: window))
|
||||
// PiP windows on macOS are typically named PIPPanelWindow or similar
|
||||
if className.contains("PIP") || className.contains("PiP") || className.contains("Picture") {
|
||||
LoggingService.shared.debug("MPVPiPBridge: Found PiP window: \(className), frame: \(window.frame)", category: .mpv)
|
||||
return window
|
||||
}
|
||||
}
|
||||
|
||||
// If no PiP window found in our app, it might be owned by AVKit framework
|
||||
// Try to find it via CGWindowListCopyWindowInfo
|
||||
let options = CGWindowListOption(arrayLiteral: .optionOnScreenOnly, .excludeDesktopElements)
|
||||
if let windowList = CGWindowListCopyWindowInfo(options, kCGNullWindowID) as? [[String: Any]] {
|
||||
for windowInfo in windowList {
|
||||
if let ownerName = windowInfo[kCGWindowOwnerName as String] as? String,
|
||||
ownerName.contains("Picture") || ownerName.contains("PiP") {
|
||||
LoggingService.shared.debug("MPVPiPBridge: Found PiP via CGWindowList: \(ownerName)", category: .mpv)
|
||||
}
|
||||
if let windowName = windowInfo[kCGWindowName as String] as? String {
|
||||
if windowName.contains("Picture") || windowName.contains("PiP") {
|
||||
// Found it, but CGWindowInfo doesn't give us NSWindow
|
||||
if let bounds = windowInfo[kCGWindowBounds as String] as? [String: Any],
|
||||
let width = bounds["Width"] as? CGFloat,
|
||||
let height = bounds["Height"] as? CGFloat {
|
||||
LoggingService.shared.debug("MPVPiPBridge: PiP window bounds from CGWindowList: \(width)x\(height)", category: .mpv)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
/// Recursively log view hierarchy for debugging
|
||||
private func logViewHierarchy(_ view: NSView, depth: Int) {
|
||||
let indent = String(repeating: " ", count: depth)
|
||||
let viewType = String(describing: type(of: view))
|
||||
let layerInfo: String
|
||||
if let layer = view.layer {
|
||||
let bgColor = layer.backgroundColor != nil ? "has bg" : "no bg"
|
||||
let clips = layer.masksToBounds ? "clips" : "no clip"
|
||||
layerInfo = "layer: \(layer.frame), \(bgColor), \(clips)"
|
||||
} else {
|
||||
layerInfo = "no layer"
|
||||
}
|
||||
LoggingService.shared.debug("MPVPiPBridge: \(indent)[\(depth)] \(viewType) frame: \(view.frame), \(layerInfo)", category: .mpv)
|
||||
|
||||
// Check sublayers
|
||||
if let layer = view.layer {
|
||||
for sublayer in layer.sublayers ?? [] {
|
||||
let sublayerType = String(describing: type(of: sublayer))
|
||||
let subBg = sublayer.backgroundColor != nil ? "HAS BG" : "no bg"
|
||||
let subClips = sublayer.masksToBounds ? "clips" : "no clip"
|
||||
LoggingService.shared.debug("MPVPiPBridge: \(indent) -> sublayer: \(sublayerType), frame: \(sublayer.frame), \(subBg), \(subClips)", category: .mpv)
|
||||
}
|
||||
}
|
||||
|
||||
// Recurse into subviews (limit depth to avoid spam)
|
||||
if depth < 6 {
|
||||
for subview in view.subviews {
|
||||
logViewHierarchy(subview, depth: depth + 1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Fix the mispositioned AVPictureInPictureCALayerHostView that causes the black bar
|
||||
private func fixPiPLayerHostViewPosition(in pipWindow: NSWindow) {
|
||||
guard let contentView = pipWindow.contentView else { return }
|
||||
|
||||
// Find the AVPictureInPictureCALayerHostView which is positioned incorrectly
|
||||
findAndFixLayerHostView(in: contentView, windowBounds: contentView.bounds)
|
||||
}
|
||||
|
||||
private func findAndFixLayerHostView(in view: NSView, windowBounds: CGRect) {
|
||||
let viewType = String(describing: type(of: view))
|
||||
|
||||
// AVPictureInPictureCALayerHostView contains the SOURCE view content (our OpenGL view)
|
||||
// It's positioned incorrectly and shows the black background from our app
|
||||
// We want to HIDE this completely - we only need the AVSampleBufferDisplayLayerContentLayer
|
||||
if viewType.contains("AVPictureInPictureCALayerHostView") {
|
||||
if !view.isHidden {
|
||||
LoggingService.shared.debug("MPVPiPBridge: Hiding \(viewType) - it contains source view with black bg", category: .mpv)
|
||||
view.isHidden = true
|
||||
view.layer?.isHidden = true
|
||||
// Track this view so we can unhide it before cleanup
|
||||
hiddenPiPViews.add(view)
|
||||
}
|
||||
}
|
||||
|
||||
// Disable clipping on content layers
|
||||
if viewType.contains("AVPictureInPictureSampleBufferDisplayLayerHostView") {
|
||||
// Disable clipping on this view and its sublayers
|
||||
view.layer?.masksToBounds = false
|
||||
for sublayer in view.layer?.sublayers ?? [] {
|
||||
sublayer.masksToBounds = false
|
||||
}
|
||||
}
|
||||
|
||||
// Recurse
|
||||
for subview in view.subviews {
|
||||
findAndFixLayerHostView(in: subview, windowBounds: windowBounds)
|
||||
}
|
||||
}
|
||||
|
||||
/// Restore any views we hid to prevent crashes during cleanup
|
||||
private func restoreHiddenPiPViews() {
|
||||
let views = hiddenPiPViews.allObjects
|
||||
let count = views.count
|
||||
for view in views {
|
||||
view.isHidden = false
|
||||
view.layer?.isHidden = false
|
||||
}
|
||||
hiddenPiPViews.removeAllObjects()
|
||||
LoggingService.shared.debug("MPVPiPBridge: Restored \(count) hidden PiP views", category: .mpv)
|
||||
}
|
||||
|
||||
/// Update the sample buffer layer's frame to match the PiP window size.
|
||||
private func updateLayerFrameToMatchPiPWindow() {
|
||||
// Try to find the PiP window
|
||||
if let pipWindow = findPiPWindow() {
|
||||
// Get the content view bounds (excludes title bar)
|
||||
let windowSize = pipWindow.contentView?.bounds.size ?? pipWindow.frame.size
|
||||
|
||||
// Log detailed PiP window view hierarchy once
|
||||
if !hasLoggedPiPHierarchy, let contentView = pipWindow.contentView {
|
||||
hasLoggedPiPHierarchy = true
|
||||
LoggingService.shared.debug("MPVPiPBridge: ===== PiP Window View Hierarchy =====", category: .mpv)
|
||||
LoggingService.shared.debug("MPVPiPBridge: Window frame: \(pipWindow.frame), contentView frame: \(contentView.frame)", category: .mpv)
|
||||
logViewHierarchy(contentView, depth: 0)
|
||||
}
|
||||
|
||||
// Fix mispositioned internal AVKit views that cause the black bar
|
||||
fixPiPLayerHostViewPosition(in: pipWindow)
|
||||
|
||||
let newFrame = CGRect(origin: .zero, size: windowSize)
|
||||
|
||||
if sampleBufferLayer.frame.size != newFrame.size {
|
||||
LoggingService.shared.debug("MPVPiPBridge: Resizing layer to match PiP window: \(sampleBufferLayer.frame) -> \(newFrame)", category: .mpv)
|
||||
CATransaction.begin()
|
||||
CATransaction.setDisableActions(true)
|
||||
sampleBufferLayer.frame = newFrame
|
||||
CATransaction.commit()
|
||||
}
|
||||
} else {
|
||||
// Fallback: try to match superlayer
|
||||
guard let superlayer = sampleBufferLayer.superlayer else { return }
|
||||
let superBounds = superlayer.bounds
|
||||
if sampleBufferLayer.frame != superBounds {
|
||||
LoggingService.shared.debug("MPVPiPBridge: Resizing layer to match superlayer: \(sampleBufferLayer.frame) -> \(superBounds)", category: .mpv)
|
||||
CATransaction.begin()
|
||||
CATransaction.setDisableActions(true)
|
||||
sampleBufferLayer.frame = superBounds
|
||||
CATransaction.commit()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif
|
||||
1220
Yattee/Services/Player/MPV/MPVRenderView.swift
Normal file
1220
Yattee/Services/Player/MPV/MPVRenderView.swift
Normal file
File diff suppressed because it is too large
Load Diff
502
Yattee/Services/Player/MPV/MPVSoftwareRenderView.swift
Normal file
502
Yattee/Services/Player/MPV/MPVSoftwareRenderView.swift
Normal file
@@ -0,0 +1,502 @@
|
||||
//
|
||||
// MPVSoftwareRenderView.swift
|
||||
// Yattee
|
||||
//
|
||||
// Software (CPU-based) rendering view for MPV in iOS/tvOS Simulator.
|
||||
// Uses MPV_RENDER_API_TYPE_SW to render to memory buffer, then displays via CGImage.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
import Libmpv
|
||||
#if os(iOS)
|
||||
import UIKit
|
||||
#elseif os(tvOS)
|
||||
import UIKit
|
||||
#endif
|
||||
import CoreMedia
|
||||
|
||||
#if targetEnvironment(simulator) && (os(iOS) || os(tvOS))
|
||||
|
||||
/// Software-based MPV render view for iOS/tvOS Simulator (where OpenGL ES is not available).
|
||||
/// Renders video frames to CPU memory buffer and displays via CALayer.
|
||||
final class MPVSoftwareRenderView: UIView {
|
||||
// MARK: - Properties
|
||||
|
||||
private weak var mpvClient: MPVClient?
|
||||
private var isSetup = false
|
||||
|
||||
/// Render buffer for MPV to write pixel data
|
||||
private var renderBuffer: UnsafeMutableRawPointer?
|
||||
private var renderWidth: Int = 0
|
||||
private var renderHeight: Int = 0
|
||||
private var renderStride: Int = 0
|
||||
|
||||
/// Display link for frame rendering
|
||||
private var displayLink: CADisplayLink?
|
||||
|
||||
/// Video frame rate from MPV
|
||||
var videoFPS: Double = 30.0 {
|
||||
didSet {
|
||||
updateDisplayLinkFrameRate()
|
||||
}
|
||||
}
|
||||
|
||||
/// Current display link target frame rate
|
||||
var displayLinkTargetFPS: Double {
|
||||
videoFPS
|
||||
}
|
||||
|
||||
/// Lock for thread-safe rendering
|
||||
private let renderLock = NSLock()
|
||||
private var isRendering = false
|
||||
|
||||
/// Tracks whether first frame has been rendered
|
||||
private var hasRenderedFirstFrame = false
|
||||
|
||||
/// Tracks whether MPV has signaled it has a frame ready
|
||||
private var mpvHasFrameReady = false
|
||||
|
||||
/// Generation counter to invalidate stale frame callbacks
|
||||
private var frameGeneration: UInt = 0
|
||||
|
||||
/// Callback when first frame is rendered
|
||||
var onFirstFrameRendered: (() -> Void)?
|
||||
|
||||
/// Callback when view is added to window (for PiP setup)
|
||||
var onDidMoveToWindow: ((UIView) -> Void)?
|
||||
|
||||
/// Dedicated queue for rendering operations
|
||||
private let renderQueue = DispatchQueue(label: "stream.yattee.mpv.software-render", qos: .userInitiated)
|
||||
|
||||
/// Lock for buffer recreation
|
||||
private let bufferLock = NSLock()
|
||||
private var isRecreatingBuffer = false
|
||||
|
||||
// MARK: - Initialization
|
||||
|
||||
init() {
|
||||
super.init(frame: .zero)
|
||||
commonInit()
|
||||
}
|
||||
|
||||
required init?(coder: NSCoder) {
|
||||
super.init(coder: coder)
|
||||
commonInit()
|
||||
}
|
||||
|
||||
private func commonInit() {
|
||||
backgroundColor = .black
|
||||
contentScaleFactor = UIScreen.main.scale
|
||||
|
||||
// Observe app lifecycle
|
||||
NotificationCenter.default.addObserver(
|
||||
self,
|
||||
selector: #selector(appDidEnterBackground),
|
||||
name: UIApplication.didEnterBackgroundNotification,
|
||||
object: nil
|
||||
)
|
||||
NotificationCenter.default.addObserver(
|
||||
self,
|
||||
selector: #selector(appDidBecomeActive),
|
||||
name: UIApplication.didBecomeActiveNotification,
|
||||
object: nil
|
||||
)
|
||||
}
|
||||
|
||||
@objc private func appDidEnterBackground() {
|
||||
displayLink?.isPaused = true
|
||||
MPVLogging.logDisplayLink("paused", isPaused: true, reason: "enterBackground")
|
||||
}
|
||||
|
||||
@objc private func appDidBecomeActive() {
|
||||
displayLink?.isPaused = false
|
||||
MPVLogging.logDisplayLink("resumed", isPaused: false, reason: "becomeActive")
|
||||
|
||||
if isSetup {
|
||||
renderQueue.async { [weak self] in
|
||||
self?.performRender()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
deinit {
|
||||
NotificationCenter.default.removeObserver(self)
|
||||
stopDisplayLink()
|
||||
|
||||
// Free render buffer on render queue
|
||||
renderQueue.sync {
|
||||
freeRenderBuffer()
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - View Lifecycle
|
||||
|
||||
override func willMove(toSuperview newSuperview: UIView?) {
|
||||
super.willMove(toSuperview: newSuperview)
|
||||
|
||||
if newSuperview == nil {
|
||||
MPVLogging.logDisplayLink("stop", reason: "removedFromSuperview")
|
||||
stopDisplayLink()
|
||||
}
|
||||
}
|
||||
|
||||
override func didMoveToSuperview() {
|
||||
super.didMoveToSuperview()
|
||||
|
||||
if superview != nil && isSetup {
|
||||
if displayLink == nil {
|
||||
MPVLogging.logDisplayLink("start", reason: "addedToSuperview")
|
||||
startDisplayLink()
|
||||
}
|
||||
|
||||
// Trigger immediate render
|
||||
renderQueue.async { [weak self] in
|
||||
self?.performRender()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override func layoutSubviews() {
|
||||
super.layoutSubviews()
|
||||
|
||||
guard isSetup else { return }
|
||||
|
||||
let currentSize = bounds.size
|
||||
let scale = contentScaleFactor
|
||||
let expectedWidth = Int(currentSize.width * scale)
|
||||
let expectedHeight = Int(currentSize.height * scale)
|
||||
|
||||
// Check if buffer size needs update
|
||||
let bufferMismatch = abs(renderWidth - expectedWidth) > 2 || abs(renderHeight - expectedHeight) > 2
|
||||
|
||||
guard bufferMismatch && expectedWidth > 0 && expectedHeight > 0 else { return }
|
||||
|
||||
MPVLogging.logTransition("layoutSubviews - size mismatch (async resize)",
|
||||
fromSize: CGSize(width: renderWidth, height: renderHeight),
|
||||
toSize: CGSize(width: expectedWidth, height: expectedHeight))
|
||||
|
||||
// Recreate buffer on background queue
|
||||
isRecreatingBuffer = true
|
||||
|
||||
renderQueue.async { [weak self] in
|
||||
guard let self else { return }
|
||||
self.allocateRenderBuffer(width: expectedWidth, height: expectedHeight)
|
||||
self.isRecreatingBuffer = false
|
||||
MPVLogging.log("layoutSubviews: buffer recreation complete")
|
||||
}
|
||||
}
|
||||
|
||||
override func didMoveToWindow() {
|
||||
super.didMoveToWindow()
|
||||
|
||||
if window != nil {
|
||||
onDidMoveToWindow?(self)
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Setup
|
||||
|
||||
/// Set up with an MPV client (async version).
|
||||
func setupAsync(with client: MPVClient) async throws {
|
||||
self.mpvClient = client
|
||||
|
||||
// Create MPV software render context
|
||||
let success = client.createSoftwareRenderContext()
|
||||
if !success {
|
||||
MPVLogging.warn("setupAsync: failed to create MPV software render context")
|
||||
throw MPVRenderError.renderContextFailed(-1)
|
||||
}
|
||||
|
||||
// Set up render update callback
|
||||
client.onRenderUpdate = { [weak self] in
|
||||
DispatchQueue.main.async {
|
||||
self?.setNeedsDisplay()
|
||||
}
|
||||
}
|
||||
|
||||
// Set up video frame callback
|
||||
client.onVideoFrameReady = { [weak self] in
|
||||
guard let self else { return }
|
||||
let capturedGeneration = self.frameGeneration
|
||||
DispatchQueue.main.async { [weak self] in
|
||||
guard let self, self.frameGeneration == capturedGeneration else { return }
|
||||
self.mpvHasFrameReady = true
|
||||
}
|
||||
}
|
||||
|
||||
await MainActor.run {
|
||||
// Allocate initial render buffer
|
||||
let scale = contentScaleFactor
|
||||
let width = Int(bounds.width * scale)
|
||||
let height = Int(bounds.height * scale)
|
||||
|
||||
if width > 0 && height > 0 {
|
||||
renderQueue.async { [weak self] in
|
||||
self?.allocateRenderBuffer(width: width, height: height)
|
||||
}
|
||||
}
|
||||
|
||||
startDisplayLink()
|
||||
isSetup = true
|
||||
}
|
||||
|
||||
MPVLogging.log("MPVSoftwareRenderView: setup complete")
|
||||
}
|
||||
|
||||
/// Update time position for frame timestamps.
|
||||
func updateTimePosition(_ time: Double) {
|
||||
// Not used in software rendering, but kept for API compatibility
|
||||
}
|
||||
|
||||
// MARK: - Buffer Management
|
||||
|
||||
/// Allocate aligned render buffer for MPV to write pixels.
|
||||
/// Must be called on renderQueue.
|
||||
private func allocateRenderBuffer(width: Int, height: Int) {
|
||||
guard width > 0 && height > 0 else {
|
||||
return
|
||||
}
|
||||
|
||||
// Free existing buffer
|
||||
freeRenderBuffer()
|
||||
|
||||
// Calculate stride (4 bytes per pixel for RGBA, aligned to 64 bytes)
|
||||
let bytesPerPixel = 4
|
||||
let minStride = width * bytesPerPixel
|
||||
let stride = ((minStride + 63) / 64) * 64 // Round up to 64-byte alignment
|
||||
|
||||
// Allocate aligned buffer
|
||||
var buffer: UnsafeMutableRawPointer?
|
||||
let bufferSize = stride * height
|
||||
let alignResult = posix_memalign(&buffer, 64, bufferSize)
|
||||
|
||||
guard alignResult == 0, let buffer else {
|
||||
MPVLogging.warn("allocateRenderBuffer: posix_memalign failed (\(alignResult))")
|
||||
return
|
||||
}
|
||||
|
||||
// Zero out buffer
|
||||
memset(buffer, 0, bufferSize)
|
||||
|
||||
renderBuffer = buffer
|
||||
renderWidth = width
|
||||
renderHeight = height
|
||||
renderStride = stride
|
||||
|
||||
// Trigger an immediate render now that we have a buffer
|
||||
if isSetup {
|
||||
DispatchQueue.main.async { [weak self] in
|
||||
guard let self, !self.isRendering else { return }
|
||||
|
||||
self.renderLock.lock()
|
||||
self.isRendering = true
|
||||
self.renderLock.unlock()
|
||||
|
||||
self.renderQueue.async { [weak self] in
|
||||
self?.performRender()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Free render buffer.
|
||||
/// Must be called on renderQueue.
|
||||
private func freeRenderBuffer() {
|
||||
if let buffer = renderBuffer {
|
||||
free(buffer)
|
||||
renderBuffer = nil
|
||||
}
|
||||
renderWidth = 0
|
||||
renderHeight = 0
|
||||
renderStride = 0
|
||||
}
|
||||
|
||||
// MARK: - Display Link
|
||||
|
||||
private func startDisplayLink() {
|
||||
displayLink = CADisplayLink(target: self, selector: #selector(displayLinkFired))
|
||||
updateDisplayLinkFrameRate()
|
||||
displayLink?.add(to: .main, forMode: .common)
|
||||
}
|
||||
|
||||
private func updateDisplayLinkFrameRate() {
|
||||
guard let displayLink else { return }
|
||||
|
||||
// Match video FPS
|
||||
let preferred = Float(min(max(videoFPS, 24.0), 60.0))
|
||||
displayLink.preferredFrameRateRange = CAFrameRateRange(
|
||||
minimum: 24,
|
||||
maximum: 60,
|
||||
preferred: preferred
|
||||
)
|
||||
}
|
||||
|
||||
private func stopDisplayLink() {
|
||||
displayLink?.invalidate()
|
||||
displayLink = nil
|
||||
}
|
||||
|
||||
/// Pause rendering.
|
||||
func pauseRendering() {
|
||||
displayLink?.isPaused = true
|
||||
}
|
||||
|
||||
/// Resume rendering.
|
||||
func resumeRendering() {
|
||||
guard let displayLink else { return }
|
||||
displayLink.isPaused = false
|
||||
|
||||
if isSetup {
|
||||
renderQueue.async { [weak self] in
|
||||
self?.performRender()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Reset first frame tracking.
|
||||
func resetFirstFrameTracking() {
|
||||
frameGeneration += 1
|
||||
hasRenderedFirstFrame = false
|
||||
mpvHasFrameReady = false
|
||||
}
|
||||
|
||||
/// Clear the render view to black.
|
||||
func clearToBlack() {
|
||||
guard renderBuffer != nil else { return }
|
||||
|
||||
renderQueue.async { [weak self] in
|
||||
guard let self, let buffer = self.renderBuffer else { return }
|
||||
memset(buffer, 0, self.renderStride * self.renderHeight)
|
||||
|
||||
DispatchQueue.main.async { [weak self] in
|
||||
self?.layer.contents = nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Rendering
|
||||
|
||||
@objc private func displayLinkFired() {
|
||||
guard isSetup, !isRendering else { return }
|
||||
|
||||
renderLock.lock()
|
||||
isRendering = true
|
||||
renderLock.unlock()
|
||||
|
||||
renderQueue.async { [weak self] in
|
||||
self?.performRender()
|
||||
}
|
||||
}
|
||||
|
||||
/// Frame counter for periodic logging
|
||||
private var renderFrameLogCounter: UInt64 = 0
|
||||
|
||||
private func performRender() {
|
||||
defer {
|
||||
renderLock.lock()
|
||||
isRendering = false
|
||||
renderLock.unlock()
|
||||
}
|
||||
|
||||
guard let mpvClient else {
|
||||
renderFrameLogCounter += 1
|
||||
return
|
||||
}
|
||||
|
||||
guard let buffer = renderBuffer, renderWidth > 0, renderHeight > 0 else {
|
||||
renderFrameLogCounter += 1
|
||||
return
|
||||
}
|
||||
|
||||
// Skip if buffer is being recreated
|
||||
if isRecreatingBuffer {
|
||||
return
|
||||
}
|
||||
|
||||
// Render frame to buffer - returns true if a frame was actually rendered
|
||||
let didRender = mpvClient.renderSoftware(
|
||||
buffer: buffer,
|
||||
width: Int32(renderWidth),
|
||||
height: Int32(renderHeight),
|
||||
stride: renderStride
|
||||
)
|
||||
|
||||
// Only update the layer if we actually rendered a frame
|
||||
guard didRender else {
|
||||
return
|
||||
}
|
||||
|
||||
// Convert buffer to CGImage and update layer
|
||||
if let image = bufferToCGImage() {
|
||||
DispatchQueue.main.async { [weak self] in
|
||||
guard let self else { return }
|
||||
self.layer.contents = image
|
||||
}
|
||||
|
||||
// Notify on first frame rendered
|
||||
if !hasRenderedFirstFrame {
|
||||
hasRenderedFirstFrame = true
|
||||
mpvHasFrameReady = true
|
||||
DispatchQueue.main.async { [weak self] in
|
||||
self?.onFirstFrameRendered?()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
renderFrameLogCounter += 1
|
||||
}
|
||||
|
||||
/// Convert render buffer to CGImage for display.
|
||||
/// Must be called on renderQueue.
|
||||
private func bufferToCGImage() -> CGImage? {
|
||||
guard let buffer = renderBuffer, renderWidth > 0, renderHeight > 0 else {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Copy buffer data to avoid lifetime issues
|
||||
let bufferSize = renderStride * renderHeight
|
||||
let dataCopy = Data(bytes: buffer, count: bufferSize)
|
||||
|
||||
let colorSpace = CGColorSpaceCreateDeviceRGB()
|
||||
let bitmapInfo = CGBitmapInfo(rawValue: CGImageAlphaInfo.noneSkipLast.rawValue)
|
||||
|
||||
guard let dataProvider = CGDataProvider(data: dataCopy as CFData) else {
|
||||
return nil
|
||||
}
|
||||
|
||||
guard let image = CGImage(
|
||||
width: renderWidth,
|
||||
height: renderHeight,
|
||||
bitsPerComponent: 8,
|
||||
bitsPerPixel: 32,
|
||||
bytesPerRow: renderStride,
|
||||
space: colorSpace,
|
||||
bitmapInfo: bitmapInfo,
|
||||
provider: dataProvider,
|
||||
decode: nil,
|
||||
shouldInterpolate: false,
|
||||
intent: .defaultIntent
|
||||
) else {
|
||||
return nil
|
||||
}
|
||||
|
||||
return image
|
||||
}
|
||||
|
||||
// MARK: - PiP Compatibility Stubs
|
||||
|
||||
/// These properties/methods exist for API compatibility with MPVRenderView.
|
||||
/// PiP is not supported in software rendering mode.
|
||||
|
||||
var captureFramesForPiP: Bool = false
|
||||
var isPiPActive: Bool = false
|
||||
var videoContentWidth: Int = 0
|
||||
var videoContentHeight: Int = 0
|
||||
var onFrameReady: ((CVPixelBuffer, CMTime) -> Void)?
|
||||
|
||||
func clearMainViewForPiP() {
|
||||
clearToBlack()
|
||||
}
|
||||
}
|
||||
|
||||
#endif
|
||||
Reference in New Issue
Block a user