Hello, mpv! 🎉

This commit is contained in:
Arkadiusz Fal 2022-02-16 21:23:11 +01:00
parent 9868a2ef01
commit 31a28a7cbd
74 changed files with 6191 additions and 891 deletions

2
.gitignore vendored
View File

@ -97,3 +97,5 @@ iOSInjectionProject/
# User-specific xcconfig files
Xcode-config/DEVELOPMENT_TEAM.xcconfig
# disable simulator libraries - to be removed when replaced with framework for mpv
Vendor/mpv/iOS/lib_sim

View File

@ -0,0 +1,7 @@
import Foundation
extension Comparable {
func clamped(to limits: ClosedRange<Self>) -> Self {
min(max(self, limits.lowerBound), limits.upperBound)
}
}

View File

@ -466,7 +466,7 @@ final class InvidiousAPI: Service, ObservableObject, VideosAPI {
return []
}
let videoAssetsURLs = streams.filter { $0["type"].stringValue.starts(with: "video/mp4") && $0["encoding"].stringValue == "h264" }
let videoAssetsURLs = streams.filter { $0["type"].stringValue.starts(with: "video/") }
return videoAssetsURLs.map {
Stream(
@ -474,7 +474,8 @@ final class InvidiousAPI: Service, ObservableObject, VideosAPI {
videoAsset: AVURLAsset(url: $0["url"].url!),
resolution: Stream.Resolution.from(resolution: $0["resolution"].stringValue),
kind: .adaptive,
encoding: $0["encoding"].stringValue
encoding: $0["encoding"].stringValue,
videoFormat: $0["type"].stringValue
)
}
}

View File

@ -481,11 +481,19 @@ final class PipedAPI: Service, ObservableObject, VideosAPI {
streams.append(Stream(hlsURL: hlsURL))
}
guard let audioStream = compatibleAudioStreams(from: content).first else {
let audioStreams = content
.dictionaryValue["audioStreams"]?
.arrayValue
.filter { $0.dictionaryValue["format"]?.stringValue == "M4A" }
.sorted {
$0.dictionaryValue["bitrate"]?.intValue ?? 0 > $1.dictionaryValue["bitrate"]?.intValue ?? 0
} ?? []
guard let audioStream = audioStreams.first else {
return streams
}
let videoStreams = compatibleVideoStream(from: content)
let videoStreams = content.dictionaryValue["videoStreams"]?.arrayValue ?? []
videoStreams.forEach { videoStream in
guard let audioAssetUrl = audioStream.dictionaryValue["url"]?.url,
@ -499,10 +507,11 @@ final class PipedAPI: Service, ObservableObject, VideosAPI {
let videoOnly = videoStream.dictionaryValue["videoOnly"]?.boolValue ?? true
let resolution = Stream.Resolution.from(resolution: videoStream.dictionaryValue["quality"]!.stringValue)
let videoFormat = videoStream.dictionaryValue["format"]?.stringValue
if videoOnly {
streams.append(
Stream(audioAsset: audioAsset, videoAsset: videoAsset, resolution: resolution, kind: .adaptive)
Stream(audioAsset: audioAsset, videoAsset: videoAsset, resolution: resolution, kind: .adaptive, videoFormat: videoFormat)
)
} else {
streams.append(
@ -521,23 +530,6 @@ final class PipedAPI: Service, ObservableObject, VideosAPI {
.compactMap(extractVideo(from:)) ?? []
}
private func compatibleAudioStreams(from content: JSON) -> [JSON] {
content
.dictionaryValue["audioStreams"]?
.arrayValue
.filter { $0.dictionaryValue["format"]?.stringValue == "M4A" }
.sorted {
$0.dictionaryValue["bitrate"]?.intValue ?? 0 > $1.dictionaryValue["bitrate"]?.intValue ?? 0
} ?? []
}
private func compatibleVideoStream(from content: JSON) -> [JSON] {
content
.dictionaryValue["videoStreams"]?
.arrayValue
.filter { $0.dictionaryValue["format"] == "MPEG_4" } ?? []
}
private func extractComment(from content: JSON) -> Comment? {
let details = content.dictionaryValue
let author = details["author"]?.stringValue ?? ""

View File

@ -23,13 +23,14 @@ extension PlayerModel {
}
func updateWatch(finished: Bool = false) {
guard let id = currentVideo?.videoID else {
guard let id = currentVideo?.videoID,
Defaults[.saveHistory]
else {
return
}
let time = player.currentTime()
let seconds = time.seconds
currentItem.playbackTime = time
let time = backend.currentTime
let seconds = time?.seconds ?? 0
let watch: Watch!
let watchFetchRequest = Watch.fetchRequest()

View File

@ -0,0 +1,613 @@
import AVFoundation
import Defaults
import Foundation
import MediaPlayer
import UIKit
final class AVPlayerBackend: PlayerBackend {
static let assetKeysToLoad = ["tracks", "playable", "duration"]
var model: PlayerModel!
var controls: PlayerControlsModel!
var stream: Stream?
var video: Video?
var currentTime: CMTime? {
avPlayer.currentTime()
}
var loadedVideo: Bool {
!avPlayer.currentItem.isNil
}
var isLoadingVideo: Bool {
model.currentItem == nil || model.time == nil || !model.time!.isValid
}
var isPlaying: Bool {
avPlayer.timeControlStatus == .playing
}
var playerItemDuration: CMTime? {
avPlayer.currentItem?.asset.duration
}
private(set) var avPlayer = AVPlayer()
var controller: AppleAVPlayerViewController?
private var asset: AVURLAsset?
private var composition = AVMutableComposition()
private var loadedCompositionAssets = [AVMediaType]()
private var currentArtwork: MPMediaItemArtwork?
private var frequentTimeObserver: Any?
private var infrequentTimeObserver: Any?
private var playerTimeControlStatusObserver: Any?
private var statusObservation: NSKeyValueObservation?
private var timeObserverThrottle = Throttle(interval: 2)
init(model: PlayerModel, controls: PlayerControlsModel?) {
self.model = model
self.controls = controls
addFrequentTimeObserver()
addInfrequentTimeObserver()
addPlayerTimeControlStatusObserver()
}
func bestPlayable(_ streams: [Stream]) -> Stream? {
streams.first { $0.kind == .hls } ??
streams.filter { $0.kind == .adaptive }.max { $0.resolution < $1.resolution } ??
streams.first
}
func canPlay(_ stream: Stream) -> Bool {
stream.kind == .hls || stream.kind == .stream || stream.videoFormat == "MPEG_4" ||
(stream.videoFormat.starts(with: "video/mp4") && stream.encoding == "h264")
}
func playStream(
_ stream: Stream,
of video: Video,
preservingTime: Bool,
upgrading: Bool
) {
if let url = stream.singleAssetURL {
model.logger.info("playing stream with one asset\(stream.kind == .hls ? " (HLS)" : ""): \(url)")
loadSingleAsset(url, stream: stream, of: video, preservingTime: preservingTime)
} else {
model.logger.info("playing stream with many assets:")
model.logger.info("composition audio asset: \(stream.audioAsset.url)")
model.logger.info("composition video asset: \(stream.videoAsset.url)")
loadComposition(stream, of: video, preservingTime: preservingTime)
}
if !upgrading {
updateCurrentArtwork()
}
}
func play() {
guard avPlayer.timeControlStatus != .playing else {
return
}
avPlayer.play()
}
func pause() {
guard avPlayer.timeControlStatus != .paused else {
return
}
avPlayer.pause()
}
func togglePlay() {
isPlaying ? pause() : play()
}
func stop() {
avPlayer.replaceCurrentItem(with: nil)
}
func seek(to time: CMTime, completionHandler: ((Bool) -> Void)?) {
avPlayer.seek(
to: time,
toleranceBefore: .secondsInDefaultTimescale(1),
toleranceAfter: .zero,
completionHandler: completionHandler ?? { _ in }
)
}
func seek(relative time: CMTime, completionHandler: ((Bool) -> Void)? = nil) {
if let currentTime = currentTime {
seek(to: currentTime + time, completionHandler: completionHandler)
}
}
func setRate(_ rate: Float) {
avPlayer.rate = rate
}
func closeItem() {
avPlayer.replaceCurrentItem(with: nil)
}
func enterFullScreen() {
controller?.playerView
.perform(NSSelectorFromString("enterFullScreenAnimated:completionHandler:"), with: false, with: nil)
}
func exitFullScreen() {
controller?.playerView
.perform(NSSelectorFromString("exitFullScreenAnimated:completionHandler:"), with: false, with: nil)
}
#if os(tvOS)
func closePiP(wasPlaying: Bool) {
let item = avPlayer.currentItem
let time = avPlayer.currentTime()
avPlayer.replaceCurrentItem(with: nil)
guard !item.isNil else {
return
}
avPlayer.seek(to: time)
avPlayer.replaceCurrentItem(with: item)
guard wasPlaying else {
return
}
DispatchQueue.main.asyncAfter(deadline: .now() + 1) { [weak self] in
self?.play()
}
}
#else
func closePiP(wasPlaying: Bool) {
controller?.playerView.player = nil
controller?.playerView.player = avPlayer
guard wasPlaying else {
return
}
DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) { [weak self] in
self?.play()
}
}
#endif
func updateControls() {}
func startControlsUpdates() {}
func stopControlsUpdates() {}
func setNeedsDrawing(_: Bool) {}
private func loadSingleAsset(
_ url: URL,
stream: Stream,
of video: Video,
preservingTime: Bool = false
) {
asset?.cancelLoading()
asset = AVURLAsset(url: url)
asset?.loadValuesAsynchronously(forKeys: Self.assetKeysToLoad) { [weak self] in
var error: NSError?
switch self?.asset?.statusOfValue(forKey: "duration", error: &error) {
case .loaded:
DispatchQueue.main.async { [weak self] in
self?.insertPlayerItem(stream, for: video, preservingTime: preservingTime)
}
case .failed:
DispatchQueue.main.async { [weak self] in
self?.model.playerError = error
}
default:
return
}
}
}
private func loadComposition(
_ stream: Stream,
of video: Video,
preservingTime: Bool = false
) {
loadedCompositionAssets = []
loadCompositionAsset(stream.audioAsset, stream: stream, type: .audio, of: video, preservingTime: preservingTime, model: model)
loadCompositionAsset(stream.videoAsset, stream: stream, type: .video, of: video, preservingTime: preservingTime, model: model)
}
private func loadCompositionAsset(
_ asset: AVURLAsset,
stream: Stream,
type: AVMediaType,
of video: Video,
preservingTime: Bool = false,
model: PlayerModel
) {
asset.loadValuesAsynchronously(forKeys: Self.assetKeysToLoad) { [weak self] in
guard let self = self else {
return
}
model.logger.info("loading \(type.rawValue) track")
let assetTracks = asset.tracks(withMediaType: type)
guard let compositionTrack = self.composition.addMutableTrack(
withMediaType: type,
preferredTrackID: kCMPersistentTrackID_Invalid
) else {
model.logger.critical("composition \(type.rawValue) addMutableTrack FAILED")
return
}
guard let assetTrack = assetTracks.first else {
model.logger.critical("asset \(type.rawValue) track FAILED")
return
}
try! compositionTrack.insertTimeRange(
CMTimeRange(start: .zero, duration: CMTime.secondsInDefaultTimescale(video.length)),
of: assetTrack,
at: .zero
)
model.logger.critical("\(type.rawValue) LOADED")
guard model.streamSelection == stream else {
model.logger.critical("IGNORING LOADED")
return
}
self.loadedCompositionAssets.append(type)
if self.loadedCompositionAssets.count == 2 {
self.insertPlayerItem(stream, for: video, preservingTime: preservingTime)
}
}
}
private func insertPlayerItem(
_ stream: Stream,
for video: Video,
preservingTime: Bool = false
) {
removeItemDidPlayToEndTimeObserver()
model.playerItem = playerItem(stream)
guard model.playerItem != nil else {
return
}
addItemDidPlayToEndTimeObserver()
attachMetadata(to: model.playerItem!, video: video, for: stream)
DispatchQueue.main.async {
self.stream = stream
self.video = video
self.model.stream = stream
self.composition = AVMutableComposition()
self.asset = nil
}
let startPlaying = {
#if !os(macOS)
try? AVAudioSession.sharedInstance().setActive(true)
#endif
if self.isAutoplaying(self.model.playerItem!) {
DispatchQueue.main.asyncAfter(deadline: .now() + 0.3) { [weak self] in
guard let self = self else {
return
}
if !preservingTime,
let segment = self.model.sponsorBlock.segments.first,
segment.start < 3,
self.model.lastSkipped.isNil
{
self.avPlayer.seek(
to: segment.endTime,
toleranceBefore: .secondsInDefaultTimescale(1),
toleranceAfter: .zero
) { finished in
guard finished else {
return
}
self.model.lastSkipped = segment
self.model.play()
}
} else {
self.model.play()
}
}
}
}
let replaceItemAndSeek = {
guard video == self.model.currentVideo else {
return
}
self.avPlayer.replaceCurrentItem(with: self.model.playerItem)
self.seekToPreservedTime { finished in
guard finished else {
return
}
self.model.preservedTime = nil
startPlaying()
}
}
if preservingTime {
if model.preservedTime.isNil {
model.saveTime {
replaceItemAndSeek()
startPlaying()
}
} else {
replaceItemAndSeek()
startPlaying()
}
} else {
avPlayer.replaceCurrentItem(with: model.playerItem)
startPlaying()
}
}
private func seekToPreservedTime(completionHandler: @escaping (Bool) -> Void = { _ in }) {
guard let time = model.preservedTime else {
return
}
avPlayer.seek(
to: time,
toleranceBefore: .secondsInDefaultTimescale(1),
toleranceAfter: .zero,
completionHandler: completionHandler
)
}
private func playerItem(_: Stream) -> AVPlayerItem? {
if let asset = asset {
return AVPlayerItem(asset: asset)
} else {
return AVPlayerItem(asset: composition)
}
}
private func attachMetadata(to item: AVPlayerItem, video: Video, for _: Stream? = nil) {
#if !os(macOS)
var externalMetadata = [
makeMetadataItem(.commonIdentifierTitle, value: video.title),
makeMetadataItem(.quickTimeMetadataGenre, value: video.genre ?? ""),
makeMetadataItem(.commonIdentifierDescription, value: video.description ?? "")
]
if let thumbnailData = try? Data(contentsOf: video.thumbnailURL(quality: .medium)!),
let image = UIImage(data: thumbnailData),
let pngData = image.pngData()
{
let artworkItem = makeMetadataItem(.commonIdentifierArtwork, value: pngData)
externalMetadata.append(artworkItem)
}
item.externalMetadata = externalMetadata
#endif
item.preferredForwardBufferDuration = 5
observePlayerItemStatus(item)
}
#if !os(macOS)
private func makeMetadataItem(_ identifier: AVMetadataIdentifier, value: Any) -> AVMetadataItem {
let item = AVMutableMetadataItem()
item.identifier = identifier
item.value = value as? NSCopying & NSObjectProtocol
item.extendedLanguageTag = "und"
return item.copy() as! AVMetadataItem
}
#endif
func isAutoplaying(_ item: AVPlayerItem) -> Bool {
avPlayer.currentItem == item
}
private func observePlayerItemStatus(_ item: AVPlayerItem) {
statusObservation?.invalidate()
statusObservation = item.observe(\.status, options: [.old, .new]) { [weak self] playerItem, _ in
guard let self = self else {
return
}
switch playerItem.status {
case .readyToPlay:
if self.isAutoplaying(playerItem) {
self.model.play()
}
case .failed:
self.model.playerError = item.error
default:
return
}
}
}
private func addItemDidPlayToEndTimeObserver() {
NotificationCenter.default.addObserver(
self,
selector: #selector(itemDidPlayToEndTime),
name: NSNotification.Name.AVPlayerItemDidPlayToEndTime,
object: playerItem
)
}
private func removeItemDidPlayToEndTimeObserver() {
NotificationCenter.default.removeObserver(
self,
name: NSNotification.Name.AVPlayerItemDidPlayToEndTime,
object: playerItem
)
}
@objc func itemDidPlayToEndTime() {
model.prepareCurrentItemForHistory(finished: true)
if model.queue.isEmpty {
#if !os(macOS)
try? AVAudioSession.sharedInstance().setActive(false)
#endif
model.resetQueue()
#if os(tvOS)
controller?.playerView.dismiss(animated: false) { [weak self] in
self?.controller?.dismiss(animated: true)
}
#else
model.hide()
#endif
} else {
model.advanceToNextItem()
}
}
private func addFrequentTimeObserver() {
let interval = CMTime.secondsInDefaultTimescale(0.5)
frequentTimeObserver = avPlayer.addPeriodicTimeObserver(
forInterval: interval,
queue: .main
) { [weak self] _ in
guard let self = self else {
return
}
guard !self.model.currentItem.isNil else {
return
}
self.controls.duration = self.playerItemDuration ?? .zero
self.controls.currentTime = self.currentTime ?? .zero
#if !os(tvOS)
self.updateNowPlayingInfo()
#endif
if let currentTime = self.currentTime {
self.model.handleSegments(at: currentTime)
}
}
}
private func addInfrequentTimeObserver() {
let interval = CMTime.secondsInDefaultTimescale(5)
infrequentTimeObserver = avPlayer.addPeriodicTimeObserver(
forInterval: interval,
queue: .main
) { [weak self] _ in
guard let self = self else {
return
}
guard !self.model.currentItem.isNil else {
return
}
self.timeObserverThrottle.execute {
self.model.updateWatch()
}
}
}
private func addPlayerTimeControlStatusObserver() {
playerTimeControlStatusObserver = avPlayer.observe(\.timeControlStatus) { [weak self] player, _ in
guard let self = self,
self.avPlayer == player
else {
return
}
DispatchQueue.main.async {
self.controls.isPlaying = player.timeControlStatus == .playing
}
if player.timeControlStatus != .waitingToPlayAtSpecifiedRate {
DispatchQueue.main.async { [weak self] in
self?.model.objectWillChange.send()
}
}
if player.timeControlStatus == .playing, player.rate != self.model.currentRate {
player.rate = self.model.currentRate
}
#if os(macOS)
if player.timeControlStatus == .playing {
ScreenSaverManager.shared.disable(reason: "Yattee is playing video")
} else {
ScreenSaverManager.shared.enable()
}
#endif
self.timeObserverThrottle.execute {
self.model.updateWatch()
}
}
}
private func updateCurrentArtwork() {
guard let thumbnailData = try? Data(contentsOf: model.currentItem.video.thumbnailURL(quality: .medium)!) else {
return
}
#if os(macOS)
let image = NSImage(data: thumbnailData)
#else
let image = UIImage(data: thumbnailData)
#endif
if image.isNil {
return
}
currentArtwork = MPMediaItemArtwork(boundsSize: image!.size) { _ in image! }
}
fileprivate func updateNowPlayingInfo() {
var nowPlayingInfo: [String: AnyObject] = [
MPMediaItemPropertyTitle: model.currentItem.video.title as AnyObject,
MPMediaItemPropertyArtist: model.currentItem.video.author as AnyObject,
MPNowPlayingInfoPropertyIsLiveStream: model.currentItem.video.live as AnyObject,
MPNowPlayingInfoPropertyElapsedPlaybackTime: avPlayer.currentTime().seconds as AnyObject,
MPNowPlayingInfoPropertyPlaybackQueueCount: model.queue.count as AnyObject,
MPMediaItemPropertyMediaType: MPMediaType.anyVideo.rawValue as AnyObject
]
if !currentArtwork.isNil {
nowPlayingInfo[MPMediaItemPropertyArtwork] = currentArtwork as AnyObject
}
if !model.currentItem.video.live {
let itemDuration = model.currentItem.videoDuration ?? model.currentItem.duration
let duration = itemDuration.isFinite ? Double(itemDuration) : nil
if !duration.isNil {
nowPlayingInfo[MPMediaItemPropertyPlaybackDuration] = duration as AnyObject
}
}
MPNowPlayingInfoCenter.default().nowPlayingInfo = nowPlayingInfo
}
}

View File

@ -0,0 +1,305 @@
import AVFAudio
import CoreMedia
import Foundation
import Logging
import SwiftUI
final class MPVBackend: PlayerBackend {
private var logger = Logger(label: "mpv-backend")
var model: PlayerModel!
var controls: PlayerControlsModel!
var stream: Stream?
var video: Video?
var currentTime: CMTime?
var loadedVideo = false
var isLoadingVideo = true
var isPlaying = true { didSet {
if isPlaying {
startClientUpdates()
} else {
stopControlsUpdates()
}
updateControlsIsPlaying()
}}
var playerItemDuration: CMTime?
var controller: MPVViewController!
var client: MPVClient! { didSet { client.backend = self } }
private var clientTimer: RepeatingTimer!
private var onFileLoaded: (() -> Void)?
private var controlsUpdates = false
private var timeObserverThrottle = Throttle(interval: 2)
init(model: PlayerModel, controls: PlayerControlsModel? = nil) {
self.model = model
self.controls = controls
clientTimer = .init(timeInterval: 1)
clientTimer.eventHandler = getClientUpdates
}
func bestPlayable(_ streams: [Stream]) -> Stream? {
streams.filter { $0.kind == .adaptive }.max { $0.resolution < $1.resolution } ??
streams.first { $0.kind == .hls } ??
streams.first
}
func canPlay(_ stream: Stream) -> Bool {
stream.resolution != .unknown && stream.format != "AV1"
}
func playStream(_ stream: Stream, of video: Video, preservingTime: Bool, upgrading _: Bool) {
let updateCurrentStream = {
DispatchQueue.main.async { [weak self] in
self?.stream = stream
self?.video = video
self?.model.stream = stream
}
}
let startPlaying = {
#if !os(macOS)
try? AVAudioSession.sharedInstance().setActive(true)
#endif
DispatchQueue.main.async { [weak self] in
guard let self = self else {
return
}
self.startClientUpdates()
if !preservingTime,
let segment = self.model.sponsorBlock.segments.first,
segment.start < 3,
self.model.lastSkipped.isNil
{
self.seek(to: segment.endTime) { finished in
guard finished else {
return
}
self.model.lastSkipped = segment
self.play()
}
} else {
self.play()
}
}
}
let replaceItem: (CMTime?) -> Void = { [weak self] time in
guard let self = self else {
return
}
self.stop()
if let url = stream.singleAssetURL {
self.onFileLoaded = { [weak self] in
self?.setIsLoadingVideo(false)
updateCurrentStream()
startPlaying()
}
self.client.loadFile(url, time: time) { [weak self] _ in
self?.setIsLoadingVideo(true)
}
} else {
self.onFileLoaded = { [weak self] in
self?.client.addAudio(stream.audioAsset.url) { _ in
self?.setIsLoadingVideo(false)
updateCurrentStream()
startPlaying()
}
}
self.client.loadFile(stream.videoAsset.url, time: time) { [weak self] _ in
self?.setIsLoadingVideo(true)
self?.pause()
}
}
}
if preservingTime {
if model.preservedTime.isNil {
model.saveTime {
replaceItem(self.model.preservedTime)
}
} else {
replaceItem(self.model.preservedTime)
}
} else {
replaceItem(nil)
}
}
func play() {
isPlaying = true
startClientUpdates()
client?.play()
}
func pause() {
isPlaying = false
stopClientUpdates()
client?.pause()
}
func togglePlay() {
isPlaying ? pause() : play()
}
func stop() {
client?.stop()
}
func seek(to time: CMTime, completionHandler: ((Bool) -> Void)?) {
client.seek(to: time) { [weak self] _ in
self?.getClientUpdates()
self?.updateControls()
completionHandler?(true)
}
}
func seek(relative time: CMTime, completionHandler: ((Bool) -> Void)? = nil) {
client.seek(relative: time) { [weak self] _ in
self?.getClientUpdates()
self?.updateControls()
completionHandler?(true)
}
}
func setRate(_: Float) {
// TODO: Implement rate change
}
func closeItem() {}
func enterFullScreen() {}
func exitFullScreen() {}
func closePiP(wasPlaying _: Bool) {}
func updateControls() {
DispatchQueue.main.async { [weak self] in
self?.logger.info("updating controls")
self?.controls.currentTime = self?.currentTime ?? .zero
self?.controls.duration = self?.playerItemDuration ?? .zero
}
}
func startControlsUpdates() {
self.logger.info("starting controls updates")
controlsUpdates = true
}
func stopControlsUpdates() {
self.logger.info("stopping controls updates")
controlsUpdates = false
}
func startClientUpdates() {
clientTimer.resume()
}
private func getClientUpdates() {
self.logger.info("getting client updates")
currentTime = client?.currentTime
playerItemDuration = client?.duration
if controlsUpdates {
updateControls()
}
if let currentTime = currentTime {
model.handleSegments(at: currentTime)
}
self.timeObserverThrottle.execute {
self.model.updateWatch()
}
}
private func stopClientUpdates() {
clientTimer.suspend()
}
private func updateControlsIsPlaying() {
DispatchQueue.main.async { [weak self] in
self?.controls.isPlaying = self?.isPlaying ?? false
}
}
private func setIsLoadingVideo(_ value: Bool) {
isLoadingVideo = value
DispatchQueue.main.async { [weak self] in
self?.controls.isLoadingVideo = value
}
}
func handle(_ event: UnsafePointer<mpv_event>!) {
logger.info("\(String(cString: mpv_event_name(event.pointee.event_id)))")
switch event.pointee.event_id {
case MPV_EVENT_SHUTDOWN:
mpv_destroy(client.mpv)
client.mpv = nil
case MPV_EVENT_LOG_MESSAGE:
let logmsg = UnsafeMutablePointer<mpv_event_log_message>(OpaquePointer(event.pointee.data))
logger.info(.init(stringLiteral: "log: \(String(cString: (logmsg!.pointee.prefix)!)), "
+ "\(String(cString: (logmsg!.pointee.level)!)), "
+ "\(String(cString: (logmsg!.pointee.text)!))"))
case MPV_EVENT_FILE_LOADED:
onFileLoaded?()
onFileLoaded = nil
case MPV_EVENT_END_FILE:
break
// DispatchQueue.main.async { [weak self] in
// TODO: handle EOF
// self?.handleEndOfFile(event)
// }
default:
logger.info(.init(stringLiteral: "event: \(String(cString: mpv_event_name(event.pointee.event_id)))"))
}
}
func handleEndOfFile(_: UnsafePointer<mpv_event>!) {
guard !isLoadingVideo else {
return
}
model.prepareCurrentItemForHistory(finished: true)
if model.queue.isEmpty {
#if !os(macOS)
try? AVAudioSession.sharedInstance().setActive(false)
#endif
model.resetQueue()
model.hide()
} else {
model.advanceToNextItem()
}
}
func setNeedsDrawing(_ needsDrawing: Bool) {
client?.setNeedsDrawing(needsDrawing)
}
}

View File

@ -0,0 +1,229 @@
import Foundation
import Logging
#if !os(macOS)
import CoreMedia
import Siesta
import UIKit
#endif
final class MPVClient: ObservableObject {
private var logger = Logger(label: "mpv-client")
var mpv: OpaquePointer!
var mpvGL: OpaquePointer!
var queue: DispatchQueue!
var glView: MPVOGLView!
var backend: MPVBackend!
func create(frame: CGRect) -> MPVOGLView {
glView = MPVOGLView(frame: frame)
mpv = mpv_create()
if mpv == nil {
print("failed creating context\n")
exit(1)
}
checkError(mpv_request_log_messages(mpv, "warn"))
checkError(mpv_initialize(mpv))
checkError(mpv_set_option_string(mpv, "vo", "libmpv"))
checkError(mpv_set_option_string(mpv, "hwdec", "yes"))
checkError(mpv_set_option_string(mpv, "override-display-fps", "\(UIScreen.main.maximumFramesPerSecond)"))
checkError(mpv_set_option_string(mpv, "video-sync", "display-resample"))
let api = UnsafeMutableRawPointer(mutating: (MPV_RENDER_API_TYPE_OPENGL as NSString).utf8String)
var initParams = mpv_opengl_init_params(
get_proc_address: getProcAddress(_:_:),
get_proc_address_ctx: nil,
extra_exts: nil
)
withUnsafeMutablePointer(to: &initParams) { initParams in
var params = [
mpv_render_param(type: MPV_RENDER_PARAM_API_TYPE, data: api),
mpv_render_param(type: MPV_RENDER_PARAM_OPENGL_INIT_PARAMS, data: initParams),
mpv_render_param()
]
var mpvGL: OpaquePointer?
if mpv_render_context_create(&mpvGL, mpv, &params) < 0 {
puts("failed to initialize mpv GL context")
exit(1)
}
glView.mpvGL = UnsafeMutableRawPointer(mpvGL)
mpv_render_context_set_update_callback(
mpvGL,
glUpdate(_:),
UnsafeMutableRawPointer(Unmanaged.passUnretained(glView).toOpaque())
)
}
queue = DispatchQueue(label: "mpv", qos: .background)
queue!.async {
mpv_set_wakeup_callback(self.mpv, wakeUp, UnsafeMutableRawPointer(Unmanaged.passUnretained(self).toOpaque()))
}
return glView
}
func readEvents() {
queue?.async { [self] in
while self.mpv != nil {
let event = mpv_wait_event(self.mpv, 0)
if event!.pointee.event_id == MPV_EVENT_NONE {
break
}
backend.handle(event)
}
}
}
func loadFile(_ url: URL, time: CMTime? = nil, completionHandler: ((Int32) -> Void)? = nil) {
var args = [url.absoluteString]
if let time = time {
args.append("replace")
args.append("start=\(Int(time.seconds))")
}
command("loadfile", args: args, returnValueCallback: completionHandler)
}
func addAudio(_ url: URL, completionHandler: ((Int32) -> Void)? = nil) {
command("audio-add", args: [url.absoluteString], returnValueCallback: completionHandler)
}
func play() {
setFlagAsync("pause", false)
}
func pause() {
setFlagAsync("pause", true)
}
func togglePlay() {
command("cycle", args: ["pause"])
}
func stop() {
command("stop")
}
var currentTime: CMTime {
CMTime.secondsInDefaultTimescale(getDouble("time-pos"))
}
var duration: CMTime {
CMTime.secondsInDefaultTimescale(getDouble("duration"))
}
func seek(relative time: CMTime, completionHandler: ((Bool) -> Void)? = nil) {
command("seek", args: [String(time.seconds)]) { _ in
completionHandler?(true)
}
}
func seek(to time: CMTime, completionHandler: ((Bool) -> Void)? = nil) {
command("seek", args: [String(time.seconds), "absolute"]) { _ in
completionHandler?(true)
}
}
func setSize(_ width: Double, _ height: Double) {
logger.info("setting player size to \(width),\(height)")
#if !os(macOS)
guard width <= UIScreen.main.bounds.width, height <= UIScreen.main.bounds.height else {
logger.info("requested size is greater than screen size, ignoring")
return
}
#endif
glView?.frame = CGRect(x: 0, y: 0, width: width, height: height)
}
func setNeedsDrawing(_ needsDrawing: Bool) {
logger.info("needs drawing: \(needsDrawing)")
glView.needsDrawing = needsDrawing
}
func command(
_ command: String,
args: [String?] = [],
checkForErrors: Bool = true,
returnValueCallback: ((Int32) -> Void)? = nil
) {
guard mpv != nil else {
return
}
var cargs = makeCArgs(command, args).map { $0.flatMap { UnsafePointer<CChar>(strdup($0)) } }
defer {
for ptr in cargs where ptr != nil {
free(UnsafeMutablePointer(mutating: ptr!))
}
}
logger.info("\(command) -- \(args)")
let returnValue = mpv_command(mpv, &cargs)
if checkForErrors {
checkError(returnValue)
}
if let cb = returnValueCallback {
cb(returnValue)
}
}
private func setFlagAsync(_ name: String, _ flag: Bool) {
var data: Int = flag ? 1 : 0
mpv_set_property_async(mpv, 0, name, MPV_FORMAT_FLAG, &data)
}
private func getDouble(_ name: String) -> Double {
var data = Double()
mpv_get_property(mpv, name, MPV_FORMAT_DOUBLE, &data)
return data
}
private func makeCArgs(_ command: String, _ args: [String?]) -> [String?] {
if !args.isEmpty, args.last == nil {
fatalError("Command do not need a nil suffix")
}
var strArgs = args
strArgs.insert(command, at: 0)
strArgs.append(nil)
return strArgs
}
func checkError(_ status: CInt) {
if status < 0 {
logger.error(.init(stringLiteral: "MPV API error: \(String(cString: mpv_error_string(status)))\n"))
}
}
}
private func getProcAddress(_: UnsafeMutableRawPointer?, _ name: UnsafePointer<Int8>?) -> UnsafeMutableRawPointer? {
let symbolName = CFStringCreateWithCString(kCFAllocatorDefault, name, CFStringBuiltInEncodings.ASCII.rawValue)
let addr = CFBundleGetFunctionPointerForName(CFBundleGetBundleWithIdentifier("com.apple.opengles" as CFString), symbolName)
return addr
}
private func glUpdate(_ ctx: UnsafeMutableRawPointer?) {
let glView = unsafeBitCast(ctx, to: MPVOGLView.self)
guard glView.needsDrawing else {
return
}
DispatchQueue.main.async {
glView.setNeedsDisplay()
}
}
private func wakeUp(_ context: UnsafeMutableRawPointer?) {
let client = unsafeBitCast(context, to: MPVClient.self)
client.readEvents()
}

View File

@ -0,0 +1,67 @@
import CoreMedia
import Defaults
import Foundation
protocol PlayerBackend {
var model: PlayerModel! { get set }
var controls: PlayerControlsModel! { get set }
var stream: Stream? { get set }
var video: Video? { get set }
var currentTime: CMTime? { get }
var loadedVideo: Bool { get }
var isLoadingVideo: Bool { get }
var isPlaying: Bool { get }
var playerItemDuration: CMTime? { get }
func bestPlayable(_ streams: [Stream]) -> Stream?
func canPlay(_ stream: Stream) -> Bool
func playStream(
_ stream: Stream,
of video: Video,
preservingTime: Bool,
upgrading: Bool
)
func play()
func pause()
func togglePlay()
func stop()
func seek(to time: CMTime, completionHandler: ((Bool) -> Void)?)
func seek(to seconds: Double, completionHandler: ((Bool) -> Void)?)
func seek(relative time: CMTime, completionHandler: ((Bool) -> Void)?)
func setRate(_ rate: Float)
func closeItem()
func enterFullScreen()
func exitFullScreen()
func closePiP(wasPlaying: Bool)
func updateControls()
func startControlsUpdates()
func stopControlsUpdates()
func setNeedsDrawing(_ needsDrawing: Bool)
}
extension PlayerBackend {
func seek(to time: CMTime, completionHandler: ((Bool) -> Void)? = nil) {
seek(to: time, completionHandler: completionHandler)
}
func seek(to seconds: Double, completionHandler: ((Bool) -> Void)? = nil) {
seek(to: .secondsInDefaultTimescale(seconds), completionHandler: completionHandler)
}
func seek(relative time: CMTime, completionHandler: ((Bool) -> Void)? = nil) {
seek(relative: time, completionHandler: completionHandler)
}
}

View File

@ -0,0 +1,16 @@
import Defaults
import Foundation
enum PlayerBackendType: String, CaseIterable, Defaults.Serializable {
case mpv
case appleAVPlayer
var label: String {
switch self {
case .mpv:
return "MPV"
case .appleAVPlayer:
return "AVPlayer"
}
}
}

View File

@ -0,0 +1,114 @@
import CoreMedia
import Foundation
import SwiftUI
final class PlayerControlsModel: ObservableObject {
@Published var isLoadingVideo = true
@Published var isPlaying = true
@Published var currentTime = CMTime.zero
@Published var duration = CMTime.zero
@Published var presentingControls = false { didSet { handlePresentationChange() } }
@Published var timer: Timer?
@Published var playingFullscreen = false
var player: PlayerModel!
var playbackTime: String {
guard let current = currentTime.seconds.formattedAsPlaybackTime(),
let duration = duration.seconds.formattedAsPlaybackTime()
else {
return "--:-- / --:--"
}
var withoutSegments = ""
if let withoutSegmentsDuration = playerItemDurationWithoutSponsorSegments,
self.duration.seconds != withoutSegmentsDuration
{
withoutSegments = " (\(withoutSegmentsDuration.formattedAsPlaybackTime() ?? "--:--"))"
}
return "\(current) / \(duration)\(withoutSegments)"
}
var playerItemDurationWithoutSponsorSegments: Double? {
guard let duration = player.playerItemDurationWithoutSponsorSegments else {
return nil
}
return duration.seconds
}
func handlePresentationChange() {
if presentingControls {
DispatchQueue.main.async { [weak self] in
self?.player.backend.startControlsUpdates()
self?.resetTimer()
}
} else {
player.backend.stopControlsUpdates()
timer?.invalidate()
timer = nil
}
}
func show() {
withAnimation(PlayerControls.animation) {
player.backend.updateControls()
presentingControls = true
}
}
func hide() {
withAnimation(PlayerControls.animation) {
presentingControls = false
}
}
func toggle() {
withAnimation(PlayerControls.animation) {
if !presentingControls {
player.backend.updateControls()
}
presentingControls.toggle()
}
}
func toggleFullscreen(_ value: Bool) {
withAnimation(Animation.easeOut) {
resetTimer()
withAnimation(PlayerControls.animation) {
playingFullscreen = !value
}
if playingFullscreen {
guard !(UIApplication.shared.windows.first?.windowScene?.interfaceOrientation.isLandscape ?? true) else {
return
}
Orientation.lockOrientation(.landscape, andRotateTo: .landscapeRight)
} else {
Orientation.lockOrientation(.allButUpsideDown, andRotateTo: .portrait)
}
}
}
func reset() {
currentTime = .zero
duration = .zero
}
func resetTimer() {
removeTimer()
timer = Timer.scheduledTimer(withTimeInterval: 5.0, repeats: false) { _ in
withAnimation(PlayerControls.animation) { [weak self] in
self?.presentingControls = false
self?.player.backend.stopControlsUpdates()
}
}
}
func removeTimer() {
timer?.invalidate()
timer = nil
}
}

View File

@ -16,18 +16,35 @@ import SwiftyJSON
final class PlayerModel: ObservableObject {
static let availableRates: [Float] = [0.5, 0.67, 0.8, 1, 1.25, 1.5, 2]
static let assetKeysToLoad = ["tracks", "playable", "duration"]
let logger = Logger(label: "stream.yattee.app")
private(set) var player = AVPlayer()
var playerView = Player()
var controller: PlayerViewController?
var avPlayerView = AVPlayerView()
var playerItem: AVPlayerItem?
@Published var presentingPlayer = false { didSet { handlePresentationChange() } }
var mpvPlayerView = MPVPlayerView()
@Published var presentingPlayer = false { didSet { handlePresentationChange() } }
@Published var activeBackend = PlayerBackendType.mpv
var avPlayerBackend: AVPlayerBackend!
var mpvBackend: MPVBackend!
var backends: [PlayerBackend] {
[avPlayerBackend, mpvBackend]
}
var backend: PlayerBackend! {
switch activeBackend {
case .mpv:
return mpvBackend
case .appleAVPlayer:
return avPlayerBackend
}
}
@Published var playerSize: CGSize = .zero
@Published var stream: Stream?
@Published var currentRate: Float = 1.0 { didSet { player.rate = currentRate } }
@Published var currentRate: Float = 1.0 { didSet { backend.setRate(currentRate) } }
@Published var availableStreams = [Stream]() { didSet { handleAvailableStreamsChange() } }
@Published var streamSelection: Stream? { didSet { rebuildTVMenu() } }
@ -53,24 +70,15 @@ final class PlayerModel: ObservableObject {
var accounts: AccountsModel
var comments: CommentsModel
var asset: AVURLAsset?
var composition = AVMutableComposition()
var loadedCompositionAssets = [AVMediaType]()
var controls: PlayerControlsModel { didSet {
backends.forEach { backend in
var backend = backend
backend.controls = controls
}
}}
var context: NSManagedObjectContext = PersistenceController.shared.container.viewContext
private var currentArtwork: MPMediaItemArtwork?
private var frequentTimeObserver: Any?
private var infrequentTimeObserver: Any?
private var playerTimeControlStatusObserver: Any?
private var statusObservation: NSKeyValueObservation?
private var timeObserverThrottle = Throttle(interval: 2)
var playingInPictureInPicture = false
var playingFullscreen = false
@Published var playingInPictureInPicture = false
@Published var presentingErrorDetails = false
var playerError: Error? { didSet {
@ -89,13 +97,15 @@ final class PlayerModel: ObservableObject {
@Default(.closePiPAndOpenPlayerOnEnteringForeground) var closePiPAndOpenPlayerOnEnteringForeground
#endif
init(accounts: AccountsModel? = nil, comments: CommentsModel? = nil) {
init(accounts: AccountsModel? = nil, comments: CommentsModel? = nil, controls: PlayerControlsModel? = nil) {
self.accounts = accounts ?? AccountsModel()
self.comments = comments ?? CommentsModel()
self.controls = controls ?? PlayerControlsModel()
addFrequentTimeObserver()
addInfrequentTimeObserver()
addPlayerTimeControlStatusObserver()
self.avPlayerBackend = AVPlayerBackend(model: self, controls: controls)
self.mpvBackend = MPVBackend(model: self)
self.activeBackend = Defaults[.activeBackend]
}
func show() {
@ -137,11 +147,25 @@ final class PlayerModel: ObservableObject {
return false
}
return player.currentItem == nil || time == nil || !time!.isValid
return backend.isLoadingVideo
}
var isPlaying: Bool {
player.timeControlStatus == .playing
backend.isPlaying
}
var playerItemDuration: CMTime? {
backend.playerItemDuration
}
var playerItemDurationWithoutSponsorSegments: CMTime? {
(backend.playerItemDuration ?? .zero) - .secondsInDefaultTimescale(
sponsorBlock.segments.reduce(0) { $0 + $1.duration }
)
}
var videoDuration: TimeInterval? {
currentItem?.duration ?? currentVideo?.length ?? playerItemDuration?.seconds
}
var time: CMTime? {
@ -152,32 +176,16 @@ final class PlayerModel: ObservableObject {
currentVideo?.live ?? false
}
var playerItemDuration: CMTime? {
player.currentItem?.asset.duration
}
var videoDuration: TimeInterval? {
currentItem?.duration ?? currentVideo?.length ?? player.currentItem?.asset.duration.seconds
}
func togglePlay() {
isPlaying ? pause() : play()
backend.togglePlay()
}
func play() {
guard player.timeControlStatus != .playing else {
return
}
player.play()
backend.play()
}
func pause() {
guard player.timeControlStatus != .paused else {
return
}
player.pause()
backend.pause()
}
func play(_ video: Video, at time: TimeInterval? = nil, inNavigationView: Bool = false) {
@ -208,28 +216,37 @@ final class PlayerModel: ObservableObject {
self?.sponsorBlock.loadSegments(
videoID: video.videoID,
categories: Defaults[.sponsorBlockCategories]
)
) {
if Defaults[.showChannelSubscribers] {
self?.loadCurrentItemChannelDetails()
}
}
}
}
if let url = stream.singleAssetURL {
logger.info("playing stream with one asset\(stream.kind == .hls ? " (HLS)" : ""): \(url)")
loadSingleAsset(url, stream: stream, of: video, preservingTime: preservingTime)
} else {
logger.info("playing stream with many assets:")
logger.info("composition audio asset: \(stream.audioAsset.url)")
logger.info("composition video asset: \(stream.videoAsset.url)")
controls.reset()
loadComposition(stream, of: video, preservingTime: preservingTime)
backend.playStream(
stream,
of: video,
preservingTime: preservingTime,
upgrading: upgrading
)
}
func saveTime(completionHandler: @escaping () -> Void = {}) {
guard let currentTime = backend.currentTime, currentTime.seconds > 0 else {
return
}
if !upgrading {
updateCurrentArtwork()
DispatchQueue.main.async { [weak self] in
self?.preservedTime = currentTime
completionHandler()
}
}
func upgradeToStream(_ stream: Stream) {
if !self.stream.isNil, self.stream != stream {
func upgradeToStream(_ stream: Stream, force: Bool = false) {
if !self.stream.isNil, force || self.stream != stream {
playStream(stream, of: currentVideo!, preservingTime: true, upgrading: true)
}
}
@ -254,6 +271,9 @@ final class PlayerModel: ObservableObject {
}
private func handlePresentationChange() {
backend.setNeedsDrawing(presentingPlayer)
controls.hide()
if presentingPlayer, closePiPOnOpeningPlayer, playingInPictureInPicture {
DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) { [weak self] in
self?.closePiP()
@ -266,7 +286,7 @@ final class PlayerModel: ObservableObject {
}
}
if !presentingPlayer, !pauseOnHidingPlayer, isPlaying {
if !presentingPlayer, !pauseOnHidingPlayer, backend.isPlaying {
DispatchQueue.main.asyncAfter(deadline: .now() + 1) { [weak self] in
self?.play()
}
@ -281,423 +301,49 @@ final class PlayerModel: ObservableObject {
}
}
private func insertPlayerItem(
_ stream: Stream,
for video: Video,
preservingTime: Bool = false
) {
removeItemDidPlayToEndTimeObserver()
func changeActiveBackend(from: PlayerBackendType, to: PlayerBackendType) {
Defaults[.activeBackend] = to
self.activeBackend = to
playerItem = playerItem(stream)
guard playerItem != nil else {
guard var stream = stream else {
return
}
addItemDidPlayToEndTimeObserver()
attachMetadata(to: playerItem!, video: video, for: stream)
inactiveBackends().forEach { $0.pause() }
DispatchQueue.main.async { [weak self] in
guard let self = self else {
return
}
let fromBackend: PlayerBackend = from == .appleAVPlayer ? avPlayerBackend : mpvBackend
let toBackend: PlayerBackend = to == .appleAVPlayer ? avPlayerBackend : mpvBackend
self.stream = stream
self.composition = AVMutableComposition()
self.asset = nil
}
let startPlaying = {
#if !os(macOS)
try? AVAudioSession.sharedInstance().setActive(true)
#endif
if self.isAutoplaying(self.playerItem!) {
DispatchQueue.main.asyncAfter(deadline: .now() + 0.3) { [weak self] in
guard let self = self else {
return
}
if !preservingTime,
let segment = self.sponsorBlock.segments.first,
segment.start < 3,
self.lastSkipped.isNil
{
self.player.seek(
to: segment.endTime,
toleranceBefore: .secondsInDefaultTimescale(1),
toleranceAfter: .zero
) { finished in
guard finished else {
return
}
self.lastSkipped = segment
self.play()
}
} else {
self.play()
}
}
}
}
let replaceItemAndSeek = {
guard video == self.currentVideo else {
return
}
self.player.replaceCurrentItem(with: self.playerItem)
self.seekToPreservedTime { finished in
if let stream = toBackend.stream, toBackend.video == fromBackend.video {
toBackend.seek(to: fromBackend.currentTime?.seconds ?? .zero) { finished in
guard finished else {
return
}
self.preservedTime = nil
startPlaying()
}
}
if preservingTime {
if preservedTime.isNil {
saveTime {
replaceItemAndSeek()
startPlaying()
}
} else {
replaceItemAndSeek()
startPlaying()
}
} else {
player.replaceCurrentItem(with: playerItem)
startPlaying()
}
}
private func loadSingleAsset(
_ url: URL,
stream: Stream,
of video: Video,
preservingTime: Bool = false
) {
asset?.cancelLoading()
asset = AVURLAsset(url: url)
asset?.loadValuesAsynchronously(forKeys: Self.assetKeysToLoad) { [weak self] in
var error: NSError?
switch self?.asset?.statusOfValue(forKey: "duration", error: &error) {
case .loaded:
DispatchQueue.main.async { [weak self] in
self?.insertPlayerItem(stream, for: video, preservingTime: preservingTime)
}
case .failed:
DispatchQueue.main.async { [weak self] in
self?.playerError = error
}
default:
return
}
}
}
private func loadComposition(
_ stream: Stream,
of video: Video,
preservingTime: Bool = false
) {
loadedCompositionAssets = []
loadCompositionAsset(stream.audioAsset, stream: stream, type: .audio, of: video, preservingTime: preservingTime)
loadCompositionAsset(stream.videoAsset, stream: stream, type: .video, of: video, preservingTime: preservingTime)
}
private func loadCompositionAsset(
_ asset: AVURLAsset,
stream: Stream,
type: AVMediaType,
of video: Video,
preservingTime: Bool = false
) {
asset.loadValuesAsynchronously(forKeys: Self.assetKeysToLoad) { [weak self] in
guard let self = self else {
return
}
self.logger.info("loading \(type.rawValue) track")
let assetTracks = asset.tracks(withMediaType: type)
guard let compositionTrack = self.composition.addMutableTrack(
withMediaType: type,
preferredTrackID: kCMPersistentTrackID_Invalid
) else {
self.logger.critical("composition \(type.rawValue) addMutableTrack FAILED")
return
toBackend.play()
}
guard let assetTrack = assetTracks.first else {
self.logger.critical("asset \(type.rawValue) track FAILED")
return
}
self.stream = stream
streamSelection = stream
try! compositionTrack.insertTimeRange(
CMTimeRange(start: .zero, duration: CMTime.secondsInDefaultTimescale(video.length)),
of: assetTrack,
at: .zero
)
self.logger.critical("\(type.rawValue) LOADED")
guard self.streamSelection == stream else {
self.logger.critical("IGNORING LOADED")
return
}
self.loadedCompositionAssets.append(type)
if self.loadedCompositionAssets.count == 2 {
self.insertPlayerItem(stream, for: video, preservingTime: preservingTime)
}
}
}
private func playerItem(_: Stream) -> AVPlayerItem? {
if let asset = asset {
return AVPlayerItem(asset: asset)
} else {
return AVPlayerItem(asset: composition)
}
}
private func attachMetadata(to item: AVPlayerItem, video: Video, for _: Stream? = nil) {
#if !os(macOS)
var externalMetadata = [
makeMetadataItem(.commonIdentifierTitle, value: video.title),
makeMetadataItem(.quickTimeMetadataGenre, value: video.genre ?? ""),
makeMetadataItem(.commonIdentifierDescription, value: video.description ?? "")
]
if let thumbnailData = try? Data(contentsOf: video.thumbnailURL(quality: .medium)!),
let image = UIImage(data: thumbnailData),
let pngData = image.pngData()
{
let artworkItem = makeMetadataItem(.commonIdentifierArtwork, value: pngData)
externalMetadata.append(artworkItem)
}
item.externalMetadata = externalMetadata
#endif
item.preferredForwardBufferDuration = 5
observePlayerItemStatus(item)
}
private func observePlayerItemStatus(_ item: AVPlayerItem) {
statusObservation?.invalidate()
statusObservation = item.observe(\.status, options: [.old, .new]) { [weak self] playerItem, _ in
guard let self = self else {
return
}
switch playerItem.status {
case .readyToPlay:
if self.isAutoplaying(playerItem) {
self.play()
}
case .failed:
self.playerError = item.error
default:
return
}
}
}
#if !os(macOS)
private func makeMetadataItem(_ identifier: AVMetadataIdentifier, value: Any) -> AVMetadataItem {
let item = AVMutableMetadataItem()
item.identifier = identifier
item.value = value as? NSCopying & NSObjectProtocol
item.extendedLanguageTag = "und"
return item.copy() as! AVMetadataItem
}
#endif
private func addItemDidPlayToEndTimeObserver() {
NotificationCenter.default.addObserver(
self,
selector: #selector(itemDidPlayToEndTime),
name: NSNotification.Name.AVPlayerItemDidPlayToEndTime,
object: playerItem
)
}
private func removeItemDidPlayToEndTimeObserver() {
NotificationCenter.default.removeObserver(
self,
name: NSNotification.Name.AVPlayerItemDidPlayToEndTime,
object: playerItem
)
}
@objc func itemDidPlayToEndTime() {
prepareCurrentItemForHistory(finished: true)
if queue.isEmpty {
#if !os(macOS)
try? AVAudioSession.sharedInstance().setActive(false)
#endif
resetQueue()
#if os(tvOS)
controller?.playerView.dismiss(animated: false) { [weak self] in
self?.controller?.dismiss(animated: true)
}
#endif
} else {
advanceToNextItem()
}
}
private func saveTime(completionHandler: @escaping () -> Void = {}) {
let currentTime = player.currentTime()
guard currentTime.seconds > 0 else {
return
}
DispatchQueue.main.async { [weak self] in
self?.preservedTime = currentTime
completionHandler()
}
}
private func seekToPreservedTime(completionHandler: @escaping (Bool) -> Void = { _ in }) {
guard let time = preservedTime else {
return
}
player.seek(
to: time,
toleranceBefore: .secondsInDefaultTimescale(1),
toleranceAfter: .zero,
completionHandler: completionHandler
)
}
private func addFrequentTimeObserver() {
let interval = CMTime.secondsInDefaultTimescale(0.5)
frequentTimeObserver = player.addPeriodicTimeObserver(
forInterval: interval,
queue: .main
) { [weak self] _ in
guard let self = self else {
if !backend.canPlay(stream) {
guard let preferredStream = preferredStream(availableStreams) else {
return
}
guard !self.currentItem.isNil else {
return
}
stream = preferredStream
streamSelection = preferredStream
}
#if !os(tvOS)
self.updateNowPlayingInfo()
#endif
self.handleSegments(at: self.player.currentTime())
DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) { [weak self] in
self?.upgradeToStream(stream, force: true)
}
}
private func addInfrequentTimeObserver() {
let interval = CMTime.secondsInDefaultTimescale(5)
infrequentTimeObserver = player.addPeriodicTimeObserver(
forInterval: interval,
queue: .main
) { [weak self] _ in
guard let self = self else {
return
}
guard !self.currentItem.isNil else {
return
}
self.timeObserverThrottle.execute {
self.updateWatch()
}
}
}
private func addPlayerTimeControlStatusObserver() {
playerTimeControlStatusObserver = player.observe(\.timeControlStatus) { [weak self] player, _ in
guard let self = self,
self.player == player
else {
return
}
if player.timeControlStatus != .waitingToPlayAtSpecifiedRate {
self.objectWillChange.send()
}
if player.timeControlStatus == .playing, player.rate != self.currentRate {
player.rate = self.currentRate
}
#if os(macOS)
if player.timeControlStatus == .playing {
ScreenSaverManager.shared.disable(reason: "Yattee is playing video")
} else {
ScreenSaverManager.shared.enable()
}
#endif
self.timeObserverThrottle.execute {
self.updateWatch()
}
}
}
fileprivate func updateNowPlayingInfo() {
var nowPlayingInfo: [String: AnyObject] = [
MPMediaItemPropertyTitle: currentItem.video.title as AnyObject,
MPMediaItemPropertyArtist: currentItem.video.author as AnyObject,
MPNowPlayingInfoPropertyIsLiveStream: currentItem.video.live as AnyObject,
MPNowPlayingInfoPropertyElapsedPlaybackTime: player.currentTime().seconds as AnyObject,
MPNowPlayingInfoPropertyPlaybackQueueCount: queue.count as AnyObject,
MPMediaItemPropertyMediaType: MPMediaType.anyVideo.rawValue as AnyObject
]
if !currentArtwork.isNil {
nowPlayingInfo[MPMediaItemPropertyArtwork] = currentArtwork as AnyObject
}
if !currentItem.video.live {
let itemDuration = currentItem.videoDuration ?? currentItem.duration
let duration = itemDuration.isFinite ? Double(itemDuration) : nil
if !duration.isNil {
nowPlayingInfo[MPMediaItemPropertyPlaybackDuration] = duration as AnyObject
}
}
MPNowPlayingInfoCenter.default().nowPlayingInfo = nowPlayingInfo
}
private func updateCurrentArtwork() {
guard let thumbnailData = try? Data(contentsOf: currentItem.video.thumbnailURL(quality: .medium)!) else {
return
}
#if os(macOS)
let image = NSImage(data: thumbnailData)
#else
let image = UIImage(data: thumbnailData)
#endif
if image.isNil {
return
}
currentArtwork = MPMediaItemArtwork(boundsSize: image!.size) { _ in image! }
private func inactiveBackends() -> [PlayerBackend] {
[activeBackend == PlayerBackendType.mpv ? avPlayerBackend : mpvBackend]
}
func rateLabel(_ rate: Float) -> String {
@ -711,7 +357,8 @@ final class PlayerModel: ObservableObject {
func closeCurrentItem() {
prepareCurrentItemForHistory()
currentItem = nil
player.replaceCurrentItem(with: nil)
backend.closeItem()
}
func closePiP() {
@ -726,46 +373,9 @@ final class PlayerModel: ObservableObject {
show()
#endif
doClosePiP(wasPlaying: wasPlaying)
backend.closePiP(wasPlaying: wasPlaying)
}
#if os(tvOS)
private func doClosePiP(wasPlaying: Bool) {
let item = player.currentItem
let time = player.currentTime()
self.player.replaceCurrentItem(with: nil)
guard !item.isNil else {
return
}
self.player.seek(to: time)
self.player.replaceCurrentItem(with: item)
guard wasPlaying else {
return
}
DispatchQueue.main.asyncAfter(deadline: .now() + 1) { [weak self] in
self?.play()
}
}
#else
private func doClosePiP(wasPlaying: Bool) {
controller?.playerView.player = nil
controller?.playerView.player = player
guard wasPlaying else {
return
}
DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) { [weak self] in
self?.play()
}
}
#endif
func handleCurrentItemChange() {
#if os(macOS)
Windows.player.window?.title = windowTitle
@ -789,25 +399,23 @@ final class PlayerModel: ObservableObject {
}
func enterFullScreen() {
guard !playingFullscreen else {
guard !controls.playingFullscreen else {
return
}
logger.info("entering fullscreen")
controller?.playerView
.perform(NSSelectorFromString("enterFullScreenAnimated:completionHandler:"), with: false, with: nil)
backend.enterFullScreen()
}
func exitFullScreen() {
guard playingFullscreen else {
guard controls.playingFullscreen else {
return
}
logger.info("exiting fullscreen")
controller?.playerView
.perform(NSSelectorFromString("exitFullScreenAnimated:completionHandler:"), with: false, with: nil)
backend.exitFullScreen()
}
#endif
}

View File

@ -56,7 +56,7 @@ extension PlayerModel {
func playItem(_ item: PlayerQueueItem, video: Video? = nil, at time: TimeInterval? = nil) {
if !playingInPictureInPicture {
player.replaceCurrentItem(with: nil)
backend.closeItem()
}
comments.reset()
@ -92,13 +92,13 @@ extension PlayerModel {
streams = streams.filter { $0.instance.id == id }
}
streams = streams.filter { backend.canPlay($0) }
switch quality {
case .best:
return streams.first { $0.kind == .hls } ??
streams.filter { $0.kind == .stream }.max { $0.resolution < $1.resolution } ??
streams.first
return backend.bestPlayable(streams)
default:
let sorted = streams.filter { $0.kind != .hls }.sorted { $0.resolution > $1.resolution }
let sorted = streams.filter { $0.kind != .hls }.sorted { $0.resolution > $1.resolution }.sorted { $0.kind < $1.kind }
return sorted.first(where: { $0.resolution.height <= quality.value.height })
}
}
@ -117,7 +117,7 @@ extension PlayerModel {
remove(newItem)
currentItem = newItem
player.pause()
pause()
accounts.api.loadDetails(newItem) { newItem in
self.playItem(newItem, video: newItem.video, at: time)
@ -143,11 +143,7 @@ extension PlayerModel {
self.removeQueueItems()
}
player.replaceCurrentItem(with: nil)
}
func isAutoplaying(_ item: AVPlayerItem) -> Bool {
player.currentItem == item
backend.closeItem()
}
@discardableResult func enqueueVideo(
@ -162,7 +158,7 @@ extension PlayerModel {
if play {
currentItem = item
// pause playing current video as it's going to be replaced with next one
player.pause()
pause()
}
queue.insert(item, at: prepending ? 0 : queue.endIndex)

View File

@ -38,9 +38,12 @@ extension PlayerModel {
return
}
player.seek(to: segment.endTime)
lastSkipped = segment
segmentRestorationTime = time
backend.seek(to: segment.endTime)
DispatchQueue.main.async { [weak self] in
self?.lastSkipped = segment
self?.segmentRestorationTime = time
}
logger.info("SponsorBlock skipping to: \(segment.end)")
}
@ -63,13 +66,15 @@ extension PlayerModel {
}
restoredSegments.append(segment)
player.seek(to: time)
backend.seek(to: time)
resetLastSegment()
}
private func resetLastSegment() {
lastSkipped = nil
segmentRestorationTime = nil
DispatchQueue.main.async { [weak self] in
self?.lastSkipped = nil
self?.segmentRestorationTime = nil
}
}
func resetSegments() {

View File

@ -17,6 +17,10 @@ class Segment: ObservableObject, Hashable {
segment.last!
}
var duration: Double {
end - start
}
var endTime: CMTime {
CMTime(seconds: end, preferredTimescale: 1000)
}

View File

@ -5,7 +5,7 @@ import Foundation
// swiftlint:disable:next final_class
class Stream: Equatable, Hashable, Identifiable {
enum Resolution: String, CaseIterable, Comparable, Defaults.Serializable {
case hd1440p60, hd1440p, hd1080p60, hd1080p, hd720p60, hd720p, sd480p, sd360p, sd240p, sd144p, unknown
case hd2160p, hd1440p60, hd1440p, hd1080p60, hd1080p, hd720p60, hd720p, sd480p, sd360p, sd240p, sd144p, unknown
var name: String {
"\(height)p\(refreshRate != -1 ? ", \(refreshRate) fps" : "")"
@ -68,6 +68,7 @@ class Stream: Equatable, Hashable, Identifiable {
var kind: Kind!
var encoding: String!
var videoFormat: String!
init(
instance: Instance? = nil,
@ -76,7 +77,8 @@ class Stream: Equatable, Hashable, Identifiable {
hlsURL: URL? = nil,
resolution: Resolution? = nil,
kind: Kind = .hls,
encoding: String? = nil
encoding: String? = nil,
videoFormat: String? = nil
) {
self.instance = instance
self.audioAsset = audioAsset
@ -85,14 +87,35 @@ class Stream: Equatable, Hashable, Identifiable {
self.resolution = resolution
self.kind = kind
self.encoding = encoding
self.videoFormat = videoFormat
}
var quality: String {
kind == .hls ? "adaptive (HLS)" : "\(resolution.name) \(kind == .stream ? "(\(kind.rawValue))" : "")"
if resolution == .hd2160p {
return "4K (2160p)"
}
return kind == .hls ? "adaptive (HLS)" : "\(resolution.name)\(kind == .stream ? " (\(kind.rawValue))" : "")"
}
var format: String {
let lowercasedFormat = (videoFormat ?? "unknown").lowercased()
if lowercasedFormat.contains("webm") {
return "WEBM"
} else if lowercasedFormat.contains("avc1") {
return "avc1"
} else if lowercasedFormat.contains("av01") {
return "AV1"
} else if lowercasedFormat.contains("mpeg_4") || lowercasedFormat.contains("mp4") {
return "MP4"
} else {
return lowercasedFormat
}
}
var description: String {
"\(quality) - \(instance?.description ?? "")"
let formatString = format == "unknown" ? "" : " (\(format))"
return "\(quality)\(formatString) - \(instance?.description ?? "")"
}
var assets: [AVURLAsset] {

View File

@ -43,6 +43,7 @@ extension Defaults.Keys {
static let timeOnThumbnail = Key<Bool>("timeOnThumbnail", default: true)
static let roundedThumbnails = Key<Bool>("roundedThumbnails", default: true)
static let activeBackend = Key<PlayerBackendType>("activeBackend", default: .mpv)
static let quality = Key<ResolutionSetting>("quality", default: .best)
static let playerSidebar = Key<PlayerSidebarSetting>("playerSidebar", default: PlayerSidebarSetting.defaultValue)
static let playerInstanceID = Key<Instance.ID?>("playerInstance")

View File

@ -19,7 +19,7 @@ struct FavoritesView: View {
#endif
var body: some View {
PlayerControlsView {
BrowserPlayerControls {
ScrollView(.vertical, showsIndicators: false) {
if !accounts.current.isNil {
#if os(tvOS)

View File

@ -49,7 +49,7 @@ struct AppSidebarNavigation: View {
.frame(minWidth: sidebarMinWidth)
VStack {
PlayerControlsView {
BrowserPlayerControls {
HStack(alignment: .center) {
Spacer()
Image(systemName: "play.tv")

View File

@ -7,6 +7,7 @@ struct AppTabNavigation: View {
@EnvironmentObject<InstancesModel> private var instances
@EnvironmentObject<NavigationModel> private var navigation
@EnvironmentObject<PlayerModel> private var player
@EnvironmentObject<PlayerControlsModel> private var playerControls
@EnvironmentObject<PlaylistsModel> private var playlists
@EnvironmentObject<RecentsModel> private var recents
@EnvironmentObject<SearchModel> private var search
@ -95,7 +96,7 @@ struct AppTabNavigation: View {
.toolbar { toolbarContent }
}
.tabItem {
Label("Favorites", systemImage: "heart")
Label("Favorites", systemImage: "heart.fill")
.accessibility(label: Text("Favorites"))
}
.tag(TabSelection.favorites)
@ -129,7 +130,7 @@ struct AppTabNavigation: View {
.toolbar { toolbarContent }
}
.tabItem {
Label("Popular", systemImage: "arrow.up.right.circle")
Label("Popular", systemImage: "arrow.up.right.circle.fill")
.accessibility(label: Text("Popular"))
}
.tag(TabSelection.popular)
@ -141,7 +142,7 @@ struct AppTabNavigation: View {
.toolbar { toolbarContent }
}
.tabItem {
Label("Trending", systemImage: "chart.bar")
Label("Trending", systemImage: "chart.bar.fill")
.accessibility(label: Text("Trending"))
}
.tag(TabSelection.trending)
@ -187,6 +188,7 @@ struct AppTabNavigation: View {
.environmentObject(instances)
.environmentObject(navigation)
.environmentObject(player)
.environmentObject(playerControls)
.environmentObject(playlists)
.environmentObject(recents)
.environmentObject(subscriptions)

View File

@ -12,6 +12,7 @@ struct ContentView: View {
@EnvironmentObject<InstancesModel> private var instances
@EnvironmentObject<NavigationModel> private var navigation
@EnvironmentObject<PlayerModel> private var player
@EnvironmentObject<PlayerControlsModel> private var playerControls
@EnvironmentObject<PlaylistsModel> private var playlists
@EnvironmentObject<RecentsModel> private var recents
@EnvironmentObject<SearchModel> private var search
@ -135,9 +136,11 @@ struct ContentView: View {
menu.accounts = accounts
menu.navigation = navigation
menu.player = player
playerControls.player = player
player.accounts = accounts
player.comments = comments
player.controls = playerControls
if !accounts.current.isNil {
player.restoreQueue()

View File

@ -1,35 +1,25 @@
import Defaults
import SwiftUI
struct Player: UIViewControllerRepresentable {
struct AVPlayerView: UIViewControllerRepresentable {
@EnvironmentObject<CommentsModel> private var comments
@EnvironmentObject<NavigationModel> private var navigation
@EnvironmentObject<PlayerModel> private var player
@EnvironmentObject<SubscriptionsModel> private var subscriptions
var controller: PlayerViewController?
init(controller: PlayerViewController? = nil) {
self.controller = controller
}
func makeUIViewController(context _: Context) -> PlayerViewController {
if self.controller != nil {
return self.controller!
}
let controller = PlayerViewController()
func makeUIViewController(context _: Context) -> UIViewController {
let controller = AppleAVPlayerViewController()
controller.commentsModel = comments
controller.navigationModel = navigation
controller.playerModel = player
controller.subscriptionsModel = subscriptions
player.controller = controller
player.avPlayerBackend.controller = controller
return controller
}
func updateUIViewController(_: PlayerViewController, context _: Context) {
func updateUIViewController(_: UIViewController, context _: Context) {
player.rebuildTVMenu()
}
}

View File

@ -0,0 +1,207 @@
import AVKit
import Defaults
import SwiftUI
final class AppleAVPlayerViewController: UIViewController {
var playerLoaded = false
var commentsModel: CommentsModel!
var navigationModel: NavigationModel!
var playerModel: PlayerModel!
var subscriptionsModel: SubscriptionsModel!
var playerView = AVPlayerViewController()
let persistenceController = PersistenceController.shared
#if !os(tvOS)
var aspectRatio: Double? {
let ratio = Double(playerView.videoBounds.width) / Double(playerView.videoBounds.height)
guard ratio.isFinite else {
return VideoPlayerView.defaultAspectRatio // swiftlint:disable:this implicit_return
}
return [ratio, 1.0].max()!
}
#endif
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
loadPlayer()
#if os(tvOS)
if !playerView.isBeingPresented, !playerView.isBeingDismissed {
present(playerView, animated: false)
}
#endif
}
#if os(tvOS)
override func viewDidDisappear(_ animated: Bool) {
super.viewDidDisappear(animated)
if !playerModel.presentingPlayer, !Defaults[.pauseOnHidingPlayer], !playerModel.isPlaying {
DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) { [weak self] in
self?.playerModel.play()
}
}
}
#endif
func loadPlayer() {
guard !playerLoaded else {
return
}
playerModel.avPlayerBackend.controller = self
playerView.player = playerModel.avPlayerBackend.avPlayer
playerView.allowsPictureInPicturePlayback = true
playerView.showsPlaybackControls = false
#if os(iOS)
if #available(iOS 14.2, *) {
playerView.canStartPictureInPictureAutomaticallyFromInline = true
}
#endif
playerView.delegate = self
#if os(tvOS)
var infoViewControllers = [UIHostingController<AnyView>]()
if CommentsModel.enabled {
infoViewControllers.append(infoViewController([.comments], title: "Comments"))
}
var queueSections = [NowPlayingView.ViewSection.playingNext]
if Defaults[.showHistoryInPlayer] {
queueSections.append(.playedPreviously)
}
infoViewControllers.append(contentsOf: [
infoViewController([.related], title: "Related"),
infoViewController(queueSections, title: "Queue")
])
playerView.customInfoViewControllers = infoViewControllers
#else
embedViewController()
#endif
}
#if os(tvOS)
func infoViewController(
_ sections: [NowPlayingView.ViewSection],
title: String
) -> UIHostingController<AnyView> {
let controller = UIHostingController(rootView:
AnyView(
NowPlayingView(sections: sections, inInfoViewController: true)
.frame(maxHeight: 600)
.environmentObject(commentsModel)
.environmentObject(playerModel)
.environmentObject(subscriptionsModel)
.environment(\.managedObjectContext, persistenceController.container.viewContext)
)
)
controller.title = title
return controller
}
#else
func embedViewController() {
playerView.view.frame = view.bounds
addChild(playerView)
view.addSubview(playerView.view)
playerView.didMove(toParent: self)
}
#endif
}
extension AppleAVPlayerViewController: AVPlayerViewControllerDelegate {
func playerViewControllerShouldDismiss(_: AVPlayerViewController) -> Bool {
true
}
func playerViewControllerShouldAutomaticallyDismissAtPictureInPictureStart(_: AVPlayerViewController) -> Bool {
true
}
func playerViewControllerWillBeginDismissalTransition(_: AVPlayerViewController) {
if Defaults[.pauseOnHidingPlayer] {
playerModel.pause()
}
dismiss(animated: false)
}
func playerViewControllerDidEndDismissalTransition(_: AVPlayerViewController) {}
func playerViewController(
_: AVPlayerViewController,
willBeginFullScreenPresentationWithAnimationCoordinator context: UIViewControllerTransitionCoordinator
) {
#if os(iOS)
if !context.isCancelled, Defaults[.lockLandscapeWhenEnteringFullscreen] {
Orientation.lockOrientation(.landscape, andRotateTo: UIDevice.current.orientation.isLandscape ? nil : .landscapeRight)
}
#endif
}
func playerViewController(
_: AVPlayerViewController,
willEndFullScreenPresentationWithAnimationCoordinator coordinator: UIViewControllerTransitionCoordinator
) {
let wasPlaying = playerModel.isPlaying
coordinator.animate(alongsideTransition: nil) { context in
#if os(iOS)
if wasPlaying {
self.playerModel.play()
}
#endif
if !context.isCancelled {
#if os(iOS)
self.playerModel.lockedOrientation = nil
if Defaults[.enterFullscreenInLandscape] {
Orientation.lockOrientation(.portrait, andRotateTo: .portrait)
}
if wasPlaying {
self.playerModel.play()
}
#endif
}
}
}
func playerViewController(
_: AVPlayerViewController,
restoreUserInterfaceForPictureInPictureStopWithCompletionHandler completionHandler: @escaping (Bool) -> Void
) {
DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) {
if self.navigationModel.presentingChannel {
self.playerModel.playerNavigationLinkActive = true
} else {
self.playerModel.show()
}
#if os(tvOS)
if self.playerModel.playingInPictureInPicture {
self.present(self.playerView, animated: false) {
completionHandler(true)
}
}
#else
completionHandler(true)
#endif
}
}
func playerViewControllerWillStartPictureInPicture(_: AVPlayerViewController) {
playerModel.playingInPictureInPicture = true
playerModel.playerNavigationLinkActive = false
}
func playerViewControllerWillStopPictureInPicture(_: AVPlayerViewController) {
playerModel.playingInPictureInPicture = false
}
}

View File

@ -0,0 +1,237 @@
import Foundation
import SwiftUI
struct PlayerControls: View {
static let animation = Animation.easeInOut(duration: 0)
private var player: PlayerModel!
@EnvironmentObject<PlayerControlsModel> private var model
@Environment(\.verticalSizeClass) private var verticalSizeClass
init(player: PlayerModel) {
self.player = player
}
var body: some View {
VStack {
ZStack(alignment: .bottom) {
VStack(spacing: 0) {
Group {
statusBar
.padding(3)
#if os(macOS)
.background(VisualEffectBlur(material: .hudWindow))
#elseif os(iOS)
.background(VisualEffectBlur(blurStyle: .systemThinMaterial))
#endif
.mask(RoundedRectangle(cornerRadius: 3))
buttonsBar
.padding(.top, 4)
.padding(.horizontal, 4)
}
Spacer()
mediumButtonsBar
Spacer()
timeline
.offset(y: 10)
.zIndex(1)
bottomBar
#if os(macOS)
.background(VisualEffectBlur(material: .hudWindow))
#elseif os(iOS)
.background(VisualEffectBlur(blurStyle: .systemThinMaterial))
#endif
.mask(RoundedRectangle(cornerRadius: 3))
}
}
.opacity(model.presentingControls ? 1 : 0)
}
.background(controlsBackground)
.environment(\.colorScheme, .dark)
}
var controlsBackground: some View {
model.presentingControls ?
AnyView(
PlayerGestures()
.background(Color.black.opacity(0.5))
) :
AnyView(Color.clear)
}
var timeline: some View {
TimelineView(duration: durationBinding, current: currentTimeBinding, cornerRadius: 0)
}
var durationBinding: Binding<Double> {
Binding<Double>(
get: { model.duration.seconds },
set: { value in model.duration = .secondsInDefaultTimescale(value) }
)
}
var currentTimeBinding: Binding<Double> {
Binding<Double>(
get: { model.currentTime.seconds },
set: { value in model.currentTime = .secondsInDefaultTimescale(value) }
)
}
var statusBar: some View {
HStack(spacing: 4) {
hidePlayerButton
Text(playbackStatus)
Spacer()
ToggleBackendButton()
Text("")
StreamControl()
}
.foregroundColor(.primary)
.padding(.trailing, 4)
.font(.system(size: 14))
}
private var hidePlayerButton: some View {
Button {
player.hide()
} label: {
Image(systemName: "chevron.down.circle.fill")
}
}
private var playbackStatus: String {
if player.live {
return "LIVE"
}
guard !player.isLoadingVideo else {
return "loading..."
}
let videoLengthAtRate = (player.currentVideo?.length ?? 0) / Double(player.currentRate)
let remainingSeconds = videoLengthAtRate - (player.time?.seconds ?? 0)
if remainingSeconds < 60 {
return "less than a minute"
}
let timeFinishAt = Date().addingTimeInterval(remainingSeconds)
return "ends at \(formattedTimeFinishAt(timeFinishAt))"
}
private func formattedTimeFinishAt(_ date: Date) -> String {
let dateFormatter = DateFormatter()
dateFormatter.dateStyle = .none
dateFormatter.timeStyle = .short
return dateFormatter.string(from: date)
}
var buttonsBar: some View {
HStack {
fullscreenButton
Spacer()
button("Music Mode", systemImage: "music.note")
}
}
var fullscreenButton: some View {
button(
"Fullscreen",
systemImage: fullScreenLayout ? "arrow.down.right.and.arrow.up.left" : "arrow.up.left.and.arrow.down.right"
) {
model.toggleFullscreen(fullScreenLayout)
}
.keyboardShortcut(fullScreenLayout ? .cancelAction : .defaultAction)
}
var mediumButtonsBar: some View {
HStack {
button("Seek Backward", systemImage: "gobackward.10", size: 50, cornerRadius: 10) {
player.backend.seek(relative: .secondsInDefaultTimescale(-10))
}
.keyboardShortcut("k")
Spacer()
button(
model.isPlaying ? "Pause" : "Play",
systemImage: model.isPlaying ? "pause.fill" : "play.fill",
size: 50,
cornerRadius: 10
) {
player.backend.togglePlay()
}
.keyboardShortcut("p")
.disabled(model.isLoadingVideo)
Spacer()
button("Seek Forward", systemImage: "goforward.10", size: 50, cornerRadius: 10) {
player.backend.seek(relative: .secondsInDefaultTimescale(10))
}
.keyboardShortcut("l")
}
.font(.system(size: 30))
.padding(.horizontal, 4)
}
var bottomBar: some View {
HStack {
Spacer()
Text(model.playbackTime)
}
.font(.system(size: 15))
.padding(.horizontal, 5)
.padding(.vertical, 3)
.labelStyle(.iconOnly)
.foregroundColor(.primary)
}
func button(
_ label: String,
systemImage: String = "arrow.up.left.and.arrow.down.right",
size: Double = 30,
cornerRadius: Double = 3,
action: @escaping () -> Void = {}
) -> some View {
Button {
action()
model.resetTimer()
} label: {
Label(label, systemImage: systemImage)
.labelStyle(.iconOnly)
.padding()
.contentShape(Rectangle())
}
.foregroundColor(.primary)
.frame(width: size, height: size)
#if os(macOS)
.background(VisualEffectBlur(material: .hudWindow))
#elseif os(iOS)
.background(VisualEffectBlur(blurStyle: .systemThinMaterial))
#endif
.mask(RoundedRectangle(cornerRadius: cornerRadius))
}
var fullScreenLayout: Bool {
model.playingFullscreen || verticalSizeClass == .compact
}
}
struct PlayerControls_Previews: PreviewProvider {
static var previews: some View {
PlayerControls(player: PlayerModel())
}
}

View File

@ -0,0 +1,23 @@
import SwiftUI
struct ToggleBackendButton: View {
@EnvironmentObject<PlayerControlsModel> private var controls
@EnvironmentObject<PlayerModel> private var player
var body: some View {
Button {
player.saveTime {
player.changeActiveBackend(from: player.activeBackend, to: player.activeBackend.next())
controls.resetTimer()
}
} label: {
Text(player.activeBackend.label)
}
}
}
struct ToggleBackendButton_Previews: PreviewProvider {
static var previews: some View {
ToggleBackendButton()
}
}

View File

@ -0,0 +1,63 @@
import GLKit
import OpenGLES
final class MPVOGLView: GLKView {
private var defaultFBO: GLint?
var mpvGL: UnsafeMutableRawPointer?
var needsDrawing = true
override init(frame: CGRect) {
guard let context = EAGLContext(api: .openGLES2) else {
print("Failed to initialize OpenGLES 2.0 context")
exit(1)
}
super.init(frame: frame, context: context)
contentMode = .redraw
EAGLContext.setCurrent(context)
drawableColorFormat = .RGBA8888
drawableDepthFormat = .formatNone
drawableStencilFormat = .formatNone
defaultFBO = -1
isOpaque = false
fillBlack()
}
func fillBlack() {
glClearColor(0, 0, 0, 0)
glClear(UInt32(GL_COLOR_BUFFER_BIT))
}
override func draw(_: CGRect) {
glGetIntegerv(UInt32(GL_FRAMEBUFFER_BINDING), &defaultFBO!)
if mpvGL != nil {
var data = mpv_opengl_fbo(
fbo: Int32(defaultFBO!),
w: Int32(bounds.size.width) * Int32(contentScaleFactor),
h: Int32(bounds.size.height) * Int32(contentScaleFactor),
internal_format: 0
)
var flip: CInt = 1
withUnsafeMutablePointer(to: &flip) { flip in
withUnsafeMutablePointer(to: &data) { data in
var params = [
mpv_render_param(type: MPV_RENDER_PARAM_OPENGL_FBO, data: data),
mpv_render_param(type: MPV_RENDER_PARAM_FLIP_Y, data: flip),
mpv_render_param()
]
mpv_render_context_render(OpaquePointer(mpvGL), &params)
}
}
}
}
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
}
}

View File

@ -0,0 +1,25 @@
import UIKit
final class MPVViewController: UIViewController {
var client: MPVClient!
var glView: MPVOGLView!
init() {
client = MPVClient()
super.init(nibName: nil, bundle: nil)
}
required init?(coder: NSCoder) {
super.init(coder: coder)
}
override func viewDidLoad() {
super.loadView()
glView = client.create(frame: view.frame)
view.addSubview(glView)
super.viewDidLoad()
}
}

View File

@ -1,231 +0,0 @@
import Defaults
import Foundation
import SwiftUI
struct PlaybackBar: View {
@Environment(\.colorScheme) private var colorScheme
@Environment(\.presentationMode) private var presentationMode
@Environment(\.inNavigationView) private var inNavigationView
@EnvironmentObject<PlayerModel> private var player
var body: some View {
HStack {
#if !os(macOS)
closeButton
#endif
if player.currentItem != nil {
HStack {
Text(playbackStatus)
Text("")
rateMenu
}
.font(.caption2)
#if os(macOS)
.padding(.leading, 4)
#endif
Spacer()
HStack(spacing: 4) {
if !player.lastSkipped.isNil {
restoreLastSkippedSegmentButton
}
if player.live {
Image(systemName: "dot.radiowaves.left.and.right")
} else if player.isLoadingAvailableStreams || player.isLoadingStream {
Image(systemName: "bolt.horizontal.fill")
} else if !player.playerError.isNil {
Button {
player.presentingErrorDetails = true
} label: {
Image(systemName: "exclamationmark.circle.fill")
.foregroundColor(.red)
}
.buttonStyle(.plain)
}
streamControl
.disabled(player.isLoadingAvailableStreams)
.frame(alignment: .trailing)
.onChange(of: player.streamSelection) { selection in
guard !selection.isNil else {
return
}
player.upgradeToStream(selection!)
}
#if os(macOS)
.frame(maxWidth: 180)
#endif
}
.transaction { t in t.animation = .none }
.font(.caption2)
} else {
Spacer()
}
}
.foregroundColor(colorScheme == .dark ? .gray : .black)
.alert(isPresented: $player.presentingErrorDetails) {
Alert(
title: Text("Error"),
message: Text(player.playerError?.localizedDescription ?? "")
)
}
.frame(minWidth: 0, maxWidth: .infinity, minHeight: 20)
.padding(4)
.background(colorScheme == .dark ? Color.black : Color.white)
}
private var closeButton: some View {
Button {
player.hide()
} label: {
Label(
"Close",
systemImage: inNavigationView ? "chevron.backward.circle.fill" : "chevron.down.circle.fill"
)
.labelStyle(.iconOnly)
}
.accessibilityLabel(Text("Close"))
.buttonStyle(.borderless)
.foregroundColor(.gray)
.keyboardShortcut(.cancelAction)
}
private var playbackStatus: String {
if player.live {
return "LIVE"
}
guard !player.isLoadingVideo else {
return "loading..."
}
guard let video = player.currentVideo,
let time = player.time
else {
return ""
}
let videoLengthAtRate = video.length / Double(player.currentRate)
let remainingSeconds = videoLengthAtRate - time.seconds
if remainingSeconds < 60 {
return "less than a minute"
}
let timeFinishAt = Date().addingTimeInterval(remainingSeconds)
return "ends at \(formattedTimeFinishAt(timeFinishAt))"
}
private func formattedTimeFinishAt(_ date: Date) -> String {
let dateFormatter = DateFormatter()
dateFormatter.dateStyle = .none
dateFormatter.timeStyle = .short
return dateFormatter.string(from: date)
}
private var rateMenu: some View {
#if os(macOS)
ratePicker
.labelsHidden()
.frame(maxWidth: 70)
#else
Menu {
ratePicker
} label: {
Text(player.rateLabel(player.currentRate))
}
#endif
}
private var ratePicker: some View {
Picker("", selection: $player.currentRate) {
ForEach(PlayerModel.availableRates, id: \.self) { rate in
Text(player.rateLabel(rate)).tag(rate)
}
}
}
private var restoreLastSkippedSegmentButton: some View {
HStack(spacing: 4) {
Button {
player.restoreLastSkippedSegment()
} label: {
HStack(spacing: 4) {
Image(systemName: "arrow.uturn.left.circle")
Text(player.lastSkipped!.title())
}
}
.buttonStyle(.plain)
Text("")
}
}
private var streamControl: some View {
#if os(macOS)
Picker("", selection: $player.streamSelection) {
ForEach(InstancesModel.all) { instance in
let instanceStreams = availableStreamsForInstance(instance)
if !instanceStreams.values.isEmpty {
let kinds = Array(instanceStreams.keys).sorted { $0 < $1 }
Section(header: Text(instance.longDescription)) {
ForEach(kinds, id: \.self) { key in
ForEach(instanceStreams[key] ?? []) { stream in
Text(stream.quality).tag(Stream?.some(stream))
}
if kinds.count > 1 {
Divider()
}
}
}
}
}
}
#else
Menu {
ForEach(InstancesModel.all) { instance in
let instanceStreams = availableStreamsForInstance(instance)
if !instanceStreams.values.isEmpty {
let kinds = Array(instanceStreams.keys).sorted { $0 < $1 }
Picker("", selection: $player.streamSelection) {
ForEach(kinds, id: \.self) { key in
ForEach(instanceStreams[key] ?? []) { stream in
Text(stream.description).tag(Stream?.some(stream))
}
if kinds.count > 1 {
Divider()
}
}
}
}
}
} label: {
Text(player.streamSelection?.quality ?? "")
}
#endif
}
private func availableStreamsForInstance(_ instance: Instance) -> [Stream.Kind: [Stream]] {
let streams = player.availableStreamsSorted.filter { $0.instance == instance }
return Dictionary(grouping: streams, by: \.kind!)
}
}
struct PlaybackBar_Previews: PreviewProvider {
static var previews: some View {
PlaybackBar()
.injectFixtureEnvironmentObjects()
}
}

View File

@ -0,0 +1,55 @@
import SwiftUI
struct PlayerGestures: View {
@EnvironmentObject<PlayerModel> private var player
@EnvironmentObject<PlayerControlsModel> private var model
var body: some View {
HStack(spacing: 0) {
gestureRectangle
.tapRecognizer(
tapSensitivity: 0.2,
singleTapAction: {
model.toggle()
},
doubleTapAction: {
player.backend.seek(relative: .secondsInDefaultTimescale(-10))
}
)
gestureRectangle
.tapRecognizer(
tapSensitivity: 0.2,
singleTapAction: {
model.toggle()
},
doubleTapAction: {
player.backend.togglePlay()
}
)
gestureRectangle
.tapRecognizer(
tapSensitivity: 0.2,
singleTapAction: {
model.toggle()
},
doubleTapAction: {
player.backend.seek(relative: .secondsInDefaultTimescale(10))
}
)
}
}
var gestureRectangle: some View {
Color.clear
.contentShape(Rectangle())
.frame(maxWidth: .infinity, maxHeight: .infinity)
}
}
struct PlayerGestures_Previews: PreviewProvider {
static var previews: some View {
PlayerGestures()
}
}

View File

@ -0,0 +1,79 @@
import SwiftUI
struct StreamControl: View {
@EnvironmentObject<PlayerModel> private var player
var body: some View {
Group {
#if os(macOS)
Picker("", selection: $player.streamSelection) {
ForEach(InstancesModel.all) { instance in
let instanceStreams = availableStreamsForInstance(instance)
if !instanceStreams.values.isEmpty {
let kinds = Array(instanceStreams.keys).sorted { $0 < $1 }
Section(header: Text(instance.longDescription)) {
ForEach(kinds, id: \.self) { key in
ForEach(instanceStreams[key] ?? []) { stream in
Text(stream.quality).tag(Stream?.some(stream))
}
if kinds.count > 1 {
Divider()
}
}
}
}
}
}
.disabled(player.isLoadingAvailableStreams)
#else
Menu {
ForEach(InstancesModel.all) { instance in
let instanceStreams = availableStreamsForInstance(instance)
if !instanceStreams.values.isEmpty {
let kinds = Array(instanceStreams.keys).sorted { $0 < $1 }
Picker("", selection: $player.streamSelection) {
ForEach(kinds, id: \.self) { key in
ForEach(instanceStreams[key] ?? []) { stream in
Text(stream.description).tag(Stream?.some(stream))
}
if kinds.count > 1 {
Divider()
}
}
}
}
}
} label: {
Text(player.streamSelection?.quality ?? "no playable streams")
}
.disabled(player.isLoadingAvailableStreams)
#endif
}
.transaction { t in t.animation = .none }
.onChange(of: player.streamSelection) { selection in
guard !selection.isNil else {
return
}
player.upgradeToStream(selection!)
}
.frame(alignment: .trailing)
}
private func availableStreamsForInstance(_ instance: Instance) -> [Stream.Kind: [Stream]] {
let streams = player.availableStreamsSorted.filter { $0.instance == instance }.filter { player.backend.canPlay($0) }
return Dictionary(grouping: streams, by: \.kind!)
}
}
struct StreamControl_Previews: PreviewProvider {
static var previews: some View {
StreamControl()
}
}

View File

@ -0,0 +1,48 @@
import SwiftUI
struct TapRecognizerViewModifier: ViewModifier {
@State private var singleTapIsTaped: Bool = .init()
var tapSensitivity: Double
var singleTapAction: () -> Void
var doubleTapAction: () -> Void
init(tapSensitivity: Double, singleTapAction: @escaping () -> Void, doubleTapAction: @escaping () -> Void) {
self.tapSensitivity = tapSensitivity
self.singleTapAction = singleTapAction
self.doubleTapAction = doubleTapAction
}
func body(content: Content) -> some View {
content.gesture(simultaneouslyGesture)
}
private var singleTapGesture: some Gesture {
TapGesture(count: 1).onEnded {
singleTapIsTaped = true
DispatchQueue.main.asyncAfter(deadline: .now() + tapSensitivity) {
if singleTapIsTaped {
singleTapAction()
}
}
}
}
private var doubleTapGesture: some Gesture {
TapGesture(count: 2).onEnded {
singleTapIsTaped = false
doubleTapAction()
}
}
private var simultaneouslyGesture: some Gesture {
singleTapGesture.simultaneously(with: doubleTapGesture)
}
}
extension View {
func tapRecognizer(tapSensitivity: Double, singleTapAction: @escaping () -> Void, doubleTapAction: @escaping () -> Void) -> some View {
modifier(TapRecognizerViewModifier(tapSensitivity: tapSensitivity, singleTapAction: singleTapAction, doubleTapAction: doubleTapAction))
}
}

View File

@ -0,0 +1,193 @@
import SwiftUI
struct TimelineView: View {
@Binding private var duration: Double
@Binding private var current: Double
@State private var size = CGSize.zero
@State private var dragging = false
@State private var dragOffset: Double = 0
@State private var draggedFrom: Double = 0
private var start: Double = 0.0
private var height = 10.0
var cornerRadius: Double
var thumbTooltipWidth: Double = 100
@EnvironmentObject<PlayerModel> private var player
@EnvironmentObject<PlayerControlsModel> private var controls
init(duration: Binding<Double>, current: Binding<Double>, cornerRadius: Double = 10.0) {
_duration = duration
_current = current
self.cornerRadius = cornerRadius
}
var body: some View {
ZStack(alignment: .leading) {
RoundedRectangle(cornerRadius: cornerRadius)
.foregroundColor(.blue)
.frame(maxHeight: height)
RoundedRectangle(cornerRadius: cornerRadius)
.fill(
Color.green
)
.frame(maxHeight: height)
.frame(width: current * oneUnitWidth)
segmentsLayers
Circle()
.strokeBorder(.gray, lineWidth: 1)
.background(Circle().fill(dragging ? .gray : .white))
.offset(x: thumbOffset)
.foregroundColor(.red.opacity(0.6))
.frame(maxHeight: height * 2)
.gesture(
DragGesture(minimumDistance: 0)
.onChanged { value in
if !dragging {
controls.removeTimer()
draggedFrom = current
}
dragging = true
let drag = value.translation.width
let change = (drag / size.width) * units
let changedCurrent = current + change
guard changedCurrent >= start, changedCurrent <= duration else {
return
}
withAnimation(Animation.linear(duration: 0.2)) {
dragOffset = drag
}
}
.onEnded { _ in
current = projectedValue
player.backend.seek(to: projectedValue)
dragging = false
dragOffset = 0.0
draggedFrom = 0.0
controls.resetTimer()
}
)
ZStack {
RoundedRectangle(cornerRadius: cornerRadius)
.frame(maxWidth: thumbTooltipWidth, maxHeight: 30)
Text(projectedValue.formattedAsPlaybackTime() ?? "--:--")
.foregroundColor(.black)
}
.animation(.linear(duration: 0.1))
.opacity(dragging ? 1 : 0)
.offset(x: thumbTooltipOffset, y: -(height * 2) - 7)
}
.background(GeometryReader { proxy in
Color.clear
.onAppear {
self.size = proxy.size
}
.onChange(of: proxy.size) { size in
self.size = size
}
})
.gesture(DragGesture(minimumDistance: 0).onEnded { value in
let target = (value.location.x / size.width) * units
current = target
player.backend.seek(to: target)
})
}
var projectedValue: Double {
let change = (dragOffset / size.width) * units
let projected = draggedFrom + change
return projected.isFinite ? projected : start
}
var thumbOffset: Double {
let offset = dragging ? (draggedThumbHorizontalOffset + dragOffset) : thumbHorizontalOffset
return offset.isFinite ? offset : thumbLeadingOffset
}
var thumbTooltipOffset: Double {
let offset = (dragging ? ((current * oneUnitWidth) + dragOffset) : (current * oneUnitWidth)) - (thumbTooltipWidth / 2)
return offset.clamped(to: minThumbTooltipOffset ... maxThumbTooltipOffset)
}
var minThumbTooltipOffset: Double = -10
var maxThumbTooltipOffset: Double {
max(minThumbTooltipOffset, (units * oneUnitWidth) - thumbTooltipWidth + 10)
}
var segmentsLayers: some View {
ForEach(player.sponsorBlock.segments, id: \.uuid) { segment in
RoundedRectangle(cornerRadius: cornerRadius)
.offset(x: segmentLayerHorizontalOffset(segment))
.foregroundColor(.red)
.frame(maxHeight: height)
.frame(width: segmentLayerWidth(segment))
}
}
func segmentLayerHorizontalOffset(_ segment: Segment) -> Double {
segment.start * oneUnitWidth
}
func segmentLayerWidth(_ segment: Segment) -> Double {
let width = segment.duration * oneUnitWidth
return width.isFinite ? width : thumbLeadingOffset
}
var draggedThumbHorizontalOffset: Double {
thumbLeadingOffset + (draggedFrom * oneUnitWidth)
}
var thumbHorizontalOffset: Double {
thumbLeadingOffset + (current * oneUnitWidth)
}
var thumbLeadingOffset: Double {
-(size.width / 2)
}
var oneUnitWidth: Double {
let one = size.width / units
return one.isFinite ? one : 0
}
var units: Double {
duration - start
}
func setCurrent(_ current: Double) {
withAnimation {
self.current = current
}
}
}
struct TimelineView_Previews: PreviewProvider {
static var previews: some View {
VStack(spacing: 40) {
TimelineView(duration: .constant(100), current: .constant(0))
TimelineView(duration: .constant(100), current: .constant(1))
TimelineView(duration: .constant(100), current: .constant(30))
TimelineView(duration: .constant(100), current: .constant(50))
TimelineView(duration: .constant(100), current: .constant(66))
TimelineView(duration: .constant(100), current: .constant(90))
TimelineView(duration: .constant(100), current: .constant(100))
}
.padding()
}
}

View File

@ -6,7 +6,7 @@ struct VideoDetailsPaddingModifier: ViewModifier {
#if os(macOS)
30
#else
40
10
#endif
}

View File

@ -5,6 +5,7 @@ struct VideoPlayerSizeModifier: ViewModifier {
let geometry: GeometryProxy
let aspectRatio: Double?
let minimumHeightLeft: Double
let fullScreen: Bool
#if os(iOS)
@Environment(\.verticalSizeClass) private var verticalSizeClass
@ -13,18 +14,19 @@ struct VideoPlayerSizeModifier: ViewModifier {
init(
geometry: GeometryProxy,
aspectRatio: Double? = nil,
minimumHeightLeft: Double? = nil
minimumHeightLeft: Double? = nil,
fullScreen: Bool = false
) {
self.geometry = geometry
self.aspectRatio = aspectRatio ?? VideoPlayerView.defaultAspectRatio
self.minimumHeightLeft = minimumHeightLeft ?? VideoPlayerView.defaultMinimumHeightLeft
self.fullScreen = fullScreen
}
func body(content: Content) -> some View {
content
.frame(maxHeight: maxHeight)
.aspectRatio(usedAspectRatio, contentMode: usedAspectRatioContentMode)
.edgesIgnoringSafeArea(edgesIgnoringSafeArea)
.frame(maxHeight: fullScreen ? .infinity : maxHeight)
.aspectRatio(usedAspectRatio, contentMode: .fit)
}
var usedAspectRatio: Double {
@ -44,7 +46,7 @@ struct VideoPlayerSizeModifier: ViewModifier {
var usedAspectRatioContentMode: ContentMode {
#if os(iOS)
verticalSizeClass == .regular ? .fit : .fill
!fullScreen ? .fit : .fill
#else
.fit
#endif
@ -59,14 +61,4 @@ struct VideoPlayerSizeModifier: ViewModifier {
return [height, 0].max()!
}
var edgesIgnoringSafeArea: Edge.Set {
let empty = Edge.Set()
#if os(iOS)
return verticalSizeClass == .compact ? .all : empty
#else
return empty
#endif
}
}

View File

@ -22,7 +22,6 @@ struct VideoPlayerView: View {
@Environment(\.colorScheme) private var colorScheme
#if os(iOS)
@Environment(\.presentationMode) private var presentationMode
@Environment(\.horizontalSizeClass) private var horizontalSizeClass
@Environment(\.verticalSizeClass) private var verticalSizeClass
@ -36,6 +35,7 @@ struct VideoPlayerView: View {
#endif
@EnvironmentObject<AccountsModel> private var accounts
@EnvironmentObject<PlayerControlsModel> private var playerControls
@EnvironmentObject<PlayerModel> private var player
var body: some View {
@ -60,12 +60,15 @@ struct VideoPlayerView: View {
.onChange(of: geometry.size) { size in
self.playerSize = size
}
.onChange(of: fullScreenDetails) { value in
player.backend.setNeedsDrawing(!value)
}
#if os(iOS)
.onReceive(NotificationCenter.default.publisher(for: UIDevice.orientationDidChangeNotification)) { _ in
handleOrientationDidChangeNotification()
}
.onDisappear {
guard !player.playingFullscreen else {
guard !playerControls.playingFullscreen else {
return // swiftlint:disable:this implicit_return
}
@ -80,7 +83,6 @@ struct VideoPlayerView: View {
}
#endif
}
.navigationBarHidden(true)
#endif
}
@ -89,79 +91,117 @@ struct VideoPlayerView: View {
Group {
#if os(tvOS)
player.playerView
.ignoresSafeArea(.all, edges: .all)
#else
GeometryReader { geometry in
VStack(spacing: 0) {
#if os(iOS)
if verticalSizeClass == .regular {
if !playerControls.playingFullscreen {
#if os(macOS)
PlaybackBar()
}
#elseif os(macOS)
PlaybackBar()
#endif
#endif
}
if player.currentItem.isNil {
playerPlaceholder(geometry: geometry)
} else if player.playingInPictureInPicture {
pictureInPicturePlaceholder(geometry: geometry)
} else {
player.playerView
.modifier(
VideoPlayerSizeModifier(
geometry: geometry,
aspectRatio: player.controller?.aspectRatio
)
ZStack(alignment: .top) {
switch player.activeBackend {
case .mpv:
player.mpvPlayerView
.overlay(GeometryReader { proxy in
Color.clear
.onAppear {
player.playerSize = proxy.size
player.mpvBackend.client?.setSize(proxy.size.width, proxy.size.height)
}
.onChange(of: proxy.size) { _ in
player.playerSize = proxy.size
player.mpvBackend.client?.setSize(proxy.size.width, proxy.size.height)
}
})
case .appleAVPlayer:
player.avPlayerView
}
PlayerGestures()
PlayerControls(player: player)
}
.modifier(
VideoPlayerSizeModifier(
geometry: geometry,
aspectRatio: player.avPlayerBackend.controller?.aspectRatio,
fullScreen: playerControls.playingFullscreen
)
)
}
}
.frame(maxWidth: fullScreenLayout ? .infinity : nil, maxHeight: fullScreenLayout ? .infinity : nil)
#if os(iOS)
.onSwipeGesture(
up: {
withAnimation {
fullScreenDetails = true
}
},
down: { player.hide() }
)
.onSwipeGesture(
up: {
withAnimation {
fullScreenDetails = true
}
},
down: { player.hide() }
)
.onHover { hovering in
hovering ? playerControls.show() : playerControls.hide()
}
#endif
.background(Color.black)
.background(Color.black)
Group {
#if os(iOS)
if verticalSizeClass == .regular {
if !playerControls.playingFullscreen {
Group {
#if os(iOS)
if verticalSizeClass == .regular {
VideoDetails(sidebarQueue: sidebarQueueBinding, fullScreen: $fullScreenDetails)
}
#else
VideoDetails(sidebarQueue: sidebarQueueBinding, fullScreen: $fullScreenDetails)
}
#else
VideoDetails(sidebarQueue: sidebarQueueBinding, fullScreen: $fullScreenDetails)
#endif
#endif
}
.background(colorScheme == .dark ? Color.black : Color.white)
.modifier(VideoDetailsPaddingModifier(
geometry: geometry,
aspectRatio: player.avPlayerBackend.controller?.aspectRatio,
fullScreen: fullScreenDetails
))
}
.background(colorScheme == .dark ? Color.black : Color.white)
.modifier(VideoDetailsPaddingModifier(
geometry: geometry,
aspectRatio: player.controller?.aspectRatio,
fullScreen: fullScreenDetails
))
}
#endif
}
.background(colorScheme == .dark ? Color.black : Color.white)
.background(((colorScheme == .dark || fullScreenLayout) ? Color.black : Color.white).edgesIgnoringSafeArea(.all))
#if os(macOS)
.frame(minWidth: 650)
#endif
#if os(iOS)
if sidebarQueue {
PlayerQueueView(sidebarQueue: .constant(true), fullScreen: $fullScreenDetails)
.frame(maxWidth: 350)
}
#elseif os(macOS)
if Defaults[.playerSidebar] != .never {
PlayerQueueView(sidebarQueue: sidebarQueueBinding, fullScreen: $fullScreenDetails)
.frame(minWidth: 300)
}
#endif
if !playerControls.playingFullscreen {
#if os(iOS)
if sidebarQueue {
PlayerQueueView(sidebarQueue: .constant(true), fullScreen: $fullScreenDetails)
.frame(maxWidth: 350)
}
#elseif os(macOS)
if Defaults[.playerSidebar] != .never {
PlayerQueueView(sidebarQueue: sidebarQueueBinding, fullScreen: $fullScreenDetails)
.frame(minWidth: 300)
}
#endif
}
}
.ignoresSafeArea(.all, edges: fullScreenLayout ? .vertical : Edge.Set())
.statusBar(hidden: playerControls.playingFullscreen)
.navigationBarHidden(true)
}
var fullScreenLayout: Bool {
playerControls.playingFullscreen || verticalSizeClass == .compact
}
func playerPlaceholder(geometry: GeometryProxy) -> some View {
@ -235,7 +275,7 @@ struct VideoPlayerView: View {
private func configureOrientationUpdatesBasedOnAccelerometer() {
if UIDevice.current.orientation.isLandscape,
enterFullscreenInLandscape,
!player.playingFullscreen,
!playerControls.playingFullscreen,
!player.playingInPictureInPicture
{
DispatchQueue.main.async {

View File

@ -53,7 +53,7 @@ struct PlaylistsView: View {
}
var body: some View {
PlayerControlsView(toolbar: {
BrowserPlayerControls(toolbar: {
HStack {
HStack {
newPlaylistButton

View File

@ -0,0 +1,54 @@
import Foundation
final class RepeatingTimer {
let timeInterval: TimeInterval
init(timeInterval: TimeInterval) {
self.timeInterval = timeInterval
}
private lazy var timer: DispatchSourceTimer = {
let t = DispatchSource.makeTimerSource()
t.schedule(deadline: .now() + self.timeInterval, repeating: self.timeInterval)
t.setEventHandler { [weak self] in
self?.eventHandler?()
}
return t
}()
var eventHandler: (() -> Void)?
private enum State {
case suspended
case resumed
}
private var state: State = .suspended
deinit {
timer.setEventHandler {}
timer.cancel()
/*
If the timer is suspended, calling cancel without resuming
triggers a crash. This is documented here https://forums.developer.apple.com/thread/15902
*/
resume()
eventHandler = nil
}
func resume() {
if state == .resumed {
return
}
state = .resumed
timer.resume()
}
func suspend() {
if state == .suspended {
return
}
state = .suspended
timer.suspend()
}
}

View File

@ -41,7 +41,7 @@ struct SearchView: View {
}
var body: some View {
PlayerControlsView(toolbar: {
BrowserPlayerControls(toolbar: {
#if os(iOS)
if accounts.app.supportsSearchFilters {
HStack(spacing: 0) {

View File

@ -33,7 +33,7 @@ struct TrendingView: View {
}
var body: some View {
PlayerControlsView(toolbar: {
BrowserPlayerControls(toolbar: {
HStack {
if accounts.app.supportsTrendingCategories {
HStack {

View File

@ -72,7 +72,7 @@ struct VideoCell: View {
}
if !playNowContinues {
player.player.seek(to: .zero)
player.backend.seek(to: .zero)
}
player.play()
@ -255,7 +255,6 @@ struct VideoCell: View {
HStack(spacing: 8) {
if let date = video.publishedDate {
HStack(spacing: 2) {
Image(systemName: "calendar")
Text(date)
.allowsTightening(true)
}
@ -272,7 +271,6 @@ struct VideoCell: View {
Spacer()
HStack(spacing: 2) {
Image(systemName: "clock")
Text(time)
}
}

View File

@ -1,11 +1,12 @@
import Foundation
import SwiftUI
struct PlayerControlsView<Content: View, Toolbar: View>: View {
struct BrowserPlayerControls<Content: View, Toolbar: View>: View {
let content: Content
let toolbar: Toolbar?
@Environment(\.navigationStyle) private var navigationStyle
@EnvironmentObject<PlayerControlsModel> private var playerControls
@EnvironmentObject<PlayerModel> private var model
init(@ViewBuilder toolbar: @escaping () -> Toolbar? = { nil }, @ViewBuilder content: @escaping () -> Content) {
@ -84,7 +85,7 @@ struct PlayerControlsView<Content: View, Toolbar: View>: View {
ZStack(alignment: .bottom) {
HStack {
Group {
if model.isPlaying {
if playerControls.isPlaying {
Button(action: {
model.pause()
}) {
@ -96,9 +97,9 @@ struct PlayerControlsView<Content: View, Toolbar: View>: View {
}) {
Label("Play", systemImage: "play.fill")
}
.disabled(model.player.currentItem.isNil)
}
}
.disabled(playerControls.isLoadingVideo)
.font(.system(size: 30))
.frame(minWidth: 30)
@ -145,7 +146,7 @@ struct PlayerControlsView<Content: View, Toolbar: View>: View {
struct PlayerControlsView_Previews: PreviewProvider {
static var previews: some View {
PlayerControlsView {
BrowserPlayerControls {
VStack {
Spacer()
Text("Hello")

View File

@ -28,12 +28,12 @@ struct ChannelPlaylistView: View {
if inNavigationView {
content
} else {
PlayerControlsView {
BrowserPlayerControls {
content
}
}
#else
PlayerControlsView {
BrowserPlayerControls {
content
}
#endif

View File

@ -33,12 +33,12 @@ struct ChannelVideosView: View {
if inNavigationView {
content
} else {
PlayerControlsView {
BrowserPlayerControls {
content
}
}
#else
PlayerControlsView {
BrowserPlayerControls {
content
}
#endif

View File

@ -0,0 +1,33 @@
import SwiftUI
#if !os(macOS)
struct MPVPlayerView: UIViewControllerRepresentable {
@EnvironmentObject<PlayerModel> private var player
@State private var controller = MPVViewController()
func makeUIViewController(context _: Context) -> some UIViewController {
player.mpvBackend.controller = controller
player.mpvBackend.client = controller.client
return controller
}
func updateUIViewController(_: UIViewControllerType, context _: Context) {}
}
#else
struct MPVPlayerView: NSViewRepresentable {
let layer: VideoLayer
func makeNSView(context _: Context) -> some NSView {
let vview = VideoView()
vview.layer = layer
vview.wantsLayer = true
return vview
}
func updateNSView(_: NSViewType, context _: Context) {}
}
#endif

View File

@ -46,7 +46,7 @@ struct PlaylistVideosView: View {
}
var body: some View {
PlayerControlsView {
BrowserPlayerControls {
VerticalCells(items: contentItems)
.onAppear {
if !player.accounts.app.userPlaylistsEndpointIncludesVideos {

View File

@ -15,7 +15,7 @@ struct PopularView: View {
}
var body: some View {
PlayerControlsView {
BrowserPlayerControls {
VerticalCells(items: videos)
.onAppear {
resource?.addObserver(store)

View File

@ -45,7 +45,7 @@ struct ShareButton: View {
shareAction(
accounts.api.shareURL(
contentItem,
time: player.player.currentTime()
time: player.backend.currentTime
)!
)
}
@ -67,7 +67,7 @@ struct ShareButton: View {
accounts.api.shareURL(
contentItem,
frontendHost: "www.youtube.com",
time: player.player.currentTime()
time: player.backend.currentTime
)!
)
}

View File

@ -60,7 +60,7 @@ struct SignInRequiredView<Content: View>: View {
struct SignInRequiredView_Previews: PreviewProvider {
static var previews: some View {
PlayerControlsView {
BrowserPlayerControls {
SignInRequiredView(title: "Subscriptions") {
Text("Only when signed in")
}

View File

@ -15,7 +15,7 @@ struct SubscriptionsView: View {
}
var body: some View {
PlayerControlsView {
BrowserPlayerControls {
SignInRequiredView(title: "Subscriptions") {
VerticalCells(items: videos)
.onAppear {

View File

@ -24,6 +24,7 @@ struct YatteeApp: App {
@StateObject private var menu = MenuModel()
@StateObject private var navigation = NavigationModel()
@StateObject private var player = PlayerModel()
@StateObject private var playerControls = PlayerControlsModel()
@StateObject private var playlists = PlaylistsModel()
@StateObject private var recents = RecentsModel()
@StateObject private var search = SearchModel()
@ -41,6 +42,7 @@ struct YatteeApp: App {
.environmentObject(instances)
.environmentObject(navigation)
.environmentObject(player)
.environmentObject(playerControls)
.environmentObject(playlists)
.environmentObject(recents)
.environmentObject(subscriptions)
@ -101,6 +103,7 @@ struct YatteeApp: App {
.environmentObject(instances)
.environmentObject(navigation)
.environmentObject(player)
.environmentObject(playerControls)
.environmentObject(playlists)
.environmentObject(recents)
.environmentObject(subscriptions)
@ -115,6 +118,7 @@ struct YatteeApp: App {
.environmentObject(accounts)
.environmentObject(instances)
.environmentObject(player)
.environmentObject(playerControls)
.environmentObject(updater)
}
#endif

1969
Vendor/mpv/iOS/include/client.h vendored Normal file

File diff suppressed because it is too large Load Diff

386
Vendor/mpv/iOS/include/qthelper.hpp vendored Normal file
View File

@ -0,0 +1,386 @@
/* Copyright (C) 2017 the mpv developers
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#ifndef MPV_CLIENT_API_QTHELPER_H_
#define MPV_CLIENT_API_QTHELPER_H_
#include <mpv/client.h>
#if !MPV_ENABLE_DEPRECATED
#error "This helper is deprecated. Copy it into your project instead."
#else
/**
* Note: these helpers are provided for convenience for C++/Qt applications.
* This is based on the public API in client.h, and it does not encode any
* knowledge that is not known or guaranteed outside of the C client API. You
* can even copy and modify this code as you like, or implement similar things
* for other languages.
*/
#include <cstring>
#include <QVariant>
#include <QString>
#include <QList>
#include <QHash>
#include <QSharedPointer>
#include <QMetaType>
namespace mpv {
namespace qt {
// Wrapper around mpv_handle. Does refcounting under the hood.
class Handle
{
struct container {
container(mpv_handle *h) : mpv(h) {}
~container() { mpv_terminate_destroy(mpv); }
mpv_handle *mpv;
};
QSharedPointer<container> sptr;
public:
// Construct a new Handle from a raw mpv_handle with refcount 1. If the
// last Handle goes out of scope, the mpv_handle will be destroyed with
// mpv_terminate_destroy().
// Never destroy the mpv_handle manually when using this wrapper. You
// will create dangling pointers. Just let the wrapper take care of
// destroying the mpv_handle.
// Never create multiple wrappers from the same raw mpv_handle; copy the
// wrapper instead (that's what it's for).
static Handle FromRawHandle(mpv_handle *handle) {
Handle h;
h.sptr = QSharedPointer<container>(new container(handle));
return h;
}
// Return the raw handle; for use with the libmpv C API.
operator mpv_handle*() const { return sptr ? (*sptr).mpv : 0; }
};
static inline QVariant node_to_variant(const mpv_node *node)
{
switch (node->format) {
case MPV_FORMAT_STRING:
return QVariant(QString::fromUtf8(node->u.string));
case MPV_FORMAT_FLAG:
return QVariant(static_cast<bool>(node->u.flag));
case MPV_FORMAT_INT64:
return QVariant(static_cast<qlonglong>(node->u.int64));
case MPV_FORMAT_DOUBLE:
return QVariant(node->u.double_);
case MPV_FORMAT_NODE_ARRAY: {
mpv_node_list *list = node->u.list;
QVariantList qlist;
for (int n = 0; n < list->num; n++)
qlist.append(node_to_variant(&list->values[n]));
return QVariant(qlist);
}
case MPV_FORMAT_NODE_MAP: {
mpv_node_list *list = node->u.list;
QVariantMap qmap;
for (int n = 0; n < list->num; n++) {
qmap.insert(QString::fromUtf8(list->keys[n]),
node_to_variant(&list->values[n]));
}
return QVariant(qmap);
}
default: // MPV_FORMAT_NONE, unknown values (e.g. future extensions)
return QVariant();
}
}
struct node_builder {
node_builder(const QVariant& v) {
set(&node_, v);
}
~node_builder() {
free_node(&node_);
}
mpv_node *node() { return &node_; }
private:
Q_DISABLE_COPY(node_builder)
mpv_node node_;
mpv_node_list *create_list(mpv_node *dst, bool is_map, int num) {
dst->format = is_map ? MPV_FORMAT_NODE_MAP : MPV_FORMAT_NODE_ARRAY;
mpv_node_list *list = new mpv_node_list();
dst->u.list = list;
if (!list)
goto err;
list->values = new mpv_node[num]();
if (!list->values)
goto err;
if (is_map) {
list->keys = new char*[num]();
if (!list->keys)
goto err;
}
return list;
err:
free_node(dst);
return NULL;
}
char *dup_qstring(const QString &s) {
QByteArray b = s.toUtf8();
char *r = new char[b.size() + 1];
if (r)
std::memcpy(r, b.data(), b.size() + 1);
return r;
}
bool test_type(const QVariant &v, QMetaType::Type t) {
// The Qt docs say: "Although this function is declared as returning
// "QVariant::Type(obsolete), the return value should be interpreted
// as QMetaType::Type."
// So a cast really seems to be needed to avoid warnings (urgh).
return static_cast<int>(v.type()) == static_cast<int>(t);
}
void set(mpv_node *dst, const QVariant &src) {
if (test_type(src, QMetaType::QString)) {
dst->format = MPV_FORMAT_STRING;
dst->u.string = dup_qstring(src.toString());
if (!dst->u.string)
goto fail;
} else if (test_type(src, QMetaType::Bool)) {
dst->format = MPV_FORMAT_FLAG;
dst->u.flag = src.toBool() ? 1 : 0;
} else if (test_type(src, QMetaType::Int) ||
test_type(src, QMetaType::LongLong) ||
test_type(src, QMetaType::UInt) ||
test_type(src, QMetaType::ULongLong))
{
dst->format = MPV_FORMAT_INT64;
dst->u.int64 = src.toLongLong();
} else if (test_type(src, QMetaType::Double)) {
dst->format = MPV_FORMAT_DOUBLE;
dst->u.double_ = src.toDouble();
} else if (src.canConvert<QVariantList>()) {
QVariantList qlist = src.toList();
mpv_node_list *list = create_list(dst, false, qlist.size());
if (!list)
goto fail;
list->num = qlist.size();
for (int n = 0; n < qlist.size(); n++)
set(&list->values[n], qlist[n]);
} else if (src.canConvert<QVariantMap>()) {
QVariantMap qmap = src.toMap();
mpv_node_list *list = create_list(dst, true, qmap.size());
if (!list)
goto fail;
list->num = qmap.size();
for (int n = 0; n < qmap.size(); n++) {
list->keys[n] = dup_qstring(qmap.keys()[n]);
if (!list->keys[n]) {
free_node(dst);
goto fail;
}
set(&list->values[n], qmap.values()[n]);
}
} else {
goto fail;
}
return;
fail:
dst->format = MPV_FORMAT_NONE;
}
void free_node(mpv_node *dst) {
switch (dst->format) {
case MPV_FORMAT_STRING:
delete[] dst->u.string;
break;
case MPV_FORMAT_NODE_ARRAY:
case MPV_FORMAT_NODE_MAP: {
mpv_node_list *list = dst->u.list;
if (list) {
for (int n = 0; n < list->num; n++) {
if (list->keys)
delete[] list->keys[n];
if (list->values)
free_node(&list->values[n]);
}
delete[] list->keys;
delete[] list->values;
}
delete list;
break;
}
default: ;
}
dst->format = MPV_FORMAT_NONE;
}
};
/**
* RAII wrapper that calls mpv_free_node_contents() on the pointer.
*/
struct node_autofree {
mpv_node *ptr;
node_autofree(mpv_node *a_ptr) : ptr(a_ptr) {}
~node_autofree() { mpv_free_node_contents(ptr); }
};
#if MPV_ENABLE_DEPRECATED
/**
* Return the given property as mpv_node converted to QVariant, or QVariant()
* on error.
*
* @deprecated use get_property() instead
*
* @param name the property name
*/
static inline QVariant get_property_variant(mpv_handle *ctx, const QString &name)
{
mpv_node node;
if (mpv_get_property(ctx, name.toUtf8().data(), MPV_FORMAT_NODE, &node) < 0)
return QVariant();
node_autofree f(&node);
return node_to_variant(&node);
}
/**
* Set the given property as mpv_node converted from the QVariant argument.
* @deprecated use set_property() instead
*/
static inline int set_property_variant(mpv_handle *ctx, const QString &name,
const QVariant &v)
{
node_builder node(v);
return mpv_set_property(ctx, name.toUtf8().data(), MPV_FORMAT_NODE, node.node());
}
/**
* Set the given option as mpv_node converted from the QVariant argument.
*
* @deprecated use set_property() instead
*/
static inline int set_option_variant(mpv_handle *ctx, const QString &name,
const QVariant &v)
{
node_builder node(v);
return mpv_set_option(ctx, name.toUtf8().data(), MPV_FORMAT_NODE, node.node());
}
/**
* mpv_command_node() equivalent. Returns QVariant() on error (and
* unfortunately, the same on success).
*
* @deprecated use command() instead
*/
static inline QVariant command_variant(mpv_handle *ctx, const QVariant &args)
{
node_builder node(args);
mpv_node res;
if (mpv_command_node(ctx, node.node(), &res) < 0)
return QVariant();
node_autofree f(&res);
return node_to_variant(&res);
}
#endif
/**
* This is used to return error codes wrapped in QVariant for functions which
* return QVariant.
*
* You can use get_error() or is_error() to extract the error status from a
* QVariant value.
*/
struct ErrorReturn
{
/**
* enum mpv_error value (or a value outside of it if ABI was extended)
*/
int error;
ErrorReturn() : error(0) {}
explicit ErrorReturn(int err) : error(err) {}
};
/**
* Return the mpv error code packed into a QVariant, or 0 (success) if it's not
* an error value.
*
* @return error code (<0) or success (>=0)
*/
static inline int get_error(const QVariant &v)
{
if (!v.canConvert<ErrorReturn>())
return 0;
return v.value<ErrorReturn>().error;
}
/**
* Return whether the QVariant carries a mpv error code.
*/
static inline bool is_error(const QVariant &v)
{
return get_error(v) < 0;
}
/**
* Return the given property as mpv_node converted to QVariant, or QVariant()
* on error.
*
* @param name the property name
* @return the property value, or an ErrorReturn with the error code
*/
static inline QVariant get_property(mpv_handle *ctx, const QString &name)
{
mpv_node node;
int err = mpv_get_property(ctx, name.toUtf8().data(), MPV_FORMAT_NODE, &node);
if (err < 0)
return QVariant::fromValue(ErrorReturn(err));
node_autofree f(&node);
return node_to_variant(&node);
}
/**
* Set the given property as mpv_node converted from the QVariant argument.
*
* @return mpv error code (<0 on error, >= 0 on success)
*/
static inline int set_property(mpv_handle *ctx, const QString &name,
const QVariant &v)
{
node_builder node(v);
return mpv_set_property(ctx, name.toUtf8().data(), MPV_FORMAT_NODE, node.node());
}
/**
* mpv_command_node() equivalent.
*
* @param args command arguments, with args[0] being the command name as string
* @return the property value, or an ErrorReturn with the error code
*/
static inline QVariant command(mpv_handle *ctx, const QVariant &args)
{
node_builder node(args);
mpv_node res;
int err = mpv_command_node(ctx, node.node(), &res);
if (err < 0)
return QVariant::fromValue(ErrorReturn(err));
node_autofree f(&res);
return node_to_variant(&res);
}
}
}
Q_DECLARE_METATYPE(mpv::qt::ErrorReturn)
#endif /* else #if MPV_ENABLE_DEPRECATED */
#endif

626
Vendor/mpv/iOS/include/render.h vendored Normal file
View File

@ -0,0 +1,626 @@
/* Copyright (C) 2018 the mpv developers
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#ifndef MPV_CLIENT_API_RENDER_H_
#define MPV_CLIENT_API_RENDER_H_
#include "client.h"
#ifdef __cplusplus
extern "C" {
#endif
/**
* Overview
* --------
*
* This API can be used to make mpv render using supported graphic APIs (such
* as OpenGL). It can be used to handle video display.
*
* The renderer needs to be created with mpv_render_context_create() before
* you start playback (or otherwise cause a VO to be created). Then (with most
* backends) mpv_render_context_render() can be used to explicitly render the
* current video frame. Use mpv_render_context_set_update_callback() to get
* notified when there is a new frame to draw.
*
* Preferably rendering should be done in a separate thread. If you call
* normal libmpv API functions on the renderer thread, deadlocks can result
* (these are made non-fatal with timeouts, but user experience will obviously
* suffer). See "Threading" section below.
*
* You can output and embed video without this API by setting the mpv "wid"
* option to a native window handle (see "Embedding the video window" section
* in the client.h header). In general, using the render API is recommended,
* because window embedding can cause various issues, especially with GUI
* toolkits and certain platforms.
*
* Supported backends
* ------------------
*
* OpenGL: via MPV_RENDER_API_TYPE_OPENGL, see render_gl.h header.
*
* Threading
* ---------
*
* You are recommended to do rendering on a separate thread than normal libmpv
* use.
*
* The mpv_render_* functions can be called from any thread, under the
* following conditions:
* - only one of the mpv_render_* functions can be called at the same time
* (unless they belong to different mpv cores created by mpv_create())
* - never can be called from within the callbacks set with
* mpv_set_wakeup_callback() or mpv_render_context_set_update_callback()
* - if the OpenGL backend is used, for all functions the OpenGL context
* must be "current" in the calling thread, and it must be the same OpenGL
* context as the mpv_render_context was created with. Otherwise, undefined
* behavior will occur.
* - the thread does not call libmpv API functions other than the mpv_render_*
* functions, except APIs which are declared as safe (see below). Likewise,
* there must be no lock or wait dependency from the render thread to a
* thread using other libmpv functions. Basically, the situation that your
* render thread waits for a "not safe" libmpv API function to return must
* not happen. If you ignore this requirement, deadlocks can happen, which
* are made non-fatal with timeouts; then playback quality will be degraded,
* and the message
* mpv_render_context_render() not being called or stuck.
* is logged. If you set MPV_RENDER_PARAM_ADVANCED_CONTROL, you promise that
* this won't happen, and must absolutely guarantee it, or a real deadlock
* will freeze the mpv core thread forever.
*
* libmpv functions which are safe to call from a render thread are:
* - functions marked with "Safe to be called from mpv render API threads."
* - client.h functions which don't have an explicit or implicit mpv_handle
* parameter
* - mpv_render_* functions; but only for the same mpv_render_context pointer.
* If the pointer is different, mpv_render_context_free() is not safe. (The
* reason is that if MPV_RENDER_PARAM_ADVANCED_CONTROL is set, it may have
* to process still queued requests from the core, which it can do only for
* the current context, while requests for other contexts would deadlock.
* Also, it may have to wait and block for the core to terminate the video
* chain to make sure no resources are used after context destruction.)
* - if the mpv_handle parameter refers to a different mpv core than the one
* you're rendering for (very obscure, but allowed)
*
* Note about old libmpv version:
*
* Before API version 1.105 (basically in mpv 0.29.x), simply enabling
* MPV_RENDER_PARAM_ADVANCED_CONTROL could cause deadlock issues. This can
* be worked around by setting the "vd-lavc-dr" option to "no".
* In addition, you were required to call all mpv_render*() API functions
* from the same thread on which mpv_render_context_create() was originally
* run (for the same the mpv_render_context). Not honoring it led to UB
* (deadlocks, use of invalid pthread_t handles), even if you moved your GL
* context to a different thread correctly.
* These problems were addressed in API version 1.105 (mpv 0.30.0).
*
* Context and handle lifecycle
* ----------------------------
*
* Video initialization will fail if the render context was not initialized yet
* (with mpv_render_context_create()), or it will revert to a VO that creates
* its own window.
*
* Currently, there can be only 1 mpv_render_context at a time per mpv core.
*
* Calling mpv_render_context_free() while a VO is using the render context is
* active will disable video.
*
* You must free the context with mpv_render_context_free() before the mpv core
* is destroyed. If this doesn't happen, undefined behavior will result.
*/
/**
* Opaque context, returned by mpv_render_context_create().
*/
typedef struct mpv_render_context mpv_render_context;
/**
* Parameters for mpv_render_param (which is used in a few places such as
* mpv_render_context_create().
*
* Also see mpv_render_param for conventions and how to use it.
*/
typedef enum mpv_render_param_type {
/**
* Not a valid value, but also used to terminate a params array. Its value
* is always guaranteed to be 0 (even if the ABI changes in the future).
*/
MPV_RENDER_PARAM_INVALID = 0,
/**
* The render API to use. Valid for mpv_render_context_create().
*
* Type: char*
*
* Defined APIs:
*
* MPV_RENDER_API_TYPE_OPENGL:
* OpenGL desktop 2.1 or later (preferably core profile compatible to
* OpenGL 3.2), or OpenGLES 2.0 or later.
* Providing MPV_RENDER_PARAM_OPENGL_INIT_PARAMS is required.
* It is expected that an OpenGL context is valid and "current" when
* calling mpv_render_* functions (unless specified otherwise). It
* must be the same context for the same mpv_render_context.
*/
MPV_RENDER_PARAM_API_TYPE = 1,
/**
* Required parameters for initializing the OpenGL renderer. Valid for
* mpv_render_context_create().
* Type: mpv_opengl_init_params*
*/
MPV_RENDER_PARAM_OPENGL_INIT_PARAMS = 2,
/**
* Describes a GL render target. Valid for mpv_render_context_render().
* Type: mpv_opengl_fbo*
*/
MPV_RENDER_PARAM_OPENGL_FBO = 3,
/**
* Control flipped rendering. Valid for mpv_render_context_render().
* Type: int*
* If the value is set to 0, render normally. Otherwise, render it flipped,
* which is needed e.g. when rendering to an OpenGL default framebuffer
* (which has a flipped coordinate system).
*/
MPV_RENDER_PARAM_FLIP_Y = 4,
/**
* Control surface depth. Valid for mpv_render_context_render().
* Type: int*
* This implies the depth of the surface passed to the render function in
* bits per channel. If omitted or set to 0, the renderer will assume 8.
* Typically used to control dithering.
*/
MPV_RENDER_PARAM_DEPTH = 5,
/**
* ICC profile blob. Valid for mpv_render_context_set_parameter().
* Type: mpv_byte_array*
* Set an ICC profile for use with the "icc-profile-auto" option. (If the
* option is not enabled, the ICC data will not be used.)
*/
MPV_RENDER_PARAM_ICC_PROFILE = 6,
/**
* Ambient light in lux. Valid for mpv_render_context_set_parameter().
* Type: int*
* This can be used for automatic gamma correction.
*/
MPV_RENDER_PARAM_AMBIENT_LIGHT = 7,
/**
* X11 Display, sometimes used for hwdec. Valid for
* mpv_render_context_create(). The Display must stay valid for the lifetime
* of the mpv_render_context.
* Type: Display*
*/
MPV_RENDER_PARAM_X11_DISPLAY = 8,
/**
* Wayland display, sometimes used for hwdec. Valid for
* mpv_render_context_create(). The wl_display must stay valid for the
* lifetime of the mpv_render_context.
* Type: struct wl_display*
*/
MPV_RENDER_PARAM_WL_DISPLAY = 9,
/**
* Better control about rendering and enabling some advanced features. Valid
* for mpv_render_context_create().
*
* This conflates multiple requirements the API user promises to abide if
* this option is enabled:
*
* - The API user's render thread, which is calling the mpv_render_*()
* functions, never waits for the core. Otherwise deadlocks can happen.
* See "Threading" section.
* - The callback set with mpv_render_context_set_update_callback() can now
* be called even if there is no new frame. The API user should call the
* mpv_render_context_update() function, and interpret the return value
* for whether a new frame should be rendered.
* - Correct functionality is impossible if the update callback is not set,
* or not set soon enough after mpv_render_context_create() (the core can
* block while waiting for you to call mpv_render_context_update(), and
* if the update callback is not correctly set, it will deadlock, or
* block for too long).
*
* In general, setting this option will enable the following features (and
* possibly more):
*
* - "Direct rendering", which means the player decodes directly to a
* texture, which saves a copy per video frame ("vd-lavc-dr" option
* needs to be enabled, and the rendering backend as well as the
* underlying GPU API/driver needs to have support for it).
* - Rendering screenshots with the GPU API if supported by the backend
* (instead of using a suboptimal software fallback via libswscale).
*
* Warning: do not just add this without reading the "Threading" section
* above, and then wondering that deadlocks happen. The
* requirements are tricky. But also note that even if advanced
* control is disabled, not adhering to the rules will lead to
* playback problems. Enabling advanced controls simply makes
* violating these rules fatal.
*
* Type: int*: 0 for disable (default), 1 for enable
*/
MPV_RENDER_PARAM_ADVANCED_CONTROL = 10,
/**
* Return information about the next frame to render. Valid for
* mpv_render_context_get_info().
*
* Type: mpv_render_frame_info*
*
* It strictly returns information about the _next_ frame. The implication
* is that e.g. mpv_render_context_update()'s return value will have
* MPV_RENDER_UPDATE_FRAME set, and the user is supposed to call
* mpv_render_context_render(). If there is no next frame, then the
* return value will have is_valid set to 0.
*/
MPV_RENDER_PARAM_NEXT_FRAME_INFO = 11,
/**
* Enable or disable video timing. Valid for mpv_render_context_render().
*
* Type: int*: 0 for disable, 1 for enable (default)
*
* When video is timed to audio, the player attempts to render video a bit
* ahead, and then do a blocking wait until the target display time is
* reached. This blocks mpv_render_context_render() for up to the amount
* specified with the "video-timing-offset" global option. You can set
* this parameter to 0 to disable this kind of waiting. If you do, it's
* recommended to use the target time value in mpv_render_frame_info to
* wait yourself, or to set the "video-timing-offset" to 0 instead.
*
* Disabling this without doing anything in addition will result in A/V sync
* being slightly off.
*/
MPV_RENDER_PARAM_BLOCK_FOR_TARGET_TIME = 12,
/**
* Use to skip rendering in mpv_render_context_render().
*
* Type: int*: 0 for rendering (default), 1 for skipping
*
* If this is set, you don't need to pass a target surface to the render
* function (and if you do, it's completely ignored). This can still call
* into the lower level APIs (i.e. if you use OpenGL, the OpenGL context
* must be set).
*
* Be aware that the render API will consider this frame as having been
* rendered. All other normal rules also apply, for example about whether
* you have to call mpv_render_context_report_swap(). It also does timing
* in the same way.
*/
MPV_RENDER_PARAM_SKIP_RENDERING = 13,
/**
* Deprecated. Not supported. Use MPV_RENDER_PARAM_DRM_DISPLAY_V2 instead.
* Type : struct mpv_opengl_drm_params*
*/
MPV_RENDER_PARAM_DRM_DISPLAY = 14,
/**
* DRM draw surface size, contains draw surface dimensions.
* Valid for mpv_render_context_create().
* Type : struct mpv_opengl_drm_draw_surface_size*
*/
MPV_RENDER_PARAM_DRM_DRAW_SURFACE_SIZE = 15,
/**
* DRM display, contains drm display handles.
* Valid for mpv_render_context_create().
* Type : struct mpv_opengl_drm_params_v2*
*/
MPV_RENDER_PARAM_DRM_DISPLAY_V2 = 16,
} mpv_render_param_type;
/**
* For backwards compatibility with the old naming of
* MPV_RENDER_PARAM_DRM_DRAW_SURFACE_SIZE
*/
#define MPV_RENDER_PARAM_DRM_OSD_SIZE MPV_RENDER_PARAM_DRM_DRAW_SURFACE_SIZE
/**
* Used to pass arbitrary parameters to some mpv_render_* functions. The
* meaning of the data parameter is determined by the type, and each
* MPV_RENDER_PARAM_* documents what type the value must point to.
*
* Each value documents the required data type as the pointer you cast to
* void* and set on mpv_render_param.data. For example, if MPV_RENDER_PARAM_FOO
* documents the type as Something* , then the code should look like this:
*
* Something foo = {...};
* mpv_render_param param;
* param.type = MPV_RENDER_PARAM_FOO;
* param.data = & foo;
*
* Normally, the data field points to exactly 1 object. If the type is char*,
* it points to a 0-terminated string.
*
* In all cases (unless documented otherwise) the pointers need to remain
* valid during the call only. Unless otherwise documented, the API functions
* will not write to the params array or any data pointed to it.
*
* As a convention, parameter arrays are always terminated by type==0. There
* is no specific order of the parameters required. The order of the 2 fields in
* this struct is guaranteed (even after ABI changes).
*/
typedef struct mpv_render_param {
enum mpv_render_param_type type;
void *data;
} mpv_render_param;
/**
* Predefined values for MPV_RENDER_PARAM_API_TYPE.
*/
#define MPV_RENDER_API_TYPE_OPENGL "opengl"
/**
* Flags used in mpv_render_frame_info.flags. Each value represents a bit in it.
*/
typedef enum mpv_render_frame_info_flag {
/**
* Set if there is actually a next frame. If unset, there is no next frame
* yet, and other flags and fields that require a frame to be queued will
* be unset.
*
* This is set for _any_ kind of frame, even for redraw requests.
*
* Note that when this is unset, it simply means no new frame was
* decoded/queued yet, not necessarily that the end of the video was
* reached. A new frame can be queued after some time.
*
* If the return value of mpv_render_context_render() had the
* MPV_RENDER_UPDATE_FRAME flag set, this flag will usually be set as well,
* unless the frame is rendered, or discarded by other asynchronous events.
*/
MPV_RENDER_FRAME_INFO_PRESENT = 1 << 0,
/**
* If set, the frame is not an actual new video frame, but a redraw request.
* For example if the video is paused, and an option that affects video
* rendering was changed (or any other reason), an update request can be
* issued and this flag will be set.
*
* Typically, redraw frames will not be subject to video timing.
*
* Implies MPV_RENDER_FRAME_INFO_PRESENT.
*/
MPV_RENDER_FRAME_INFO_REDRAW = 1 << 1,
/**
* If set, this is supposed to reproduce the previous frame perfectly. This
* is usually used for certain "video-sync" options ("display-..." modes).
* Typically the renderer will blit the video from a FBO. Unset otherwise.
*
* Implies MPV_RENDER_FRAME_INFO_PRESENT.
*/
MPV_RENDER_FRAME_INFO_REPEAT = 1 << 2,
/**
* If set, the player timing code expects that the user thread blocks on
* vsync (by either delaying the render call, or by making a call to
* mpv_render_context_report_swap() at vsync time).
*
* Implies MPV_RENDER_FRAME_INFO_PRESENT.
*/
MPV_RENDER_FRAME_INFO_BLOCK_VSYNC = 1 << 3,
} mpv_render_frame_info_flag;
/**
* Information about the next video frame that will be rendered. Can be
* retrieved with MPV_RENDER_PARAM_NEXT_FRAME_INFO.
*/
typedef struct mpv_render_frame_info {
/**
* A bitset of mpv_render_frame_info_flag values (i.e. multiple flags are
* combined with bitwise or).
*/
uint64_t flags;
/**
* Absolute time at which the frame is supposed to be displayed. This is in
* the same unit and base as the time returned by mpv_get_time_us(). For
* frames that are redrawn, or if vsync locked video timing is used (see
* "video-sync" option), then this can be 0. The "video-timing-offset"
* option determines how much "headroom" the render thread gets (but a high
* enough frame rate can reduce it anyway). mpv_render_context_render() will
* normally block until the time is elapsed, unless you pass it
* MPV_RENDER_PARAM_BLOCK_FOR_TARGET_TIME = 0.
*/
int64_t target_time;
} mpv_render_frame_info;
/**
* Initialize the renderer state. Depending on the backend used, this will
* access the underlying GPU API and initialize its own objects.
*
* You must free the context with mpv_render_context_free(). Not doing so before
* the mpv core is destroyed may result in memory leaks or crashes.
*
* Currently, only at most 1 context can exists per mpv core (it represents the
* main video output).
*
* You should pass the following parameters:
* - MPV_RENDER_PARAM_API_TYPE to select the underlying backend/GPU API.
* - Backend-specific init parameter, like MPV_RENDER_PARAM_OPENGL_INIT_PARAMS.
* - Setting MPV_RENDER_PARAM_ADVANCED_CONTROL and following its rules is
* strongly recommended.
* - If you want to use hwdec, possibly hwdec interop resources.
*
* @param res set to the context (on success) or NULL (on failure). The value
* is never read and always overwritten.
* @param mpv handle used to get the core (the mpv_render_context won't depend
* on this specific handle, only the core referenced by it)
* @param params an array of parameters, terminated by type==0. It's left
* unspecified what happens with unknown parameters. At least
* MPV_RENDER_PARAM_API_TYPE is required, and most backends will
* require another backend-specific parameter.
* @return error code, including but not limited to:
* MPV_ERROR_UNSUPPORTED: the OpenGL version is not supported
* (or required extensions are missing)
* MPV_ERROR_NOT_IMPLEMENTED: an unknown API type was provided, or
* support for the requested API was not
* built in the used libmpv binary.
* MPV_ERROR_INVALID_PARAMETER: at least one of the provided parameters was
* not valid.
*/
int mpv_render_context_create(mpv_render_context **res, mpv_handle *mpv,
mpv_render_param *params);
/**
* Attempt to change a single parameter. Not all backends and parameter types
* support all kinds of changes.
*
* @param ctx a valid render context
* @param param the parameter type and data that should be set
* @return error code. If a parameter could actually be changed, this returns
* success, otherwise an error code depending on the parameter type
* and situation.
*/
int mpv_render_context_set_parameter(mpv_render_context *ctx,
mpv_render_param param);
/**
* Retrieve information from the render context. This is NOT a counterpart to
* mpv_render_context_set_parameter(), because you generally can't read
* parameters set with it, and this function is not meant for this purpose.
* Instead, this is for communicating information from the renderer back to the
* user. See mpv_render_param_type; entries which support this function
* explicitly mention it, and for other entries you can assume it will fail.
*
* You pass param with param.type set and param.data pointing to a variable
* of the required data type. The function will then overwrite that variable
* with the returned value (at least on success).
*
* @param ctx a valid render context
* @param param the parameter type and data that should be retrieved
* @return error code. If a parameter could actually be retrieved, this returns
* success, otherwise an error code depending on the parameter type
* and situation. MPV_ERROR_NOT_IMPLEMENTED is used for unknown
* param.type, or if retrieving it is not supported.
*/
int mpv_render_context_get_info(mpv_render_context *ctx,
mpv_render_param param);
typedef void (*mpv_render_update_fn)(void *cb_ctx);
/**
* Set the callback that notifies you when a new video frame is available, or
* if the video display configuration somehow changed and requires a redraw.
* Similar to mpv_set_wakeup_callback(), you must not call any mpv API from
* the callback, and all the other listed restrictions apply (such as not
* exiting the callback by throwing exceptions).
*
* This can be called from any thread, except from an update callback. In case
* of the OpenGL backend, no OpenGL state or API is accessed.
*
* Calling this will raise an update callback immediately.
*
* @param callback callback(callback_ctx) is called if the frame should be
* redrawn
* @param callback_ctx opaque argument to the callback
*/
void mpv_render_context_set_update_callback(mpv_render_context *ctx,
mpv_render_update_fn callback,
void *callback_ctx);
/**
* The API user is supposed to call this when the update callback was invoked
* (like all mpv_render_* functions, this has to happen on the render thread,
* and _not_ from the update callback itself).
*
* This is optional if MPV_RENDER_PARAM_ADVANCED_CONTROL was not set (default).
* Otherwise, it's a hard requirement that this is called after each update
* callback. If multiple update callback happened, and the function could not
* be called sooner, it's OK to call it once after the last callback.
*
* If an update callback happens during or after this function, the function
* must be called again at the soonest possible time.
*
* If MPV_RENDER_PARAM_ADVANCED_CONTROL was set, this will do additional work
* such as allocating textures for the video decoder.
*
* @return a bitset of mpv_render_update_flag values (i.e. multiple flags are
* combined with bitwise or). Typically, this will tell the API user
* what should happen next. E.g. if the MPV_RENDER_UPDATE_FRAME flag is
* set, mpv_render_context_render() should be called. If flags unknown
* to the API user are set, or if the return value is 0, nothing needs
* to be done.
*/
uint64_t mpv_render_context_update(mpv_render_context *ctx);
/**
* Flags returned by mpv_render_context_update(). Each value represents a bit
* in the function's return value.
*/
typedef enum mpv_render_update_flag {
/**
* A new video frame must be rendered. mpv_render_context_render() must be
* called.
*/
MPV_RENDER_UPDATE_FRAME = 1 << 0,
} mpv_render_context_flag;
/**
* Render video.
*
* Typically renders the video to a target surface provided via mpv_render_param
* (the details depend on the backend in use). Options like "panscan" are
* applied to determine which part of the video should be visible and how the
* video should be scaled. You can change these options at runtime by using the
* mpv property API.
*
* The renderer will reconfigure itself every time the target surface
* configuration (such as size) is changed.
*
* This function implicitly pulls a video frame from the internal queue and
* renders it. If no new frame is available, the previous frame is redrawn.
* The update callback set with mpv_render_context_set_update_callback()
* notifies you when a new frame was added. The details potentially depend on
* the backends and the provided parameters.
*
* Generally, libmpv will invoke your update callback some time before the video
* frame should be shown, and then lets this function block until the supposed
* display time. This will limit your rendering to video FPS. You can prevent
* this by setting the "video-timing-offset" global option to 0. (This applies
* only to "audio" video sync mode.)
*
* You should pass the following parameters:
* - Backend-specific target object, such as MPV_RENDER_PARAM_OPENGL_FBO.
* - Possibly transformations, such as MPV_RENDER_PARAM_FLIP_Y.
*
* @param ctx a valid render context
* @param params an array of parameters, terminated by type==0. Which parameters
* are required depends on the backend. It's left unspecified what
* happens with unknown parameters.
* @return error code
*/
int mpv_render_context_render(mpv_render_context *ctx, mpv_render_param *params);
/**
* Tell the renderer that a frame was flipped at the given time. This is
* optional, but can help the player to achieve better timing.
*
* Note that calling this at least once informs libmpv that you will use this
* function. If you use it inconsistently, expect bad video playback.
*
* If this is called while no video is initialized, it is ignored.
*
* @param ctx a valid render context
*/
void mpv_render_context_report_swap(mpv_render_context *ctx);
/**
* Destroy the mpv renderer state.
*
* If video is still active (e.g. a file playing), video will be disabled
* forcefully.
*
* @param ctx a valid render context. After this function returns, this is not
* a valid pointer anymore. NULL is also allowed and does nothing.
*/
void mpv_render_context_free(mpv_render_context *ctx);
#ifdef __cplusplus
}
#endif
#endif

216
Vendor/mpv/iOS/include/render_gl.h vendored Normal file
View File

@ -0,0 +1,216 @@
/* Copyright (C) 2018 the mpv developers
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#ifndef MPV_CLIENT_API_RENDER_GL_H_
#define MPV_CLIENT_API_RENDER_GL_H_
#include "render.h"
#ifdef __cplusplus
extern "C" {
#endif
/**
* OpenGL backend
* --------------
*
* This header contains definitions for using OpenGL with the render.h API.
*
* OpenGL interop
* --------------
*
* The OpenGL backend has some special rules, because OpenGL itself uses
* implicit per-thread contexts, which causes additional API problems.
*
* This assumes the OpenGL context lives on a certain thread controlled by the
* API user. All mpv_render_* APIs have to be assumed to implicitly use the
* OpenGL context if you pass a mpv_render_context using the OpenGL backend,
* unless specified otherwise.
*
* The OpenGL context is indirectly accessed through the OpenGL function
* pointers returned by the get_proc_address callback in mpv_opengl_init_params.
* Generally, mpv will not load the system OpenGL library when using this API.
*
* OpenGL state
* ------------
*
* OpenGL has a large amount of implicit state. All the mpv functions mentioned
* above expect that the OpenGL state is reasonably set to OpenGL standard
* defaults. Likewise, mpv will attempt to leave the OpenGL context with
* standard defaults. The following state is excluded from this:
*
* - the glViewport state
* - the glScissor state (but GL_SCISSOR_TEST is in its default value)
* - glBlendFuncSeparate() state (but GL_BLEND is in its default value)
* - glClearColor() state
* - mpv may overwrite the callback set with glDebugMessageCallback()
* - mpv always disables GL_DITHER at init
*
* Messing with the state could be avoided by creating shared OpenGL contexts,
* but this is avoided for the sake of compatibility and interoperability.
*
* On OpenGL 2.1, mpv will strictly call functions like glGenTextures() to
* create OpenGL objects. You will have to do the same. This ensures that
* objects created by mpv and the API users don't clash. Also, legacy state
* must be either in its defaults, or not interfere with core state.
*
* API use
* -------
*
* The mpv_render_* API is used. That API supports multiple backends, and this
* section documents specifics for the OpenGL backend.
*
* Use mpv_render_context_create() with MPV_RENDER_PARAM_API_TYPE set to
* MPV_RENDER_API_TYPE_OPENGL, and MPV_RENDER_PARAM_OPENGL_INIT_PARAMS provided.
*
* Call mpv_render_context_render() with MPV_RENDER_PARAM_OPENGL_FBO to render
* the video frame to an FBO.
*
* Hardware decoding
* -----------------
*
* Hardware decoding via this API is fully supported, but requires some
* additional setup. (At least if direct hardware decoding modes are wanted,
* instead of copying back surface data from GPU to CPU RAM.)
*
* There may be certain requirements on the OpenGL implementation:
*
* - Windows: ANGLE is required (although in theory GL/DX interop could be used)
* - Intel/Linux: EGL is required, and also the native display resource needs
* to be provided (e.g. MPV_RENDER_PARAM_X11_DISPLAY for X11 and
* MPV_RENDER_PARAM_WL_DISPLAY for Wayland)
* - nVidia/Linux: Both GLX and EGL should work (GLX is required if vdpau is
* used, e.g. due to old drivers.)
* - OSX: CGL is required (CGLGetCurrentContext() returning non-NULL)
* - iOS: EAGL is required (EAGLContext.currentContext returning non-nil)
*
* Once these things are setup, hardware decoding can be enabled/disabled at
* any time by setting the "hwdec" property.
*/
/**
* For initializing the mpv OpenGL state via MPV_RENDER_PARAM_OPENGL_INIT_PARAMS.
*/
typedef struct mpv_opengl_init_params {
/**
* This retrieves OpenGL function pointers, and will use them in subsequent
* operation.
* Usually, you can simply call the GL context APIs from this callback (e.g.
* glXGetProcAddressARB or wglGetProcAddress), but some APIs do not always
* return pointers for all standard functions (even if present); in this
* case you have to compensate by looking up these functions yourself when
* libmpv wants to resolve them through this callback.
* libmpv will not normally attempt to resolve GL functions on its own, nor
* does it link to GL libraries directly.
*/
void *(*get_proc_address)(void *ctx, const char *name);
/**
* Value passed as ctx parameter to get_proc_address().
*/
void *get_proc_address_ctx;
/**
* This should not be used. It is deprecated and will be removed or ignored
* when the opengl_cb API is removed.
*/
const char *extra_exts;
} mpv_opengl_init_params;
/**
* For MPV_RENDER_PARAM_OPENGL_FBO.
*/
typedef struct mpv_opengl_fbo {
/**
* Framebuffer object name. This must be either a valid FBO generated by
* glGenFramebuffers() that is complete and color-renderable, or 0. If the
* value is 0, this refers to the OpenGL default framebuffer.
*/
int fbo;
/**
* Valid dimensions. This must refer to the size of the framebuffer. This
* must always be set.
*/
int w, h;
/**
* Underlying texture internal format (e.g. GL_RGBA8), or 0 if unknown. If
* this is the default framebuffer, this can be an equivalent.
*/
int internal_format;
} mpv_opengl_fbo;
/**
* Deprecated. For MPV_RENDER_PARAM_DRM_DISPLAY.
*/
typedef struct mpv_opengl_drm_params {
int fd;
int crtc_id;
int connector_id;
struct _drmModeAtomicReq **atomic_request_ptr;
int render_fd;
} mpv_opengl_drm_params;
/**
* For MPV_RENDER_PARAM_DRM_DRAW_SURFACE_SIZE.
*/
typedef struct mpv_opengl_drm_draw_surface_size {
/**
* size of the draw plane surface in pixels.
*/
int width, height;
} mpv_opengl_drm_draw_surface_size;
/**
* For MPV_RENDER_PARAM_DRM_DISPLAY_V2.
*/
typedef struct mpv_opengl_drm_params_v2 {
/**
* DRM fd (int). Set to -1 if invalid.
*/
int fd;
/**
* Currently used crtc id
*/
int crtc_id;
/**
* Currently used connector id
*/
int connector_id;
/**
* Pointer to a drmModeAtomicReq pointer that is being used for the renderloop.
* This pointer should hold a pointer to the atomic request pointer
* The atomic request pointer is usually changed at every renderloop.
*/
struct _drmModeAtomicReq **atomic_request_ptr;
/**
* DRM render node. Used for VAAPI interop.
* Set to -1 if invalid.
*/
int render_fd;
} mpv_opengl_drm_params_v2;
/**
* For backwards compatibility with the old naming of mpv_opengl_drm_draw_surface_size
*/
#define mpv_opengl_drm_osd_size mpv_opengl_drm_draw_surface_size
#ifdef __cplusplus
}
#endif
#endif

240
Vendor/mpv/iOS/include/stream_cb.h vendored Normal file
View File

@ -0,0 +1,240 @@
/* Copyright (C) 2017 the mpv developers
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#ifndef MPV_CLIENT_API_STREAM_CB_H_
#define MPV_CLIENT_API_STREAM_CB_H_
#include "client.h"
#ifdef __cplusplus
extern "C" {
#endif
/**
* Warning: this API is not stable yet.
*
* Overview
* --------
*
* This API can be used to make mpv read from a stream with a custom
* implementation. This interface is inspired by funopen on BSD and
* fopencookie on linux. The stream is backed by user-defined callbacks
* which can implement customized open, read, seek, size and close behaviors.
*
* Usage
* -----
*
* Register your stream callbacks with the mpv_stream_cb_add_ro() function. You
* have to provide a mpv_stream_cb_open_ro_fn callback to it (open_fn argument).
*
* Once registered, you can `loadfile myprotocol://myfile`. Your open_fn will be
* invoked with the URI and you must fill out the provided mpv_stream_cb_info
* struct. This includes your stream callbacks (like read_fn), and an opaque
* cookie, which will be passed as the first argument to all the remaining
* stream callbacks.
*
* Note that your custom callbacks must not invoke libmpv APIs as that would
* cause a deadlock. (Unless you call a different mpv_handle than the one the
* callback was registered for, and the mpv_handles refer to different mpv
* instances.)
*
* Stream lifetime
* ---------------
*
* A stream remains valid until its close callback has been called. It's up to
* libmpv to call the close callback, and the libmpv user cannot close it
* directly with the stream_cb API.
*
* For example, if you consider your custom stream to become suddenly invalid
* (maybe because the underlying stream died), libmpv will continue using your
* stream. All you can do is returning errors from each callback, until libmpv
* gives up and closes it.
*
* Protocol registration and lifetime
* ----------------------------------
*
* Protocols remain registered until the mpv instance is terminated. This means
* in particular that it can outlive the mpv_handle that was used to register
* it, but once mpv_terminate_destroy() is called, your registered callbacks
* will not be called again.
*
* Protocol unregistration is finished after the mpv core has been destroyed
* (e.g. after mpv_terminate_destroy() has returned).
*
* If you do not call mpv_terminate_destroy() yourself (e.g. plugin-style code),
* you will have to deal with the registration or even streams outliving your
* code. Here are some possible ways to do this:
* - call mpv_terminate_destroy(), which destroys the core, and will make sure
* all streams are closed once this function returns
* - you refcount all resources your stream "cookies" reference, so that it
* doesn't matter if streams live longer than expected
* - create "cancellation" semantics: after your protocol has been unregistered,
* notify all your streams that are still opened, and make them drop all
* referenced resources - then return errors from the stream callbacks as
* long as the stream is still opened
*
*/
/**
* Read callback used to implement a custom stream. The semantics of the
* callback match read(2) in blocking mode. Short reads are allowed (you can
* return less bytes than requested, and libmpv will retry reading the rest
* with another call). If no data can be immediately read, the callback must
* block until there is new data. A return of 0 will be interpreted as final
* EOF, although libmpv might retry the read, or seek to a different position.
*
* @param cookie opaque cookie identifying the stream,
* returned from mpv_stream_cb_open_fn
* @param buf buffer to read data into
* @param size of the buffer
* @return number of bytes read into the buffer
* @return 0 on EOF
* @return -1 on error
*/
typedef int64_t (*mpv_stream_cb_read_fn)(void *cookie, char *buf, uint64_t nbytes);
/**
* Seek callback used to implement a custom stream.
*
* Note that mpv will issue a seek to position 0 immediately after opening. This
* is used to test whether the stream is seekable (since seekability might
* depend on the URI contents, not just the protocol). Return
* MPV_ERROR_UNSUPPORTED if seeking is not implemented for this stream. This
* seek also serves to establish the fact that streams start at position 0.
*
* This callback can be NULL, in which it behaves as if always returning
* MPV_ERROR_UNSUPPORTED.
*
* @param cookie opaque cookie identifying the stream,
* returned from mpv_stream_cb_open_fn
* @param offset target absolut stream position
* @return the resulting offset of the stream
* MPV_ERROR_UNSUPPORTED or MPV_ERROR_GENERIC if the seek failed
*/
typedef int64_t (*mpv_stream_cb_seek_fn)(void *cookie, int64_t offset);
/**
* Size callback used to implement a custom stream.
*
* Return MPV_ERROR_UNSUPPORTED if no size is known.
*
* This callback can be NULL, in which it behaves as if always returning
* MPV_ERROR_UNSUPPORTED.
*
* @param cookie opaque cookie identifying the stream,
* returned from mpv_stream_cb_open_fn
* @return the total size in bytes of the stream
*/
typedef int64_t (*mpv_stream_cb_size_fn)(void *cookie);
/**
* Close callback used to implement a custom stream.
*
* @param cookie opaque cookie identifying the stream,
* returned from mpv_stream_cb_open_fn
*/
typedef void (*mpv_stream_cb_close_fn)(void *cookie);
/**
* Cancel callback used to implement a custom stream.
*
* This callback is used to interrupt any current or future read and seek
* operations. It will be called from a separate thread than the demux
* thread, and should not block.
*
* This callback can be NULL.
*
* Available since API 1.106.
*
* @param cookie opaque cookie identifying the stream,
* returned from mpv_stream_cb_open_fn
*/
typedef void (*mpv_stream_cb_cancel_fn)(void *cookie);
/**
* See mpv_stream_cb_open_ro_fn callback.
*/
typedef struct mpv_stream_cb_info {
/**
* Opaque user-provided value, which will be passed to the other callbacks.
* The close callback will be called to release the cookie. It is not
* interpreted by mpv. It doesn't even need to be a valid pointer.
*
* The user sets this in the mpv_stream_cb_open_ro_fn callback.
*/
void *cookie;
/**
* Callbacks set by the user in the mpv_stream_cb_open_ro_fn callback. Some
* of them are optional, and can be left unset.
*
* The following callbacks are mandatory: read_fn, close_fn
*/
mpv_stream_cb_read_fn read_fn;
mpv_stream_cb_seek_fn seek_fn;
mpv_stream_cb_size_fn size_fn;
mpv_stream_cb_close_fn close_fn;
mpv_stream_cb_cancel_fn cancel_fn; /* since API 1.106 */
} mpv_stream_cb_info;
/**
* Open callback used to implement a custom read-only (ro) stream. The user
* must set the callback fields in the passed info struct. The cookie field
* also can be set to store state associated to the stream instance.
*
* Note that the info struct is valid only for the duration of this callback.
* You can't change the callbacks or the pointer to the cookie at a later point.
*
* Each stream instance created by the open callback can have different
* callbacks.
*
* The close_fn callback will terminate the stream instance. The pointers to
* your callbacks and cookie will be discarded, and the callbacks will not be
* called again.
*
* @param user_data opaque user data provided via mpv_stream_cb_add()
* @param uri name of the stream to be opened (with protocol prefix)
* @param info fields which the user should fill
* @return 0 on success, MPV_ERROR_LOADING_FAILED if the URI cannot be opened.
*/
typedef int (*mpv_stream_cb_open_ro_fn)(void *user_data, char *uri,
mpv_stream_cb_info *info);
/**
* Add a custom stream protocol. This will register a protocol handler under
* the given protocol prefix, and invoke the given callbacks if an URI with the
* matching protocol prefix is opened.
*
* The "ro" is for read-only - only read-only streams can be registered with
* this function.
*
* The callback remains registered until the mpv core is registered.
*
* If a custom stream with the same name is already registered, then the
* MPV_ERROR_INVALID_PARAMETER error is returned.
*
* @param protocol protocol prefix, for example "foo" for "foo://" URIs
* @param user_data opaque pointer passed into the mpv_stream_cb_open_fn
* callback.
* @return error code
*/
int mpv_stream_cb_add_ro(mpv_handle *ctx, const char *protocol, void *user_data,
mpv_stream_cb_open_ro_fn open_fn);
#ifdef __cplusplus
}
#endif
#endif

BIN
Vendor/mpv/iOS/lib/libass.a vendored Normal file

Binary file not shown.

BIN
Vendor/mpv/iOS/lib/libavcodec.a vendored Normal file

Binary file not shown.

BIN
Vendor/mpv/iOS/lib/libavdevice.a vendored Normal file

Binary file not shown.

BIN
Vendor/mpv/iOS/lib/libavfilter.a vendored Normal file

Binary file not shown.

BIN
Vendor/mpv/iOS/lib/libavformat.a vendored Normal file

Binary file not shown.

BIN
Vendor/mpv/iOS/lib/libavutil.a vendored Normal file

Binary file not shown.

BIN
Vendor/mpv/iOS/lib/libcrypto.a vendored Normal file

Binary file not shown.

BIN
Vendor/mpv/iOS/lib/libfreetype.a vendored Normal file

Binary file not shown.

BIN
Vendor/mpv/iOS/lib/libfribidi.a vendored Normal file

Binary file not shown.

BIN
Vendor/mpv/iOS/lib/libharfbuzz.a vendored Normal file

Binary file not shown.

BIN
Vendor/mpv/iOS/lib/libmpv.a vendored Normal file

Binary file not shown.

BIN
Vendor/mpv/iOS/lib/libssl.a vendored Normal file

Binary file not shown.

BIN
Vendor/mpv/iOS/lib/libswresample.a vendored Normal file

Binary file not shown.

BIN
Vendor/mpv/iOS/lib/libswscale.a vendored Normal file

Binary file not shown.

BIN
Vendor/mpv/iOS/lib/libuchardet.a vendored Normal file

Binary file not shown.

5
iOS/BridgingHeader.h Normal file
View File

@ -0,0 +1,5 @@
#import <CoreFoundation/CoreFoundation.h>
#import "../Vendor/mpv/iOS/include/client.h"
#import "../Vendor/mpv/iOS/include/render.h"
#import "../Vendor/mpv/iOS/include/render_gl.h"
#import "../Vendor/mpv/iOS/include/stream_cb.h"

View File

@ -16,10 +16,7 @@
</dict>
</array>
<key>UIApplicationSceneManifest</key>
<dict>
<key>UIApplicationSupportsMultipleScenes</key>
<false/>
</dict>
<dict/>
<key>UIBackgroundModes</key>
<array>
<string>audio</string>

27
macOS/AVPlayerView.swift Normal file
View File

@ -0,0 +1,27 @@
import Defaults
import SwiftUI
struct AVPlayerView: NSViewControllerRepresentable {
@EnvironmentObject<PlayerModel> private var player
@State private var controller: AVPlayerViewController?
init(controller: AVPlayerViewController? = nil) {
self.controller = controller
}
func makeNSViewController(context _: Context) -> AVPlayerViewController {
if self.controller != nil {
return self.controller!
}
let controller = AVPlayerViewController()
controller.playerModel = player
player.controller = controller
return controller
}
func updateNSViewController(_: AVPlayerViewController, context _: Context) {}
}

View File

@ -0,0 +1,34 @@
import AVKit
import SwiftUI
final class AVPlayerViewController: NSViewController {
var playerModel: PlayerModel!
var playerView = AVPlayerView()
var pictureInPictureDelegate = PictureInPictureDelegate()
var aspectRatio: Double? {
let ratio = Double(playerView.videoBounds.width) / Double(playerView.videoBounds.height)
if !ratio.isFinite {
return VideoPlayerView.defaultAspectRatio
}
return [ratio, 1.0].max()!
}
override func viewDidDisappear() {
super.viewDidDisappear()
}
override func loadView() {
playerView.player = playerModel.avPlayer
pictureInPictureDelegate.playerModel = playerModel
playerView.allowsPictureInPicturePlayback = true
playerView.showsFullScreenToggleButton = true
playerView.pictureInPictureDelegate = pictureInPictureDelegate
view = playerView
}
}