Compare commits

...

21 Commits

Author SHA1 Message Date
Arkadiusz Fal
7be19d1192 Bump build and version number 2022-03-27 22:02:31 +02:00
Arkadiusz Fal
d0c0b459f4 Controls fixes 2022-03-27 22:02:31 +02:00
Arkadiusz Fal
9f29b6be5c tvOS fixes 2022-03-27 22:02:31 +02:00
Arkadiusz Fal
a09fc36ad3 Close fullscreen and restore portrait on closing player 2022-03-27 22:02:31 +02:00
Arkadiusz Fal
0e78b1dc23 Improve streams quality settings 2022-03-27 22:02:31 +02:00
Arkadiusz Fal
7c8db2f1c3 Add tvOS mpv libraries 2022-03-27 22:02:31 +02:00
Arkadiusz Fal
dce5e1ea3b Fix player window on Mac 2022-03-27 22:02:30 +02:00
Arkadiusz Fal
bd0fbb308e Minor improvements 2022-03-27 22:02:30 +02:00
Arkadiusz Fal
c5070725aa Bump version number 2022-03-27 22:02:30 +02:00
Arkadiusz Fal
9d7ebb3bd9 Add toggle for dislikes 2022-03-27 22:02:30 +02:00
Arkadiusz Fal
7d740931b7 Bump version number 2022-03-27 22:02:30 +02:00
Arkadiusz Fal
77ee73db73 Minor fixes 2022-03-27 22:02:30 +02:00
Arkadiusz Fal
048f846e32 Add ReturnYoutubeDislike API 2022-03-27 22:02:30 +02:00
Arkadiusz Fal
8e0a710a53 Fixes for MPV in macOS 2022-03-27 22:02:30 +02:00
Arkadiusz Fal
12a7d8cfd6 Fix EOF handler 2022-03-27 22:02:30 +02:00
Arkadiusz Fal
5c89ae9c66 Minor improvements 2022-03-27 22:02:30 +02:00
Arkadiusz Fal
525a01eaed Add hide player button cancel action 2022-03-27 22:02:30 +02:00
Arkadiusz Fal
be2d0f3670 Prevent multiple seeks 2022-03-27 22:02:30 +02:00
Arkadiusz Fal
5f43e3b586 Add Now Playing info center updates 2022-03-27 22:02:30 +02:00
Arkadiusz Fal
d6d5def4e7 Hello, mpv! 🎉 2022-03-27 22:02:30 +02:00
Arkadiusz Fal
e9157d1193 Reorganize toolbars placement 2022-03-27 22:02:29 +02:00
129 changed files with 7739 additions and 1327 deletions

3
.gitignore vendored
View File

@@ -91,3 +91,6 @@ iOSInjectionProject/
# SwiftLint Remote Config Cache
.swiftlint/RemoteConfigCache
# disable simulator libraries - to be removed when replaced with framework for mpv
Vendor/mpv/iOS/lib_sim

View File

@@ -0,0 +1,7 @@
import Foundation
extension Comparable {
func clamped(to limits: ClosedRange<Self>) -> Self {
min(max(self, limits.lowerBound), limits.upperBound)
}
}

0
ISSUE_TEMPLATE.md Normal file
View File

View File

@@ -403,7 +403,7 @@ final class InvidiousAPI: Service, ObservableObject, VideosAPI {
return []
}
let videoAssetsURLs = streams.filter { $0["type"].stringValue.starts(with: "video/mp4") && $0["encoding"].stringValue == "h264" }
let videoAssetsURLs = streams.filter { $0["type"].stringValue.starts(with: "video/") }
return videoAssetsURLs.map {
Stream(
@@ -411,7 +411,8 @@ final class InvidiousAPI: Service, ObservableObject, VideosAPI {
videoAsset: AVURLAsset(url: $0["url"].url!),
resolution: Stream.Resolution.from(resolution: $0["resolution"].stringValue),
kind: .adaptive,
encoding: $0["encoding"].stringValue
encoding: $0["encoding"].stringValue,
videoFormat: $0["type"].stringValue
)
}
}

View File

@@ -392,11 +392,19 @@ final class PipedAPI: Service, ObservableObject, VideosAPI {
streams.append(Stream(hlsURL: hlsURL))
}
guard let audioStream = compatibleAudioStreams(from: content).first else {
let audioStreams = content
.dictionaryValue["audioStreams"]?
.arrayValue
.filter { $0.dictionaryValue["format"]?.stringValue == "M4A" }
.sorted {
$0.dictionaryValue["bitrate"]?.intValue ?? 0 > $1.dictionaryValue["bitrate"]?.intValue ?? 0
} ?? []
guard let audioStream = audioStreams.first else {
return streams
}
let videoStreams = compatibleVideoStream(from: content)
let videoStreams = content.dictionaryValue["videoStreams"]?.arrayValue ?? []
videoStreams.forEach { videoStream in
let audioAsset = AVURLAsset(url: audioStream.dictionaryValue["url"]!.url!)
@@ -404,10 +412,11 @@ final class PipedAPI: Service, ObservableObject, VideosAPI {
let videoOnly = videoStream.dictionaryValue["videoOnly"]?.boolValue ?? true
let resolution = Stream.Resolution.from(resolution: videoStream.dictionaryValue["quality"]!.stringValue)
let videoFormat = videoStream.dictionaryValue["format"]?.stringValue
if videoOnly {
streams.append(
Stream(audioAsset: audioAsset, videoAsset: videoAsset, resolution: resolution, kind: .adaptive)
Stream(audioAsset: audioAsset, videoAsset: videoAsset, resolution: resolution, kind: .adaptive, videoFormat: videoFormat)
)
} else {
streams.append(
@@ -426,23 +435,6 @@ final class PipedAPI: Service, ObservableObject, VideosAPI {
.compactMap(extractVideo(from:)) ?? []
}
private func compatibleAudioStreams(from content: JSON) -> [JSON] {
content
.dictionaryValue["audioStreams"]?
.arrayValue
.filter { $0.dictionaryValue["format"]?.stringValue == "M4A" }
.sorted {
$0.dictionaryValue["bitrate"]?.intValue ?? 0 > $1.dictionaryValue["bitrate"]?.intValue ?? 0
} ?? []
}
private func compatibleVideoStream(from content: JSON) -> [JSON] {
content
.dictionaryValue["videoStreams"]?
.arrayValue
.filter { $0.dictionaryValue["format"] == "MPEG_4" } ?? []
}
private func extractComment(from content: JSON) -> Comment? {
let details = content.dictionaryValue
let author = details["author"]?.stringValue ?? ""

View File

@@ -23,13 +23,14 @@ extension PlayerModel {
}
func updateWatch(finished: Bool = false) {
guard let id = currentVideo?.videoID else {
guard let id = currentVideo?.videoID,
Defaults[.saveHistory]
else {
return
}
let time = player.currentTime()
let seconds = time.seconds
currentItem.playbackTime = time
let time = backend.currentTime
let seconds = time?.seconds ?? 0
let watch: Watch!
let watchFetchRequest = Watch.fetchRequest()

View File

@@ -0,0 +1,568 @@
import AVFoundation
import Defaults
import Foundation
import MediaPlayer
#if !os(macOS)
import UIKit
#endif
final class AVPlayerBackend: PlayerBackend {
static let assetKeysToLoad = ["tracks", "playable", "duration"]
var model: PlayerModel!
var controls: PlayerControlsModel!
var stream: Stream?
var video: Video?
var currentTime: CMTime? {
avPlayer.currentTime()
}
var loadedVideo: Bool {
!avPlayer.currentItem.isNil
}
var isLoadingVideo: Bool {
model.currentItem == nil || model.time == nil || !model.time!.isValid
}
var isPlaying: Bool {
avPlayer.timeControlStatus == .playing
}
var playerItemDuration: CMTime? {
avPlayer.currentItem?.asset.duration
}
private(set) var avPlayer = AVPlayer()
var controller: AppleAVPlayerViewController?
private var asset: AVURLAsset?
private var composition = AVMutableComposition()
private var loadedCompositionAssets = [AVMediaType]()
private var frequentTimeObserver: Any?
private var infrequentTimeObserver: Any?
private var playerTimeControlStatusObserver: Any?
private var statusObservation: NSKeyValueObservation?
private var timeObserverThrottle = Throttle(interval: 2)
init(model: PlayerModel, controls: PlayerControlsModel?) {
self.model = model
self.controls = controls
addFrequentTimeObserver()
addInfrequentTimeObserver()
addPlayerTimeControlStatusObserver()
}
func bestPlayable(_ streams: [Stream], maxResolution _: ResolutionSetting) -> Stream? {
streams.first { $0.kind == .hls } ??
streams.filter { $0.kind == .adaptive }.max { $0.resolution < $1.resolution } ??
streams.first
}
func canPlay(_ stream: Stream) -> Bool {
stream.kind == .hls || stream.kind == .stream || stream.videoFormat == "MPEG_4" ||
(stream.videoFormat.starts(with: "video/mp4") && stream.encoding == "h264")
}
func playStream(
_ stream: Stream,
of video: Video,
preservingTime: Bool,
upgrading _: Bool
) {
if let url = stream.singleAssetURL {
model.logger.info("playing stream with one asset\(stream.kind == .hls ? " (HLS)" : ""): \(url)")
loadSingleAsset(url, stream: stream, of: video, preservingTime: preservingTime)
} else {
model.logger.info("playing stream with many assets:")
model.logger.info("composition audio asset: \(stream.audioAsset.url)")
model.logger.info("composition video asset: \(stream.videoAsset.url)")
loadComposition(stream, of: video, preservingTime: preservingTime)
}
}
func play() {
guard avPlayer.timeControlStatus != .playing else {
return
}
avPlayer.play()
}
func pause() {
guard avPlayer.timeControlStatus != .paused else {
return
}
avPlayer.pause()
}
func togglePlay() {
isPlaying ? pause() : play()
}
func stop() {
avPlayer.replaceCurrentItem(with: nil)
}
func seek(to time: CMTime, completionHandler: ((Bool) -> Void)?) {
avPlayer.seek(
to: time,
toleranceBefore: .secondsInDefaultTimescale(1),
toleranceAfter: .zero,
completionHandler: completionHandler ?? { _ in }
)
}
func seek(relative time: CMTime, completionHandler: ((Bool) -> Void)? = nil) {
if let currentTime = currentTime {
seek(to: currentTime + time, completionHandler: completionHandler)
}
}
func setRate(_ rate: Float) {
avPlayer.rate = rate
}
func closeItem() {
avPlayer.replaceCurrentItem(with: nil)
}
func enterFullScreen() {
controller?.playerView
.perform(NSSelectorFromString("enterFullScreenAnimated:completionHandler:"), with: false, with: nil)
}
func exitFullScreen() {
controller?.playerView
.perform(NSSelectorFromString("exitFullScreenAnimated:completionHandler:"), with: false, with: nil)
}
#if os(tvOS)
func closePiP(wasPlaying: Bool) {
let item = avPlayer.currentItem
let time = avPlayer.currentTime()
avPlayer.replaceCurrentItem(with: nil)
guard !item.isNil else {
return
}
avPlayer.seek(to: time)
avPlayer.replaceCurrentItem(with: item)
guard wasPlaying else {
return
}
DispatchQueue.main.asyncAfter(deadline: .now() + 1) { [weak self] in
self?.play()
}
}
#else
func closePiP(wasPlaying: Bool) {
controller?.playerView.player = nil
controller?.playerView.player = avPlayer
guard wasPlaying else {
return
}
DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) { [weak self] in
self?.play()
}
}
#endif
private func loadSingleAsset(
_ url: URL,
stream: Stream,
of video: Video,
preservingTime: Bool = false
) {
asset?.cancelLoading()
asset = AVURLAsset(url: url)
asset?.loadValuesAsynchronously(forKeys: Self.assetKeysToLoad) { [weak self] in
var error: NSError?
switch self?.asset?.statusOfValue(forKey: "duration", error: &error) {
case .loaded:
DispatchQueue.main.async { [weak self] in
self?.insertPlayerItem(stream, for: video, preservingTime: preservingTime)
}
case .failed:
DispatchQueue.main.async { [weak self] in
self?.model.playerError = error
}
default:
return
}
}
}
private func loadComposition(
_ stream: Stream,
of video: Video,
preservingTime: Bool = false
) {
loadedCompositionAssets = []
loadCompositionAsset(stream.audioAsset, stream: stream, type: .audio, of: video, preservingTime: preservingTime, model: model)
loadCompositionAsset(stream.videoAsset, stream: stream, type: .video, of: video, preservingTime: preservingTime, model: model)
}
private func loadCompositionAsset(
_ asset: AVURLAsset,
stream: Stream,
type: AVMediaType,
of video: Video,
preservingTime: Bool = false,
model: PlayerModel
) {
asset.loadValuesAsynchronously(forKeys: Self.assetKeysToLoad) { [weak self] in
guard let self = self else {
return
}
model.logger.info("loading \(type.rawValue) track")
let assetTracks = asset.tracks(withMediaType: type)
guard let compositionTrack = self.composition.addMutableTrack(
withMediaType: type,
preferredTrackID: kCMPersistentTrackID_Invalid
) else {
model.logger.critical("composition \(type.rawValue) addMutableTrack FAILED")
return
}
guard let assetTrack = assetTracks.first else {
model.logger.critical("asset \(type.rawValue) track FAILED")
return
}
try! compositionTrack.insertTimeRange(
CMTimeRange(start: .zero, duration: CMTime.secondsInDefaultTimescale(video.length)),
of: assetTrack,
at: .zero
)
model.logger.critical("\(type.rawValue) LOADED")
guard model.streamSelection == stream else {
model.logger.critical("IGNORING LOADED")
return
}
self.loadedCompositionAssets.append(type)
if self.loadedCompositionAssets.count == 2 {
self.insertPlayerItem(stream, for: video, preservingTime: preservingTime)
}
}
}
private func insertPlayerItem(
_ stream: Stream,
for video: Video,
preservingTime: Bool = false
) {
removeItemDidPlayToEndTimeObserver()
model.playerItem = playerItem(stream)
guard model.playerItem != nil else {
return
}
addItemDidPlayToEndTimeObserver()
attachMetadata(to: model.playerItem!, video: video, for: stream)
DispatchQueue.main.async {
self.stream = stream
self.video = video
self.model.stream = stream
self.composition = AVMutableComposition()
self.asset = nil
}
let startPlaying = {
#if !os(macOS)
try? AVAudioSession.sharedInstance().setActive(true)
#endif
if self.isAutoplaying(self.model.playerItem!) {
DispatchQueue.main.asyncAfter(deadline: .now() + 0.3) { [weak self] in
guard let self = self else {
return
}
if !preservingTime,
let segment = self.model.sponsorBlock.segments.first,
segment.start < 3,
self.model.lastSkipped.isNil
{
self.avPlayer.seek(
to: segment.endTime,
toleranceBefore: .secondsInDefaultTimescale(1),
toleranceAfter: .zero
) { finished in
guard finished else {
return
}
self.model.lastSkipped = segment
self.model.play()
}
} else {
self.model.play()
}
}
}
}
let replaceItemAndSeek = {
guard video == self.model.currentVideo else {
return
}
self.avPlayer.replaceCurrentItem(with: self.model.playerItem)
self.seekToPreservedTime { finished in
guard finished else {
return
}
self.model.preservedTime = nil
startPlaying()
}
}
if preservingTime {
if model.preservedTime.isNil {
model.saveTime {
replaceItemAndSeek()
startPlaying()
}
} else {
replaceItemAndSeek()
startPlaying()
}
} else {
avPlayer.replaceCurrentItem(with: model.playerItem)
startPlaying()
}
}
private func seekToPreservedTime(completionHandler: @escaping (Bool) -> Void = { _ in }) {
guard let time = model.preservedTime else {
return
}
avPlayer.seek(
to: time,
toleranceBefore: .secondsInDefaultTimescale(1),
toleranceAfter: .zero,
completionHandler: completionHandler
)
}
private func playerItem(_: Stream) -> AVPlayerItem? {
if let asset = asset {
return AVPlayerItem(asset: asset)
} else {
return AVPlayerItem(asset: composition)
}
}
private func attachMetadata(to item: AVPlayerItem, video: Video, for _: Stream? = nil) {
#if !os(macOS)
var externalMetadata = [
makeMetadataItem(.commonIdentifierTitle, value: video.title),
makeMetadataItem(.quickTimeMetadataGenre, value: video.genre ?? ""),
makeMetadataItem(.commonIdentifierDescription, value: video.description ?? "")
]
if let thumbnailData = try? Data(contentsOf: video.thumbnailURL(quality: .medium)!),
let image = UIImage(data: thumbnailData),
let pngData = image.pngData()
{
let artworkItem = makeMetadataItem(.commonIdentifierArtwork, value: pngData)
externalMetadata.append(artworkItem)
}
item.externalMetadata = externalMetadata
#endif
item.preferredForwardBufferDuration = 5
observePlayerItemStatus(item)
}
#if !os(macOS)
private func makeMetadataItem(_ identifier: AVMetadataIdentifier, value: Any) -> AVMetadataItem {
let item = AVMutableMetadataItem()
item.identifier = identifier
item.value = value as? NSCopying & NSObjectProtocol
item.extendedLanguageTag = "und"
return item.copy() as! AVMetadataItem
}
#endif
func isAutoplaying(_ item: AVPlayerItem) -> Bool {
avPlayer.currentItem == item
}
private func observePlayerItemStatus(_ item: AVPlayerItem) {
statusObservation?.invalidate()
statusObservation = item.observe(\.status, options: [.old, .new]) { [weak self] playerItem, _ in
guard let self = self else {
return
}
switch playerItem.status {
case .readyToPlay:
if self.isAutoplaying(playerItem) {
self.model.play()
}
case .failed:
self.model.playerError = item.error
default:
return
}
}
}
private func addItemDidPlayToEndTimeObserver() {
NotificationCenter.default.addObserver(
self,
selector: #selector(itemDidPlayToEndTime),
name: NSNotification.Name.AVPlayerItemDidPlayToEndTime,
object: playerItem
)
}
private func removeItemDidPlayToEndTimeObserver() {
NotificationCenter.default.removeObserver(
self,
name: NSNotification.Name.AVPlayerItemDidPlayToEndTime,
object: playerItem
)
}
@objc func itemDidPlayToEndTime() {
model.prepareCurrentItemForHistory(finished: true)
if model.queue.isEmpty {
#if !os(macOS)
try? AVAudioSession.sharedInstance().setActive(false)
#endif
model.resetQueue()
#if os(tvOS)
controller?.playerView.dismiss(animated: false) { [weak self] in
self?.controller?.dismiss(animated: true)
}
#else
model.hide()
#endif
} else {
model.advanceToNextItem()
}
}
private func addFrequentTimeObserver() {
let interval = CMTime.secondsInDefaultTimescale(0.5)
frequentTimeObserver = avPlayer.addPeriodicTimeObserver(
forInterval: interval,
queue: .main
) { [weak self] _ in
guard let self = self else {
return
}
guard !self.model.currentItem.isNil else {
return
}
self.controls.duration = self.playerItemDuration ?? .zero
self.controls.currentTime = self.currentTime ?? .zero
#if !os(tvOS)
self.model.updateNowPlayingInfo()
#endif
if let currentTime = self.currentTime {
self.model.handleSegments(at: currentTime)
}
}
}
private func addInfrequentTimeObserver() {
let interval = CMTime.secondsInDefaultTimescale(5)
infrequentTimeObserver = avPlayer.addPeriodicTimeObserver(
forInterval: interval,
queue: .main
) { [weak self] _ in
guard let self = self else {
return
}
guard !self.model.currentItem.isNil else {
return
}
self.timeObserverThrottle.execute {
self.model.updateWatch()
}
}
}
private func addPlayerTimeControlStatusObserver() {
playerTimeControlStatusObserver = avPlayer.observe(\.timeControlStatus) { [weak self] player, _ in
guard let self = self,
self.avPlayer == player
else {
return
}
DispatchQueue.main.async {
self.controls.isPlaying = player.timeControlStatus == .playing
}
if player.timeControlStatus != .waitingToPlayAtSpecifiedRate {
DispatchQueue.main.async { [weak self] in
self?.model.objectWillChange.send()
}
}
if player.timeControlStatus == .playing, player.rate != self.model.currentRate {
player.rate = self.model.currentRate
}
#if os(macOS)
if player.timeControlStatus == .playing {
ScreenSaverManager.shared.disable(reason: "Yattee is playing video")
} else {
ScreenSaverManager.shared.enable()
}
#endif
self.timeObserverThrottle.execute {
self.model.updateWatch()
}
}
}
func updateControls() {}
func startControlsUpdates() {}
func stopControlsUpdates() {}
func setNeedsDrawing(_: Bool) {}
func setSize(_: Double, _: Double) {}
}

View File

@@ -0,0 +1,327 @@
import AVFAudio
import CoreMedia
import Foundation
import Logging
import SwiftUI
final class MPVBackend: PlayerBackend {
private var logger = Logger(label: "mpv-backend")
var model: PlayerModel!
var controls: PlayerControlsModel!
var stream: Stream?
var video: Video?
var currentTime: CMTime?
var loadedVideo = false
var isLoadingVideo = true { didSet {
DispatchQueue.main.async { [weak self] in
self?.controls.isLoadingVideo = self?.isLoadingVideo ?? true
}
}}
var isPlaying = true { didSet {
if isPlaying {
startClientUpdates()
} else {
stopControlsUpdates()
}
updateControlsIsPlaying()
}}
var playerItemDuration: CMTime?
#if !os(macOS)
var controller: MPVViewController!
#endif
var client: MPVClient! { didSet { client.backend = self } }
private var clientTimer: RepeatingTimer!
private var onFileLoaded: (() -> Void)?
private var controlsUpdates = false
private var timeObserverThrottle = Throttle(interval: 2)
init(model: PlayerModel, controls: PlayerControlsModel? = nil) {
self.model = model
self.controls = controls
clientTimer = .init(timeInterval: 1)
clientTimer.eventHandler = getClientUpdates
}
func bestPlayable(_ streams: [Stream], maxResolution: ResolutionSetting) -> Stream? {
streams
.filter { $0.kind == .adaptive && $0.resolution <= maxResolution.value }
.max { $0.resolution < $1.resolution } ??
streams.first { $0.kind == .hls } ??
streams.first
}
func canPlay(_ stream: Stream) -> Bool {
stream.resolution != .unknown && stream.format != "AV1"
}
func playStream(_ stream: Stream, of video: Video, preservingTime: Bool, upgrading _: Bool) {
let updateCurrentStream = {
DispatchQueue.main.async { [weak self] in
self?.stream = stream
self?.video = video
self?.model.stream = stream
}
}
let startPlaying = {
#if !os(macOS)
try? AVAudioSession.sharedInstance().setActive(true)
#endif
DispatchQueue.main.async { [weak self] in
guard let self = self else {
return
}
self.startClientUpdates()
if !preservingTime,
let segment = self.model.sponsorBlock.segments.first,
segment.start < 3,
self.model.lastSkipped.isNil
{
self.seek(to: segment.endTime) { finished in
guard finished else {
return
}
self.model.lastSkipped = segment
self.play()
}
} else {
self.play()
}
}
}
let replaceItem: (CMTime?) -> Void = { [weak self] time in
guard let self = self else {
return
}
self.stop()
if let url = stream.singleAssetURL {
self.onFileLoaded = {
updateCurrentStream()
startPlaying()
}
self.client.loadFile(url, time: time) { [weak self] _ in
self?.isLoadingVideo = true
}
} else {
self.onFileLoaded = { [weak self] in
DispatchQueue.main.asyncAfter(deadline: .now() + 0.2) {
self?.client.addAudio(stream.audioAsset.url) { _ in
updateCurrentStream()
startPlaying()
}
}
}
self.client.loadFile(stream.videoAsset.url, time: time) { [weak self] _ in
self?.isLoadingVideo = true
self?.pause()
}
}
}
if preservingTime {
if model.preservedTime.isNil {
model.saveTime {
replaceItem(self.model.preservedTime)
}
} else {
replaceItem(self.model.preservedTime)
}
} else {
replaceItem(nil)
}
startClientUpdates()
}
func play() {
isPlaying = true
startClientUpdates()
if controls.presentingControls {
startControlsUpdates()
}
client?.play()
}
func pause() {
isPlaying = false
stopClientUpdates()
client?.pause()
}
func togglePlay() {
isPlaying ? pause() : play()
}
func stop() {
client?.stop()
}
func seek(to time: CMTime, completionHandler: ((Bool) -> Void)?) {
client.seek(to: time) { [weak self] _ in
self?.getClientUpdates()
self?.updateControls()
completionHandler?(true)
}
}
func seek(relative time: CMTime, completionHandler: ((Bool) -> Void)? = nil) {
client.seek(relative: time) { [weak self] _ in
self?.getClientUpdates()
self?.updateControls()
completionHandler?(true)
}
}
func setRate(_: Float) {
// TODO: Implement rate change
}
func closeItem() {}
func enterFullScreen() {}
func exitFullScreen() {}
func closePiP(wasPlaying _: Bool) {}
func updateControls() {
DispatchQueue.main.async { [weak self] in
self?.logger.info("updating controls")
self?.controls.currentTime = self?.currentTime ?? .zero
self?.controls.duration = self?.playerItemDuration ?? .zero
}
}
func startControlsUpdates() {
self.logger.info("starting controls updates")
controlsUpdates = true
}
func stopControlsUpdates() {
self.logger.info("stopping controls updates")
controlsUpdates = false
}
func startClientUpdates() {
clientTimer.resume()
}
private func getClientUpdates() {
self.logger.info("getting client updates")
currentTime = client?.currentTime
playerItemDuration = client?.duration
if controlsUpdates {
updateControls()
}
model.updateNowPlayingInfo()
if let currentTime = currentTime {
model.handleSegments(at: currentTime)
}
timeObserverThrottle.execute {
self.model.updateWatch()
}
}
private func stopClientUpdates() {
clientTimer.suspend()
}
private func updateControlsIsPlaying() {
DispatchQueue.main.async { [weak self] in
self?.controls.isPlaying = self?.isPlaying ?? false
}
}
func handle(_ event: UnsafePointer<mpv_event>!) {
logger.info("\(String(cString: mpv_event_name(event.pointee.event_id)))")
switch event.pointee.event_id {
case MPV_EVENT_SHUTDOWN:
mpv_destroy(client.mpv)
client.mpv = nil
case MPV_EVENT_LOG_MESSAGE:
let logmsg = UnsafeMutablePointer<mpv_event_log_message>(OpaquePointer(event.pointee.data))
logger.info(.init(stringLiteral: "log: \(String(cString: (logmsg!.pointee.prefix)!)), "
+ "\(String(cString: (logmsg!.pointee.level)!)), "
+ "\(String(cString: (logmsg!.pointee.text)!))"))
case MPV_EVENT_FILE_LOADED:
onFileLoaded?()
startClientUpdates()
onFileLoaded = nil
case MPV_EVENT_PLAYBACK_RESTART:
isLoadingVideo = false
onFileLoaded?()
startClientUpdates()
onFileLoaded = nil
case MPV_EVENT_UNPAUSE:
isLoadingVideo = false
case MPV_EVENT_END_FILE:
DispatchQueue.main.async { [weak self] in
self?.handleEndOfFile(event)
}
default:
logger.info(.init(stringLiteral: "event: \(String(cString: mpv_event_name(event.pointee.event_id)))"))
}
}
func handleEndOfFile(_: UnsafePointer<mpv_event>!) {
guard !isLoadingVideo else {
return
}
model.prepareCurrentItemForHistory(finished: true)
if model.queue.isEmpty {
#if !os(macOS)
try? AVAudioSession.sharedInstance().setActive(false)
#endif
model.resetQueue()
model.hide()
} else {
model.advanceToNextItem()
}
}
func setNeedsDrawing(_ needsDrawing: Bool) {
client?.setNeedsDrawing(needsDrawing)
}
func setSize(_ width: Double, _ height: Double) {
self.client?.setSize(width, height)
}
}

View File

@@ -0,0 +1,294 @@
import CoreMedia
import Foundation
import Logging
#if !os(macOS)
import Siesta
import UIKit
#endif
final class MPVClient: ObservableObject {
private var logger = Logger(label: "mpv-client")
var mpv: OpaquePointer!
var mpvGL: OpaquePointer!
var queue: DispatchQueue!
#if os(macOS)
var layer: VideoLayer!
var link: CVDisplayLink!
#else
var glView: MPVOGLView!
#endif
var backend: MPVBackend!
var seeking = false
func create(frame: CGRect? = nil) {
#if !os(macOS)
if let frame = frame {
glView = MPVOGLView(frame: frame)
}
#endif
mpv = mpv_create()
if mpv == nil {
print("failed creating context\n")
exit(1)
}
checkError(mpv_request_log_messages(mpv, "warn"))
#if os(macOS)
checkError(mpv_set_option_string(mpv, "input-media-keys", "yes"))
#else
checkError(mpv_set_option_string(mpv, "hwdec", "yes"))
checkError(mpv_set_option_string(mpv, "override-display-fps", "\(UIScreen.main.maximumFramesPerSecond)"))
checkError(mpv_set_option_string(mpv, "video-sync", "display-resample"))
#endif
checkError(mpv_set_option_string(mpv, "vo", "libmpv"))
checkError(mpv_initialize(mpv))
let api = UnsafeMutableRawPointer(mutating: (MPV_RENDER_API_TYPE_OPENGL as NSString).utf8String)
var initParams = mpv_opengl_init_params(
get_proc_address: getProcAddress,
get_proc_address_ctx: nil,
extra_exts: nil
)
queue = DispatchQueue(label: "mpv", qos: .background)
withUnsafeMutablePointer(to: &initParams) { initParams in
var params = [
mpv_render_param(type: MPV_RENDER_PARAM_API_TYPE, data: api),
mpv_render_param(type: MPV_RENDER_PARAM_OPENGL_INIT_PARAMS, data: initParams),
mpv_render_param()
]
if mpv_render_context_create(&mpvGL, mpv, &params) < 0 {
puts("failed to initialize mpv GL context")
exit(1)
}
#if os(macOS)
mpv_render_context_set_update_callback(
mpvGL,
glUpdate,
UnsafeMutableRawPointer(Unmanaged.passUnretained(layer).toOpaque())
)
#else
glView.mpvGL = UnsafeMutableRawPointer(mpvGL)
mpv_render_context_set_update_callback(
mpvGL,
glUpdate(_:),
UnsafeMutableRawPointer(Unmanaged.passUnretained(glView).toOpaque())
)
#endif
}
queue!.async {
mpv_set_wakeup_callback(self.mpv, wakeUp, UnsafeMutableRawPointer(Unmanaged.passUnretained(self).toOpaque()))
}
}
func readEvents() {
queue?.async { [self] in
while self.mpv != nil {
let event = mpv_wait_event(self.mpv, 0)
if event!.pointee.event_id == MPV_EVENT_NONE {
break
}
backend.handle(event)
}
}
}
func loadFile(_ url: URL, time: CMTime? = nil, completionHandler: ((Int32) -> Void)? = nil) {
var args = [url.absoluteString]
if let time = time {
args.append("replace")
args.append("start=\(Int(time.seconds))")
}
command("loadfile", args: args, returnValueCallback: completionHandler)
}
func addAudio(_ url: URL, completionHandler: ((Int32) -> Void)? = nil) {
command("audio-add", args: [url.absoluteString], returnValueCallback: completionHandler)
}
func play() {
setFlagAsync("pause", false)
}
func pause() {
setFlagAsync("pause", true)
}
func togglePlay() {
command("cycle", args: ["pause"])
}
func stop() {
command("stop")
}
var currentTime: CMTime {
CMTime.secondsInDefaultTimescale(getDouble("time-pos"))
}
var duration: CMTime {
CMTime.secondsInDefaultTimescale(getDouble("duration"))
}
func seek(relative time: CMTime, completionHandler: ((Bool) -> Void)? = nil) {
guard !seeking else {
logger.warning("ignoring seek, another in progress")
return
}
seeking = true
command("seek", args: [String(time.seconds)]) { [weak self] _ in
self?.seeking = false
completionHandler?(true)
}
}
func seek(to time: CMTime, completionHandler: ((Bool) -> Void)? = nil) {
guard !seeking else {
logger.warning("ignoring seek, another in progress")
return
}
seeking = true
command("seek", args: [String(time.seconds), "absolute"]) { [weak self] _ in
self?.seeking = false
completionHandler?(true)
}
}
func setSize(_ width: Double, _ height: Double) {
let roundedWidth = width.rounded()
let roundedHeight = height.rounded()
guard width > 0, height > 0 else {
return
}
logger.info("setting player size to \(roundedWidth),\(roundedHeight)")
#if !os(macOS)
guard roundedWidth <= UIScreen.main.bounds.width, roundedHeight <= UIScreen.main.bounds.height else {
logger.info("requested size is greater than screen size, ignoring")
logger.info("width: \(roundedWidth) <= \(UIScreen.main.bounds.width)")
logger.info("height: \(roundedHeight) <= \(UIScreen.main.bounds.height)")
return
}
glView?.frame = CGRect(x: 0, y: 0, width: roundedWidth, height: roundedHeight)
#endif
}
func setNeedsDrawing(_ needsDrawing: Bool) {
logger.info("needs drawing: \(needsDrawing)")
#if !os(macOS)
glView.needsDrawing = needsDrawing
#endif
}
func command(
_ command: String,
args: [String?] = [],
checkForErrors: Bool = true,
returnValueCallback: ((Int32) -> Void)? = nil
) {
guard mpv != nil else {
return
}
var cargs = makeCArgs(command, args).map { $0.flatMap { UnsafePointer<CChar>(strdup($0)) } }
defer {
for ptr in cargs where ptr != nil {
free(UnsafeMutablePointer(mutating: ptr!))
}
}
logger.info("\(command) -- \(args)")
let returnValue = mpv_command(mpv, &cargs)
if checkForErrors {
checkError(returnValue)
}
if let cb = returnValueCallback {
cb(returnValue)
}
}
private func setFlagAsync(_ name: String, _ flag: Bool) {
var data: Int = flag ? 1 : 0
mpv_set_property_async(mpv, 0, name, MPV_FORMAT_FLAG, &data)
}
private func getDouble(_ name: String) -> Double {
var data = Double()
mpv_get_property(mpv, name, MPV_FORMAT_DOUBLE, &data)
return data
}
private func makeCArgs(_ command: String, _ args: [String?]) -> [String?] {
if !args.isEmpty, args.last == nil {
fatalError("Command do not need a nil suffix")
}
var strArgs = args
strArgs.insert(command, at: 0)
strArgs.append(nil)
return strArgs
}
func checkError(_ status: CInt) {
if status < 0 {
logger.error(.init(stringLiteral: "MPV API error: \(String(cString: mpv_error_string(status)))\n"))
}
}
}
#if os(macOS)
func getProcAddress(_: UnsafeMutableRawPointer?, _ name: UnsafePointer<Int8>?) -> UnsafeMutableRawPointer? {
let symbolName = CFStringCreateWithCString(kCFAllocatorDefault, name, CFStringBuiltInEncodings.ASCII.rawValue)
let identifier = CFBundleGetBundleWithIdentifier("com.apple.opengl" as CFString)
return CFBundleGetFunctionPointerForName(identifier, symbolName)
}
func glUpdate(_ ctx: UnsafeMutableRawPointer?) {
let videoLayer = unsafeBitCast(ctx, to: VideoLayer.self)
videoLayer.client?.queue?.async {
if !videoLayer.isAsynchronous {
videoLayer.display()
}
}
}
#else
func getProcAddress(_: UnsafeMutableRawPointer?, _ name: UnsafePointer<Int8>?) -> UnsafeMutableRawPointer? {
let symbolName = CFStringCreateWithCString(kCFAllocatorDefault, name, CFStringBuiltInEncodings.ASCII.rawValue)
let identifier = CFBundleGetBundleWithIdentifier("com.apple.opengles" as CFString)
return CFBundleGetFunctionPointerForName(identifier, symbolName)
}
private func glUpdate(_ ctx: UnsafeMutableRawPointer?) {
let glView = unsafeBitCast(ctx, to: MPVOGLView.self)
guard glView.needsDrawing else {
return
}
DispatchQueue.main.async {
glView.setNeedsDisplay()
}
}
#endif
private func wakeUp(_ context: UnsafeMutableRawPointer?) {
let client = unsafeBitCast(context, to: MPVClient.self)
client.readEvents()
}

View File

@@ -0,0 +1,68 @@
import CoreMedia
import Defaults
import Foundation
protocol PlayerBackend {
var model: PlayerModel! { get set }
var controls: PlayerControlsModel! { get set }
var stream: Stream? { get set }
var video: Video? { get set }
var currentTime: CMTime? { get }
var loadedVideo: Bool { get }
var isLoadingVideo: Bool { get }
var isPlaying: Bool { get }
var playerItemDuration: CMTime? { get }
func bestPlayable(_ streams: [Stream], maxResolution: ResolutionSetting) -> Stream?
func canPlay(_ stream: Stream) -> Bool
func playStream(
_ stream: Stream,
of video: Video,
preservingTime: Bool,
upgrading: Bool
)
func play()
func pause()
func togglePlay()
func stop()
func seek(to time: CMTime, completionHandler: ((Bool) -> Void)?)
func seek(to seconds: Double, completionHandler: ((Bool) -> Void)?)
func seek(relative time: CMTime, completionHandler: ((Bool) -> Void)?)
func setRate(_ rate: Float)
func closeItem()
func enterFullScreen()
func exitFullScreen()
func closePiP(wasPlaying: Bool)
func updateControls()
func startControlsUpdates()
func stopControlsUpdates()
func setNeedsDrawing(_ needsDrawing: Bool)
func setSize(_ width: Double, _ height: Double)
}
extension PlayerBackend {
func seek(to time: CMTime, completionHandler: ((Bool) -> Void)? = nil) {
seek(to: time, completionHandler: completionHandler)
}
func seek(to seconds: Double, completionHandler: ((Bool) -> Void)? = nil) {
seek(to: .secondsInDefaultTimescale(seconds), completionHandler: completionHandler)
}
func seek(relative time: CMTime, completionHandler: ((Bool) -> Void)? = nil) {
seek(relative: time, completionHandler: completionHandler)
}
}

View File

@@ -0,0 +1,16 @@
import Defaults
import Foundation
enum PlayerBackendType: String, CaseIterable, Defaults.Serializable {
case mpv
case appleAVPlayer
var label: String {
switch self {
case .mpv:
return "MPV"
case .appleAVPlayer:
return "AVPlayer"
}
}
}

View File

@@ -0,0 +1,140 @@
import CoreMedia
import Foundation
import SwiftUI
final class PlayerControlsModel: ObservableObject {
@Published var isLoadingVideo = true
@Published var isPlaying = true
@Published var currentTime = CMTime.zero
@Published var duration = CMTime.zero
@Published var presentingControls = false { didSet { handlePresentationChange() } }
@Published var timer: Timer?
@Published var playingFullscreen = false
private var throttle = Throttle(interval: 1)
var player: PlayerModel!
var playbackTime: String {
guard let current = currentTime.seconds.formattedAsPlaybackTime(),
let duration = duration.seconds.formattedAsPlaybackTime()
else {
return "--:-- / --:--"
}
var withoutSegments = ""
if let withoutSegmentsDuration = playerItemDurationWithoutSponsorSegments,
self.duration.seconds != withoutSegmentsDuration
{
withoutSegments = " (\(withoutSegmentsDuration.formattedAsPlaybackTime() ?? "--:--"))"
}
return "\(current) / \(duration)\(withoutSegments)"
}
var playerItemDurationWithoutSponsorSegments: Double? {
guard let duration = player.playerItemDurationWithoutSponsorSegments else {
return nil
}
return duration.seconds
}
func handlePresentationChange() {
if presentingControls {
DispatchQueue.main.async { [weak self] in
self?.player.backend.startControlsUpdates()
self?.resetTimer()
}
} else {
player.backend.stopControlsUpdates()
timer?.invalidate()
timer = nil
}
}
func show() {
guard !(player?.currentItem.isNil ?? true) else {
return
}
guard !presentingControls else {
return
}
withAnimation(PlayerControls.animation) {
presentingControls = true
}
}
func hide() {
player?.backend.stopControlsUpdates()
guard !(player?.currentItem.isNil ?? true) else {
return
}
guard presentingControls else {
return
}
withAnimation(PlayerControls.animation) {
presentingControls = false
}
}
func toggle() {
withAnimation(PlayerControls.animation) {
presentingControls.toggle()
}
}
func toggleFullscreen(_ value: Bool) {
withAnimation(Animation.easeOut) {
resetTimer()
withAnimation(PlayerControls.animation) {
playingFullscreen = !value
}
#if os(iOS)
if playingFullscreen {
guard !(UIApplication.shared.windows.first?.windowScene?.interfaceOrientation.isLandscape ?? true) else {
return
}
Orientation.lockOrientation(.landscape, andRotateTo: .landscapeRight)
} else {
Orientation.lockOrientation(.allButUpsideDown, andRotateTo: .portrait)
}
#endif
}
}
func reset() {
currentTime = .zero
duration = .zero
}
func resetTimer() {
if !presentingControls {
show()
}
removeTimer()
timer = Timer.scheduledTimer(withTimeInterval: 5.0, repeats: false) { _ in
withAnimation(PlayerControls.animation) { [weak self] in
self?.presentingControls = false
self?.player.backend.stopControlsUpdates()
}
}
}
func removeTimer() {
timer?.invalidate()
timer = nil
}
func update() {
throttle.execute { [weak self] in
self?.player?.backend.updateControls()
}
}
}

View File

@@ -16,18 +16,37 @@ import SwiftyJSON
final class PlayerModel: ObservableObject {
static let availableRates: [Float] = [0.5, 0.67, 0.8, 1, 1.25, 1.5, 2]
static let assetKeysToLoad = ["tracks", "playable", "duration"]
let logger = Logger(label: "stream.yattee.app")
private(set) var player = AVPlayer()
var playerView = Player()
var controller: PlayerViewController?
var avPlayerView = AppleAVPlayerView()
var playerItem: AVPlayerItem?
@Published var presentingPlayer = false { didSet { handlePresentationChange() } }
var mpvPlayerView = MPVPlayerView()
@Published var presentingPlayer = false { didSet { handlePresentationChange() } }
@Published var activeBackend = PlayerBackendType.mpv
var avPlayerBackend: AVPlayerBackend!
var mpvBackend: MPVBackend!
var backends: [PlayerBackend] {
[avPlayerBackend, mpvBackend]
}
var backend: PlayerBackend! {
switch activeBackend {
case .mpv:
return mpvBackend
case .appleAVPlayer:
return avPlayerBackend
}
}
@Published var playerSize: CGSize = .zero { didSet {
backend.setSize(playerSize.width, playerSize.height)
}}
@Published var stream: Stream?
@Published var currentRate: Float = 1.0 { didSet { player.rate = currentRate } }
@Published var currentRate: Float = 1.0 { didSet { backend.setRate(currentRate) } }
@Published var availableStreams = [Stream]() { didSet { handleAvailableStreamsChange() } }
@Published var streamSelection: Stream? { didSet { rebuildTVMenu() } }
@@ -45,6 +64,8 @@ final class PlayerModel: ObservableObject {
@Published var lastSkipped: Segment? { didSet { rebuildTVMenu() } }
@Published var restoredSegments = [Segment]()
@Published var returnYouTubeDislike = ReturnYouTubeDislikeAPI()
@Published var channelWithDetails: Channel?
#if os(iOS)
@@ -55,24 +76,15 @@ final class PlayerModel: ObservableObject {
var accounts: AccountsModel
var comments: CommentsModel
var asset: AVURLAsset?
var composition = AVMutableComposition()
var loadedCompositionAssets = [AVMediaType]()
var controls: PlayerControlsModel { didSet {
backends.forEach { backend in
var backend = backend
backend.controls = controls
}
}}
var context: NSManagedObjectContext = PersistenceController.shared.container.viewContext
private var currentArtwork: MPMediaItemArtwork?
private var frequentTimeObserver: Any?
private var infrequentTimeObserver: Any?
private var playerTimeControlStatusObserver: Any?
private var statusObservation: NSKeyValueObservation?
private var timeObserverThrottle = Throttle(interval: 2)
var playingInPictureInPicture = false
var playingFullscreen = false
@Published var playingInPictureInPicture = false
@Published var presentingErrorDetails = false
var playerError: Error? { didSet {
@@ -91,13 +103,17 @@ final class PlayerModel: ObservableObject {
@Default(.closePiPAndOpenPlayerOnEnteringForeground) var closePiPAndOpenPlayerOnEnteringForeground
#endif
init(accounts: AccountsModel? = nil, comments: CommentsModel? = nil) {
private var currentArtwork: MPMediaItemArtwork?
init(accounts: AccountsModel? = nil, comments: CommentsModel? = nil, controls: PlayerControlsModel? = nil) {
self.accounts = accounts ?? AccountsModel()
self.comments = comments ?? CommentsModel()
self.controls = controls ?? PlayerControlsModel()
addFrequentTimeObserver()
addInfrequentTimeObserver()
addPlayerTimeControlStatusObserver()
self.avPlayerBackend = AVPlayerBackend(model: self, controls: controls)
self.mpvBackend = MPVBackend(model: self)
self.activeBackend = Defaults[.activeBackend]
}
func show() {
@@ -115,8 +131,15 @@ final class PlayerModel: ObservableObject {
}
func hide() {
controls.playingFullscreen = false
presentingPlayer = false
playerNavigationLinkActive = false
#if os(iOS)
if Defaults[.lockPortraitWhenBrowsing] {
Orientation.lockOrientation(.portrait, andRotateTo: .portrait)
}
#endif
}
func togglePlayer() {
@@ -139,11 +162,25 @@ final class PlayerModel: ObservableObject {
return false
}
return player.currentItem == nil || time == nil || !time!.isValid
return backend.isLoadingVideo
}
var isPlaying: Bool {
player.timeControlStatus == .playing
backend.isPlaying
}
var playerItemDuration: CMTime? {
backend.playerItemDuration
}
var playerItemDurationWithoutSponsorSegments: CMTime? {
(backend.playerItemDuration ?? .zero) - .secondsInDefaultTimescale(
sponsorBlock.segments.reduce(0) { $0 + $1.duration }
)
}
var videoDuration: TimeInterval? {
currentItem?.duration ?? currentVideo?.length ?? playerItemDuration?.seconds
}
var time: CMTime? {
@@ -154,32 +191,16 @@ final class PlayerModel: ObservableObject {
currentVideo?.live ?? false
}
var playerItemDuration: CMTime? {
player.currentItem?.asset.duration
}
var videoDuration: TimeInterval? {
currentItem?.duration ?? currentVideo?.length ?? player.currentItem?.asset.duration.seconds
}
func togglePlay() {
isPlaying ? pause() : play()
backend.togglePlay()
}
func play() {
guard player.timeControlStatus != .playing else {
return
}
player.play()
backend.play()
}
func pause() {
guard player.timeControlStatus != .paused else {
return
}
player.pause()
backend.pause()
}
func play(_ video: Video, at time: TimeInterval? = nil, inNavigationView: Bool = false) {
@@ -210,32 +231,49 @@ final class PlayerModel: ObservableObject {
self?.sponsorBlock.loadSegments(
videoID: video.videoID,
categories: Defaults[.sponsorBlockCategories]
) { [weak self] in
) {
if Defaults[.showChannelSubscribers] {
self?.loadCurrentItemChannelDetails()
}
}
guard Defaults[.enableReturnYouTubeDislike] else {
return
}
self?.returnYouTubeDislike.loadDislikes(videoID: video.videoID) { [weak self] dislikes in
self?.currentItem?.video?.dislikes = dislikes
}
}
}
if let url = stream.singleAssetURL {
logger.info("playing stream with one asset\(stream.kind == .hls ? " (HLS)" : ""): \(url)")
loadSingleAsset(url, stream: stream, of: video, preservingTime: preservingTime)
} else {
logger.info("playing stream with many assets:")
logger.info("composition audio asset: \(stream.audioAsset.url)")
logger.info("composition video asset: \(stream.videoAsset.url)")
controls.reset()
loadComposition(stream, of: video, preservingTime: preservingTime)
}
backend.playStream(
stream,
of: video,
preservingTime: preservingTime,
upgrading: upgrading
)
if !upgrading {
updateCurrentArtwork()
}
}
func upgradeToStream(_ stream: Stream) {
if !self.stream.isNil, self.stream != stream {
func saveTime(completionHandler: @escaping () -> Void = {}) {
guard let currentTime = backend.currentTime, currentTime.seconds > 0 else {
return
}
DispatchQueue.main.async { [weak self] in
self?.preservedTime = currentTime
completionHandler()
}
}
func upgradeToStream(_ stream: Stream, force: Bool = false) {
if !self.stream.isNil, force || self.stream != stream {
playStream(stream, of: currentVideo!, preservingTime: true, upgrading: true)
}
}
@@ -260,6 +298,9 @@ final class PlayerModel: ObservableObject {
}
private func handlePresentationChange() {
backend.setNeedsDrawing(presentingPlayer)
controls.hide()
if presentingPlayer, closePiPOnOpeningPlayer, playingInPictureInPicture {
DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) { [weak self] in
self?.closePiP()
@@ -272,7 +313,7 @@ final class PlayerModel: ObservableObject {
}
}
if !presentingPlayer, !pauseOnHidingPlayer, isPlaying {
if !presentingPlayer, !pauseOnHidingPlayer, backend.isPlaying {
DispatchQueue.main.asyncAfter(deadline: .now() + 1) { [weak self] in
self?.play()
}
@@ -280,6 +321,9 @@ final class PlayerModel: ObservableObject {
}
private func handleNavigationViewPlayerPresentationChange() {
backend.setNeedsDrawing(playerNavigationLinkActive)
controls.hide()
if pauseOnHidingPlayer, !playingInPictureInPicture, !playerNavigationLinkActive {
DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) {
self.pause()
@@ -287,423 +331,49 @@ final class PlayerModel: ObservableObject {
}
}
private func insertPlayerItem(
_ stream: Stream,
for video: Video,
preservingTime: Bool = false
) {
removeItemDidPlayToEndTimeObserver()
func changeActiveBackend(from: PlayerBackendType, to: PlayerBackendType) {
Defaults[.activeBackend] = to
self.activeBackend = to
playerItem = playerItem(stream)
guard playerItem != nil else {
guard var stream = stream else {
return
}
addItemDidPlayToEndTimeObserver()
attachMetadata(to: playerItem!, video: video, for: stream)
inactiveBackends().forEach { $0.pause() }
DispatchQueue.main.async { [weak self] in
guard let self = self else {
return
}
let fromBackend: PlayerBackend = from == .appleAVPlayer ? avPlayerBackend : mpvBackend
let toBackend: PlayerBackend = to == .appleAVPlayer ? avPlayerBackend : mpvBackend
self.stream = stream
self.composition = AVMutableComposition()
self.asset = nil
}
let startPlaying = {
#if !os(macOS)
try? AVAudioSession.sharedInstance().setActive(true)
#endif
if self.isAutoplaying(self.playerItem!) {
DispatchQueue.main.asyncAfter(deadline: .now() + 0.3) { [weak self] in
guard let self = self else {
return
}
if !preservingTime,
let segment = self.sponsorBlock.segments.first,
segment.start < 3,
self.lastSkipped.isNil
{
self.player.seek(
to: segment.endTime,
toleranceBefore: .secondsInDefaultTimescale(1),
toleranceAfter: .zero
) { finished in
guard finished else {
return
}
self.lastSkipped = segment
self.play()
}
} else {
self.play()
}
}
}
}
let replaceItemAndSeek = {
guard video == self.currentVideo else {
return
}
self.player.replaceCurrentItem(with: self.playerItem)
self.seekToPreservedTime { finished in
if let stream = toBackend.stream, toBackend.video == fromBackend.video {
toBackend.seek(to: fromBackend.currentTime?.seconds ?? .zero) { finished in
guard finished else {
return
}
self.preservedTime = nil
startPlaying()
}
}
if preservingTime {
if preservedTime.isNil {
saveTime {
replaceItemAndSeek()
startPlaying()
}
} else {
replaceItemAndSeek()
startPlaying()
}
} else {
player.replaceCurrentItem(with: playerItem)
startPlaying()
}
}
private func loadSingleAsset(
_ url: URL,
stream: Stream,
of video: Video,
preservingTime: Bool = false
) {
asset?.cancelLoading()
asset = AVURLAsset(url: url)
asset?.loadValuesAsynchronously(forKeys: Self.assetKeysToLoad) { [weak self] in
var error: NSError?
switch self?.asset?.statusOfValue(forKey: "duration", error: &error) {
case .loaded:
DispatchQueue.main.async { [weak self] in
self?.insertPlayerItem(stream, for: video, preservingTime: preservingTime)
}
case .failed:
DispatchQueue.main.async { [weak self] in
self?.playerError = error
}
default:
return
}
}
}
private func loadComposition(
_ stream: Stream,
of video: Video,
preservingTime: Bool = false
) {
loadedCompositionAssets = []
loadCompositionAsset(stream.audioAsset, stream: stream, type: .audio, of: video, preservingTime: preservingTime)
loadCompositionAsset(stream.videoAsset, stream: stream, type: .video, of: video, preservingTime: preservingTime)
}
private func loadCompositionAsset(
_ asset: AVURLAsset,
stream: Stream,
type: AVMediaType,
of video: Video,
preservingTime: Bool = false
) {
asset.loadValuesAsynchronously(forKeys: Self.assetKeysToLoad) { [weak self] in
guard let self = self else {
return
}
self.logger.info("loading \(type.rawValue) track")
let assetTracks = asset.tracks(withMediaType: type)
guard let compositionTrack = self.composition.addMutableTrack(
withMediaType: type,
preferredTrackID: kCMPersistentTrackID_Invalid
) else {
self.logger.critical("composition \(type.rawValue) addMutableTrack FAILED")
return
toBackend.play()
}
guard let assetTrack = assetTracks.first else {
self.logger.critical("asset \(type.rawValue) track FAILED")
return
}
self.stream = stream
streamSelection = stream
try! compositionTrack.insertTimeRange(
CMTimeRange(start: .zero, duration: CMTime.secondsInDefaultTimescale(video.length)),
of: assetTrack,
at: .zero
)
self.logger.critical("\(type.rawValue) LOADED")
guard self.streamSelection == stream else {
self.logger.critical("IGNORING LOADED")
return
}
self.loadedCompositionAssets.append(type)
if self.loadedCompositionAssets.count == 2 {
self.insertPlayerItem(stream, for: video, preservingTime: preservingTime)
}
}
}
private func playerItem(_: Stream) -> AVPlayerItem? {
if let asset = asset {
return AVPlayerItem(asset: asset)
} else {
return AVPlayerItem(asset: composition)
}
}
private func attachMetadata(to item: AVPlayerItem, video: Video, for _: Stream? = nil) {
#if !os(macOS)
var externalMetadata = [
makeMetadataItem(.commonIdentifierTitle, value: video.title),
makeMetadataItem(.quickTimeMetadataGenre, value: video.genre ?? ""),
makeMetadataItem(.commonIdentifierDescription, value: video.description ?? "")
]
if let thumbnailData = try? Data(contentsOf: video.thumbnailURL(quality: .medium)!),
let image = UIImage(data: thumbnailData),
let pngData = image.pngData()
{
let artworkItem = makeMetadataItem(.commonIdentifierArtwork, value: pngData)
externalMetadata.append(artworkItem)
}
item.externalMetadata = externalMetadata
#endif
item.preferredForwardBufferDuration = 5
observePlayerItemStatus(item)
}
private func observePlayerItemStatus(_ item: AVPlayerItem) {
statusObservation?.invalidate()
statusObservation = item.observe(\.status, options: [.old, .new]) { [weak self] playerItem, _ in
guard let self = self else {
return
}
switch playerItem.status {
case .readyToPlay:
if self.isAutoplaying(playerItem) {
self.play()
}
case .failed:
self.playerError = item.error
default:
return
}
}
}
#if !os(macOS)
private func makeMetadataItem(_ identifier: AVMetadataIdentifier, value: Any) -> AVMetadataItem {
let item = AVMutableMetadataItem()
item.identifier = identifier
item.value = value as? NSCopying & NSObjectProtocol
item.extendedLanguageTag = "und"
return item.copy() as! AVMetadataItem
}
#endif
private func addItemDidPlayToEndTimeObserver() {
NotificationCenter.default.addObserver(
self,
selector: #selector(itemDidPlayToEndTime),
name: NSNotification.Name.AVPlayerItemDidPlayToEndTime,
object: playerItem
)
}
private func removeItemDidPlayToEndTimeObserver() {
NotificationCenter.default.removeObserver(
self,
name: NSNotification.Name.AVPlayerItemDidPlayToEndTime,
object: playerItem
)
}
@objc func itemDidPlayToEndTime() {
prepareCurrentItemForHistory(finished: true)
if queue.isEmpty {
#if !os(macOS)
try? AVAudioSession.sharedInstance().setActive(false)
#endif
resetQueue()
#if os(tvOS)
controller?.playerView.dismiss(animated: false) { [weak self] in
self?.controller?.dismiss(animated: true)
}
#endif
} else {
advanceToNextItem()
}
}
private func saveTime(completionHandler: @escaping () -> Void = {}) {
let currentTime = player.currentTime()
guard currentTime.seconds > 0 else {
return
}
DispatchQueue.main.async { [weak self] in
self?.preservedTime = currentTime
completionHandler()
}
}
private func seekToPreservedTime(completionHandler: @escaping (Bool) -> Void = { _ in }) {
guard let time = preservedTime else {
return
}
player.seek(
to: time,
toleranceBefore: .secondsInDefaultTimescale(1),
toleranceAfter: .zero,
completionHandler: completionHandler
)
}
private func addFrequentTimeObserver() {
let interval = CMTime.secondsInDefaultTimescale(0.5)
frequentTimeObserver = player.addPeriodicTimeObserver(
forInterval: interval,
queue: .main
) { [weak self] _ in
guard let self = self else {
if !backend.canPlay(stream) {
guard let preferredStream = preferredStream(availableStreams) else {
return
}
guard !self.currentItem.isNil else {
return
}
stream = preferredStream
streamSelection = preferredStream
}
#if !os(tvOS)
self.updateNowPlayingInfo()
#endif
self.handleSegments(at: self.player.currentTime())
DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) { [weak self] in
self?.upgradeToStream(stream, force: true)
}
}
private func addInfrequentTimeObserver() {
let interval = CMTime.secondsInDefaultTimescale(5)
infrequentTimeObserver = player.addPeriodicTimeObserver(
forInterval: interval,
queue: .main
) { [weak self] _ in
guard let self = self else {
return
}
guard !self.currentItem.isNil else {
return
}
self.timeObserverThrottle.execute {
self.updateWatch()
}
}
}
private func addPlayerTimeControlStatusObserver() {
playerTimeControlStatusObserver = player.observe(\.timeControlStatus) { [weak self] player, _ in
guard let self = self,
self.player == player
else {
return
}
if player.timeControlStatus != .waitingToPlayAtSpecifiedRate {
self.objectWillChange.send()
}
if player.timeControlStatus == .playing, player.rate != self.currentRate {
player.rate = self.currentRate
}
#if os(macOS)
if player.timeControlStatus == .playing {
ScreenSaverManager.shared.disable(reason: "Yattee is playing video")
} else {
ScreenSaverManager.shared.enable()
}
#endif
self.timeObserverThrottle.execute {
self.updateWatch()
}
}
}
fileprivate func updateNowPlayingInfo() {
var nowPlayingInfo: [String: AnyObject] = [
MPMediaItemPropertyTitle: currentItem.video.title as AnyObject,
MPMediaItemPropertyArtist: currentItem.video.author as AnyObject,
MPNowPlayingInfoPropertyIsLiveStream: currentItem.video.live as AnyObject,
MPNowPlayingInfoPropertyElapsedPlaybackTime: player.currentTime().seconds as AnyObject,
MPNowPlayingInfoPropertyPlaybackQueueCount: queue.count as AnyObject,
MPMediaItemPropertyMediaType: MPMediaType.anyVideo.rawValue as AnyObject
]
if !currentArtwork.isNil {
nowPlayingInfo[MPMediaItemPropertyArtwork] = currentArtwork as AnyObject
}
if !currentItem.video.live {
let itemDuration = currentItem.videoDuration ?? currentItem.duration
let duration = itemDuration.isFinite ? Double(itemDuration) : nil
if !duration.isNil {
nowPlayingInfo[MPMediaItemPropertyPlaybackDuration] = duration as AnyObject
}
}
MPNowPlayingInfoCenter.default().nowPlayingInfo = nowPlayingInfo
}
private func updateCurrentArtwork() {
guard let thumbnailData = try? Data(contentsOf: currentItem.video.thumbnailURL(quality: .medium)!) else {
return
}
#if os(macOS)
let image = NSImage(data: thumbnailData)
#else
let image = UIImage(data: thumbnailData)
#endif
if image.isNil {
return
}
currentArtwork = MPMediaItemArtwork(boundsSize: image!.size) { _ in image! }
private func inactiveBackends() -> [PlayerBackend] {
[activeBackend == PlayerBackendType.mpv ? avPlayerBackend : mpvBackend]
}
func loadCurrentItemChannelDetails() {
@@ -721,7 +391,7 @@ final class PlayerModel: ObservableObject {
if let channel: Channel = response.typedContent() {
self?.channelWithDetails = channel
withAnimation {
self?.currentItem?.video.channel = channel
self?.currentItem?.video?.channel = channel
}
}
}
@@ -747,7 +417,8 @@ final class PlayerModel: ObservableObject {
func closeCurrentItem() {
prepareCurrentItemForHistory()
currentItem = nil
player.replaceCurrentItem(with: nil)
backend.closeItem()
}
func closePiP() {
@@ -762,46 +433,9 @@ final class PlayerModel: ObservableObject {
show()
#endif
doClosePiP(wasPlaying: wasPlaying)
backend.closePiP(wasPlaying: wasPlaying)
}
#if os(tvOS)
private func doClosePiP(wasPlaying: Bool) {
let item = player.currentItem
let time = player.currentTime()
self.player.replaceCurrentItem(with: nil)
guard !item.isNil else {
return
}
self.player.seek(to: time)
self.player.replaceCurrentItem(with: item)
guard wasPlaying else {
return
}
DispatchQueue.main.asyncAfter(deadline: .now() + 1) { [weak self] in
self?.play()
}
}
#else
private func doClosePiP(wasPlaying: Bool) {
controller?.playerView.player = nil
controller?.playerView.player = player
guard wasPlaying else {
return
}
DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) { [weak self] in
self?.play()
}
}
#endif
func handleCurrentItemChange() {
#if os(macOS)
Windows.player.window?.title = windowTitle
@@ -825,25 +459,75 @@ final class PlayerModel: ObservableObject {
}
func enterFullScreen() {
guard !playingFullscreen else {
guard !controls.playingFullscreen else {
return
}
logger.info("entering fullscreen")
controller?.playerView
.perform(NSSelectorFromString("enterFullScreenAnimated:completionHandler:"), with: false, with: nil)
backend.enterFullScreen()
}
func exitFullScreen() {
guard playingFullscreen else {
guard controls.playingFullscreen else {
return
}
logger.info("exiting fullscreen")
controller?.playerView
.perform(NSSelectorFromString("exitFullScreenAnimated:completionHandler:"), with: false, with: nil)
backend.exitFullScreen()
}
#endif
func updateNowPlayingInfo() {
guard let video = currentItem?.video else {
return
}
let currentTime = (backend.currentTime?.seconds.isFinite ?? false) ? backend.currentTime!.seconds : 0
var nowPlayingInfo: [String: AnyObject] = [
MPMediaItemPropertyTitle: video.title as AnyObject,
MPMediaItemPropertyArtist: video.author as AnyObject,
MPNowPlayingInfoPropertyIsLiveStream: video.live as AnyObject,
MPNowPlayingInfoPropertyElapsedPlaybackTime: currentTime as AnyObject,
MPNowPlayingInfoPropertyPlaybackQueueCount: queue.count as AnyObject,
MPNowPlayingInfoPropertyPlaybackQueueIndex: 1 as AnyObject,
MPMediaItemPropertyMediaType: MPMediaType.anyVideo.rawValue as AnyObject
]
if !currentArtwork.isNil {
nowPlayingInfo[MPMediaItemPropertyArtwork] = currentArtwork as AnyObject
}
if !video.live {
let itemDuration = (backend.playerItemDuration ?? .zero).seconds
let duration = itemDuration.isFinite ? Double(itemDuration) : nil
if !duration.isNil {
nowPlayingInfo[MPMediaItemPropertyPlaybackDuration] = duration as AnyObject
}
}
MPNowPlayingInfoCenter.default().nowPlayingInfo = nowPlayingInfo
}
func updateCurrentArtwork() {
guard let video = currentVideo,
let thumbnailData = try? Data(contentsOf: video.thumbnailURL(quality: .medium)!)
else {
return
}
#if os(macOS)
let image = NSImage(data: thumbnailData)
#else
let image = UIImage(data: thumbnailData)
#endif
if image.isNil {
return
}
currentArtwork = MPMediaItemArtwork(boundsSize: image!.size) { _ in image! }
}
}

View File

@@ -56,7 +56,7 @@ extension PlayerModel {
func playItem(_ item: PlayerQueueItem, video: Video? = nil, at time: TimeInterval? = nil) {
if !playingInPictureInPicture {
player.replaceCurrentItem(with: nil)
backend.closeItem()
}
comments.reset()
@@ -93,15 +93,9 @@ extension PlayerModel {
streams = streams.filter { $0.instance.id == id }
}
switch quality {
case .best:
return streams.first { $0.kind == .hls } ??
streams.filter { $0.kind == .stream }.max { $0.resolution < $1.resolution } ??
streams.first
default:
let sorted = streams.filter { $0.kind != .hls }.sorted { $0.resolution > $1.resolution }
return sorted.first(where: { $0.resolution.height <= quality.value.height })
}
streams = streams.filter { backend.canPlay($0) }
return backend.bestPlayable(streams, maxResolution: quality)
}
func advanceToNextItem() {
@@ -118,7 +112,7 @@ extension PlayerModel {
remove(newItem)
currentItem = newItem
player.pause()
pause()
accounts.api.loadDetails(newItem) { newItem in
self.playItem(newItem, video: newItem.video, at: time)
@@ -144,11 +138,7 @@ extension PlayerModel {
self.removeQueueItems()
}
player.replaceCurrentItem(with: nil)
}
func isAutoplaying(_ item: AVPlayerItem) -> Bool {
player.currentItem == item
backend.closeItem()
}
@discardableResult func enqueueVideo(
@@ -163,7 +153,7 @@ extension PlayerModel {
if play {
currentItem = item
// pause playing current video as it's going to be replaced with next one
player.pause()
pause()
}
queue.insert(item, at: prepending ? 0 : queue.endIndex)

View File

@@ -28,7 +28,7 @@ struct PlayerQueueItem: Hashable, Identifiable, Defaults.Serializable {
}
var duration: TimeInterval {
videoDuration ?? video.length
videoDuration ?? video?.length ?? .zero
}
var shouldRestartPlaying: Bool {

View File

@@ -38,9 +38,12 @@ extension PlayerModel {
return
}
player.seek(to: segment.endTime)
lastSkipped = segment
segmentRestorationTime = time
backend.seek(to: segment.endTime)
DispatchQueue.main.async { [weak self] in
self?.lastSkipped = segment
self?.segmentRestorationTime = time
}
logger.info("SponsorBlock skipping to: \(segment.end)")
}
@@ -63,13 +66,15 @@ extension PlayerModel {
}
restoredSegments.append(segment)
player.seek(to: time)
backend.seek(to: time)
resetLastSegment()
}
private func resetLastSegment() {
lastSkipped = nil
segmentRestorationTime = nil
DispatchQueue.main.async { [weak self] in
self?.lastSkipped = nil
self?.segmentRestorationTime = nil
}
}
func resetSegments() {

View File

@@ -66,7 +66,7 @@ extension PlayerModel {
func rebuildTVMenu() {
#if os(tvOS)
controller?.playerView.transportBarCustomMenuItems = [
avPlayerBackend.controller?.playerView.transportBarCustomMenuItems = [
restoreLastSkippedSegmentAction,
rateMenu,
streamsMenu

View File

@@ -0,0 +1,48 @@
import Alamofire
import Defaults
import Foundation
import Logging
import SwiftyJSON
final class ReturnYouTubeDislikeAPI: ObservableObject {
let logger = Logger(label: "stream.yattee.app.rytd")
@Published var videoID: String?
@Published var dislikes = -1
func loadDislikes(videoID: String, completionHandler: @escaping (Int) -> Void = { _ in }) {
guard self.videoID != videoID else {
completionHandler(dislikes)
return
}
self.videoID = videoID
DispatchQueue.main.async { [weak self] in
self?.requestDislikes(completionHandler: completionHandler)
}
}
private func requestDislikes(completionHandler: @escaping (Int) -> Void = { _ in }) {
AF.request(votesURL).responseDecodable(of: JSON.self) { [weak self] response in
guard let self = self else {
return
}
switch response.result {
case let .success(value):
let value = JSON(value).dictionaryValue["dislikes"]?.int
self.dislikes = value ?? -1
case let .failure(error):
self.logger.error("failed to load dislikes: \(error.localizedDescription)")
}
completionHandler(self.dislikes)
}
}
private var votesURL: String {
"https://returnyoutubedislikeapi.com/Votes?videoId=\(videoID ?? "")"
}
}

View File

@@ -17,6 +17,10 @@ class Segment: ObservableObject, Hashable {
segment.last!
}
var duration: Double {
end - start
}
var endTime: CMTime {
CMTime(seconds: end, preferredTimescale: 1000)
}

View File

@@ -5,7 +5,18 @@ import Foundation
// swiftlint:disable:next final_class
class Stream: Equatable, Hashable, Identifiable {
enum Resolution: String, CaseIterable, Comparable, Defaults.Serializable {
case hd1440p60, hd1440p, hd1080p60, hd1080p, hd720p60, hd720p, sd480p, sd360p, sd240p, sd144p, unknown
case hd2160p
case hd1440p60
case hd1440p
case hd1080p60
case hd1080p
case hd720p60
case hd720p
case sd480p
case sd360p
case sd240p
case sd144p
case unknown
var name: String {
"\(height)p\(refreshRate != -1 ? ", \(refreshRate) fps" : "")"
@@ -68,6 +79,7 @@ class Stream: Equatable, Hashable, Identifiable {
var kind: Kind!
var encoding: String!
var videoFormat: String!
init(
instance: Instance? = nil,
@@ -76,7 +88,8 @@ class Stream: Equatable, Hashable, Identifiable {
hlsURL: URL? = nil,
resolution: Resolution? = nil,
kind: Kind = .hls,
encoding: String? = nil
encoding: String? = nil,
videoFormat: String? = nil
) {
self.instance = instance
self.audioAsset = audioAsset
@@ -85,14 +98,35 @@ class Stream: Equatable, Hashable, Identifiable {
self.resolution = resolution
self.kind = kind
self.encoding = encoding
self.videoFormat = videoFormat
}
var quality: String {
kind == .hls ? "adaptive (HLS)" : "\(resolution.name) \(kind == .stream ? "(\(kind.rawValue))" : "")"
if resolution == .hd2160p {
return "4K (2160p)"
}
return kind == .hls ? "adaptive (HLS)" : "\(resolution.name)\(kind == .stream ? " (\(kind.rawValue))" : "")"
}
var format: String {
let lowercasedFormat = (videoFormat ?? "unknown").lowercased()
if lowercasedFormat.contains("webm") {
return "WEBM"
} else if lowercasedFormat.contains("avc1") {
return "avc1"
} else if lowercasedFormat.contains("av01") {
return "AV1"
} else if lowercasedFormat.contains("mpeg_4") || lowercasedFormat.contains("mp4") {
return "MP4"
} else {
return lowercasedFormat
}
}
var description: String {
"\(quality) - \(instance?.description ?? "")"
let formatString = format == "unknown" ? "" : " (\(format))"
return "\(quality)\(formatString) - \(instance?.description ?? "")"
}
var assets: [AVURLAsset] {

View File

@@ -23,6 +23,8 @@ extension Defaults.Keys {
static let sponsorBlockInstance = Key<String>("sponsorBlockInstance", default: "https://sponsor.ajay.app")
static let sponsorBlockCategories = Key<Set<String>>("sponsorBlockCategories", default: Set(SponsorBlockAPI.categories))
static let enableReturnYouTubeDislike = Key<Bool>("enableReturnYouTubeDislike", default: false)
static let favorites = Key<[FavoriteItem]>("favorites", default: [
.init(section: .trending("US", "default")),
.init(section: .trending("GB", "default")),
@@ -43,6 +45,7 @@ extension Defaults.Keys {
static let timeOnThumbnail = Key<Bool>("timeOnThumbnail", default: true)
static let roundedThumbnails = Key<Bool>("roundedThumbnails", default: true)
static let activeBackend = Key<PlayerBackendType>("activeBackend", default: .mpv)
static let quality = Key<ResolutionSetting>("quality", default: .best)
static let playerSidebar = Key<PlayerSidebarSetting>("playerSidebar", default: PlayerSidebarSetting.defaultValue)
static let playerInstanceID = Key<Instance.ID?>("playerInstance")
@@ -93,12 +96,23 @@ extension Defaults.Keys {
}
enum ResolutionSetting: String, CaseIterable, Defaults.Serializable {
case best, hd720p, sd480p, sd360p, sd240p, sd144p
case best
case hd2160p
case hd1440p60
case hd1440p
case hd1080p60
case hd1080p
case hd720p60
case hd720p
case sd480p
case sd360p
case sd240p
case sd144p
var value: Stream.Resolution {
switch self {
case .best:
return .hd720p
return .hd2160p
default:
return Stream.Resolution(rawValue: rawValue)!
}
@@ -108,6 +122,8 @@ enum ResolutionSetting: String, CaseIterable, Defaults.Serializable {
switch self {
case .best:
return "Best available quality"
case .hd2160p:
return "4K, 60fps"
default:
return value.name
}

View File

@@ -29,11 +29,15 @@ private struct CurrentPlaylistID: EnvironmentKey {
static let defaultValue: String? = nil
}
typealias LoadMoreContentHandlerType = () -> Void
private struct LoadMoreContentHandler: EnvironmentKey {
static let defaultValue: LoadMoreContentHandlerType = {}
}
typealias LoadMoreContentHandlerType = () -> Void
private struct ScrollViewBottomPaddingKey: EnvironmentKey {
static let defaultValue: Double = 30
}
extension EnvironmentValues {
var inNavigationView: Bool {
@@ -70,4 +74,9 @@ extension EnvironmentValues {
get { self[LoadMoreContentHandler.self] }
set { self[LoadMoreContentHandler.self] = newValue }
}
var scrollViewBottomPadding: Double {
get { self[ScrollViewBottomPaddingKey.self] }
set { self[ScrollViewBottomPaddingKey.self] = newValue }
}
}

View File

@@ -19,7 +19,7 @@ struct FavoritesView: View {
#endif
var body: some View {
PlayerControlsView {
BrowserPlayerControls {
ScrollView(.vertical, showsIndicators: false) {
if !accounts.current.isNil {
#if os(tvOS)
@@ -39,6 +39,7 @@ struct FavoritesView: View {
.padding(.top, item == first && RefreshControl.navigationBarTitleDisplayMode == .inline ? 10 : 0)
#endif
}
Color.clear.padding(.bottom, 30)
#endif
}
}

View File

@@ -49,7 +49,7 @@ struct AppSidebarNavigation: View {
.frame(minWidth: sidebarMinWidth)
VStack {
PlayerControlsView {
BrowserPlayerControls {
HStack(alignment: .center) {
Spacer()
Image(systemName: "play.tv")

View File

@@ -7,6 +7,7 @@ struct AppTabNavigation: View {
@EnvironmentObject<InstancesModel> private var instances
@EnvironmentObject<NavigationModel> private var navigation
@EnvironmentObject<PlayerModel> private var player
@EnvironmentObject<PlayerControlsModel> private var playerControls
@EnvironmentObject<PlaylistsModel> private var playlists
@EnvironmentObject<RecentsModel> private var recents
@EnvironmentObject<SearchModel> private var search
@@ -95,7 +96,7 @@ struct AppTabNavigation: View {
.toolbar { toolbarContent }
}
.tabItem {
Label("Favorites", systemImage: "heart")
Label("Favorites", systemImage: "heart.fill")
.accessibility(label: Text("Favorites"))
}
.tag(TabSelection.favorites)
@@ -129,7 +130,7 @@ struct AppTabNavigation: View {
.toolbar { toolbarContent }
}
.tabItem {
Label("Popular", systemImage: "arrow.up.right.circle")
Label("Popular", systemImage: "arrow.up.right.circle.fill")
.accessibility(label: Text("Popular"))
}
.tag(TabSelection.popular)
@@ -141,7 +142,7 @@ struct AppTabNavigation: View {
.toolbar { toolbarContent }
}
.tabItem {
Label("Trending", systemImage: "chart.bar")
Label("Trending", systemImage: "chart.bar.fill")
.accessibility(label: Text("Trending"))
}
.tag(TabSelection.trending)
@@ -187,6 +188,7 @@ struct AppTabNavigation: View {
.environmentObject(instances)
.environmentObject(navigation)
.environmentObject(player)
.environmentObject(playerControls)
.environmentObject(playlists)
.environmentObject(recents)
.environmentObject(subscriptions)

View File

@@ -1,5 +1,6 @@
import AVFAudio
import Defaults
import MediaPlayer
import SDWebImage
import SDWebImagePINPlugin
import SDWebImageWebPCoder
@@ -12,6 +13,7 @@ struct ContentView: View {
@EnvironmentObject<InstancesModel> private var instances
@EnvironmentObject<NavigationModel> private var navigation
@EnvironmentObject<PlayerModel> private var player
@EnvironmentObject<PlayerControlsModel> private var playerControls
@EnvironmentObject<PlaylistsModel> private var playlists
@EnvironmentObject<RecentsModel> private var recents
@EnvironmentObject<SearchModel> private var search
@@ -106,7 +108,7 @@ struct ContentView: View {
SDImageCodersManager.shared.addCoder(SDImageWebPCoder.shared)
SDWebImageManager.defaultImageCache = PINCache(name: "stream.yattee.app")
#if !os(macOS)
try? AVAudioSession.sharedInstance().setCategory(.playback, mode: .moviePlayback)
setupNowPlayingInfoCenter()
#endif
#if os(iOS)
@@ -135,9 +137,11 @@ struct ContentView: View {
menu.accounts = accounts
menu.navigation = navigation
menu.player = player
playerControls.player = player
player.accounts = accounts
player.comments = comments
player.controls = playerControls
if !accounts.current.isNil {
player.restoreQueue()
@@ -161,6 +165,56 @@ struct ContentView: View {
playlists.load()
}
func setupNowPlayingInfoCenter() {
#if !os(macOS)
try? AVAudioSession.sharedInstance().setCategory(.playback, mode: .moviePlayback)
UIApplication.shared.beginReceivingRemoteControlEvents()
#endif
MPRemoteCommandCenter.shared().playCommand.addTarget { _ in
player.play()
return .success
}
MPRemoteCommandCenter.shared().pauseCommand.addTarget { _ in
player.pause()
return .success
}
MPRemoteCommandCenter.shared().previousTrackCommand.isEnabled = false
MPRemoteCommandCenter.shared().nextTrackCommand.isEnabled = false
MPRemoteCommandCenter.shared().changePlaybackPositionCommand.addTarget { remoteEvent in
guard let event = remoteEvent as? MPChangePlaybackPositionCommandEvent
else {
return .commandFailed
}
player.backend.seek(to: event.positionTime)
return .success
}
let skipForwardCommand = MPRemoteCommandCenter.shared().skipForwardCommand
skipForwardCommand.isEnabled = true
skipForwardCommand.preferredIntervals = [10]
skipForwardCommand.addTarget { _ in
player.backend.seek(relative: .secondsInDefaultTimescale(10))
return .success
}
let skipBackwardCommand = MPRemoteCommandCenter.shared().skipBackwardCommand
skipBackwardCommand.isEnabled = true
skipBackwardCommand.preferredIntervals = [10]
skipBackwardCommand.addTarget { _ in
player.backend.seek(relative: .secondsInDefaultTimescale(-10))
return .success
}
}
func openWelcomeScreenIfAccountEmpty() {
guard Defaults[.instances].isEmpty else {
return

View File

@@ -1,35 +1,25 @@
import Defaults
import SwiftUI
struct Player: UIViewControllerRepresentable {
struct AppleAVPlayerView: UIViewControllerRepresentable {
@EnvironmentObject<CommentsModel> private var comments
@EnvironmentObject<NavigationModel> private var navigation
@EnvironmentObject<PlayerModel> private var player
@EnvironmentObject<SubscriptionsModel> private var subscriptions
var controller: PlayerViewController?
init(controller: PlayerViewController? = nil) {
self.controller = controller
}
func makeUIViewController(context _: Context) -> PlayerViewController {
if self.controller != nil {
return self.controller!
}
let controller = PlayerViewController()
func makeUIViewController(context _: Context) -> UIViewController {
let controller = AppleAVPlayerViewController()
controller.commentsModel = comments
controller.navigationModel = navigation
controller.playerModel = player
controller.subscriptionsModel = subscriptions
player.controller = controller
player.avPlayerBackend.controller = controller
return controller
}
func updateUIViewController(_: PlayerViewController, context _: Context) {
func updateUIViewController(_: UIViewController, context _: Context) {
player.rebuildTVMenu()
}
}

View File

@@ -0,0 +1,207 @@
import AVKit
import Defaults
import SwiftUI
final class AppleAVPlayerViewController: UIViewController {
var playerLoaded = false
var commentsModel: CommentsModel!
var navigationModel: NavigationModel!
var playerModel: PlayerModel!
var subscriptionsModel: SubscriptionsModel!
var playerView = AVPlayerViewController()
let persistenceController = PersistenceController.shared
#if !os(tvOS)
var aspectRatio: Double? {
let ratio = Double(playerView.videoBounds.width) / Double(playerView.videoBounds.height)
guard ratio.isFinite else {
return VideoPlayerView.defaultAspectRatio // swiftlint:disable:this implicit_return
}
return [ratio, 1.0].max()!
}
#endif
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
loadPlayer()
#if os(tvOS)
if !playerView.isBeingPresented, !playerView.isBeingDismissed {
present(playerView, animated: false)
}
#endif
}
#if os(tvOS)
override func viewDidDisappear(_ animated: Bool) {
super.viewDidDisappear(animated)
if !playerModel.presentingPlayer, !Defaults[.pauseOnHidingPlayer], !playerModel.isPlaying {
DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) { [weak self] in
self?.playerModel.play()
}
}
}
#endif
func loadPlayer() {
guard !playerLoaded else {
return
}
playerModel.avPlayerBackend.controller = self
playerView.player = playerModel.avPlayerBackend.avPlayer
playerView.allowsPictureInPicturePlayback = true
playerView.showsPlaybackControls = false
#if os(iOS)
if #available(iOS 14.2, *) {
playerView.canStartPictureInPictureAutomaticallyFromInline = true
}
#endif
playerView.delegate = self
#if os(tvOS)
var infoViewControllers = [UIHostingController<AnyView>]()
if CommentsModel.enabled {
infoViewControllers.append(infoViewController([.comments], title: "Comments"))
}
var queueSections = [NowPlayingView.ViewSection.playingNext]
if Defaults[.showHistoryInPlayer] {
queueSections.append(.playedPreviously)
}
infoViewControllers.append(contentsOf: [
infoViewController([.related], title: "Related"),
infoViewController(queueSections, title: "Queue")
])
playerView.customInfoViewControllers = infoViewControllers
#else
embedViewController()
#endif
}
#if os(tvOS)
func infoViewController(
_ sections: [NowPlayingView.ViewSection],
title: String
) -> UIHostingController<AnyView> {
let controller = UIHostingController(rootView:
AnyView(
NowPlayingView(sections: sections, inInfoViewController: true)
.frame(maxHeight: 600)
.environmentObject(commentsModel)
.environmentObject(playerModel)
.environmentObject(subscriptionsModel)
.environment(\.managedObjectContext, persistenceController.container.viewContext)
)
)
controller.title = title
return controller
}
#else
func embedViewController() {
playerView.view.frame = view.bounds
addChild(playerView)
view.addSubview(playerView.view)
playerView.didMove(toParent: self)
}
#endif
}
extension AppleAVPlayerViewController: AVPlayerViewControllerDelegate {
func playerViewControllerShouldDismiss(_: AVPlayerViewController) -> Bool {
true
}
func playerViewControllerShouldAutomaticallyDismissAtPictureInPictureStart(_: AVPlayerViewController) -> Bool {
true
}
func playerViewControllerWillBeginDismissalTransition(_: AVPlayerViewController) {
if Defaults[.pauseOnHidingPlayer] {
playerModel.pause()
}
dismiss(animated: false)
}
func playerViewControllerDidEndDismissalTransition(_: AVPlayerViewController) {}
func playerViewController(
_: AVPlayerViewController,
willBeginFullScreenPresentationWithAnimationCoordinator context: UIViewControllerTransitionCoordinator
) {
#if os(iOS)
if !context.isCancelled, Defaults[.lockLandscapeWhenEnteringFullscreen] {
Orientation.lockOrientation(.landscape, andRotateTo: UIDevice.current.orientation.isLandscape ? nil : .landscapeRight)
}
#endif
}
func playerViewController(
_: AVPlayerViewController,
willEndFullScreenPresentationWithAnimationCoordinator coordinator: UIViewControllerTransitionCoordinator
) {
let wasPlaying = playerModel.isPlaying
coordinator.animate(alongsideTransition: nil) { context in
#if os(iOS)
if wasPlaying {
self.playerModel.play()
}
#endif
if !context.isCancelled {
#if os(iOS)
self.playerModel.lockedOrientation = nil
if Defaults[.enterFullscreenInLandscape] {
Orientation.lockOrientation(.portrait, andRotateTo: .portrait)
}
if wasPlaying {
self.playerModel.play()
}
#endif
}
}
}
func playerViewController(
_: AVPlayerViewController,
restoreUserInterfaceForPictureInPictureStopWithCompletionHandler completionHandler: @escaping (Bool) -> Void
) {
DispatchQueue.main.asyncAfter(deadline: .now() + 0.5) {
if self.navigationModel.presentingChannel {
self.playerModel.playerNavigationLinkActive = true
} else {
self.playerModel.show()
}
#if os(tvOS)
if self.playerModel.playingInPictureInPicture {
self.present(self.playerView, animated: false) {
completionHandler(true)
}
}
#else
completionHandler(true)
#endif
}
}
func playerViewControllerWillStartPictureInPicture(_: AVPlayerViewController) {
playerModel.playingInPictureInPicture = true
playerModel.playerNavigationLinkActive = false
}
func playerViewControllerWillStopPictureInPicture(_: AVPlayerViewController) {
playerModel.playingInPictureInPicture = false
}
}

View File

@@ -0,0 +1,302 @@
import Foundation
import SwiftUI
struct PlayerControls: View {
static let animation = Animation.easeInOut(duration: 0.2)
private var player: PlayerModel!
@EnvironmentObject<PlayerControlsModel> private var model
#if os(iOS)
@Environment(\.verticalSizeClass) private var verticalSizeClass
#elseif os(tvOS)
enum Field: Hashable {
case play
case backward
case forward
}
@FocusState private var focusedField: Field?
#endif
init(player: PlayerModel) {
self.player = player
}
var body: some View {
VStack {
ZStack(alignment: .bottom) {
VStack(spacing: 0) {
Group {
statusBar
.padding(3)
#if os(macOS)
.background(VisualEffectBlur(material: .hudWindow))
#elseif os(iOS)
.background(VisualEffectBlur(blurStyle: .systemThinMaterial))
#endif
.mask(RoundedRectangle(cornerRadius: 3))
buttonsBar
.padding(.top, 4)
.padding(.horizontal, 4)
}
Spacer()
mediumButtonsBar
Spacer()
timeline
.offset(y: 10)
.zIndex(1)
bottomBar
#if os(macOS)
.background(VisualEffectBlur(material: .hudWindow))
#elseif os(iOS)
.background(VisualEffectBlur(blurStyle: .systemThinMaterial))
#endif
.mask(RoundedRectangle(cornerRadius: 3))
}
}
.opacity(model.presentingControls ? 1 : 0)
}
#if os(tvOS)
.onChange(of: model.presentingControls) { _ in
if model.presentingControls {
focusedField = .play
}
}
.onChange(of: focusedField) { _ in
model.resetTimer()
}
#else
.background(controlsBackground)
#endif
.environment(\.colorScheme, .dark)
}
#if !os(tvOS)
var controlsBackground: some View {
PlayerGestures()
.background(Color.black.opacity(model.presentingControls ? 0.5 : 0))
}
#endif
var timeline: some View {
TimelineView(duration: durationBinding, current: currentTimeBinding, cornerRadius: 0)
}
var durationBinding: Binding<Double> {
Binding<Double>(
get: { model.duration.seconds },
set: { value in model.duration = .secondsInDefaultTimescale(value) }
)
}
var currentTimeBinding: Binding<Double> {
Binding<Double>(
get: { model.currentTime.seconds },
set: { value in model.currentTime = .secondsInDefaultTimescale(value) }
)
}
var statusBar: some View {
HStack(spacing: 4) {
#if os(iOS)
hidePlayerButton
#endif
Text(playbackStatus)
Spacer()
#if !os(tvOS)
ToggleBackendButton()
Text("")
StreamControl()
#if os(macOS)
.frame(maxWidth: 160)
#endif
#else
Text(player.stream?.description ?? "")
#endif
}
.foregroundColor(.primary)
.padding(.trailing, 4)
.font(.system(size: 14))
}
private var hidePlayerButton: some View {
Button {
player.hide()
} label: {
Image(systemName: "chevron.down.circle.fill")
}
#if !os(tvOS)
.keyboardShortcut(.cancelAction)
#endif
}
private var playbackStatus: String {
if player.live {
return "LIVE"
}
guard !player.isLoadingVideo else {
return "loading..."
}
let videoLengthAtRate = (player.currentVideo?.length ?? 0) / Double(player.currentRate)
let remainingSeconds = videoLengthAtRate - (player.time?.seconds ?? 0)
if remainingSeconds < 60 {
return "less than a minute"
}
let timeFinishAt = Date().addingTimeInterval(remainingSeconds)
return "ends at \(formattedTimeFinishAt(timeFinishAt))"
}
private func formattedTimeFinishAt(_ date: Date) -> String {
let dateFormatter = DateFormatter()
dateFormatter.dateStyle = .none
dateFormatter.timeStyle = .short
return dateFormatter.string(from: date)
}
var buttonsBar: some View {
HStack {
#if !os(tvOS)
fullscreenButton
#endif
Spacer()
// button("Music Mode", systemImage: "music.note")
}
}
var fullscreenButton: some View {
button(
"Fullscreen",
systemImage: fullScreenLayout ? "arrow.down.right.and.arrow.up.left" : "arrow.up.left.and.arrow.down.right"
) {
model.toggleFullscreen(fullScreenLayout)
}
#if !os(tvOS)
.keyboardShortcut(fullScreenLayout ? .cancelAction : .defaultAction)
#endif
}
var mediumButtonsBar: some View {
HStack {
#if !os(tvOS)
button("Seek Backward", systemImage: "gobackward.10", size: 50, cornerRadius: 10) {
player.backend.seek(relative: .secondsInDefaultTimescale(-10))
}
#if os(tvOS)
.focused($focusedField, equals: .backward)
#else
.keyboardShortcut("k")
.keyboardShortcut(.leftArrow)
#endif
#endif
Spacer()
button(
model.isPlaying ? "Pause" : "Play",
systemImage: model.isPlaying ? "pause.fill" : "play.fill",
size: 50,
cornerRadius: 10
) {
player.backend.togglePlay()
}
#if os(tvOS)
.focused($focusedField, equals: .play)
#else
.keyboardShortcut("p")
.keyboardShortcut(.space)
#endif
.disabled(model.isLoadingVideo)
Spacer()
#if !os(tvOS)
button("Seek Forward", systemImage: "goforward.10", size: 50, cornerRadius: 10) {
player.backend.seek(relative: .secondsInDefaultTimescale(10))
}
#if os(tvOS)
.focused($focusedField, equals: .forward)
#else
.keyboardShortcut("l")
.keyboardShortcut(.rightArrow)
#endif
#endif
}
.font(.system(size: 30))
.padding(.horizontal, 4)
}
var bottomBar: some View {
HStack {
Spacer()
Text(model.playbackTime)
}
.font(.system(size: 15))
.padding(.horizontal, 5)
.padding(.vertical, 3)
.labelStyle(.iconOnly)
.foregroundColor(.primary)
}
func button(
_ label: String,
systemImage: String = "arrow.up.left.and.arrow.down.right",
size: Double = 30,
cornerRadius: Double = 3,
action: @escaping () -> Void = {}
) -> some View {
Button {
action()
model.resetTimer()
} label: {
Label(label, systemImage: systemImage)
.labelStyle(.iconOnly)
.padding()
.contentShape(Rectangle())
}
.buttonStyle(.plain)
.foregroundColor(.primary)
.frame(width: size, height: size)
#if os(macOS)
.background(VisualEffectBlur(material: .hudWindow))
#elseif os(iOS)
.background(VisualEffectBlur(blurStyle: .systemThinMaterial))
#endif
.mask(RoundedRectangle(cornerRadius: cornerRadius))
}
var fullScreenLayout: Bool {
#if os(iOS)
model.playingFullscreen || verticalSizeClass == .compact
#else
model.playingFullscreen
#endif
}
}
struct PlayerControls_Previews: PreviewProvider {
static var previews: some View {
PlayerControls(player: PlayerModel())
}
}

View File

@@ -0,0 +1,23 @@
import SwiftUI
struct ToggleBackendButton: View {
@EnvironmentObject<PlayerControlsModel> private var controls
@EnvironmentObject<PlayerModel> private var player
var body: some View {
Button {
player.saveTime {
player.changeActiveBackend(from: player.activeBackend, to: player.activeBackend.next())
controls.resetTimer()
}
} label: {
Text(player.activeBackend.label)
}
}
}
struct ToggleBackendButton_Previews: PreviewProvider {
static var previews: some View {
ToggleBackendButton()
}
}

View File

@@ -0,0 +1,70 @@
import GLKit
import Logging
import OpenGLES
final class MPVOGLView: GLKView {
private var logger = Logger(label: "stream.yattee.mpv.oglview")
private var defaultFBO: GLint?
var mpvGL: UnsafeMutableRawPointer?
var needsDrawing = true
override init(frame: CGRect) {
guard let context = EAGLContext(api: .openGLES3) else {
print("Failed to initialize OpenGLES 2.0 context")
exit(1)
}
logger.info("frame size: \(frame.width) x \(frame.height)")
super.init(frame: frame, context: context)
contentMode = .redraw
EAGLContext.setCurrent(context)
drawableColorFormat = .RGBA8888
drawableDepthFormat = .formatNone
drawableStencilFormat = .formatNone
defaultFBO = -1
isOpaque = false
fillBlack()
}
func fillBlack() {
glClearColor(0, 0, 0, 0)
glClear(UInt32(GL_COLOR_BUFFER_BIT))
}
override func draw(_: CGRect) {
glGetIntegerv(UInt32(GL_FRAMEBUFFER_BINDING), &defaultFBO!)
var dims: [GLint] = [0, 0, 0, 0]
glGetIntegerv(GLenum(GL_VIEWPORT), &dims)
if mpvGL != nil {
var data = mpv_opengl_fbo(
fbo: Int32(defaultFBO!),
w: Int32(dims[2]),
h: Int32(dims[3]),
internal_format: 0
)
var flip: CInt = 1
withUnsafeMutablePointer(to: &flip) { flip in
withUnsafeMutablePointer(to: &data) { data in
var params = [
mpv_render_param(type: MPV_RENDER_PARAM_OPENGL_FBO, data: data),
mpv_render_param(type: MPV_RENDER_PARAM_FLIP_Y, data: flip),
mpv_render_param()
]
mpv_render_context_render(OpaquePointer(mpvGL), &params)
}
}
}
}
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
}
}

View File

@@ -0,0 +1,26 @@
import UIKit
final class MPVViewController: UIViewController {
var client: MPVClient!
var glView: MPVOGLView!
init() {
client = MPVClient()
super.init(nibName: nil, bundle: nil)
}
required init?(coder: NSCoder) {
super.init(coder: coder)
}
override func viewDidLoad() {
super.loadView()
client.create(frame: view.frame)
glView = client.glView
view.addSubview(glView)
super.viewDidLoad()
}
}

View File

@@ -1,225 +0,0 @@
import Defaults
import Foundation
import SwiftUI
struct PlaybackBar: View {
@Environment(\.colorScheme) private var colorScheme
@Environment(\.presentationMode) private var presentationMode
@Environment(\.inNavigationView) private var inNavigationView
@EnvironmentObject<PlayerModel> private var player
var body: some View {
HStack {
#if !os(macOS)
closeButton
#endif
if player.currentItem != nil {
HStack {
Text(playbackStatus)
Text("")
rateMenu
}
.font(.caption2)
#if os(macOS)
.padding(.leading, 4)
#endif
Spacer()
HStack(spacing: 4) {
if !player.lastSkipped.isNil {
restoreLastSkippedSegmentButton
}
if player.live {
Image(systemName: "dot.radiowaves.left.and.right")
} else if player.isLoadingAvailableStreams || player.isLoadingStream {
Image(systemName: "bolt.horizontal.fill")
} else if !player.playerError.isNil {
Button {
player.presentingErrorDetails = true
} label: {
Image(systemName: "exclamationmark.circle.fill")
.foregroundColor(.red)
}
.buttonStyle(.plain)
}
streamControl
.disabled(player.isLoadingAvailableStreams)
.frame(alignment: .trailing)
.onChange(of: player.streamSelection) { selection in
guard !selection.isNil else {
return
}
player.upgradeToStream(selection!)
}
#if os(macOS)
.frame(maxWidth: 180)
#endif
}
.transaction { t in t.animation = .none }
.font(.caption2)
} else {
Spacer()
}
}
.foregroundColor(colorScheme == .dark ? .gray : .black)
.alert(isPresented: $player.presentingErrorDetails) {
Alert(
title: Text("Error"),
message: Text(player.playerError?.localizedDescription ?? "")
)
}
.frame(minWidth: 0, maxWidth: .infinity, minHeight: 20)
.padding(4)
.background(colorScheme == .dark ? Color.black : Color.white)
}
private var closeButton: some View {
Button {
player.hide()
} label: {
Label(
"Close",
systemImage: inNavigationView ? "chevron.backward.circle.fill" : "chevron.down.circle.fill"
)
.labelStyle(.iconOnly)
}
.accessibilityLabel(Text("Close"))
.buttonStyle(.borderless)
.foregroundColor(.gray)
.keyboardShortcut(.cancelAction)
}
private var playbackStatus: String {
if player.live {
return "LIVE"
}
guard !player.isLoadingVideo else {
return "loading..."
}
let videoLengthAtRate = player.currentVideo!.length / Double(player.currentRate)
let remainingSeconds = videoLengthAtRate - player.time!.seconds
if remainingSeconds < 60 {
return "less than a minute"
}
let timeFinishAt = Date().addingTimeInterval(remainingSeconds)
return "ends at \(formattedTimeFinishAt(timeFinishAt))"
}
private func formattedTimeFinishAt(_ date: Date) -> String {
let dateFormatter = DateFormatter()
dateFormatter.dateStyle = .none
dateFormatter.timeStyle = .short
return dateFormatter.string(from: date)
}
private var rateMenu: some View {
#if os(macOS)
ratePicker
.labelsHidden()
.frame(maxWidth: 70)
#else
Menu {
ratePicker
} label: {
Text(player.rateLabel(player.currentRate))
}
#endif
}
private var ratePicker: some View {
Picker("", selection: $player.currentRate) {
ForEach(PlayerModel.availableRates, id: \.self) { rate in
Text(player.rateLabel(rate)).tag(rate)
}
}
}
private var restoreLastSkippedSegmentButton: some View {
HStack(spacing: 4) {
Button {
player.restoreLastSkippedSegment()
} label: {
HStack(spacing: 4) {
Image(systemName: "arrow.uturn.left.circle")
Text(player.lastSkipped!.title())
}
}
.buttonStyle(.plain)
Text("")
}
}
private var streamControl: some View {
#if os(macOS)
Picker("", selection: $player.streamSelection) {
ForEach(InstancesModel.all) { instance in
let instanceStreams = availableStreamsForInstance(instance)
if !instanceStreams.values.isEmpty {
let kinds = Array(instanceStreams.keys).sorted { $0 < $1 }
Section(header: Text(instance.longDescription)) {
ForEach(kinds, id: \.self) { key in
ForEach(instanceStreams[key] ?? []) { stream in
Text(stream.quality).tag(Stream?.some(stream))
}
if kinds.count > 1 {
Divider()
}
}
}
}
}
}
#else
Menu {
ForEach(InstancesModel.all) { instance in
let instanceStreams = availableStreamsForInstance(instance)
if !instanceStreams.values.isEmpty {
let kinds = Array(instanceStreams.keys).sorted { $0 < $1 }
Picker("", selection: $player.streamSelection) {
ForEach(kinds, id: \.self) { key in
ForEach(instanceStreams[key] ?? []) { stream in
Text(stream.description).tag(Stream?.some(stream))
}
if kinds.count > 1 {
Divider()
}
}
}
}
}
} label: {
Text(player.streamSelection?.quality ?? "")
}
#endif
}
private func availableStreamsForInstance(_ instance: Instance) -> [Stream.Kind: [Stream]] {
let streams = player.availableStreamsSorted.filter { $0.instance == instance }
return Dictionary(grouping: streams, by: \.kind!)
}
}
struct PlaybackBar_Previews: PreviewProvider {
static var previews: some View {
PlaybackBar()
.injectFixtureEnvironmentObjects()
}
}

View File

@@ -0,0 +1,64 @@
import SwiftUI
struct PlayerGestures: View {
@EnvironmentObject<PlayerModel> private var player
@EnvironmentObject<PlayerControlsModel> private var model
var body: some View {
HStack(spacing: 0) {
gestureRectangle
.tapRecognizer(
tapSensitivity: 0.2,
singleTapAction: {
model.toggle()
},
doubleTapAction: {
player.backend.seek(relative: .secondsInDefaultTimescale(-10))
},
anyTapAction: {
model.update()
}
)
gestureRectangle
.tapRecognizer(
tapSensitivity: 0.2,
singleTapAction: {
model.toggle()
},
doubleTapAction: {
player.backend.togglePlay()
},
anyTapAction: {
model.update()
}
)
gestureRectangle
.tapRecognizer(
tapSensitivity: 0.2,
singleTapAction: {
model.toggle()
},
doubleTapAction: {
player.backend.seek(relative: .secondsInDefaultTimescale(10))
},
anyTapAction: {
model.update()
}
)
}
}
var gestureRectangle: some View {
Color.clear
.contentShape(Rectangle())
.frame(maxWidth: .infinity, maxHeight: .infinity)
}
}
struct PlayerGestures_Previews: PreviewProvider {
static var previews: some View {
PlayerGestures()
}
}

View File

@@ -0,0 +1,79 @@
import SwiftUI
struct StreamControl: View {
@EnvironmentObject<PlayerModel> private var player
var body: some View {
Group {
#if os(macOS)
Picker("", selection: $player.streamSelection) {
ForEach(InstancesModel.all) { instance in
let instanceStreams = availableStreamsForInstance(instance)
if !instanceStreams.values.isEmpty {
let kinds = Array(instanceStreams.keys).sorted { $0 < $1 }
Section(header: Text(instance.longDescription)) {
ForEach(kinds, id: \.self) { key in
ForEach(instanceStreams[key] ?? []) { stream in
Text(stream.quality).tag(Stream?.some(stream))
}
if kinds.count > 1 {
Divider()
}
}
}
}
}
}
.disabled(player.isLoadingAvailableStreams)
#else
Menu {
ForEach(InstancesModel.all) { instance in
let instanceStreams = availableStreamsForInstance(instance)
if !instanceStreams.values.isEmpty {
let kinds = Array(instanceStreams.keys).sorted { $0 < $1 }
Picker("", selection: $player.streamSelection) {
ForEach(kinds, id: \.self) { key in
ForEach(instanceStreams[key] ?? []) { stream in
Text(stream.description).tag(Stream?.some(stream))
}
if kinds.count > 1 {
Divider()
}
}
}
}
}
} label: {
Text(player.streamSelection?.quality ?? "no playable streams")
}
.disabled(player.isLoadingAvailableStreams)
#endif
}
.transaction { t in t.animation = .none }
.onChange(of: player.streamSelection) { selection in
guard !selection.isNil else {
return
}
player.upgradeToStream(selection!)
}
.frame(alignment: .trailing)
}
private func availableStreamsForInstance(_ instance: Instance) -> [Stream.Kind: [Stream]] {
let streams = player.availableStreamsSorted.filter { $0.instance == instance }.filter { player.backend.canPlay($0) }
return Dictionary(grouping: streams, by: \.kind!)
}
}
struct StreamControl_Previews: PreviewProvider {
static var previews: some View {
StreamControl()
}
}

View File

@@ -0,0 +1,69 @@
import SwiftUI
struct TapRecognizerViewModifier: ViewModifier {
@State private var singleTapIsTaped: Bool = .init()
var tapSensitivity: Double
var singleTapAction: () -> Void
var doubleTapAction: () -> Void
var anyTapAction: () -> Void
init(
tapSensitivity: Double,
singleTapAction: @escaping () -> Void,
doubleTapAction: @escaping () -> Void,
anyTapAction: @escaping () -> Void
) {
self.tapSensitivity = tapSensitivity
self.singleTapAction = singleTapAction
self.doubleTapAction = doubleTapAction
self.anyTapAction = anyTapAction
}
func body(content: Content) -> some View {
content.gesture(simultaneouslyGesture)
}
private var singleTapGesture: some Gesture {
TapGesture(count: 1).onEnded {
anyTapAction()
singleTapIsTaped = true
DispatchQueue.main.asyncAfter(deadline: .now() + tapSensitivity) {
if singleTapIsTaped {
singleTapAction()
}
}
}
}
private var doubleTapGesture: some Gesture {
TapGesture(count: 2).onEnded {
singleTapIsTaped = false
doubleTapAction()
}
}
private var simultaneouslyGesture: some Gesture {
singleTapGesture.simultaneously(with: doubleTapGesture)
}
}
extension View {
func tapRecognizer(
tapSensitivity: Double,
singleTapAction: @escaping () -> Void,
doubleTapAction: @escaping () -> Void,
anyTapAction: @escaping () -> Void = {}
) -> some View {
modifier(
TapRecognizerViewModifier(
tapSensitivity: tapSensitivity,
singleTapAction: singleTapAction,
doubleTapAction: doubleTapAction,
anyTapAction: anyTapAction
)
)
}
}

View File

@@ -0,0 +1,197 @@
import SwiftUI
struct TimelineView: View {
@Binding private var duration: Double
@Binding private var current: Double
@State private var size = CGSize.zero
@State private var dragging = false
@State private var dragOffset: Double = 0
@State private var draggedFrom: Double = 0
private var start: Double = 0.0
private var height = 10.0
var cornerRadius: Double
var thumbTooltipWidth: Double = 100
@EnvironmentObject<PlayerModel> private var player
@EnvironmentObject<PlayerControlsModel> private var controls
init(duration: Binding<Double>, current: Binding<Double>, cornerRadius: Double = 10.0) {
_duration = duration
_current = current
self.cornerRadius = cornerRadius
}
var body: some View {
ZStack(alignment: .leading) {
RoundedRectangle(cornerRadius: cornerRadius)
.foregroundColor(.blue)
.frame(maxHeight: height)
RoundedRectangle(cornerRadius: cornerRadius)
.fill(
Color.green
)
.frame(maxHeight: height)
.frame(width: current * oneUnitWidth)
segmentsLayers
Circle()
.strokeBorder(.gray, lineWidth: 1)
.background(Circle().fill(dragging ? .gray : .white))
.offset(x: thumbOffset)
.foregroundColor(.red.opacity(0.6))
.frame(maxHeight: height * 2)
#if !os(tvOS)
.gesture(
DragGesture(minimumDistance: 0)
.onChanged { value in
if !dragging {
controls.removeTimer()
draggedFrom = current
}
dragging = true
let drag = value.translation.width
let change = (drag / size.width) * units
let changedCurrent = current + change
guard changedCurrent >= start, changedCurrent <= duration else {
return
}
withAnimation(Animation.linear(duration: 0.2)) {
dragOffset = drag
}
}
.onEnded { _ in
current = projectedValue
player.backend.seek(to: projectedValue)
dragging = false
dragOffset = 0.0
draggedFrom = 0.0
controls.resetTimer()
}
)
#endif
ZStack {
RoundedRectangle(cornerRadius: cornerRadius)
.frame(maxWidth: thumbTooltipWidth, maxHeight: 30)
Text(projectedValue.formattedAsPlaybackTime() ?? "--:--")
.foregroundColor(.black)
}
.animation(.linear(duration: 0.1))
.opacity(dragging ? 1 : 0)
.offset(x: thumbTooltipOffset, y: -(height * 2) - 7)
}
.background(GeometryReader { proxy in
Color.clear
.onAppear {
self.size = proxy.size
}
.onChange(of: proxy.size) { size in
self.size = size
}
})
#if !os(tvOS)
.gesture(DragGesture(minimumDistance: 0).onEnded { value in
let target = (value.location.x / size.width) * units
current = target
player.backend.seek(to: target)
})
#endif
}
var projectedValue: Double {
let change = (dragOffset / size.width) * units
let projected = draggedFrom + change
return projected.isFinite ? projected : start
}
var thumbOffset: Double {
let offset = dragging ? (draggedThumbHorizontalOffset + dragOffset) : thumbHorizontalOffset
return offset.isFinite ? offset : thumbLeadingOffset
}
var thumbTooltipOffset: Double {
let offset = (dragging ? ((current * oneUnitWidth) + dragOffset) : (current * oneUnitWidth)) - (thumbTooltipWidth / 2)
return offset.clamped(to: minThumbTooltipOffset ... maxThumbTooltipOffset)
}
var minThumbTooltipOffset: Double = -10
var maxThumbTooltipOffset: Double {
max(minThumbTooltipOffset, (units * oneUnitWidth) - thumbTooltipWidth + 10)
}
var segmentsLayers: some View {
ForEach(player.sponsorBlock.segments, id: \.uuid) { segment in
RoundedRectangle(cornerRadius: cornerRadius)
.offset(x: segmentLayerHorizontalOffset(segment))
.foregroundColor(.red)
.frame(maxHeight: height)
.frame(width: segmentLayerWidth(segment))
}
}
func segmentLayerHorizontalOffset(_ segment: Segment) -> Double {
segment.start * oneUnitWidth
}
func segmentLayerWidth(_ segment: Segment) -> Double {
let width = segment.duration * oneUnitWidth
return width.isFinite ? width : thumbLeadingOffset
}
var draggedThumbHorizontalOffset: Double {
thumbLeadingOffset + (draggedFrom * oneUnitWidth)
}
var thumbHorizontalOffset: Double {
thumbLeadingOffset + (current * oneUnitWidth)
}
var thumbLeadingOffset: Double {
-(size.width / 2)
}
var oneUnitWidth: Double {
let one = size.width / units
return one.isFinite ? one : 0
}
var units: Double {
duration - start
}
func setCurrent(_ current: Double) {
withAnimation {
self.current = current
}
}
}
struct TimelineView_Previews: PreviewProvider {
static var previews: some View {
VStack(spacing: 40) {
TimelineView(duration: .constant(100), current: .constant(0))
TimelineView(duration: .constant(100), current: .constant(1))
TimelineView(duration: .constant(100), current: .constant(30))
TimelineView(duration: .constant(100), current: .constant(50))
TimelineView(duration: .constant(100), current: .constant(66))
TimelineView(duration: .constant(100), current: .constant(90))
TimelineView(duration: .constant(100), current: .constant(100))
}
.padding()
}
}

View File

@@ -90,7 +90,7 @@ struct VideoDetails: View {
if fullScreen {
fullScreen = false
} else {
self.presentationMode.wrappedValue.dismiss()
self.player.hide()
}
}
}
@@ -98,12 +98,8 @@ struct VideoDetails: View {
switch currentPage {
case .info:
if player.isLoadingVideo {
PlaceholderProgressView()
} else {
ScrollView(.vertical, showsIndicators: false) {
detailsPage
}
ScrollView(.vertical, showsIndicators: false) {
detailsPage
}
case .queue:
PlayerQueueView(sidebarQueue: $sidebarQueue, fullScreen: $fullScreen)

View File

@@ -4,9 +4,9 @@ import SwiftUI
struct VideoDetailsPaddingModifier: ViewModifier {
static var defaultAdditionalDetailsPadding: Double {
#if os(macOS)
30
5
#else
40
10
#endif
}

View File

@@ -5,6 +5,7 @@ struct VideoPlayerSizeModifier: ViewModifier {
let geometry: GeometryProxy
let aspectRatio: Double?
let minimumHeightLeft: Double
let fullScreen: Bool
#if os(iOS)
@Environment(\.verticalSizeClass) private var verticalSizeClass
@@ -13,18 +14,19 @@ struct VideoPlayerSizeModifier: ViewModifier {
init(
geometry: GeometryProxy,
aspectRatio: Double? = nil,
minimumHeightLeft: Double? = nil
minimumHeightLeft: Double? = nil,
fullScreen: Bool = false
) {
self.geometry = geometry
self.aspectRatio = aspectRatio ?? VideoPlayerView.defaultAspectRatio
self.minimumHeightLeft = minimumHeightLeft ?? VideoPlayerView.defaultMinimumHeightLeft
self.fullScreen = fullScreen
}
func body(content: Content) -> some View {
content
.frame(maxHeight: maxHeight)
.aspectRatio(usedAspectRatio, contentMode: usedAspectRatioContentMode)
.edgesIgnoringSafeArea(edgesIgnoringSafeArea)
.frame(maxHeight: fullScreen ? .infinity : maxHeight)
.aspectRatio(usedAspectRatio, contentMode: .fit)
}
var usedAspectRatio: Double {
@@ -44,7 +46,7 @@ struct VideoPlayerSizeModifier: ViewModifier {
var usedAspectRatioContentMode: ContentMode {
#if os(iOS)
verticalSizeClass == .regular ? .fit : .fill
!fullScreen ? .fit : .fill
#else
.fit
#endif
@@ -59,14 +61,4 @@ struct VideoPlayerSizeModifier: ViewModifier {
return [height, 0].max()!
}
var edgesIgnoringSafeArea: Edge.Set {
let empty = Edge.Set()
#if os(iOS)
return verticalSizeClass == .compact ? .all : empty
#else
return empty
#endif
}
}

View File

@@ -17,12 +17,12 @@ struct VideoPlayerView: View {
}
@State private var playerSize: CGSize = .zero
@State private var hoveringPlayer = false
@State private var fullScreenDetails = false
@Environment(\.colorScheme) private var colorScheme
#if os(iOS)
@Environment(\.presentationMode) private var presentationMode
@Environment(\.horizontalSizeClass) private var horizontalSizeClass
@Environment(\.verticalSizeClass) private var verticalSizeClass
@@ -33,9 +33,12 @@ struct VideoPlayerView: View {
@State private var motionManager: CMMotionManager!
@State private var orientation = UIInterfaceOrientation.portrait
@State private var lastOrientation: UIInterfaceOrientation?
#elseif os(macOS)
var mouseLocation: CGPoint { NSEvent.mouseLocation }
#endif
@EnvironmentObject<AccountsModel> private var accounts
@EnvironmentObject<PlayerControlsModel> private var playerControls
@EnvironmentObject<PlayerModel> private var player
var body: some View {
@@ -60,12 +63,15 @@ struct VideoPlayerView: View {
.onChange(of: geometry.size) { size in
self.playerSize = size
}
.onChange(of: fullScreenDetails) { value in
player.backend.setNeedsDrawing(!value)
}
#if os(iOS)
.onReceive(NotificationCenter.default.publisher(for: UIDevice.orientationDidChangeNotification)) { _ in
handleOrientationDidChangeNotification()
}
.onDisappear {
guard !player.playingFullscreen else {
guard !playerControls.playingFullscreen else {
return // swiftlint:disable:this implicit_return
}
@@ -80,7 +86,6 @@ struct VideoPlayerView: View {
}
#endif
}
.navigationBarHidden(true)
#endif
}
@@ -88,32 +93,51 @@ struct VideoPlayerView: View {
Group {
Group {
#if os(tvOS)
player.playerView
playerView
.ignoresSafeArea(.all, edges: .all)
.onMoveCommand { direction in
if direction == .left {
playerControls.resetTimer()
player.backend.seek(relative: .secondsInDefaultTimescale(-10))
}
if direction == .right {
playerControls.resetTimer()
player.backend.seek(relative: .secondsInDefaultTimescale(10))
}
if direction == .up {
playerControls.show()
playerControls.resetTimer()
}
if direction == .down {
playerControls.show()
playerControls.resetTimer()
}
}
#else
GeometryReader { geometry in
VStack(spacing: 0) {
#if os(iOS)
if verticalSizeClass == .regular {
PlaybackBar()
}
#elseif os(macOS)
PlaybackBar()
#endif
if player.currentItem.isNil {
playerPlaceholder(geometry: geometry)
} else if player.playingInPictureInPicture {
pictureInPicturePlaceholder(geometry: geometry)
} else {
player.playerView
.modifier(
VideoPlayerSizeModifier(
geometry: geometry,
aspectRatio: player.controller?.aspectRatio
)
playerView
#if !os(tvOS)
.modifier(
VideoPlayerSizeModifier(
geometry: geometry,
aspectRatio: player.avPlayerBackend.controller?.aspectRatio,
fullScreen: playerControls.playingFullscreen
)
)
#endif
}
}
.frame(maxWidth: fullScreenLayout ? .infinity : nil, maxHeight: fullScreenLayout ? .infinity : nil)
.onHover { hovering in
hoveringPlayer = hovering
hovering ? playerControls.show() : playerControls.hide()
}
#if os(iOS)
.onSwipeGesture(
up: {
@@ -123,45 +147,101 @@ struct VideoPlayerView: View {
},
down: { player.hide() }
)
#elseif os(macOS)
.onAppear(perform: {
NSEvent.addLocalMonitorForEvents(matching: [.mouseMoved]) {
if hoveringPlayer {
playerControls.resetTimer()
}
return $0
}
})
#endif
.background(Color.black)
Group {
#if os(iOS)
if verticalSizeClass == .regular {
VideoDetails(sidebarQueue: sidebarQueueBinding, fullScreen: $fullScreenDetails)
}
#if !os(tvOS)
if !playerControls.playingFullscreen {
Group {
#if os(iOS)
if verticalSizeClass == .regular {
VideoDetails(sidebarQueue: sidebarQueueBinding, fullScreen: $fullScreenDetails)
}
#else
VideoDetails(sidebarQueue: sidebarQueueBinding, fullScreen: $fullScreenDetails)
#endif
}
.background(colorScheme == .dark ? Color.black : Color.white)
.modifier(VideoDetailsPaddingModifier(
geometry: geometry,
aspectRatio: player.controller?.aspectRatio,
fullScreen: fullScreenDetails
))
#else
VideoDetails(sidebarQueue: sidebarQueueBinding, fullScreen: $fullScreenDetails)
#endif
}
.background(colorScheme == .dark ? Color.black : Color.white)
.modifier(VideoDetailsPaddingModifier(
geometry: geometry,
aspectRatio: player.avPlayerBackend.controller?.aspectRatio,
fullScreen: fullScreenDetails
))
}
#endif
}
#endif
}
.background(colorScheme == .dark ? Color.black : Color.white)
.background(((colorScheme == .dark || fullScreenLayout) ? Color.black : Color.white).edgesIgnoringSafeArea(.all))
#if os(macOS)
.frame(minWidth: 650)
#endif
#if os(iOS)
if sidebarQueue {
PlayerQueueView(sidebarQueue: .constant(true), fullScreen: $fullScreenDetails)
.frame(maxWidth: 350)
}
#elseif os(macOS)
if Defaults[.playerSidebar] != .never {
PlayerQueueView(sidebarQueue: sidebarQueueBinding, fullScreen: $fullScreenDetails)
.frame(minWidth: 300)
}
#endif
if !playerControls.playingFullscreen {
#if os(iOS)
if sidebarQueue {
PlayerQueueView(sidebarQueue: .constant(true), fullScreen: $fullScreenDetails)
.frame(maxWidth: 350)
}
#elseif os(macOS)
if Defaults[.playerSidebar] != .never {
PlayerQueueView(sidebarQueue: sidebarQueueBinding, fullScreen: $fullScreenDetails)
.frame(minWidth: 300)
}
#endif
}
}
.ignoresSafeArea(.all, edges: fullScreenLayout ? .vertical : Edge.Set())
#if os(iOS)
.statusBar(hidden: playerControls.playingFullscreen)
.navigationBarHidden(true)
#endif
}
var playerView: some View {
ZStack(alignment: .top) {
switch player.activeBackend {
case .mpv:
player.mpvPlayerView
.overlay(GeometryReader { proxy in
Color.clear
.onAppear {
player.playerSize = proxy.size
}
.onChange(of: proxy.size) { _ in
player.playerSize = proxy.size
}
})
case .appleAVPlayer:
player.avPlayerView
}
#if !os(tvOS)
PlayerGestures()
#endif
PlayerControls(player: player)
}
}
var fullScreenLayout: Bool {
#if os(iOS)
playerControls.playingFullscreen || verticalSizeClass == .compact
#else
playerControls.playingFullscreen
#endif
}
func playerPlaceholder(geometry: GeometryProxy) -> some View {
@@ -235,7 +315,7 @@ struct VideoPlayerView: View {
private func configureOrientationUpdatesBasedOnAccelerometer() {
if UIDevice.current.orientation.isLandscape,
enterFullscreenInLandscape,
!player.playingFullscreen,
!playerControls.playingFullscreen,
!player.playingInPictureInPicture
{
DispatchQueue.main.async {

View File

@@ -22,7 +22,43 @@ struct PlaylistsView: View {
}
var body: some View {
PlayerControlsView {
BrowserPlayerControls(toolbar: {
HStack {
HStack {
newPlaylistButton
.offset(x: -10)
if currentPlaylist != nil {
editPlaylistButton
}
}
if !model.isEmpty {
Spacer()
}
HStack {
if model.isEmpty {
Text("No Playlists")
.foregroundColor(.secondary)
} else {
selectPlaylistButton
.transaction { t in t.animation = .none }
}
}
Spacer()
if currentPlaylist != nil {
HStack(spacing: 0) {
playButton
shuffleButton
}
.offset(x: 10)
}
}
.padding(.horizontal)
}) {
SignInRequiredView(title: "Playlists") {
VStack {
#if os(tvOS)
@@ -41,6 +77,7 @@ struct PlaylistsView: View {
Spacer()
#else
VerticalCells(items: items)
.environment(\.scrollViewBottomPadding, 70)
#endif
}
.environment(\.currentPlaylistID, currentPlaylist?.id)
@@ -48,6 +85,12 @@ struct PlaylistsView: View {
}
}
}
.onAppear {
model.load()
}
.onChange(of: accounts.current) { _ in
model.load(force: true)
}
#if os(tvOS)
.fullScreenCover(isPresented: $showingNewPlaylist, onDismiss: selectCreatedPlaylist) {
PlaylistFormView(playlist: $createdPlaylist)
@@ -57,67 +100,25 @@ struct PlaylistsView: View {
PlaylistFormView(playlist: $editedPlaylist)
.environmentObject(accounts)
}
.focusScope(focusNamespace)
#else
.background(
EmptyView()
.sheet(isPresented: $showingNewPlaylist, onDismiss: selectCreatedPlaylist) {
PlaylistFormView(playlist: $createdPlaylist)
.environmentObject(accounts)
}
)
.background(
EmptyView()
.sheet(isPresented: $showingEditPlaylist, onDismiss: selectEditedPlaylist) {
PlaylistFormView(playlist: $editedPlaylist)
.environmentObject(accounts)
}
)
.background(
EmptyView()
.sheet(isPresented: $showingNewPlaylist, onDismiss: selectCreatedPlaylist) {
PlaylistFormView(playlist: $createdPlaylist)
.environmentObject(accounts)
}
)
.background(
EmptyView()
.sheet(isPresented: $showingEditPlaylist, onDismiss: selectEditedPlaylist) {
PlaylistFormView(playlist: $editedPlaylist)
.environmentObject(accounts)
}
)
#endif
.toolbar {
#if os(iOS)
ToolbarItemGroup(placement: .bottomBar) {
Group {
if model.isEmpty {
Text("No Playlists")
.foregroundColor(.secondary)
} else {
selectPlaylistButton
.transaction { t in t.animation = .none }
}
Spacer()
if currentPlaylist != nil {
HStack(spacing: 10) {
playButton
shuffleButton
}
Spacer()
}
HStack(spacing: 2) {
newPlaylistButton
if currentPlaylist != nil {
editPlaylistButton
}
}
}
}
#endif
}
#if os(tvOS)
.focusScope(focusNamespace)
#endif
.onAppear {
model.load()
}
.onChange(of: accounts.current) { _ in
model.load(force: true)
}
#if os(iOS)
.navigationBarTitleDisplayMode(RefreshControl.navigationBarTitleDisplayMode)
.navigationBarTitleDisplayMode(RefreshControl.navigationBarTitleDisplayMode)
#endif
}
@@ -223,7 +224,7 @@ struct PlaylistsView: View {
}
} label: {
Text(currentPlaylist?.title ?? "Select playlist")
.frame(maxWidth: 140, alignment: .leading)
.frame(maxWidth: 140, alignment: .center)
}
#endif
}
@@ -234,16 +235,17 @@ struct PlaylistsView: View {
self.showingEditPlaylist = true
}) {
HStack(spacing: 8) {
Image(systemName: "slider.horizontal.3")
Text("Edit")
Image(systemName: "rectangle.and.pencil.and.ellipsis")
}
}
}
var newPlaylistButton: some View {
Button(action: { self.showingNewPlaylist = true }) {
HStack(spacing: 8) {
HStack(spacing: 0) {
Image(systemName: "plus")
.padding(8)
.contentShape(Rectangle())
#if os(tvOS)
Text("New Playlist")
#endif
@@ -256,6 +258,8 @@ struct PlaylistsView: View {
player.play(items.compactMap(\.video))
} label: {
Image(systemName: "play")
.padding(8)
.contentShape(Rectangle())
}
}
@@ -264,6 +268,8 @@ struct PlaylistsView: View {
player.play(items.compactMap(\.video), shuffling: true)
} label: {
Image(systemName: "shuffle")
.padding(8)
.contentShape(Rectangle())
}
}

View File

@@ -0,0 +1,54 @@
import Foundation
final class RepeatingTimer {
let timeInterval: TimeInterval
init(timeInterval: TimeInterval) {
self.timeInterval = timeInterval
}
private lazy var timer: DispatchSourceTimer = {
let t = DispatchSource.makeTimerSource()
t.schedule(deadline: .now() + self.timeInterval, repeating: self.timeInterval)
t.setEventHandler { [weak self] in
self?.eventHandler?()
}
return t
}()
var eventHandler: (() -> Void)?
private enum State {
case suspended
case resumed
}
private var state: State = .suspended
deinit {
timer.setEventHandler {}
timer.cancel()
/*
If the timer is suspended, calling cancel without resuming
triggers a crash. This is documented here https://forums.developer.apple.com/thread/15902
*/
resume()
eventHandler = nil
}
func resume() {
if state == .resumed {
return
}
state = .resumed
timer.resume()
}
func suspend() {
if state == .suspended {
return
}
state = .suspended
timer.suspend()
}
}

View File

@@ -41,7 +41,23 @@ struct SearchView: View {
}
var body: some View {
PlayerControlsView {
BrowserPlayerControls(toolbar: {
#if os(iOS)
if accounts.app.supportsSearchFilters {
HStack(spacing: 0) {
Menu("Sort: \(searchSortOrder.name)") {
searchSortOrderPicker
}
.transaction { t in t.animation = .none }
Spacer()
filtersMenu
}
.padding()
}
#endif
}) {
#if os(iOS)
VStack {
SearchTextField(favoriteItem: $favoriteItem)
@@ -70,27 +86,19 @@ struct SearchView: View {
#endif
}
.toolbar {
#if !os(tvOS)
#if os(macOS)
ToolbarItemGroup(placement: toolbarPlacement) {
#if os(macOS)
FavoriteButton(item: favoriteItem)
.id(favoriteItem?.id)
#endif
FavoriteButton(item: favoriteItem)
.id(favoriteItem?.id)
if accounts.app.supportsSearchFilters {
Section {
#if os(macOS)
HStack {
Text("Sort:")
.foregroundColor(.secondary)
HStack {
Text("Sort:")
.foregroundColor(.secondary)
searchSortOrderPicker
}
#else
Menu("Sort: \(searchSortOrder.name)") {
searchSortOrderPicker
}
#endif
searchSortOrderPicker
}
}
.transaction { t in t.animation = .none }
}
@@ -99,9 +107,7 @@ struct SearchView: View {
filtersMenu
}
#if os(macOS)
SearchTextField()
#endif
SearchTextField()
}
#endif
}

View File

@@ -28,6 +28,8 @@ struct PlayerSettings: View {
@Default(.closePiPAndOpenPlayerOnEnteringForeground) private var closePiPAndOpenPlayerOnEnteringForeground
#endif
@Default(.enableReturnYouTubeDislike) private var enableReturnYouTubeDislike
#if os(iOS)
private var idiom: UIUserInterfaceIdiom {
UIDevice.current.userInterfaceIdiom
@@ -84,6 +86,7 @@ struct PlayerSettings: View {
keywordsToggle
showHistoryToggle
channelSubscribersToggle
returnYouTubeDislikeToggle
}
Section(header: SettingsHeader(text: "Picture in Picture")) {
@@ -200,6 +203,10 @@ struct PlayerSettings: View {
Toggle("Show subscribers count", isOn: $channelSubscribers)
}
private var returnYouTubeDislikeToggle: some View {
Toggle("Enable Return YouTube Dislike", isOn: $enableReturnYouTubeDislike)
}
private var pauseOnHidingPlayerToggle: some View {
Toggle("Pause when player is closed", isOn: $pauseOnHidingPlayer)
}

View File

@@ -179,7 +179,7 @@ struct SettingsView: View {
case .browsing:
return 350
case .player:
return 450
return 470
case .history:
return 480
case .sponsorBlock:

View File

@@ -33,7 +33,39 @@ struct TrendingView: View {
}
var body: some View {
PlayerControlsView {
BrowserPlayerControls(toolbar: {
HStack {
if accounts.app.supportsTrendingCategories {
HStack {
Text("Category")
.foregroundColor(.secondary)
categoryButton
// only way to disable Menu animation is to
// force redraw of the view when it changes
.id(UUID())
}
Spacer()
}
if let favoriteItem = favoriteItem {
FavoriteButton(item: favoriteItem, labelPadding: true)
.id(favoriteItem.id)
.labelStyle(.iconOnly)
Spacer()
}
HStack {
Text("Country")
.foregroundColor(.secondary)
countryButton
}
}
.padding(.horizontal)
}) {
Section {
VStack(alignment: .center, spacing: 0) {
#if os(tvOS)
@@ -44,6 +76,7 @@ struct TrendingView: View {
Spacer()
#else
VerticalCells(items: trending)
.environment(\.scrollViewBottomPadding, 70)
#endif
}
}
@@ -62,38 +95,6 @@ struct TrendingView: View {
}
countryButton
}
#elseif os(iOS)
ToolbarItemGroup(placement: .bottomBar) {
Group {
if accounts.app.supportsTrendingCategories {
HStack {
Text("Category")
.foregroundColor(.secondary)
categoryButton
// only way to disable Menu animation is to
// force redraw of the view when it changes
.id(UUID())
}
Spacer()
}
if let favoriteItem = favoriteItem {
FavoriteButton(item: favoriteItem)
.id(favoriteItem.id)
Spacer()
}
HStack {
Text("Country")
.foregroundColor(.secondary)
countryButton
}
}
}
#endif
}
.onChange(of: resource) { _ in

View File

@@ -6,6 +6,7 @@ struct VerticalCells: View {
@Environment(\.verticalSizeClass) private var verticalSizeClass
#endif
@Environment(\.scrollViewBottomPadding) private var scrollViewBottomPadding
@Environment(\.loadMoreContentHandler) private var loadMoreContentHandler
var items = [ContentItem]()
@@ -20,6 +21,9 @@ struct VerticalCells: View {
}
}
.padding()
#if !os(tvOS)
Color.clear.padding(.bottom, scrollViewBottomPadding)
#endif
}
.edgesIgnoringSafeArea(.horizontal)
#if os(macOS)

View File

@@ -72,7 +72,7 @@ struct VideoCell: View {
}
if !playNowContinues {
player.player.seek(to: .zero)
player.backend.seek(to: .zero)
}
player.play()
@@ -255,7 +255,6 @@ struct VideoCell: View {
HStack(spacing: 8) {
if let date = video.publishedDate {
HStack(spacing: 2) {
Image(systemName: "calendar")
Text(date)
.allowsTightening(true)
}
@@ -272,7 +271,6 @@ struct VideoCell: View {
Spacer()
HStack(spacing: 2) {
Image(systemName: "clock")
Text(time)
}
}

View File

@@ -1,14 +1,21 @@
import Foundation
import SwiftUI
struct PlayerControlsView<Content: View>: View {
struct BrowserPlayerControls<Content: View, Toolbar: View>: View {
let content: Content
let toolbar: Toolbar?
@Environment(\.navigationStyle) private var navigationStyle
@EnvironmentObject<PlayerControlsModel> private var playerControls
@EnvironmentObject<PlayerModel> private var model
init(@ViewBuilder content: @escaping () -> Content) {
init(@ViewBuilder toolbar: @escaping () -> Toolbar? = { nil }, @ViewBuilder content: @escaping () -> Content) {
self.content = content()
self.toolbar = toolbar()
}
init(@ViewBuilder content: @escaping () -> Content) where Toolbar == EmptyView {
self.init(toolbar: { EmptyView() }, content: content)
}
var body: some View {
@@ -16,17 +23,30 @@ struct PlayerControlsView<Content: View>: View {
content
#if !os(tvOS)
.frame(minHeight: 0, maxHeight: .infinity)
.padding(.bottom, 50)
#endif
#if !os(tvOS)
controls
Group {
#if !os(tvOS)
#if !os(macOS)
toolbar
.frame(height: 100)
.offset(x: 0, y: -28)
#endif
controls
#endif
}
.borderTop(height: 0.4, color: Color("ControlsBorderColor"))
#if os(macOS)
.background(VisualEffectBlur(material: .sidebar))
#elseif os(iOS)
.background(VisualEffectBlur(blurStyle: .systemThinMaterial).edgesIgnoringSafeArea(.all))
#endif
}
}
private var controls: some View {
let controls = HStack {
HStack {
Button(action: {
model.togglePlayer()
}) {
@@ -57,6 +77,7 @@ struct PlayerControlsView<Content: View>: View {
Spacer()
}
.padding(.vertical)
.contentShape(Rectangle())
}
.padding(.vertical, 20)
@@ -64,7 +85,7 @@ struct PlayerControlsView<Content: View>: View {
ZStack(alignment: .bottom) {
HStack {
Group {
if model.isPlaying {
if playerControls.isPlaying {
Button(action: {
model.pause()
}) {
@@ -76,14 +97,16 @@ struct PlayerControlsView<Content: View>: View {
}) {
Label("Play", systemImage: "play.fill")
}
.disabled(model.player.currentItem.isNil)
}
}
.disabled(playerControls.isLoadingVideo)
.font(.system(size: 30))
.frame(minWidth: 30)
Button(action: { model.advanceToNextItem() }) {
Label("Next", systemImage: "forward.fill")
.padding(.vertical)
.contentShape(Rectangle())
}
.disabled(model.queue.isEmpty)
}
@@ -91,10 +114,9 @@ struct PlayerControlsView<Content: View>: View {
ProgressView(value: progressViewValue, total: progressViewTotal)
.progressViewStyle(.linear)
#if os(iOS)
.offset(x: 0, y: 8)
.frame(maxWidth: 60)
#else
.offset(x: 0, y: 15)
.offset(y: 6)
.frame(maxWidth: 70)
#endif
}
@@ -111,20 +133,6 @@ struct PlayerControlsView<Content: View>: View {
model.show()
})
#endif
return Group {
if #available(iOS 15.0, macOS 12.0, tvOS 15.0, *) {
controls
.background(Material.ultraThinMaterial)
} else {
controls
#if os(macOS)
.background(VisualEffectBlur(material: .hudWindow))
#elseif os(iOS)
.background(VisualEffectBlur(blurStyle: .systemUltraThinMaterial))
#endif
}
}
}
private var progressViewValue: Double {
@@ -138,7 +146,7 @@ struct PlayerControlsView<Content: View>: View {
struct PlayerControlsView_Previews: PreviewProvider {
static var previews: some View {
PlayerControlsView {
BrowserPlayerControls {
VStack {
Spacer()
Text("Hello")

View File

@@ -28,12 +28,12 @@ struct ChannelPlaylistView: View {
if inNavigationView {
content
} else {
PlayerControlsView {
BrowserPlayerControls {
content
}
}
#else
PlayerControlsView {
BrowserPlayerControls {
content
}
#endif

View File

@@ -33,12 +33,12 @@ struct ChannelVideosView: View {
if inNavigationView {
content
} else {
PlayerControlsView {
BrowserPlayerControls {
content
}
}
#else
PlayerControlsView {
BrowserPlayerControls {
content
}
#endif

View File

@@ -5,6 +5,12 @@ import SwiftUI
struct FavoriteButton: View {
let item: FavoriteItem!
let favorites = FavoritesModel.shared
let labelPadding: Bool
init(item: FavoriteItem?, labelPadding: Bool = false) {
self.item = item
self.labelPadding = labelPadding
}
@State private var isFavorite = false
@@ -19,11 +25,17 @@ struct FavoriteButton: View {
favorites.toggle(item)
isFavorite.toggle()
} label: {
if isFavorite {
Label("Remove from Favorites", systemImage: "heart.fill")
} else {
Label("Add to Favorites", systemImage: "heart")
Group {
if isFavorite {
Label("Remove from Favorites", systemImage: "heart.fill")
} else {
Label("Add to Favorites", systemImage: "heart")
}
}
#if os(iOS)
.padding(labelPadding ? 10 : 0)
.contentShape(Rectangle())
#endif
}
.disabled(item.isNil)
.onAppear {

View File

@@ -0,0 +1,41 @@
import SwiftUI
#if !os(macOS)
struct MPVPlayerView: UIViewControllerRepresentable {
@State private var controller = MPVViewController()
@EnvironmentObject<PlayerModel> private var player
func makeUIViewController(context _: Context) -> some UIViewController {
player.mpvBackend.controller = controller
player.mpvBackend.client = controller.client
return controller
}
func updateUIViewController(_: UIViewControllerType, context _: Context) {}
}
#else
struct MPVPlayerView: NSViewRepresentable {
@State private var client = MPVClient()
@State private var layer = VideoLayer()
@EnvironmentObject<PlayerModel> private var player
func makeNSView(context _: Context) -> some NSView {
player.mpvBackend.client = client
client.layer = layer
layer.client = client
let view = MPVOGLView()
view.layer = client.layer
view.wantsLayer = true
return view
}
func updateNSView(_: NSViewType, context _: Context) {}
}
#endif

View File

@@ -20,7 +20,7 @@ struct PlaylistVideosView: View {
}
var body: some View {
PlayerControlsView {
BrowserPlayerControls {
VerticalCells(items: contentItems)
#if !os(tvOS)
.navigationTitle("\(playlist.title) Playlist")

View File

@@ -15,7 +15,7 @@ struct PopularView: View {
}
var body: some View {
PlayerControlsView {
BrowserPlayerControls {
VerticalCells(items: videos)
.onAppear {
resource?.addObserver(store)

View File

@@ -45,7 +45,7 @@ struct ShareButton: View {
shareAction(
accounts.api.shareURL(
contentItem,
time: player.player.currentTime()
time: player.backend.currentTime
)!
)
}
@@ -67,7 +67,7 @@ struct ShareButton: View {
accounts.api.shareURL(
contentItem,
frontendHost: "www.youtube.com",
time: player.player.currentTime()
time: player.backend.currentTime
)!
)
}

View File

@@ -60,7 +60,7 @@ struct SignInRequiredView<Content: View>: View {
struct SignInRequiredView_Previews: PreviewProvider {
static var previews: some View {
PlayerControlsView {
BrowserPlayerControls {
SignInRequiredView(title: "Subscriptions") {
Text("Only when signed in")
}

View File

@@ -15,7 +15,7 @@ struct SubscriptionsView: View {
}
var body: some View {
PlayerControlsView {
BrowserPlayerControls {
SignInRequiredView(title: "Subscriptions") {
VerticalCells(items: videos)
.onAppear {

View File

@@ -195,7 +195,7 @@ struct VideoContextMenuView: View {
Button {
navigation.presentAddToPlaylist(video)
} label: {
Label("Add to playlist...", systemImage: "text.badge.plus")
Label("Add to Playlist...", systemImage: "text.badge.plus")
}
}
@@ -203,7 +203,7 @@ struct VideoContextMenuView: View {
Button {
playlists.removeVideo(videoIndexID: video.indexID!, playlistID: playlistID)
} label: {
Label("Remove from playlist", systemImage: "text.badge.minus")
Label("Remove from Playlist", systemImage: "text.badge.minus")
}
}
}

View File

@@ -24,6 +24,7 @@ struct YatteeApp: App {
@StateObject private var menu = MenuModel()
@StateObject private var navigation = NavigationModel()
@StateObject private var player = PlayerModel()
@StateObject private var playerControls = PlayerControlsModel()
@StateObject private var playlists = PlaylistsModel()
@StateObject private var recents = RecentsModel()
@StateObject private var search = SearchModel()
@@ -41,6 +42,7 @@ struct YatteeApp: App {
.environmentObject(instances)
.environmentObject(navigation)
.environmentObject(player)
.environmentObject(playerControls)
.environmentObject(playlists)
.environmentObject(recents)
.environmentObject(subscriptions)
@@ -101,6 +103,7 @@ struct YatteeApp: App {
.environmentObject(instances)
.environmentObject(navigation)
.environmentObject(player)
.environmentObject(playerControls)
.environmentObject(playlists)
.environmentObject(recents)
.environmentObject(subscriptions)
@@ -115,6 +118,7 @@ struct YatteeApp: App {
.environmentObject(accounts)
.environmentObject(instances)
.environmentObject(player)
.environmentObject(playerControls)
.environmentObject(updater)
}
#endif

BIN
Vendor/mpv/iOS/lib/libass.a vendored Normal file

Binary file not shown.

BIN
Vendor/mpv/iOS/lib/libavcodec.a vendored Normal file

Binary file not shown.

BIN
Vendor/mpv/iOS/lib/libavdevice.a vendored Normal file

Binary file not shown.

BIN
Vendor/mpv/iOS/lib/libavfilter.a vendored Normal file

Binary file not shown.

BIN
Vendor/mpv/iOS/lib/libavformat.a vendored Normal file

Binary file not shown.

BIN
Vendor/mpv/iOS/lib/libavutil.a vendored Normal file

Binary file not shown.

BIN
Vendor/mpv/iOS/lib/libcrypto.a vendored Normal file

Binary file not shown.

BIN
Vendor/mpv/iOS/lib/libfreetype.a vendored Normal file

Binary file not shown.

BIN
Vendor/mpv/iOS/lib/libfribidi.a vendored Normal file

Binary file not shown.

BIN
Vendor/mpv/iOS/lib/libharfbuzz.a vendored Normal file

Binary file not shown.

BIN
Vendor/mpv/iOS/lib/libmpv.a vendored Normal file

Binary file not shown.

BIN
Vendor/mpv/iOS/lib/libssl.a vendored Normal file

Binary file not shown.

BIN
Vendor/mpv/iOS/lib/libswresample.a vendored Normal file

Binary file not shown.

BIN
Vendor/mpv/iOS/lib/libswscale.a vendored Normal file

Binary file not shown.

BIN
Vendor/mpv/iOS/lib/libuchardet.a vendored Normal file

Binary file not shown.

1969
Vendor/mpv/include/client.h vendored Normal file

File diff suppressed because it is too large Load Diff

386
Vendor/mpv/include/qthelper.hpp vendored Normal file
View File

@@ -0,0 +1,386 @@
/* Copyright (C) 2017 the mpv developers
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#ifndef MPV_CLIENT_API_QTHELPER_H_
#define MPV_CLIENT_API_QTHELPER_H_
#include <mpv/client.h>
#if !MPV_ENABLE_DEPRECATED
#error "This helper is deprecated. Copy it into your project instead."
#else
/**
* Note: these helpers are provided for convenience for C++/Qt applications.
* This is based on the public API in client.h, and it does not encode any
* knowledge that is not known or guaranteed outside of the C client API. You
* can even copy and modify this code as you like, or implement similar things
* for other languages.
*/
#include <cstring>
#include <QVariant>
#include <QString>
#include <QList>
#include <QHash>
#include <QSharedPointer>
#include <QMetaType>
namespace mpv {
namespace qt {
// Wrapper around mpv_handle. Does refcounting under the hood.
class Handle
{
struct container {
container(mpv_handle *h) : mpv(h) {}
~container() { mpv_terminate_destroy(mpv); }
mpv_handle *mpv;
};
QSharedPointer<container> sptr;
public:
// Construct a new Handle from a raw mpv_handle with refcount 1. If the
// last Handle goes out of scope, the mpv_handle will be destroyed with
// mpv_terminate_destroy().
// Never destroy the mpv_handle manually when using this wrapper. You
// will create dangling pointers. Just let the wrapper take care of
// destroying the mpv_handle.
// Never create multiple wrappers from the same raw mpv_handle; copy the
// wrapper instead (that's what it's for).
static Handle FromRawHandle(mpv_handle *handle) {
Handle h;
h.sptr = QSharedPointer<container>(new container(handle));
return h;
}
// Return the raw handle; for use with the libmpv C API.
operator mpv_handle*() const { return sptr ? (*sptr).mpv : 0; }
};
static inline QVariant node_to_variant(const mpv_node *node)
{
switch (node->format) {
case MPV_FORMAT_STRING:
return QVariant(QString::fromUtf8(node->u.string));
case MPV_FORMAT_FLAG:
return QVariant(static_cast<bool>(node->u.flag));
case MPV_FORMAT_INT64:
return QVariant(static_cast<qlonglong>(node->u.int64));
case MPV_FORMAT_DOUBLE:
return QVariant(node->u.double_);
case MPV_FORMAT_NODE_ARRAY: {
mpv_node_list *list = node->u.list;
QVariantList qlist;
for (int n = 0; n < list->num; n++)
qlist.append(node_to_variant(&list->values[n]));
return QVariant(qlist);
}
case MPV_FORMAT_NODE_MAP: {
mpv_node_list *list = node->u.list;
QVariantMap qmap;
for (int n = 0; n < list->num; n++) {
qmap.insert(QString::fromUtf8(list->keys[n]),
node_to_variant(&list->values[n]));
}
return QVariant(qmap);
}
default: // MPV_FORMAT_NONE, unknown values (e.g. future extensions)
return QVariant();
}
}
struct node_builder {
node_builder(const QVariant& v) {
set(&node_, v);
}
~node_builder() {
free_node(&node_);
}
mpv_node *node() { return &node_; }
private:
Q_DISABLE_COPY(node_builder)
mpv_node node_;
mpv_node_list *create_list(mpv_node *dst, bool is_map, int num) {
dst->format = is_map ? MPV_FORMAT_NODE_MAP : MPV_FORMAT_NODE_ARRAY;
mpv_node_list *list = new mpv_node_list();
dst->u.list = list;
if (!list)
goto err;
list->values = new mpv_node[num]();
if (!list->values)
goto err;
if (is_map) {
list->keys = new char*[num]();
if (!list->keys)
goto err;
}
return list;
err:
free_node(dst);
return NULL;
}
char *dup_qstring(const QString &s) {
QByteArray b = s.toUtf8();
char *r = new char[b.size() + 1];
if (r)
std::memcpy(r, b.data(), b.size() + 1);
return r;
}
bool test_type(const QVariant &v, QMetaType::Type t) {
// The Qt docs say: "Although this function is declared as returning
// "QVariant::Type(obsolete), the return value should be interpreted
// as QMetaType::Type."
// So a cast really seems to be needed to avoid warnings (urgh).
return static_cast<int>(v.type()) == static_cast<int>(t);
}
void set(mpv_node *dst, const QVariant &src) {
if (test_type(src, QMetaType::QString)) {
dst->format = MPV_FORMAT_STRING;
dst->u.string = dup_qstring(src.toString());
if (!dst->u.string)
goto fail;
} else if (test_type(src, QMetaType::Bool)) {
dst->format = MPV_FORMAT_FLAG;
dst->u.flag = src.toBool() ? 1 : 0;
} else if (test_type(src, QMetaType::Int) ||
test_type(src, QMetaType::LongLong) ||
test_type(src, QMetaType::UInt) ||
test_type(src, QMetaType::ULongLong))
{
dst->format = MPV_FORMAT_INT64;
dst->u.int64 = src.toLongLong();
} else if (test_type(src, QMetaType::Double)) {
dst->format = MPV_FORMAT_DOUBLE;
dst->u.double_ = src.toDouble();
} else if (src.canConvert<QVariantList>()) {
QVariantList qlist = src.toList();
mpv_node_list *list = create_list(dst, false, qlist.size());
if (!list)
goto fail;
list->num = qlist.size();
for (int n = 0; n < qlist.size(); n++)
set(&list->values[n], qlist[n]);
} else if (src.canConvert<QVariantMap>()) {
QVariantMap qmap = src.toMap();
mpv_node_list *list = create_list(dst, true, qmap.size());
if (!list)
goto fail;
list->num = qmap.size();
for (int n = 0; n < qmap.size(); n++) {
list->keys[n] = dup_qstring(qmap.keys()[n]);
if (!list->keys[n]) {
free_node(dst);
goto fail;
}
set(&list->values[n], qmap.values()[n]);
}
} else {
goto fail;
}
return;
fail:
dst->format = MPV_FORMAT_NONE;
}
void free_node(mpv_node *dst) {
switch (dst->format) {
case MPV_FORMAT_STRING:
delete[] dst->u.string;
break;
case MPV_FORMAT_NODE_ARRAY:
case MPV_FORMAT_NODE_MAP: {
mpv_node_list *list = dst->u.list;
if (list) {
for (int n = 0; n < list->num; n++) {
if (list->keys)
delete[] list->keys[n];
if (list->values)
free_node(&list->values[n]);
}
delete[] list->keys;
delete[] list->values;
}
delete list;
break;
}
default: ;
}
dst->format = MPV_FORMAT_NONE;
}
};
/**
* RAII wrapper that calls mpv_free_node_contents() on the pointer.
*/
struct node_autofree {
mpv_node *ptr;
node_autofree(mpv_node *a_ptr) : ptr(a_ptr) {}
~node_autofree() { mpv_free_node_contents(ptr); }
};
#if MPV_ENABLE_DEPRECATED
/**
* Return the given property as mpv_node converted to QVariant, or QVariant()
* on error.
*
* @deprecated use get_property() instead
*
* @param name the property name
*/
static inline QVariant get_property_variant(mpv_handle *ctx, const QString &name)
{
mpv_node node;
if (mpv_get_property(ctx, name.toUtf8().data(), MPV_FORMAT_NODE, &node) < 0)
return QVariant();
node_autofree f(&node);
return node_to_variant(&node);
}
/**
* Set the given property as mpv_node converted from the QVariant argument.
* @deprecated use set_property() instead
*/
static inline int set_property_variant(mpv_handle *ctx, const QString &name,
const QVariant &v)
{
node_builder node(v);
return mpv_set_property(ctx, name.toUtf8().data(), MPV_FORMAT_NODE, node.node());
}
/**
* Set the given option as mpv_node converted from the QVariant argument.
*
* @deprecated use set_property() instead
*/
static inline int set_option_variant(mpv_handle *ctx, const QString &name,
const QVariant &v)
{
node_builder node(v);
return mpv_set_option(ctx, name.toUtf8().data(), MPV_FORMAT_NODE, node.node());
}
/**
* mpv_command_node() equivalent. Returns QVariant() on error (and
* unfortunately, the same on success).
*
* @deprecated use command() instead
*/
static inline QVariant command_variant(mpv_handle *ctx, const QVariant &args)
{
node_builder node(args);
mpv_node res;
if (mpv_command_node(ctx, node.node(), &res) < 0)
return QVariant();
node_autofree f(&res);
return node_to_variant(&res);
}
#endif
/**
* This is used to return error codes wrapped in QVariant for functions which
* return QVariant.
*
* You can use get_error() or is_error() to extract the error status from a
* QVariant value.
*/
struct ErrorReturn
{
/**
* enum mpv_error value (or a value outside of it if ABI was extended)
*/
int error;
ErrorReturn() : error(0) {}
explicit ErrorReturn(int err) : error(err) {}
};
/**
* Return the mpv error code packed into a QVariant, or 0 (success) if it's not
* an error value.
*
* @return error code (<0) or success (>=0)
*/
static inline int get_error(const QVariant &v)
{
if (!v.canConvert<ErrorReturn>())
return 0;
return v.value<ErrorReturn>().error;
}
/**
* Return whether the QVariant carries a mpv error code.
*/
static inline bool is_error(const QVariant &v)
{
return get_error(v) < 0;
}
/**
* Return the given property as mpv_node converted to QVariant, or QVariant()
* on error.
*
* @param name the property name
* @return the property value, or an ErrorReturn with the error code
*/
static inline QVariant get_property(mpv_handle *ctx, const QString &name)
{
mpv_node node;
int err = mpv_get_property(ctx, name.toUtf8().data(), MPV_FORMAT_NODE, &node);
if (err < 0)
return QVariant::fromValue(ErrorReturn(err));
node_autofree f(&node);
return node_to_variant(&node);
}
/**
* Set the given property as mpv_node converted from the QVariant argument.
*
* @return mpv error code (<0 on error, >= 0 on success)
*/
static inline int set_property(mpv_handle *ctx, const QString &name,
const QVariant &v)
{
node_builder node(v);
return mpv_set_property(ctx, name.toUtf8().data(), MPV_FORMAT_NODE, node.node());
}
/**
* mpv_command_node() equivalent.
*
* @param args command arguments, with args[0] being the command name as string
* @return the property value, or an ErrorReturn with the error code
*/
static inline QVariant command(mpv_handle *ctx, const QVariant &args)
{
node_builder node(args);
mpv_node res;
int err = mpv_command_node(ctx, node.node(), &res);
if (err < 0)
return QVariant::fromValue(ErrorReturn(err));
node_autofree f(&res);
return node_to_variant(&res);
}
}
}
Q_DECLARE_METATYPE(mpv::qt::ErrorReturn)
#endif /* else #if MPV_ENABLE_DEPRECATED */
#endif

626
Vendor/mpv/include/render.h vendored Normal file
View File

@@ -0,0 +1,626 @@
/* Copyright (C) 2018 the mpv developers
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#ifndef MPV_CLIENT_API_RENDER_H_
#define MPV_CLIENT_API_RENDER_H_
#include "client.h"
#ifdef __cplusplus
extern "C" {
#endif
/**
* Overview
* --------
*
* This API can be used to make mpv render using supported graphic APIs (such
* as OpenGL). It can be used to handle video display.
*
* The renderer needs to be created with mpv_render_context_create() before
* you start playback (or otherwise cause a VO to be created). Then (with most
* backends) mpv_render_context_render() can be used to explicitly render the
* current video frame. Use mpv_render_context_set_update_callback() to get
* notified when there is a new frame to draw.
*
* Preferably rendering should be done in a separate thread. If you call
* normal libmpv API functions on the renderer thread, deadlocks can result
* (these are made non-fatal with timeouts, but user experience will obviously
* suffer). See "Threading" section below.
*
* You can output and embed video without this API by setting the mpv "wid"
* option to a native window handle (see "Embedding the video window" section
* in the client.h header). In general, using the render API is recommended,
* because window embedding can cause various issues, especially with GUI
* toolkits and certain platforms.
*
* Supported backends
* ------------------
*
* OpenGL: via MPV_RENDER_API_TYPE_OPENGL, see render_gl.h header.
*
* Threading
* ---------
*
* You are recommended to do rendering on a separate thread than normal libmpv
* use.
*
* The mpv_render_* functions can be called from any thread, under the
* following conditions:
* - only one of the mpv_render_* functions can be called at the same time
* (unless they belong to different mpv cores created by mpv_create())
* - never can be called from within the callbacks set with
* mpv_set_wakeup_callback() or mpv_render_context_set_update_callback()
* - if the OpenGL backend is used, for all functions the OpenGL context
* must be "current" in the calling thread, and it must be the same OpenGL
* context as the mpv_render_context was created with. Otherwise, undefined
* behavior will occur.
* - the thread does not call libmpv API functions other than the mpv_render_*
* functions, except APIs which are declared as safe (see below). Likewise,
* there must be no lock or wait dependency from the render thread to a
* thread using other libmpv functions. Basically, the situation that your
* render thread waits for a "not safe" libmpv API function to return must
* not happen. If you ignore this requirement, deadlocks can happen, which
* are made non-fatal with timeouts; then playback quality will be degraded,
* and the message
* mpv_render_context_render() not being called or stuck.
* is logged. If you set MPV_RENDER_PARAM_ADVANCED_CONTROL, you promise that
* this won't happen, and must absolutely guarantee it, or a real deadlock
* will freeze the mpv core thread forever.
*
* libmpv functions which are safe to call from a render thread are:
* - functions marked with "Safe to be called from mpv render API threads."
* - client.h functions which don't have an explicit or implicit mpv_handle
* parameter
* - mpv_render_* functions; but only for the same mpv_render_context pointer.
* If the pointer is different, mpv_render_context_free() is not safe. (The
* reason is that if MPV_RENDER_PARAM_ADVANCED_CONTROL is set, it may have
* to process still queued requests from the core, which it can do only for
* the current context, while requests for other contexts would deadlock.
* Also, it may have to wait and block for the core to terminate the video
* chain to make sure no resources are used after context destruction.)
* - if the mpv_handle parameter refers to a different mpv core than the one
* you're rendering for (very obscure, but allowed)
*
* Note about old libmpv version:
*
* Before API version 1.105 (basically in mpv 0.29.x), simply enabling
* MPV_RENDER_PARAM_ADVANCED_CONTROL could cause deadlock issues. This can
* be worked around by setting the "vd-lavc-dr" option to "no".
* In addition, you were required to call all mpv_render*() API functions
* from the same thread on which mpv_render_context_create() was originally
* run (for the same the mpv_render_context). Not honoring it led to UB
* (deadlocks, use of invalid pthread_t handles), even if you moved your GL
* context to a different thread correctly.
* These problems were addressed in API version 1.105 (mpv 0.30.0).
*
* Context and handle lifecycle
* ----------------------------
*
* Video initialization will fail if the render context was not initialized yet
* (with mpv_render_context_create()), or it will revert to a VO that creates
* its own window.
*
* Currently, there can be only 1 mpv_render_context at a time per mpv core.
*
* Calling mpv_render_context_free() while a VO is using the render context is
* active will disable video.
*
* You must free the context with mpv_render_context_free() before the mpv core
* is destroyed. If this doesn't happen, undefined behavior will result.
*/
/**
* Opaque context, returned by mpv_render_context_create().
*/
typedef struct mpv_render_context mpv_render_context;
/**
* Parameters for mpv_render_param (which is used in a few places such as
* mpv_render_context_create().
*
* Also see mpv_render_param for conventions and how to use it.
*/
typedef enum mpv_render_param_type {
/**
* Not a valid value, but also used to terminate a params array. Its value
* is always guaranteed to be 0 (even if the ABI changes in the future).
*/
MPV_RENDER_PARAM_INVALID = 0,
/**
* The render API to use. Valid for mpv_render_context_create().
*
* Type: char*
*
* Defined APIs:
*
* MPV_RENDER_API_TYPE_OPENGL:
* OpenGL desktop 2.1 or later (preferably core profile compatible to
* OpenGL 3.2), or OpenGLES 2.0 or later.
* Providing MPV_RENDER_PARAM_OPENGL_INIT_PARAMS is required.
* It is expected that an OpenGL context is valid and "current" when
* calling mpv_render_* functions (unless specified otherwise). It
* must be the same context for the same mpv_render_context.
*/
MPV_RENDER_PARAM_API_TYPE = 1,
/**
* Required parameters for initializing the OpenGL renderer. Valid for
* mpv_render_context_create().
* Type: mpv_opengl_init_params*
*/
MPV_RENDER_PARAM_OPENGL_INIT_PARAMS = 2,
/**
* Describes a GL render target. Valid for mpv_render_context_render().
* Type: mpv_opengl_fbo*
*/
MPV_RENDER_PARAM_OPENGL_FBO = 3,
/**
* Control flipped rendering. Valid for mpv_render_context_render().
* Type: int*
* If the value is set to 0, render normally. Otherwise, render it flipped,
* which is needed e.g. when rendering to an OpenGL default framebuffer
* (which has a flipped coordinate system).
*/
MPV_RENDER_PARAM_FLIP_Y = 4,
/**
* Control surface depth. Valid for mpv_render_context_render().
* Type: int*
* This implies the depth of the surface passed to the render function in
* bits per channel. If omitted or set to 0, the renderer will assume 8.
* Typically used to control dithering.
*/
MPV_RENDER_PARAM_DEPTH = 5,
/**
* ICC profile blob. Valid for mpv_render_context_set_parameter().
* Type: mpv_byte_array*
* Set an ICC profile for use with the "icc-profile-auto" option. (If the
* option is not enabled, the ICC data will not be used.)
*/
MPV_RENDER_PARAM_ICC_PROFILE = 6,
/**
* Ambient light in lux. Valid for mpv_render_context_set_parameter().
* Type: int*
* This can be used for automatic gamma correction.
*/
MPV_RENDER_PARAM_AMBIENT_LIGHT = 7,
/**
* X11 Display, sometimes used for hwdec. Valid for
* mpv_render_context_create(). The Display must stay valid for the lifetime
* of the mpv_render_context.
* Type: Display*
*/
MPV_RENDER_PARAM_X11_DISPLAY = 8,
/**
* Wayland display, sometimes used for hwdec. Valid for
* mpv_render_context_create(). The wl_display must stay valid for the
* lifetime of the mpv_render_context.
* Type: struct wl_display*
*/
MPV_RENDER_PARAM_WL_DISPLAY = 9,
/**
* Better control about rendering and enabling some advanced features. Valid
* for mpv_render_context_create().
*
* This conflates multiple requirements the API user promises to abide if
* this option is enabled:
*
* - The API user's render thread, which is calling the mpv_render_*()
* functions, never waits for the core. Otherwise deadlocks can happen.
* See "Threading" section.
* - The callback set with mpv_render_context_set_update_callback() can now
* be called even if there is no new frame. The API user should call the
* mpv_render_context_update() function, and interpret the return value
* for whether a new frame should be rendered.
* - Correct functionality is impossible if the update callback is not set,
* or not set soon enough after mpv_render_context_create() (the core can
* block while waiting for you to call mpv_render_context_update(), and
* if the update callback is not correctly set, it will deadlock, or
* block for too long).
*
* In general, setting this option will enable the following features (and
* possibly more):
*
* - "Direct rendering", which means the player decodes directly to a
* texture, which saves a copy per video frame ("vd-lavc-dr" option
* needs to be enabled, and the rendering backend as well as the
* underlying GPU API/driver needs to have support for it).
* - Rendering screenshots with the GPU API if supported by the backend
* (instead of using a suboptimal software fallback via libswscale).
*
* Warning: do not just add this without reading the "Threading" section
* above, and then wondering that deadlocks happen. The
* requirements are tricky. But also note that even if advanced
* control is disabled, not adhering to the rules will lead to
* playback problems. Enabling advanced controls simply makes
* violating these rules fatal.
*
* Type: int*: 0 for disable (default), 1 for enable
*/
MPV_RENDER_PARAM_ADVANCED_CONTROL = 10,
/**
* Return information about the next frame to render. Valid for
* mpv_render_context_get_info().
*
* Type: mpv_render_frame_info*
*
* It strictly returns information about the _next_ frame. The implication
* is that e.g. mpv_render_context_update()'s return value will have
* MPV_RENDER_UPDATE_FRAME set, and the user is supposed to call
* mpv_render_context_render(). If there is no next frame, then the
* return value will have is_valid set to 0.
*/
MPV_RENDER_PARAM_NEXT_FRAME_INFO = 11,
/**
* Enable or disable video timing. Valid for mpv_render_context_render().
*
* Type: int*: 0 for disable, 1 for enable (default)
*
* When video is timed to audio, the player attempts to render video a bit
* ahead, and then do a blocking wait until the target display time is
* reached. This blocks mpv_render_context_render() for up to the amount
* specified with the "video-timing-offset" global option. You can set
* this parameter to 0 to disable this kind of waiting. If you do, it's
* recommended to use the target time value in mpv_render_frame_info to
* wait yourself, or to set the "video-timing-offset" to 0 instead.
*
* Disabling this without doing anything in addition will result in A/V sync
* being slightly off.
*/
MPV_RENDER_PARAM_BLOCK_FOR_TARGET_TIME = 12,
/**
* Use to skip rendering in mpv_render_context_render().
*
* Type: int*: 0 for rendering (default), 1 for skipping
*
* If this is set, you don't need to pass a target surface to the render
* function (and if you do, it's completely ignored). This can still call
* into the lower level APIs (i.e. if you use OpenGL, the OpenGL context
* must be set).
*
* Be aware that the render API will consider this frame as having been
* rendered. All other normal rules also apply, for example about whether
* you have to call mpv_render_context_report_swap(). It also does timing
* in the same way.
*/
MPV_RENDER_PARAM_SKIP_RENDERING = 13,
/**
* Deprecated. Not supported. Use MPV_RENDER_PARAM_DRM_DISPLAY_V2 instead.
* Type : struct mpv_opengl_drm_params*
*/
MPV_RENDER_PARAM_DRM_DISPLAY = 14,
/**
* DRM draw surface size, contains draw surface dimensions.
* Valid for mpv_render_context_create().
* Type : struct mpv_opengl_drm_draw_surface_size*
*/
MPV_RENDER_PARAM_DRM_DRAW_SURFACE_SIZE = 15,
/**
* DRM display, contains drm display handles.
* Valid for mpv_render_context_create().
* Type : struct mpv_opengl_drm_params_v2*
*/
MPV_RENDER_PARAM_DRM_DISPLAY_V2 = 16,
} mpv_render_param_type;
/**
* For backwards compatibility with the old naming of
* MPV_RENDER_PARAM_DRM_DRAW_SURFACE_SIZE
*/
#define MPV_RENDER_PARAM_DRM_OSD_SIZE MPV_RENDER_PARAM_DRM_DRAW_SURFACE_SIZE
/**
* Used to pass arbitrary parameters to some mpv_render_* functions. The
* meaning of the data parameter is determined by the type, and each
* MPV_RENDER_PARAM_* documents what type the value must point to.
*
* Each value documents the required data type as the pointer you cast to
* void* and set on mpv_render_param.data. For example, if MPV_RENDER_PARAM_FOO
* documents the type as Something* , then the code should look like this:
*
* Something foo = {...};
* mpv_render_param param;
* param.type = MPV_RENDER_PARAM_FOO;
* param.data = & foo;
*
* Normally, the data field points to exactly 1 object. If the type is char*,
* it points to a 0-terminated string.
*
* In all cases (unless documented otherwise) the pointers need to remain
* valid during the call only. Unless otherwise documented, the API functions
* will not write to the params array or any data pointed to it.
*
* As a convention, parameter arrays are always terminated by type==0. There
* is no specific order of the parameters required. The order of the 2 fields in
* this struct is guaranteed (even after ABI changes).
*/
typedef struct mpv_render_param {
enum mpv_render_param_type type;
void *data;
} mpv_render_param;
/**
* Predefined values for MPV_RENDER_PARAM_API_TYPE.
*/
#define MPV_RENDER_API_TYPE_OPENGL "opengl"
/**
* Flags used in mpv_render_frame_info.flags. Each value represents a bit in it.
*/
typedef enum mpv_render_frame_info_flag {
/**
* Set if there is actually a next frame. If unset, there is no next frame
* yet, and other flags and fields that require a frame to be queued will
* be unset.
*
* This is set for _any_ kind of frame, even for redraw requests.
*
* Note that when this is unset, it simply means no new frame was
* decoded/queued yet, not necessarily that the end of the video was
* reached. A new frame can be queued after some time.
*
* If the return value of mpv_render_context_render() had the
* MPV_RENDER_UPDATE_FRAME flag set, this flag will usually be set as well,
* unless the frame is rendered, or discarded by other asynchronous events.
*/
MPV_RENDER_FRAME_INFO_PRESENT = 1 << 0,
/**
* If set, the frame is not an actual new video frame, but a redraw request.
* For example if the video is paused, and an option that affects video
* rendering was changed (or any other reason), an update request can be
* issued and this flag will be set.
*
* Typically, redraw frames will not be subject to video timing.
*
* Implies MPV_RENDER_FRAME_INFO_PRESENT.
*/
MPV_RENDER_FRAME_INFO_REDRAW = 1 << 1,
/**
* If set, this is supposed to reproduce the previous frame perfectly. This
* is usually used for certain "video-sync" options ("display-..." modes).
* Typically the renderer will blit the video from a FBO. Unset otherwise.
*
* Implies MPV_RENDER_FRAME_INFO_PRESENT.
*/
MPV_RENDER_FRAME_INFO_REPEAT = 1 << 2,
/**
* If set, the player timing code expects that the user thread blocks on
* vsync (by either delaying the render call, or by making a call to
* mpv_render_context_report_swap() at vsync time).
*
* Implies MPV_RENDER_FRAME_INFO_PRESENT.
*/
MPV_RENDER_FRAME_INFO_BLOCK_VSYNC = 1 << 3,
} mpv_render_frame_info_flag;
/**
* Information about the next video frame that will be rendered. Can be
* retrieved with MPV_RENDER_PARAM_NEXT_FRAME_INFO.
*/
typedef struct mpv_render_frame_info {
/**
* A bitset of mpv_render_frame_info_flag values (i.e. multiple flags are
* combined with bitwise or).
*/
uint64_t flags;
/**
* Absolute time at which the frame is supposed to be displayed. This is in
* the same unit and base as the time returned by mpv_get_time_us(). For
* frames that are redrawn, or if vsync locked video timing is used (see
* "video-sync" option), then this can be 0. The "video-timing-offset"
* option determines how much "headroom" the render thread gets (but a high
* enough frame rate can reduce it anyway). mpv_render_context_render() will
* normally block until the time is elapsed, unless you pass it
* MPV_RENDER_PARAM_BLOCK_FOR_TARGET_TIME = 0.
*/
int64_t target_time;
} mpv_render_frame_info;
/**
* Initialize the renderer state. Depending on the backend used, this will
* access the underlying GPU API and initialize its own objects.
*
* You must free the context with mpv_render_context_free(). Not doing so before
* the mpv core is destroyed may result in memory leaks or crashes.
*
* Currently, only at most 1 context can exists per mpv core (it represents the
* main video output).
*
* You should pass the following parameters:
* - MPV_RENDER_PARAM_API_TYPE to select the underlying backend/GPU API.
* - Backend-specific init parameter, like MPV_RENDER_PARAM_OPENGL_INIT_PARAMS.
* - Setting MPV_RENDER_PARAM_ADVANCED_CONTROL and following its rules is
* strongly recommended.
* - If you want to use hwdec, possibly hwdec interop resources.
*
* @param res set to the context (on success) or NULL (on failure). The value
* is never read and always overwritten.
* @param mpv handle used to get the core (the mpv_render_context won't depend
* on this specific handle, only the core referenced by it)
* @param params an array of parameters, terminated by type==0. It's left
* unspecified what happens with unknown parameters. At least
* MPV_RENDER_PARAM_API_TYPE is required, and most backends will
* require another backend-specific parameter.
* @return error code, including but not limited to:
* MPV_ERROR_UNSUPPORTED: the OpenGL version is not supported
* (or required extensions are missing)
* MPV_ERROR_NOT_IMPLEMENTED: an unknown API type was provided, or
* support for the requested API was not
* built in the used libmpv binary.
* MPV_ERROR_INVALID_PARAMETER: at least one of the provided parameters was
* not valid.
*/
int mpv_render_context_create(mpv_render_context **res, mpv_handle *mpv,
mpv_render_param *params);
/**
* Attempt to change a single parameter. Not all backends and parameter types
* support all kinds of changes.
*
* @param ctx a valid render context
* @param param the parameter type and data that should be set
* @return error code. If a parameter could actually be changed, this returns
* success, otherwise an error code depending on the parameter type
* and situation.
*/
int mpv_render_context_set_parameter(mpv_render_context *ctx,
mpv_render_param param);
/**
* Retrieve information from the render context. This is NOT a counterpart to
* mpv_render_context_set_parameter(), because you generally can't read
* parameters set with it, and this function is not meant for this purpose.
* Instead, this is for communicating information from the renderer back to the
* user. See mpv_render_param_type; entries which support this function
* explicitly mention it, and for other entries you can assume it will fail.
*
* You pass param with param.type set and param.data pointing to a variable
* of the required data type. The function will then overwrite that variable
* with the returned value (at least on success).
*
* @param ctx a valid render context
* @param param the parameter type and data that should be retrieved
* @return error code. If a parameter could actually be retrieved, this returns
* success, otherwise an error code depending on the parameter type
* and situation. MPV_ERROR_NOT_IMPLEMENTED is used for unknown
* param.type, or if retrieving it is not supported.
*/
int mpv_render_context_get_info(mpv_render_context *ctx,
mpv_render_param param);
typedef void (*mpv_render_update_fn)(void *cb_ctx);
/**
* Set the callback that notifies you when a new video frame is available, or
* if the video display configuration somehow changed and requires a redraw.
* Similar to mpv_set_wakeup_callback(), you must not call any mpv API from
* the callback, and all the other listed restrictions apply (such as not
* exiting the callback by throwing exceptions).
*
* This can be called from any thread, except from an update callback. In case
* of the OpenGL backend, no OpenGL state or API is accessed.
*
* Calling this will raise an update callback immediately.
*
* @param callback callback(callback_ctx) is called if the frame should be
* redrawn
* @param callback_ctx opaque argument to the callback
*/
void mpv_render_context_set_update_callback(mpv_render_context *ctx,
mpv_render_update_fn callback,
void *callback_ctx);
/**
* The API user is supposed to call this when the update callback was invoked
* (like all mpv_render_* functions, this has to happen on the render thread,
* and _not_ from the update callback itself).
*
* This is optional if MPV_RENDER_PARAM_ADVANCED_CONTROL was not set (default).
* Otherwise, it's a hard requirement that this is called after each update
* callback. If multiple update callback happened, and the function could not
* be called sooner, it's OK to call it once after the last callback.
*
* If an update callback happens during or after this function, the function
* must be called again at the soonest possible time.
*
* If MPV_RENDER_PARAM_ADVANCED_CONTROL was set, this will do additional work
* such as allocating textures for the video decoder.
*
* @return a bitset of mpv_render_update_flag values (i.e. multiple flags are
* combined with bitwise or). Typically, this will tell the API user
* what should happen next. E.g. if the MPV_RENDER_UPDATE_FRAME flag is
* set, mpv_render_context_render() should be called. If flags unknown
* to the API user are set, or if the return value is 0, nothing needs
* to be done.
*/
uint64_t mpv_render_context_update(mpv_render_context *ctx);
/**
* Flags returned by mpv_render_context_update(). Each value represents a bit
* in the function's return value.
*/
typedef enum mpv_render_update_flag {
/**
* A new video frame must be rendered. mpv_render_context_render() must be
* called.
*/
MPV_RENDER_UPDATE_FRAME = 1 << 0,
} mpv_render_context_flag;
/**
* Render video.
*
* Typically renders the video to a target surface provided via mpv_render_param
* (the details depend on the backend in use). Options like "panscan" are
* applied to determine which part of the video should be visible and how the
* video should be scaled. You can change these options at runtime by using the
* mpv property API.
*
* The renderer will reconfigure itself every time the target surface
* configuration (such as size) is changed.
*
* This function implicitly pulls a video frame from the internal queue and
* renders it. If no new frame is available, the previous frame is redrawn.
* The update callback set with mpv_render_context_set_update_callback()
* notifies you when a new frame was added. The details potentially depend on
* the backends and the provided parameters.
*
* Generally, libmpv will invoke your update callback some time before the video
* frame should be shown, and then lets this function block until the supposed
* display time. This will limit your rendering to video FPS. You can prevent
* this by setting the "video-timing-offset" global option to 0. (This applies
* only to "audio" video sync mode.)
*
* You should pass the following parameters:
* - Backend-specific target object, such as MPV_RENDER_PARAM_OPENGL_FBO.
* - Possibly transformations, such as MPV_RENDER_PARAM_FLIP_Y.
*
* @param ctx a valid render context
* @param params an array of parameters, terminated by type==0. Which parameters
* are required depends on the backend. It's left unspecified what
* happens with unknown parameters.
* @return error code
*/
int mpv_render_context_render(mpv_render_context *ctx, mpv_render_param *params);
/**
* Tell the renderer that a frame was flipped at the given time. This is
* optional, but can help the player to achieve better timing.
*
* Note that calling this at least once informs libmpv that you will use this
* function. If you use it inconsistently, expect bad video playback.
*
* If this is called while no video is initialized, it is ignored.
*
* @param ctx a valid render context
*/
void mpv_render_context_report_swap(mpv_render_context *ctx);
/**
* Destroy the mpv renderer state.
*
* If video is still active (e.g. a file playing), video will be disabled
* forcefully.
*
* @param ctx a valid render context. After this function returns, this is not
* a valid pointer anymore. NULL is also allowed and does nothing.
*/
void mpv_render_context_free(mpv_render_context *ctx);
#ifdef __cplusplus
}
#endif
#endif

216
Vendor/mpv/include/render_gl.h vendored Normal file
View File

@@ -0,0 +1,216 @@
/* Copyright (C) 2018 the mpv developers
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#ifndef MPV_CLIENT_API_RENDER_GL_H_
#define MPV_CLIENT_API_RENDER_GL_H_
#include "render.h"
#ifdef __cplusplus
extern "C" {
#endif
/**
* OpenGL backend
* --------------
*
* This header contains definitions for using OpenGL with the render.h API.
*
* OpenGL interop
* --------------
*
* The OpenGL backend has some special rules, because OpenGL itself uses
* implicit per-thread contexts, which causes additional API problems.
*
* This assumes the OpenGL context lives on a certain thread controlled by the
* API user. All mpv_render_* APIs have to be assumed to implicitly use the
* OpenGL context if you pass a mpv_render_context using the OpenGL backend,
* unless specified otherwise.
*
* The OpenGL context is indirectly accessed through the OpenGL function
* pointers returned by the get_proc_address callback in mpv_opengl_init_params.
* Generally, mpv will not load the system OpenGL library when using this API.
*
* OpenGL state
* ------------
*
* OpenGL has a large amount of implicit state. All the mpv functions mentioned
* above expect that the OpenGL state is reasonably set to OpenGL standard
* defaults. Likewise, mpv will attempt to leave the OpenGL context with
* standard defaults. The following state is excluded from this:
*
* - the glViewport state
* - the glScissor state (but GL_SCISSOR_TEST is in its default value)
* - glBlendFuncSeparate() state (but GL_BLEND is in its default value)
* - glClearColor() state
* - mpv may overwrite the callback set with glDebugMessageCallback()
* - mpv always disables GL_DITHER at init
*
* Messing with the state could be avoided by creating shared OpenGL contexts,
* but this is avoided for the sake of compatibility and interoperability.
*
* On OpenGL 2.1, mpv will strictly call functions like glGenTextures() to
* create OpenGL objects. You will have to do the same. This ensures that
* objects created by mpv and the API users don't clash. Also, legacy state
* must be either in its defaults, or not interfere with core state.
*
* API use
* -------
*
* The mpv_render_* API is used. That API supports multiple backends, and this
* section documents specifics for the OpenGL backend.
*
* Use mpv_render_context_create() with MPV_RENDER_PARAM_API_TYPE set to
* MPV_RENDER_API_TYPE_OPENGL, and MPV_RENDER_PARAM_OPENGL_INIT_PARAMS provided.
*
* Call mpv_render_context_render() with MPV_RENDER_PARAM_OPENGL_FBO to render
* the video frame to an FBO.
*
* Hardware decoding
* -----------------
*
* Hardware decoding via this API is fully supported, but requires some
* additional setup. (At least if direct hardware decoding modes are wanted,
* instead of copying back surface data from GPU to CPU RAM.)
*
* There may be certain requirements on the OpenGL implementation:
*
* - Windows: ANGLE is required (although in theory GL/DX interop could be used)
* - Intel/Linux: EGL is required, and also the native display resource needs
* to be provided (e.g. MPV_RENDER_PARAM_X11_DISPLAY for X11 and
* MPV_RENDER_PARAM_WL_DISPLAY for Wayland)
* - nVidia/Linux: Both GLX and EGL should work (GLX is required if vdpau is
* used, e.g. due to old drivers.)
* - OSX: CGL is required (CGLGetCurrentContext() returning non-NULL)
* - iOS: EAGL is required (EAGLContext.currentContext returning non-nil)
*
* Once these things are setup, hardware decoding can be enabled/disabled at
* any time by setting the "hwdec" property.
*/
/**
* For initializing the mpv OpenGL state via MPV_RENDER_PARAM_OPENGL_INIT_PARAMS.
*/
typedef struct mpv_opengl_init_params {
/**
* This retrieves OpenGL function pointers, and will use them in subsequent
* operation.
* Usually, you can simply call the GL context APIs from this callback (e.g.
* glXGetProcAddressARB or wglGetProcAddress), but some APIs do not always
* return pointers for all standard functions (even if present); in this
* case you have to compensate by looking up these functions yourself when
* libmpv wants to resolve them through this callback.
* libmpv will not normally attempt to resolve GL functions on its own, nor
* does it link to GL libraries directly.
*/
void *(*get_proc_address)(void *ctx, const char *name);
/**
* Value passed as ctx parameter to get_proc_address().
*/
void *get_proc_address_ctx;
/**
* This should not be used. It is deprecated and will be removed or ignored
* when the opengl_cb API is removed.
*/
const char *extra_exts;
} mpv_opengl_init_params;
/**
* For MPV_RENDER_PARAM_OPENGL_FBO.
*/
typedef struct mpv_opengl_fbo {
/**
* Framebuffer object name. This must be either a valid FBO generated by
* glGenFramebuffers() that is complete and color-renderable, or 0. If the
* value is 0, this refers to the OpenGL default framebuffer.
*/
int fbo;
/**
* Valid dimensions. This must refer to the size of the framebuffer. This
* must always be set.
*/
int w, h;
/**
* Underlying texture internal format (e.g. GL_RGBA8), or 0 if unknown. If
* this is the default framebuffer, this can be an equivalent.
*/
int internal_format;
} mpv_opengl_fbo;
/**
* Deprecated. For MPV_RENDER_PARAM_DRM_DISPLAY.
*/
typedef struct mpv_opengl_drm_params {
int fd;
int crtc_id;
int connector_id;
struct _drmModeAtomicReq **atomic_request_ptr;
int render_fd;
} mpv_opengl_drm_params;
/**
* For MPV_RENDER_PARAM_DRM_DRAW_SURFACE_SIZE.
*/
typedef struct mpv_opengl_drm_draw_surface_size {
/**
* size of the draw plane surface in pixels.
*/
int width, height;
} mpv_opengl_drm_draw_surface_size;
/**
* For MPV_RENDER_PARAM_DRM_DISPLAY_V2.
*/
typedef struct mpv_opengl_drm_params_v2 {
/**
* DRM fd (int). Set to -1 if invalid.
*/
int fd;
/**
* Currently used crtc id
*/
int crtc_id;
/**
* Currently used connector id
*/
int connector_id;
/**
* Pointer to a drmModeAtomicReq pointer that is being used for the renderloop.
* This pointer should hold a pointer to the atomic request pointer
* The atomic request pointer is usually changed at every renderloop.
*/
struct _drmModeAtomicReq **atomic_request_ptr;
/**
* DRM render node. Used for VAAPI interop.
* Set to -1 if invalid.
*/
int render_fd;
} mpv_opengl_drm_params_v2;
/**
* For backwards compatibility with the old naming of mpv_opengl_drm_draw_surface_size
*/
#define mpv_opengl_drm_osd_size mpv_opengl_drm_draw_surface_size
#ifdef __cplusplus
}
#endif
#endif

240
Vendor/mpv/include/stream_cb.h vendored Normal file
View File

@@ -0,0 +1,240 @@
/* Copyright (C) 2017 the mpv developers
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#ifndef MPV_CLIENT_API_STREAM_CB_H_
#define MPV_CLIENT_API_STREAM_CB_H_
#include "client.h"
#ifdef __cplusplus
extern "C" {
#endif
/**
* Warning: this API is not stable yet.
*
* Overview
* --------
*
* This API can be used to make mpv read from a stream with a custom
* implementation. This interface is inspired by funopen on BSD and
* fopencookie on linux. The stream is backed by user-defined callbacks
* which can implement customized open, read, seek, size and close behaviors.
*
* Usage
* -----
*
* Register your stream callbacks with the mpv_stream_cb_add_ro() function. You
* have to provide a mpv_stream_cb_open_ro_fn callback to it (open_fn argument).
*
* Once registered, you can `loadfile myprotocol://myfile`. Your open_fn will be
* invoked with the URI and you must fill out the provided mpv_stream_cb_info
* struct. This includes your stream callbacks (like read_fn), and an opaque
* cookie, which will be passed as the first argument to all the remaining
* stream callbacks.
*
* Note that your custom callbacks must not invoke libmpv APIs as that would
* cause a deadlock. (Unless you call a different mpv_handle than the one the
* callback was registered for, and the mpv_handles refer to different mpv
* instances.)
*
* Stream lifetime
* ---------------
*
* A stream remains valid until its close callback has been called. It's up to
* libmpv to call the close callback, and the libmpv user cannot close it
* directly with the stream_cb API.
*
* For example, if you consider your custom stream to become suddenly invalid
* (maybe because the underlying stream died), libmpv will continue using your
* stream. All you can do is returning errors from each callback, until libmpv
* gives up and closes it.
*
* Protocol registration and lifetime
* ----------------------------------
*
* Protocols remain registered until the mpv instance is terminated. This means
* in particular that it can outlive the mpv_handle that was used to register
* it, but once mpv_terminate_destroy() is called, your registered callbacks
* will not be called again.
*
* Protocol unregistration is finished after the mpv core has been destroyed
* (e.g. after mpv_terminate_destroy() has returned).
*
* If you do not call mpv_terminate_destroy() yourself (e.g. plugin-style code),
* you will have to deal with the registration or even streams outliving your
* code. Here are some possible ways to do this:
* - call mpv_terminate_destroy(), which destroys the core, and will make sure
* all streams are closed once this function returns
* - you refcount all resources your stream "cookies" reference, so that it
* doesn't matter if streams live longer than expected
* - create "cancellation" semantics: after your protocol has been unregistered,
* notify all your streams that are still opened, and make them drop all
* referenced resources - then return errors from the stream callbacks as
* long as the stream is still opened
*
*/
/**
* Read callback used to implement a custom stream. The semantics of the
* callback match read(2) in blocking mode. Short reads are allowed (you can
* return less bytes than requested, and libmpv will retry reading the rest
* with another call). If no data can be immediately read, the callback must
* block until there is new data. A return of 0 will be interpreted as final
* EOF, although libmpv might retry the read, or seek to a different position.
*
* @param cookie opaque cookie identifying the stream,
* returned from mpv_stream_cb_open_fn
* @param buf buffer to read data into
* @param size of the buffer
* @return number of bytes read into the buffer
* @return 0 on EOF
* @return -1 on error
*/
typedef int64_t (*mpv_stream_cb_read_fn)(void *cookie, char *buf, uint64_t nbytes);
/**
* Seek callback used to implement a custom stream.
*
* Note that mpv will issue a seek to position 0 immediately after opening. This
* is used to test whether the stream is seekable (since seekability might
* depend on the URI contents, not just the protocol). Return
* MPV_ERROR_UNSUPPORTED if seeking is not implemented for this stream. This
* seek also serves to establish the fact that streams start at position 0.
*
* This callback can be NULL, in which it behaves as if always returning
* MPV_ERROR_UNSUPPORTED.
*
* @param cookie opaque cookie identifying the stream,
* returned from mpv_stream_cb_open_fn
* @param offset target absolut stream position
* @return the resulting offset of the stream
* MPV_ERROR_UNSUPPORTED or MPV_ERROR_GENERIC if the seek failed
*/
typedef int64_t (*mpv_stream_cb_seek_fn)(void *cookie, int64_t offset);
/**
* Size callback used to implement a custom stream.
*
* Return MPV_ERROR_UNSUPPORTED if no size is known.
*
* This callback can be NULL, in which it behaves as if always returning
* MPV_ERROR_UNSUPPORTED.
*
* @param cookie opaque cookie identifying the stream,
* returned from mpv_stream_cb_open_fn
* @return the total size in bytes of the stream
*/
typedef int64_t (*mpv_stream_cb_size_fn)(void *cookie);
/**
* Close callback used to implement a custom stream.
*
* @param cookie opaque cookie identifying the stream,
* returned from mpv_stream_cb_open_fn
*/
typedef void (*mpv_stream_cb_close_fn)(void *cookie);
/**
* Cancel callback used to implement a custom stream.
*
* This callback is used to interrupt any current or future read and seek
* operations. It will be called from a separate thread than the demux
* thread, and should not block.
*
* This callback can be NULL.
*
* Available since API 1.106.
*
* @param cookie opaque cookie identifying the stream,
* returned from mpv_stream_cb_open_fn
*/
typedef void (*mpv_stream_cb_cancel_fn)(void *cookie);
/**
* See mpv_stream_cb_open_ro_fn callback.
*/
typedef struct mpv_stream_cb_info {
/**
* Opaque user-provided value, which will be passed to the other callbacks.
* The close callback will be called to release the cookie. It is not
* interpreted by mpv. It doesn't even need to be a valid pointer.
*
* The user sets this in the mpv_stream_cb_open_ro_fn callback.
*/
void *cookie;
/**
* Callbacks set by the user in the mpv_stream_cb_open_ro_fn callback. Some
* of them are optional, and can be left unset.
*
* The following callbacks are mandatory: read_fn, close_fn
*/
mpv_stream_cb_read_fn read_fn;
mpv_stream_cb_seek_fn seek_fn;
mpv_stream_cb_size_fn size_fn;
mpv_stream_cb_close_fn close_fn;
mpv_stream_cb_cancel_fn cancel_fn; /* since API 1.106 */
} mpv_stream_cb_info;
/**
* Open callback used to implement a custom read-only (ro) stream. The user
* must set the callback fields in the passed info struct. The cookie field
* also can be set to store state associated to the stream instance.
*
* Note that the info struct is valid only for the duration of this callback.
* You can't change the callbacks or the pointer to the cookie at a later point.
*
* Each stream instance created by the open callback can have different
* callbacks.
*
* The close_fn callback will terminate the stream instance. The pointers to
* your callbacks and cookie will be discarded, and the callbacks will not be
* called again.
*
* @param user_data opaque user data provided via mpv_stream_cb_add()
* @param uri name of the stream to be opened (with protocol prefix)
* @param info fields which the user should fill
* @return 0 on success, MPV_ERROR_LOADING_FAILED if the URI cannot be opened.
*/
typedef int (*mpv_stream_cb_open_ro_fn)(void *user_data, char *uri,
mpv_stream_cb_info *info);
/**
* Add a custom stream protocol. This will register a protocol handler under
* the given protocol prefix, and invoke the given callbacks if an URI with the
* matching protocol prefix is opened.
*
* The "ro" is for read-only - only read-only streams can be registered with
* this function.
*
* The callback remains registered until the mpv core is registered.
*
* If a custom stream with the same name is already registered, then the
* MPV_ERROR_INVALID_PARAMETER error is returned.
*
* @param protocol protocol prefix, for example "foo" for "foo://" URIs
* @param user_data opaque pointer passed into the mpv_stream_cb_open_fn
* callback.
* @return error code
*/
int mpv_stream_cb_add_ro(mpv_handle *ctx, const char *protocol, void *user_data,
mpv_stream_cb_open_ro_fn open_fn);
#ifdef __cplusplus
}
#endif
#endif

BIN
Vendor/mpv/macOS/lib/libX11.6.dylib vendored Executable file

Binary file not shown.

BIN
Vendor/mpv/macOS/lib/libXau.6.dylib vendored Executable file

Binary file not shown.

BIN
Vendor/mpv/macOS/lib/libXdmcp.6.dylib vendored Executable file

Binary file not shown.

BIN
Vendor/mpv/macOS/lib/libass.9.dylib vendored Executable file

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

BIN
Vendor/mpv/macOS/lib/libavutil.57.17.100.dylib vendored Executable file

Binary file not shown.

BIN
Vendor/mpv/macOS/lib/libbrotlicommon.1.dylib vendored Executable file

Binary file not shown.

BIN
Vendor/mpv/macOS/lib/libbrotlidec.1.0.9.dylib vendored Executable file

Binary file not shown.

BIN
Vendor/mpv/macOS/lib/libcrypto.3.dylib vendored Executable file

Binary file not shown.

BIN
Vendor/mpv/macOS/lib/libfontconfig.1.dylib vendored Executable file

Binary file not shown.

BIN
Vendor/mpv/macOS/lib/libfreetype.6.dylib vendored Executable file

Binary file not shown.

BIN
Vendor/mpv/macOS/lib/libfribidi.0.dylib vendored Executable file

Binary file not shown.

Binary file not shown.

BIN
Vendor/mpv/macOS/lib/libharfbuzz.0.dylib vendored Executable file

Binary file not shown.

BIN
Vendor/mpv/macOS/lib/liblcms2.2.dylib vendored Executable file

Binary file not shown.

BIN
Vendor/mpv/macOS/lib/libmpv.dylib vendored Executable file

Binary file not shown.

Some files were not shown because too many files have changed in this diff Show More