Model improvements

This commit is contained in:
Arkadiusz Fal 2021-06-15 23:21:57 +02:00
parent da22b06cc1
commit 4dd6f13df9
4 changed files with 106 additions and 83 deletions

View File

@ -4,33 +4,23 @@ import Logging
import SwiftUI
struct PlayerViewController: UIViewControllerRepresentable {
@ObservedObject private var state = PlayerState()
@ObservedObject var video: Video
var player = AVPlayer()
var composition = AVMutableComposition()
let logger = Logger(label: "net.arekf.Pearvidious.pvc")
var playerItem: AVPlayerItem {
let playerItem = AVPlayerItem(asset: composition)
@ObservedObject private var state: PlayerState
playerItem.externalMetadata = [makeMetadataItem(.commonIdentifierTitle, value: video.title)]
playerItem.preferredForwardBufferDuration = 10
return playerItem
}
var video: Video
init(video: Video) {
self.video = video
state = PlayerState(video)
loadStream(video.defaultStream)
loadStream(video.defaultStream, loadBest: false)
}
func loadStream(_ stream: Stream?) {
func loadStream(_ stream: Stream?, loadBest: Bool = false) {
if stream != state.streamToLoad {
state.loadStream(stream)
addTracksAndLoadAssets(state.streamToLoad, loadBest: true)
addTracksAndLoadAssets(stream!, loadBest: loadBest)
}
}
@ -52,62 +42,49 @@ struct PlayerViewController: UIViewControllerRepresentable {
}
}
func addTrack(_ asset: AVURLAsset, type: AVMediaType) {
guard let assetTrack = asset.tracks(withMediaType: type).first else {
return
func addTrack(_ asset: AVURLAsset, stream: Stream, type: AVMediaType? = nil) {
let types: [AVMediaType] = stream.type == .adaptive ? [type!] : [.video, .audio]
types.forEach { type in
guard let assetTrack = asset.tracks(withMediaType: type).first else {
return
}
if let track = state.composition.tracks(withMediaType: type).first {
logger.info("removing \(type) track")
state.composition.removeTrack(track)
}
let track = state.composition.addMutableTrack(withMediaType: type, preferredTrackID: kCMPersistentTrackID_Invalid)!
try! track.insertTimeRange(
CMTimeRange(start: .zero, duration: CMTime(seconds: video.length, preferredTimescale: 1)),
of: assetTrack,
at: .zero
)
logger.info("inserted \(type) track")
}
if let track = composition.tracks(withMediaType: type).first {
logger.info("removing \(type) track")
composition.removeTrack(track)
}
let track = composition.addMutableTrack(withMediaType: type, preferredTrackID: kCMPersistentTrackID_Invalid)!
try! track.insertTimeRange(
CMTimeRange(start: .zero, duration: CMTime(seconds: video.length, preferredTimescale: 1)),
of: assetTrack,
at: .zero
)
logger.info("inserted \(type) track")
}
func handleAssetLoad(_ stream: Stream, type: AVMediaType, loadBest: Bool = false) {
logger.info("handling asset load: \(stream.type), \(stream.description)")
guard stream != state.currentStream else {
logger.warning("IGNORING assets loaded: \(stream.type), \(stream.description)")
return
}
let loadedAssets = stream.assets.filter { $0.statusOfValue(forKey: "playable", error: nil) == .loaded }
loadedAssets.forEach { asset in
logger.info("both assets loaded: \(stream.type), \(stream.description)")
if stream.type == .stream {
addTrack(asset, type: .video)
addTrack(asset, type: .audio)
} else {
addTrack(asset, type: type)
}
stream.loadedAssets.forEach { asset in
addTrack(asset, stream: stream, type: type)
if stream.assetsLoaded {
let resumeAt = player.currentTime()
if resumeAt.seconds > 0 {
state.seekTo = resumeAt
DispatchQueue.main.async {
logger.info("ALL assets loaded: \(stream.type), \(stream.description)")
state.loadStreamIntoPlayer(stream)
}
logger.warning("replacing player item")
player.replaceCurrentItem(with: playerItem)
state.streamDidLoad(stream)
if let time = state.seekTo {
player.seek(to: time)
}
player.play()
if loadBest {
loadBestStream()
}
@ -115,22 +92,12 @@ struct PlayerViewController: UIViewControllerRepresentable {
}
}
private func makeMetadataItem(_ identifier: AVMetadataIdentifier, value: Any) -> AVMetadataItem {
let item = AVMutableMetadataItem()
item.identifier = identifier
item.value = value as? NSCopying & NSObjectProtocol
item.extendedLanguageTag = "und"
return item.copy() as! AVMetadataItem
}
func makeUIViewController(context _: Context) -> AVPlayerViewController {
let controller = AVPlayerViewController()
controller.transportBarCustomMenuItems = [streamingQualityMenu]
controller.modalPresentationStyle = .fullScreen
controller.player = player
controller.player = state.player
controller.player?.automaticallyWaitsToMinimizeStalling = true
return controller
@ -157,13 +124,11 @@ struct PlayerViewController: UIViewControllerRepresentable {
let image = self.state.currentStream == stream ? UIImage(systemName: "checkmark") : nil
return UIAction(title: stream.description, image: image) { _ in
DispatchQueue.main.async {
guard state.currentStream != stream else {
return
}
state.streamToLoad = stream
addTracksAndLoadAssets(state.streamToLoad)
guard state.currentStream != stream else {
return
}
loadStream(stream)
}
}
}

View File

@ -9,8 +9,4 @@ final class AudioVideoStream: Stream {
super.init(audioAsset: avAsset, videoAsset: avAsset, resolution: resolution, type: type, encoding: encoding)
}
override var assets: [AVURLAsset] {
[videoAsset]
}
}

View File

@ -5,13 +5,31 @@ import Logging
final class PlayerState: ObservableObject {
let logger = Logger(label: "net.arekf.Pearvidious.ps")
var video: Video
@Published private(set) var currentStream: Stream!
@Published var streamToLoad: Stream!
@Published var seekTo: CMTime?
@Published var savedTime: CMTime?
@Published var streamLoading = false
@Published var player = AVPlayer()
var composition = AVMutableComposition()
var playerItem: AVPlayerItem {
let playerItem = AVPlayerItem(asset: composition)
playerItem.externalMetadata = [makeMetadataItem(.commonIdentifierTitle, value: video.title)]
playerItem.preferredForwardBufferDuration = 10
return playerItem
}
init(_ video: Video) {
self.video = video
}
func cancelLoadingStream(_ stream: Stream) {
guard streamToLoad == stream else {
return
@ -30,9 +48,10 @@ final class PlayerState: ObservableObject {
streamToLoad?.cancelLoadingAssets()
streamLoading = true
streamToLoad = stream
DispatchQueue.main.async {
self.streamLoading = true
self.streamToLoad = stream
}
logger.info("replace streamToLoad: \(streamToLoad?.description ?? "nil"), streamLoading \(streamLoading)")
}
@ -49,4 +68,43 @@ final class PlayerState: ObservableObject {
logger.info("after: toLoad: \(streamToLoad?.description ?? "nil"), current \(currentStream?.description ?? "nil"), loading \(streamLoading)")
}
func loadStreamIntoPlayer(_ stream: Stream) {
logger.warning("loading \(stream.description) to player")
beforeLoadStreamIntoPlayer()
player.replaceCurrentItem(with: playerItem)
streamDidLoad(stream)
afterLoadStreamIntoPlayer()
}
func beforeLoadStreamIntoPlayer() {
let currentTime = player.currentTime()
guard currentTime.seconds > 0 else {
return
}
savedTime = currentTime
}
func afterLoadStreamIntoPlayer() {
if let time = savedTime {
player.seek(to: time)
}
player.play()
}
private func makeMetadataItem(_ identifier: AVMetadataIdentifier, value: Any) -> AVMetadataItem {
let item = AVMutableMetadataItem()
item.identifier = identifier
item.value = value as? NSCopying & NSObjectProtocol
item.extendedLanguageTag = "und"
return item.copy() as! AVMetadataItem
}
}

View File

@ -31,6 +31,10 @@ class Stream: Equatable {
assets.allSatisfy { $0.statusOfValue(forKey: "playable", error: nil) == .loaded }
}
var loadedAssets: [AVURLAsset] {
assets.filter { $0.statusOfValue(forKey: "playable", error: nil) == .loaded }
}
func cancelLoadingAssets() {
assets.forEach { $0.cancelLoading() }
audioAsset = AVURLAsset(url: audioAsset.url)