Compare commits

..

2 Commits

Author SHA1 Message Date
Toni Förster
4855f9bead fix tvOS build
Signed-off-by: Toni Förster <toni.foerster@gmail.com>
2024-09-13 11:48:40 +02:00
Toni Förster
a65ed67751 proper audio interrupt and route change handling
- set AVAudioSession inactive on pause and stop
- handle audio route changes
2024-09-13 11:21:07 +02:00
5 changed files with 174 additions and 92 deletions

View File

@@ -181,7 +181,9 @@ final class AVPlayerBackend: PlayerBackend {
{
seek(to: 0, seekType: .loopRestart)
}
#if !os(macOS)
model.setAudioSessionActive(true)
#endif
avPlayer.play()
// Setting hasStarted to true the first time player started
@@ -196,7 +198,9 @@ final class AVPlayerBackend: PlayerBackend {
guard avPlayer.timeControlStatus != .paused else {
return
}
#if !os(macOS)
model.setAudioSessionActive(false)
#endif
avPlayer.pause()
model.objectWillChange.send()
}
@@ -210,6 +214,9 @@ final class AVPlayerBackend: PlayerBackend {
}
func stop() {
#if !os(macOS)
model.setAudioSessionActive(false)
#endif
avPlayer.replaceCurrentItem(with: nil)
hasStarted = false
}
@@ -364,11 +371,7 @@ final class AVPlayerBackend: PlayerBackend {
let startPlaying = {
#if !os(macOS)
do {
try AVAudioSession.sharedInstance().setActive(true)
} catch {
self.logger.error("Error setting up audio session: \(error)")
}
self.model.setAudioSessionActive(true)
#endif
self.setRate(self.model.currentRate)

View File

@@ -23,14 +23,15 @@ final class MPVBackend: PlayerBackend {
var stream: Stream?
var video: Video?
var captions: Captions? {
didSet {
Task {
await handleCaptionsChange()
var captions: Captions? { didSet {
guard let captions else {
if client?.areSubtitlesAdded == true {
client?.removeSubs()
}
return
}
}
addSubTrack(captions.url)
}}
var currentTime: CMTime?
var loadedVideo = false
@@ -252,11 +253,7 @@ final class MPVBackend: PlayerBackend {
let startPlaying = {
#if !os(macOS)
do {
try AVAudioSession.sharedInstance().setActive(true)
} catch {
self.logger.error("Error setting up audio session: \(error)")
}
self.model.setAudioSessionActive(true)
#endif
DispatchQueue.main.async { [weak self] in
@@ -358,6 +355,9 @@ final class MPVBackend: PlayerBackend {
}
func play() {
#if !os(macOS)
model.setAudioSessionActive(true)
#endif
startClientUpdates()
startRefreshRateUpdates()
@@ -386,6 +386,9 @@ final class MPVBackend: PlayerBackend {
}
func pause() {
#if !os(macOS)
model.setAudioSessionActive(false)
#endif
stopClientUpdates()
stopRefreshRateUpdates()
@@ -407,6 +410,9 @@ final class MPVBackend: PlayerBackend {
}
func stop() {
#if !os(macOS)
model.setAudioSessionActive(false)
#endif
stopClientUpdates()
stopRefreshRateUpdates()
client?.stop()
@@ -616,14 +622,10 @@ final class MPVBackend: PlayerBackend {
}
func addSubTrack(_ url: URL) {
Task {
if let areSubtitlesAdded = client?.areSubtitlesAdded {
if await areSubtitlesAdded() {
await client?.removeSubs()
}
}
await client?.addSubTrack(url)
if client?.areSubtitlesAdded == true {
client?.removeSubs()
}
client?.addSubTrack(url)
}
func setVideoToAuto() {
@@ -687,17 +689,6 @@ final class MPVBackend: PlayerBackend {
}
}
private func handleCaptionsChange() async {
guard let captions else {
if let isSubtitlesAdded = client?.areSubtitlesAdded, await isSubtitlesAdded() {
await client?.removeSubs()
}
return
}
addSubTrack(captions.url)
}
private func handlePropertyChange(_ name: String, _ property: mpv_event_property) {
switch name {
case "pause":

View File

@@ -349,17 +349,23 @@ final class MPVClient: ObservableObject {
return Int(fps.rounded())
}
func areSubtitlesAdded() async -> Bool {
var areSubtitlesAdded: Bool {
guard !mpv.isNil else { return false }
let trackCount = await Task(operation: { getInt("track-list/count") }).value
// Retrieve the number of tracks
let trackCount = getInt("track-list/count")
guard trackCount > 0 else { return false }
for index in 0 ..< trackCount {
if let trackType = await Task(operation: { getString("track-list/\(index)/type") }).value, trackType == "sub" {
// Get the type of each track
if let trackType = getString("track-list/\(index)/type"), trackType == "sub" {
// Check if the subtitle track is currently selected
let selected = getInt("track-list/\(index)/selected")
if selected == 1 {
return true
}
}
}
return false
}
@@ -533,16 +539,12 @@ final class MPVClient: ObservableObject {
command("video-add", args: [url.absoluteString])
}
func addSubTrack(_ url: URL) async {
await Task {
func addSubTrack(_ url: URL) {
command("sub-add", args: [url.absoluteString])
}.value
}
func removeSubs() async {
await Task {
func removeSubs() {
command("sub-remove")
}.value
}
func setVideoToAuto() {

View File

@@ -232,6 +232,14 @@ final class PlayerModel: ObservableObject {
name: AVAudioSession.interruptionNotification,
object: nil
)
// Register for audio session route change notifications
NotificationCenter.default.addObserver(
self,
selector: #selector(handleRouteChange(_:)),
name: AVAudioSession.routeChangeNotification,
object: AVAudioSession.sharedInstance()
)
#endif
playbackMode = Defaults[.playbackMode]
@@ -250,7 +258,15 @@ final class PlayerModel: ObservableObject {
#if !os(macOS)
deinit {
NotificationCenter.default.removeObserver(self, name: AVAudioSession.interruptionNotification, object: nil)
NotificationCenter.default.removeObserver(
self, name: AVAudioSession.interruptionNotification, object: nil
)
NotificationCenter.default.removeObserver(
self,
name: AVAudioSession.routeChangeNotification,
object: AVAudioSession.sharedInstance()
)
}
#endif
@@ -1276,12 +1292,27 @@ final class PlayerModel: ObservableObject {
}
#if !os(macOS)
func setAudioSessionActive(_ setActive: Bool) {
DispatchQueue.main.asyncAfter(deadline: .now() + 0.2) {
do {
try AVAudioSession.sharedInstance().setActive(setActive)
} catch {
self.logger.error("Error setting up audio session: \(error)")
}
}
}
@objc func handleAudioSessionInterruption(_ notification: Notification) {
logger.info("Audio session interruption received.")
logger.info("Notification received: \(notification)")
logger.info("Notification object: \(String(describing: notification.object))")
guard let info = notification.userInfo,
let typeValue = info[AVAudioSessionInterruptionTypeKey] as? UInt,
guard let info = notification.userInfo else {
logger.info("userInfo is missing in the notification.")
return
}
// Extract the interruption type
guard let typeValue = info[AVAudioSessionInterruptionTypeKey] as? UInt,
let type = AVAudioSession.InterruptionType(rawValue: typeValue)
else {
logger.info("AVAudioSessionInterruptionTypeKey is missing or not a UInt in userInfo.")
@@ -1290,23 +1321,103 @@ final class PlayerModel: ObservableObject {
logger.info("Interruption type received: \(type)")
// Check availability for iOS 14.5 or newer to handle interruption reason
// Currently only for debugging purpose
#if os(iOS)
if #available(iOS 14.5, *) {
// Extract the interruption reason, if available
if let reasonValue = info[AVAudioSessionInterruptionReasonKey] as? UInt,
let reason = AVAudioSession.InterruptionReason(rawValue: reasonValue)
{
logger.info("Interruption reason received: \(reason)")
switch reason {
case .default:
logger.info("Interruption reason: Default or unspecified interruption occurred.")
case .appWasSuspended:
logger.info("Interruption reason: The app was suspended during the interruption.")
@unknown default:
logger.info("Unknown interruption reason received.")
}
} else {
logger.info("AVAudioSessionInterruptionReasonKey is missing or not a UInt in userInfo.")
}
} else {
logger.info("Interruption reason handling is not available on this iOS version.")
}
#endif
// Handle the specific interruption type
switch type {
case .began:
logger.info("Audio session interrupted.")
// We need to call pause() to set all variables correctly, and play()
// directly afterwards, because the .began interrupt is sent after audio
// ducking ended and playback would pause. Audio ducking usually happens
// when using headphones.
pause()
play()
logger.info("Audio session interrupted (began).")
case .ended:
// Extract any interruption options, if available
if let optionsValue = info[AVAudioSessionInterruptionOptionKey] as? UInt {
logger.info("Interruption options received: \(optionsValue)")
if optionsValue & AVAudioSession.InterruptionOptions.shouldResume.rawValue != 0 {
play()
logger.info("Interruption option indicates playback should resume automatically.")
} else {
logger.info("Interruption option indicates playback should not resume automatically.")
}
} else {
logger.info("AVAudioSessionInterruptionOptionKey is missing or not a UInt in userInfo.")
}
logger.info("Audio session interruption ended.")
// We need to call pause() to set all variables correctly.
// Otherwise, playback does not resume when the interruption ends.
pause()
// Check if audio was resumed or if there's any indication of ducking
let currentVolume = AVAudioSession.sharedInstance().outputVolume
logger.info("Current output volume: \(currentVolume)")
default:
logger.info("Unknown interruption type received.")
}
}
@objc func handleRouteChange(_ notification: Notification) {
logger.info("Audio route change received.")
guard let info = notification.userInfo else {
logger.info("userInfo is missing in the notification.")
return
}
guard let reasonValue = info[AVAudioSessionRouteChangeReasonKey] as? UInt,
let reason = AVAudioSession.RouteChangeReason(rawValue: reasonValue)
else {
logger.info("AVAudioSessionRouteChangeReasonKey is missing or not a UInt in userInfo.")
return
}
logger.info("Route change reason received: \(reason)")
let currentCategory = AVAudioSession.sharedInstance().category
logger.info("Current audio session category before change: \(currentCategory)")
switch reason {
case .categoryChange:
logger.info("Audio session category changed.")
let newCategory = AVAudioSession.sharedInstance().category
logger.info("New audio session category: \(newCategory)")
case .oldDeviceUnavailable, .newDeviceAvailable:
logger.info("Audio route change may indicate ducking or device change.")
let currentRoute = AVAudioSession.sharedInstance().currentRoute
logger.info("Current audio route: \(currentRoute)")
for output in currentRoute.outputs {
logger.info("Output port type: \(output.portType), UID: \(output.uid)")
switch output.portType {
case .headphones, .bluetoothA2DP:
logger.info("Detected port type \(output.portType). Executing play().")
play()
default:
break
logger.info("Detected port type \(output.portType). Executing pause().")
pause()
}
}
case .noSuitableRouteForCategory:
logger.info("No suitable route for the current category.")
default:
logger.info("Unhandled route change reason: \(reason)")
}
}
#endif

View File

@@ -5,8 +5,6 @@ struct ControlsOverlay: View {
@ObservedObject private var player = PlayerModel.shared
private var model = PlayerControlsModel.shared
@State private var availableCaptions: [Captions] = []
@State private var isLoadingCaptions = true
@State private var contentSize: CGSize = .zero
@Default(.showMPVPlaybackStats) private var showMPVPlaybackStats
@@ -337,6 +335,7 @@ struct ControlsOverlay: View {
Image(systemName: "text.bubble")
if let captions = captionsBinding.wrappedValue,
let language = LanguageCodes(rawValue: captions.code)
{
Text("\(language.description.capitalized) (\(language.rawValue))")
.foregroundColor(.accentColor)
@@ -381,16 +380,17 @@ struct ControlsOverlay: View {
.contextMenu {
Button("Disabled") { captionsBinding.wrappedValue = nil }
ForEach(availableCaptions) { caption in
ForEach(player.currentVideo?.captions ?? []) { caption in
Button(caption.description) { captionsBinding.wrappedValue = caption }
}
Button("Cancel", role: .cancel) {}
}
#endif
}
@ViewBuilder private var captionsPicker: some View {
let captions = availableCaptions
let captions = player.currentVideo?.captions ?? []
Picker("Captions", selection: captionsBinding) {
if captions.isEmpty {
Text("Not available").tag(Captions?.none)
@@ -402,31 +402,6 @@ struct ControlsOverlay: View {
}
}
.disabled(captions.isEmpty)
.onAppear {
loadCaptions()
}
}
private func loadCaptions() {
isLoadingCaptions = true
// Fetch captions asynchronously
Task {
let fetchedCaptions = await fetchCaptions()
await MainActor.run {
// Update state on the main thread
self.availableCaptions = fetchedCaptions
self.isLoadingCaptions = false
}
}
}
private func fetchCaptions() async -> [Captions] {
// Access currentVideo from the main actor context
await MainActor.run {
// Safely access the main actor-isolated currentVideo property
player.currentVideo?.captions ?? []
}
}
private var captionsBinding: Binding<Captions?> {