iOS 媒体播放控制通知
iOS media playback controls notification
我是 iOS 的新手,正在使用 Flutter 开发跨平台应用程序。我正在尝试从网络 URL 播放音频,我发现它可以使用 AVPlayer
来完成。当应用程序处于前台和后台时播放音频,但我可以像这样显示媒体播放控件:.
我使用了let mediaController = MPMusicPlayerController.applicationMusicPlayer
,然后调用self.mediaController.beginGeneratingPlaybackNotifications()
,还提供了播放信息MPNowPlayingInfoCenter.default().nowPlayingInfo = mediaInfo
,并在self.registerCommands()
方法中设置了远程指挥中心的目标。
我做了很多研究,但没有找到问题所在,正如我之前所说,我是 ios 的新手。
AppDelegate
import UIKit
import Flutter
import AVFoundation
import MediaPlayer
@UIApplicationMain
@objc class AppDelegate: FlutterAppDelegate {
static let CHANNEL = "APP_CHANNEL"
let mPlayer = AudioPlayer()
let mediaController = MPMusicPlayerController.applicationMusicPlayer
override func application(
_ application: UIApplication,
didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?
) -> Bool {
self.requestNotificationPermission(application: application)
let controller : FlutterViewController = window?.rootViewController as! FlutterViewController
let mainChannel = FlutterMethodChannel(name: AppDelegate.CHANNEL,
binaryMessenger: controller.binaryMessenger)
mainChannel.setMethodCallHandler({
(call: FlutterMethodCall, result: @escaping FlutterResult) -> Void in
switch(call.method) {
case "getSharedContainerPath":
let path = Utils.getSharedContainerPath()
result(path)
break
case "saveSelectedCity":
let city = call.arguments as! String
Utils.saveCityToUserDefaults(city: city)
result(true)
break
case "playSurah":
let number = call.arguments as! Int
self.initAudioPlayer()
self.mPlayer.toggle(num: number)
result(true)
break
default:
result(FlutterMethodNotImplemented)
return
}
})
GeneratedPluginRegistrant.register(with: self)
return super.application(application, didFinishLaunchingWithOptions: launchOptions)
}
func initAudioPlayer() {
self.mediaController.beginGeneratingPlaybackNotifications()
self.mPlayer.initPlayer(object: self)
self.registerCommands()
let nc = NotificationCenter.default
nc.addObserver(self,
selector: #selector(handleInterruption),
name: AVAudioSession.interruptionNotification,
object: nil)
nc.addObserver(self, selector: #selector(playerDidFinishPlaying), name: .AVPlayerItemDidPlayToEndTime, object: nil)
}
func requestNotificationPermission(application: UIApplication) {
if #available(iOS 10, *) {
// iOS 10 support
//create the notificationCenter
let center = UNUserNotificationCenter.current()
center.delegate = self as UNUserNotificationCenterDelegate
// set the type as sound or badge
center.requestAuthorization(options: [.sound,.alert,.badge]) { (granted, error) in
if granted {
print("Notification Enable Successfully")
}else{
print("Some Error Occure")
}
}
application.registerForRemoteNotifications()
} else if #available(iOS 9, *) {
// iOS 9 support
UIApplication.shared.registerUserNotificationSettings(UIUserNotificationSettings(types: [.badge, .sound, .alert], categories: nil))
UIApplication.shared.registerForRemoteNotifications()
} else if #available(iOS 8, *) {
// iOS 8 support
UIApplication.shared.registerUserNotificationSettings(UIUserNotificationSettings(types: [.badge, .sound, .alert], categories: nil))
UIApplication.shared.registerForRemoteNotifications()
} else { // iOS 7 support
application.registerForRemoteNotifications(matching: [.badge, .sound, .alert])
}
}
func registerCommands() {
let command = MPRemoteCommandCenter.shared()
command.playCommand.isEnabled = true;
command.playCommand.addTarget { (_) -> MPRemoteCommandHandlerStatus in
self.mPlayer.play()
return .success
}
command.pauseCommand.isEnabled = true;
command.pauseCommand.addTarget { (_) -> MPRemoteCommandHandlerStatus in
self.mPlayer.pause()
return .success
}
command.togglePlayPauseCommand.isEnabled = true;
command.togglePlayPauseCommand.addTarget { (_) -> MPRemoteCommandHandlerStatus in
self.mPlayer.toggle(num: self.mPlayer.index)
return .success
}
command.nextTrackCommand.isEnabled = true;
command.nextTrackCommand.addTarget { (_) -> MPRemoteCommandHandlerStatus in
self.mPlayer.playNext()
return .success
}
command.previousTrackCommand.isEnabled = true;
command.previousTrackCommand.addTarget { (_) -> MPRemoteCommandHandlerStatus in
self.mPlayer.playPrev()
return .success
}
command.stopCommand.isEnabled = true;
command.stopCommand.addTarget { (_) -> MPRemoteCommandHandlerStatus in
self.mPlayer.stop()
return .success
}
}
// [notificationCenter addObserver: self
// selector: @selector (handle_NowPlayingItemChanged:)
// name: MPMusicPlayerControllerNowPlayingItemDidChangeNotification
// object: musicPlayer];
//
// [notificationCenter addObserver: self
// selector: @selector (handle_PlaybackStateChanged:)
// name: MPMusicPlayerControllerPlaybackStateDidChangeNotification
// object: musicPlayer];
//
// [notificationCenter addObserver: self
// selector: @selector (handle_VolumeChanged:)
// name: MPMusicPlayerControllerVolumeDidChangeNotification
// object: musicPlayer];
func destroyPlayer() {
self.mPlayer.stop()
let nc = NotificationCenter.default
nc.removeObserver(self, name: AVAudioSession.interruptionNotification, object: nil)
nc.removeObserver(self, name: .AVPlayerItemDidPlayToEndTime, object: nil)
self.mediaController.endGeneratingPlaybackNotifications()
let command = MPRemoteCommandCenter.shared()
command.playCommand.isEnabled = false;
command.pauseCommand.isEnabled = false;
command.togglePlayPauseCommand.isEnabled = false;
command.nextTrackCommand.isEnabled = false;
command.previousTrackCommand.isEnabled = false;
command.stopCommand.isEnabled = false;
}
// override func applicationDidReceiveMemoryWarning(_ application: UIApplication) {
// self.destroyPlayer()
// }
override func applicationWillTerminate(_ application: UIApplication) {
self.destroyPlayer()
}
@objc func playerDidFinishPlaying(note: NSNotification) {
self.mPlayer.playNext()
}
override func observeValue(forKeyPath keyPath: String?,
of object: Any?,
change: [NSKeyValueChangeKey : Any]?,
context: UnsafeMutableRawPointer?) {
// Only handle observations for the playerItemContext
guard context == &mPlayer.playerItemContext else {
super.observeValue(forKeyPath: keyPath,
of: object,
change: change,
context: context)
return
}
if keyPath == #keyPath(AVPlayerItem.status) {
let status: AVPlayerItem.Status
if let statusNumber = change?[.newKey] as? NSNumber {
status = AVPlayerItem.Status(rawValue: statusNumber.intValue)!
} else {
status = .unknown
}
// Switch over status value
switch status {
case .readyToPlay:
self.mPlayer.updateMediaInfo()
break
// Player item is ready to play.
case .failed: break
// Player item failed. See error.
case .unknown: break
// Player item is not yet ready.
@unknown default:
super.observeValue(forKeyPath: keyPath,
of: object,
change: change,
context: context)
}
} else if keyPath == #keyPath(AVPlayer.timeControlStatus) {
if object is AVPlayer {
if (object as? AVPlayer) != nil {
self.mPlayer.updateMediaInfo()
}
}
} else {
super.observeValue(forKeyPath: keyPath,
of: object,
change: change,
context: context)
}
}
@objc func handleInterruption(notification: Notification) {
guard let userInfo = notification.userInfo,
let typeValue = userInfo[AVAudioSessionInterruptionTypeKey] as? UInt,
let type = AVAudioSession.InterruptionType(rawValue: typeValue) else {
return
}
// Switch over the interruption type.
switch type {
case .began:
// An interruption began. Update the UI as needed.
self.mPlayer.pause()
break
case .ended:
// An interruption ended. Resume playback, if appropriate.
guard let optionsValue = userInfo[AVAudioSessionInterruptionOptionKey] as? UInt else { return }
let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue)
if options.contains(.shouldResume) {
// Interruption ended. Playback should resume.
self.mPlayer.play()
} else {
// Interruption ended. Playback should not resume.
}
default: ()
}
}
}
音频播放器class
//
// AudioPlayer.swift
// Runner
import Foundation
import AVFoundation
import MediaPlayer
class AudioPlayer {
private var player: AVPlayer?
var index: Int = 0
private var object: NSObject!
// Key-value observing context
var playerItemContext = 0
private var mediaInfo = [String : Any]()
func initPlayer(object: NSObject) {
self.object = object
do {
if #available(iOS 10.0, *) {
try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playback, mode: AVAudioSession.Mode.default, options: [.mixWithOthers, .allowAirPlay])
try AVAudioSession.sharedInstance().setActive(false)
} else {
// Fallback on earlier versions
try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playback, options: .mixWithOthers)
}
} catch {
print(error)
}
}
func startPlayer() {
do {
try AVAudioSession.sharedInstance().setActive(true)
} catch {
print(error)
}
self.mediaInfo[MPMediaItemPropertyTitle] = ""
self.mediaInfo[MPMediaItemPropertyArtist] = ""
updateMediaInfo()
let url = getUrl()
let playerItem = AVPlayerItem(url: url!)
playerItem.addObserver(self.object, forKeyPath: #keyPath(AVPlayerItem.status), options: [.old, .new], context: &playerItemContext)
if self.player == nil {
self.player = AVPlayer(playerItem: playerItem)
} else {
self.player?.replaceCurrentItem(with: playerItem)
}
self.player?.addObserver(self.object, forKeyPath: #keyPath(AVPlayer.timeControlStatus), options: [.new, .old], context: &playerItemContext)
if let p = self.player {
p.play()
}
getMetadata(for: url!, completionHandler: { (metadata) in
self.mediaInfo[MPMediaItemPropertyTitle] = metadata?["title"]
self.mediaInfo[MPMediaItemPropertyArtist] = metadata!["artist"]
self.mediaInfo[MPMediaItemPropertyPlaybackDuration] = playerItem.asset.duration.seconds
self.updateMediaInfo()
})
}
func toggle(num: Int) {
if self.index == num {
if let p = self.player {
if(p.isPlaying) {
p.pause()
}
else {
p.play()
}
self.updateMediaInfo()
}
} else {
self.index = num
startPlayer()
}
}
func pause() {
if let p = self.player {
if(p.isPlaying) {
p.pause()
self.updateMediaInfo()
}
}
}
func play() {
if let p = self.player {
if(!p.isPlaying ) {
p.play()
self.updateMediaInfo()
}
}
}
func playNext() {
if self.index + 1 <= 114 {
self.index += 1
} else {
self.index = 1
}
self.startPlayer()
}
func playPrev() {
if self.index - 1 >= 1 {
self.index -= 1
} else {
self.index = 114
}
self.startPlayer()
}
func stop() {
if let p = self.player {
p.pause()
self.player?.replaceCurrentItem(with: nil)
}
MPNowPlayingInfoCenter.default().nowPlayingInfo = nil
}
func getUrl() -> URL? {
return URL(string: String(format: Utils.QURAN_AUDIO, self.index))
}
func updateMediaInfo() {
mediaInfo[MPNowPlayingInfoPropertyPlaybackRate] = player?.rate
mediaInfo[MPNowPlayingInfoPropertyElapsedPlaybackTime] = player?.currentTime().seconds
if #available(iOS 10.0, *) {
mediaInfo[MPNowPlayingInfoPropertyMediaType] = NSNumber(value: MPNowPlayingInfoMediaType.audio.rawValue)
}
MPNowPlayingInfoCenter.default().nowPlayingInfo = mediaInfo
}
func getMetadata(for url: URL, completionHandler: @escaping (_ metadata: [String : String]?) -> ()) {
var request = URLRequest(url: url)
request.httpMethod = "HEAD"
let task = URLSession.shared.dataTask(with: request) { (data, response, error) in
guard error == nil,
let res1 = response as? HTTPURLResponse,
let contentLength = res1.allHeaderFields["Content-Length"] as? String else {
completionHandler(nil)
return
}
do {
var req = URLRequest(url: url)
req.setValue("bytes=\(UInt64(contentLength)! - 128)-", forHTTPHeaderField: "Range")
let data = try NSURLConnection.sendSynchronousRequest(req, returning: nil)
let titleBytes = data.subdata(in: Range<Int>(NSRange(location: 3, length: 29))!)
.filter { (data) -> Bool in
data != 0
}
let artistBytes = data.subdata(in: Range<Int>(NSRange(location: 33, length: 29))!)
.filter { (data) -> Bool in
data != 0
}
let title = String(data: titleBytes, encoding: String.Encoding.utf8)
let artist = String(data: artistBytes, encoding: String.Encoding.utf8)
completionHandler(["title": title!, "artist": artist!])
} catch {
completionHandler(nil)
}
}
task.resume()
}
}
extension AVPlayer {
var isPlaying: Bool {
if #available(iOS 10.0, *) {
return timeControlStatus.rawValue == TimeControlStatus.playing.rawValue
}
return rate != 0 && error == nil
}
}
如果我没看错,NowPlayingInfo 不会显示您的 MediaInfo(标题等)。
这是因为目前 iOS 忽略启用了 .mixWithOthers
选项的 AVAudioSessions 中的 NowPlayingInfo。
我确实用你的代码设置了一个小测试项目。使用 .mixWithOthers
选项我可以重现您的问题。删除此选项后,NowPlayingInfoCenter 按预期工作。
还有一件事:
尝试设置 AVAudioSession 类别时,我总是收到错误 Error Domain=NSOSStatusErrorDomain Code=-50 "(null)"
。
这是因为类别 .playback
不允许设置 .allowsAirPlay
选项。 (https://developer.apple.com/documentation/avfoundation/avaudiosession/categoryoptions/1771736-allowairplay)
来自评论:
i don't have a real device, i am using the IPhone 11 pro max simulator
这就是问题所在。除了在设备上,您无法测试此功能。模拟器不是许多 iOS 功能/行为的可靠指南,这就是其中之一。如果没有设备,您就无法证明您的代码是否按预期工作。
我是 iOS 的新手,正在使用 Flutter 开发跨平台应用程序。我正在尝试从网络 URL 播放音频,我发现它可以使用 AVPlayer
来完成。当应用程序处于前台和后台时播放音频,但我可以像这样显示媒体播放控件:
我使用了let mediaController = MPMusicPlayerController.applicationMusicPlayer
,然后调用self.mediaController.beginGeneratingPlaybackNotifications()
,还提供了播放信息MPNowPlayingInfoCenter.default().nowPlayingInfo = mediaInfo
,并在self.registerCommands()
方法中设置了远程指挥中心的目标。
我做了很多研究,但没有找到问题所在,正如我之前所说,我是 ios 的新手。
AppDelegate
import UIKit
import Flutter
import AVFoundation
import MediaPlayer
@UIApplicationMain
@objc class AppDelegate: FlutterAppDelegate {
static let CHANNEL = "APP_CHANNEL"
let mPlayer = AudioPlayer()
let mediaController = MPMusicPlayerController.applicationMusicPlayer
override func application(
_ application: UIApplication,
didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?
) -> Bool {
self.requestNotificationPermission(application: application)
let controller : FlutterViewController = window?.rootViewController as! FlutterViewController
let mainChannel = FlutterMethodChannel(name: AppDelegate.CHANNEL,
binaryMessenger: controller.binaryMessenger)
mainChannel.setMethodCallHandler({
(call: FlutterMethodCall, result: @escaping FlutterResult) -> Void in
switch(call.method) {
case "getSharedContainerPath":
let path = Utils.getSharedContainerPath()
result(path)
break
case "saveSelectedCity":
let city = call.arguments as! String
Utils.saveCityToUserDefaults(city: city)
result(true)
break
case "playSurah":
let number = call.arguments as! Int
self.initAudioPlayer()
self.mPlayer.toggle(num: number)
result(true)
break
default:
result(FlutterMethodNotImplemented)
return
}
})
GeneratedPluginRegistrant.register(with: self)
return super.application(application, didFinishLaunchingWithOptions: launchOptions)
}
func initAudioPlayer() {
self.mediaController.beginGeneratingPlaybackNotifications()
self.mPlayer.initPlayer(object: self)
self.registerCommands()
let nc = NotificationCenter.default
nc.addObserver(self,
selector: #selector(handleInterruption),
name: AVAudioSession.interruptionNotification,
object: nil)
nc.addObserver(self, selector: #selector(playerDidFinishPlaying), name: .AVPlayerItemDidPlayToEndTime, object: nil)
}
func requestNotificationPermission(application: UIApplication) {
if #available(iOS 10, *) {
// iOS 10 support
//create the notificationCenter
let center = UNUserNotificationCenter.current()
center.delegate = self as UNUserNotificationCenterDelegate
// set the type as sound or badge
center.requestAuthorization(options: [.sound,.alert,.badge]) { (granted, error) in
if granted {
print("Notification Enable Successfully")
}else{
print("Some Error Occure")
}
}
application.registerForRemoteNotifications()
} else if #available(iOS 9, *) {
// iOS 9 support
UIApplication.shared.registerUserNotificationSettings(UIUserNotificationSettings(types: [.badge, .sound, .alert], categories: nil))
UIApplication.shared.registerForRemoteNotifications()
} else if #available(iOS 8, *) {
// iOS 8 support
UIApplication.shared.registerUserNotificationSettings(UIUserNotificationSettings(types: [.badge, .sound, .alert], categories: nil))
UIApplication.shared.registerForRemoteNotifications()
} else { // iOS 7 support
application.registerForRemoteNotifications(matching: [.badge, .sound, .alert])
}
}
func registerCommands() {
let command = MPRemoteCommandCenter.shared()
command.playCommand.isEnabled = true;
command.playCommand.addTarget { (_) -> MPRemoteCommandHandlerStatus in
self.mPlayer.play()
return .success
}
command.pauseCommand.isEnabled = true;
command.pauseCommand.addTarget { (_) -> MPRemoteCommandHandlerStatus in
self.mPlayer.pause()
return .success
}
command.togglePlayPauseCommand.isEnabled = true;
command.togglePlayPauseCommand.addTarget { (_) -> MPRemoteCommandHandlerStatus in
self.mPlayer.toggle(num: self.mPlayer.index)
return .success
}
command.nextTrackCommand.isEnabled = true;
command.nextTrackCommand.addTarget { (_) -> MPRemoteCommandHandlerStatus in
self.mPlayer.playNext()
return .success
}
command.previousTrackCommand.isEnabled = true;
command.previousTrackCommand.addTarget { (_) -> MPRemoteCommandHandlerStatus in
self.mPlayer.playPrev()
return .success
}
command.stopCommand.isEnabled = true;
command.stopCommand.addTarget { (_) -> MPRemoteCommandHandlerStatus in
self.mPlayer.stop()
return .success
}
}
// [notificationCenter addObserver: self
// selector: @selector (handle_NowPlayingItemChanged:)
// name: MPMusicPlayerControllerNowPlayingItemDidChangeNotification
// object: musicPlayer];
//
// [notificationCenter addObserver: self
// selector: @selector (handle_PlaybackStateChanged:)
// name: MPMusicPlayerControllerPlaybackStateDidChangeNotification
// object: musicPlayer];
//
// [notificationCenter addObserver: self
// selector: @selector (handle_VolumeChanged:)
// name: MPMusicPlayerControllerVolumeDidChangeNotification
// object: musicPlayer];
func destroyPlayer() {
self.mPlayer.stop()
let nc = NotificationCenter.default
nc.removeObserver(self, name: AVAudioSession.interruptionNotification, object: nil)
nc.removeObserver(self, name: .AVPlayerItemDidPlayToEndTime, object: nil)
self.mediaController.endGeneratingPlaybackNotifications()
let command = MPRemoteCommandCenter.shared()
command.playCommand.isEnabled = false;
command.pauseCommand.isEnabled = false;
command.togglePlayPauseCommand.isEnabled = false;
command.nextTrackCommand.isEnabled = false;
command.previousTrackCommand.isEnabled = false;
command.stopCommand.isEnabled = false;
}
// override func applicationDidReceiveMemoryWarning(_ application: UIApplication) {
// self.destroyPlayer()
// }
override func applicationWillTerminate(_ application: UIApplication) {
self.destroyPlayer()
}
@objc func playerDidFinishPlaying(note: NSNotification) {
self.mPlayer.playNext()
}
override func observeValue(forKeyPath keyPath: String?,
of object: Any?,
change: [NSKeyValueChangeKey : Any]?,
context: UnsafeMutableRawPointer?) {
// Only handle observations for the playerItemContext
guard context == &mPlayer.playerItemContext else {
super.observeValue(forKeyPath: keyPath,
of: object,
change: change,
context: context)
return
}
if keyPath == #keyPath(AVPlayerItem.status) {
let status: AVPlayerItem.Status
if let statusNumber = change?[.newKey] as? NSNumber {
status = AVPlayerItem.Status(rawValue: statusNumber.intValue)!
} else {
status = .unknown
}
// Switch over status value
switch status {
case .readyToPlay:
self.mPlayer.updateMediaInfo()
break
// Player item is ready to play.
case .failed: break
// Player item failed. See error.
case .unknown: break
// Player item is not yet ready.
@unknown default:
super.observeValue(forKeyPath: keyPath,
of: object,
change: change,
context: context)
}
} else if keyPath == #keyPath(AVPlayer.timeControlStatus) {
if object is AVPlayer {
if (object as? AVPlayer) != nil {
self.mPlayer.updateMediaInfo()
}
}
} else {
super.observeValue(forKeyPath: keyPath,
of: object,
change: change,
context: context)
}
}
@objc func handleInterruption(notification: Notification) {
guard let userInfo = notification.userInfo,
let typeValue = userInfo[AVAudioSessionInterruptionTypeKey] as? UInt,
let type = AVAudioSession.InterruptionType(rawValue: typeValue) else {
return
}
// Switch over the interruption type.
switch type {
case .began:
// An interruption began. Update the UI as needed.
self.mPlayer.pause()
break
case .ended:
// An interruption ended. Resume playback, if appropriate.
guard let optionsValue = userInfo[AVAudioSessionInterruptionOptionKey] as? UInt else { return }
let options = AVAudioSession.InterruptionOptions(rawValue: optionsValue)
if options.contains(.shouldResume) {
// Interruption ended. Playback should resume.
self.mPlayer.play()
} else {
// Interruption ended. Playback should not resume.
}
default: ()
}
}
}
音频播放器class
//
// AudioPlayer.swift
// Runner
import Foundation
import AVFoundation
import MediaPlayer
class AudioPlayer {
private var player: AVPlayer?
var index: Int = 0
private var object: NSObject!
// Key-value observing context
var playerItemContext = 0
private var mediaInfo = [String : Any]()
func initPlayer(object: NSObject) {
self.object = object
do {
if #available(iOS 10.0, *) {
try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playback, mode: AVAudioSession.Mode.default, options: [.mixWithOthers, .allowAirPlay])
try AVAudioSession.sharedInstance().setActive(false)
} else {
// Fallback on earlier versions
try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playback, options: .mixWithOthers)
}
} catch {
print(error)
}
}
func startPlayer() {
do {
try AVAudioSession.sharedInstance().setActive(true)
} catch {
print(error)
}
self.mediaInfo[MPMediaItemPropertyTitle] = ""
self.mediaInfo[MPMediaItemPropertyArtist] = ""
updateMediaInfo()
let url = getUrl()
let playerItem = AVPlayerItem(url: url!)
playerItem.addObserver(self.object, forKeyPath: #keyPath(AVPlayerItem.status), options: [.old, .new], context: &playerItemContext)
if self.player == nil {
self.player = AVPlayer(playerItem: playerItem)
} else {
self.player?.replaceCurrentItem(with: playerItem)
}
self.player?.addObserver(self.object, forKeyPath: #keyPath(AVPlayer.timeControlStatus), options: [.new, .old], context: &playerItemContext)
if let p = self.player {
p.play()
}
getMetadata(for: url!, completionHandler: { (metadata) in
self.mediaInfo[MPMediaItemPropertyTitle] = metadata?["title"]
self.mediaInfo[MPMediaItemPropertyArtist] = metadata!["artist"]
self.mediaInfo[MPMediaItemPropertyPlaybackDuration] = playerItem.asset.duration.seconds
self.updateMediaInfo()
})
}
func toggle(num: Int) {
if self.index == num {
if let p = self.player {
if(p.isPlaying) {
p.pause()
}
else {
p.play()
}
self.updateMediaInfo()
}
} else {
self.index = num
startPlayer()
}
}
func pause() {
if let p = self.player {
if(p.isPlaying) {
p.pause()
self.updateMediaInfo()
}
}
}
func play() {
if let p = self.player {
if(!p.isPlaying ) {
p.play()
self.updateMediaInfo()
}
}
}
func playNext() {
if self.index + 1 <= 114 {
self.index += 1
} else {
self.index = 1
}
self.startPlayer()
}
func playPrev() {
if self.index - 1 >= 1 {
self.index -= 1
} else {
self.index = 114
}
self.startPlayer()
}
func stop() {
if let p = self.player {
p.pause()
self.player?.replaceCurrentItem(with: nil)
}
MPNowPlayingInfoCenter.default().nowPlayingInfo = nil
}
func getUrl() -> URL? {
return URL(string: String(format: Utils.QURAN_AUDIO, self.index))
}
func updateMediaInfo() {
mediaInfo[MPNowPlayingInfoPropertyPlaybackRate] = player?.rate
mediaInfo[MPNowPlayingInfoPropertyElapsedPlaybackTime] = player?.currentTime().seconds
if #available(iOS 10.0, *) {
mediaInfo[MPNowPlayingInfoPropertyMediaType] = NSNumber(value: MPNowPlayingInfoMediaType.audio.rawValue)
}
MPNowPlayingInfoCenter.default().nowPlayingInfo = mediaInfo
}
func getMetadata(for url: URL, completionHandler: @escaping (_ metadata: [String : String]?) -> ()) {
var request = URLRequest(url: url)
request.httpMethod = "HEAD"
let task = URLSession.shared.dataTask(with: request) { (data, response, error) in
guard error == nil,
let res1 = response as? HTTPURLResponse,
let contentLength = res1.allHeaderFields["Content-Length"] as? String else {
completionHandler(nil)
return
}
do {
var req = URLRequest(url: url)
req.setValue("bytes=\(UInt64(contentLength)! - 128)-", forHTTPHeaderField: "Range")
let data = try NSURLConnection.sendSynchronousRequest(req, returning: nil)
let titleBytes = data.subdata(in: Range<Int>(NSRange(location: 3, length: 29))!)
.filter { (data) -> Bool in
data != 0
}
let artistBytes = data.subdata(in: Range<Int>(NSRange(location: 33, length: 29))!)
.filter { (data) -> Bool in
data != 0
}
let title = String(data: titleBytes, encoding: String.Encoding.utf8)
let artist = String(data: artistBytes, encoding: String.Encoding.utf8)
completionHandler(["title": title!, "artist": artist!])
} catch {
completionHandler(nil)
}
}
task.resume()
}
}
extension AVPlayer {
var isPlaying: Bool {
if #available(iOS 10.0, *) {
return timeControlStatus.rawValue == TimeControlStatus.playing.rawValue
}
return rate != 0 && error == nil
}
}
如果我没看错,NowPlayingInfo 不会显示您的 MediaInfo(标题等)。
这是因为目前 iOS 忽略启用了 .mixWithOthers
选项的 AVAudioSessions 中的 NowPlayingInfo。
我确实用你的代码设置了一个小测试项目。使用 .mixWithOthers
选项我可以重现您的问题。删除此选项后,NowPlayingInfoCenter 按预期工作。
还有一件事:
尝试设置 AVAudioSession 类别时,我总是收到错误 Error Domain=NSOSStatusErrorDomain Code=-50 "(null)"
。
这是因为类别 .playback
不允许设置 .allowsAirPlay
选项。 (https://developer.apple.com/documentation/avfoundation/avaudiosession/categoryoptions/1771736-allowairplay)
来自评论:
i don't have a real device, i am using the IPhone 11 pro max simulator
这就是问题所在。除了在设备上,您无法测试此功能。模拟器不是许多 iOS 功能/行为的可靠指南,这就是其中之一。如果没有设备,您就无法证明您的代码是否按预期工作。