如何使用 audio_service 和 just_audio 在 Flutter 中查找当前歌曲持续时间

How to find the current song duration in Flutter with audio_service and just_audio

当您在 audio_service you don't know the song duration yet because just_audio 中设置 MediaItem 时,目前还没有更改告诉您。

FAQs 说要像这样更新 MediaItem

modifiedMediaItem = mediaItem.copyWith(duration: duration);
AudioServiceBackground.setMediaItem(modifiedMediaItem);

但我不清楚如何或在何处执行此操作。 example app in the GitHub repo sidesteps this problem by providing precomputed times. (GitHub issue)

如何以及在哪里将持续时间从 just_audio 转移到 audio_service 以便它可以更新听众?

我找到了有用的东西,所以我在下面添加了一个答案。

设置 just_audio AudioPlayer 后,您可以监听持续时间流中的变化,然后在那里进行更新:

_player.durationStream.listen((duration) {
  final songIndex = _player.playbackEvent.currentIndex;
  print('current index: $songIndex, duration: $duration');
  final modifiedMediaItem = mediaItem.copyWith(duration: duration);
  _queue[songIndex] = modifiedMediaItem;
  AudioServiceBackground.setMediaItem(_queue[songIndex]);
  AudioServiceBackground.setQueue(_queue);
});

备注:

  • 这是你的 audio_service BackgroundAudioTask class.
  • 当我尝试直接使用 _player.currentIndex 时,我遇到了奇怪的行为(前两首歌曲在索引开始递增之前都有索引 0)(GitHub issue)。这就是为什么我在这里使用播放事件来获取当前索引。
  • 在我的示例中,我使用 List<MediaItem> 作为队列。我实际上并不需要在最后一行使用 setQueue,因为 by UI 没有监听队列中的变化,但我想这样做还是有好处的。

更完整的代码示例

这里是我的全部background_audio_service.dart供参考。这是对 documentation example:

的改编
import 'dart:async';

import 'package:audio_service/audio_service.dart';
import 'package:audio_session/audio_session.dart';
import 'package:just_audio/just_audio.dart';

void audioPlayerTaskEntrypoint() async {
  AudioServiceBackground.run(() => AudioPlayerTask());
}

class AudioPlayerTask extends BackgroundAudioTask {
  AudioPlayer _player = new AudioPlayer();
  AudioProcessingState _skipState;
  StreamSubscription<PlaybackEvent> _eventSubscription;

  List<MediaItem> _queue = [];
  List<MediaItem> get queue => _queue;

  int get index => _player.playbackEvent.currentIndex;
  MediaItem get mediaItem => index == null ? null : queue[index];

  @override
  Future<void> onStart(Map<String, dynamic> params) async {
    _loadMediaItemsIntoQueue(params);
    await _setAudioSession();
    _propogateEventsFromAudioPlayerToAudioServiceClients();
    _performSpecialProcessingForStateTransistions();
    _loadQueue();
  }

  void _loadMediaItemsIntoQueue(Map<String, dynamic> params) {
    _queue.clear();
    final List mediaItems = params['data'];
    for (var item in mediaItems) {
      final mediaItem = MediaItem.fromJson(item);
      _queue.add(mediaItem);
    }
  }

  Future<void> _setAudioSession() async {
    final session = await AudioSession.instance;
    await session.configure(AudioSessionConfiguration.music());
  }

  void _propogateEventsFromAudioPlayerToAudioServiceClients() {
    _eventSubscription = _player.playbackEventStream.listen((event) {
      _broadcastState();
    });
  }

  void _performSpecialProcessingForStateTransistions() {
    _player.processingStateStream.listen((state) {
      switch (state) {
        case ProcessingState.completed:
          onStop();
          break;
        case ProcessingState.ready:
          _skipState = null;
          break;
        default:
          break;
      }
    });
  }

  Future<void> _loadQueue() async {
    AudioServiceBackground.setQueue(queue);
    try {
      await _player.load(ConcatenatingAudioSource(
        children:
            queue.map((item) => AudioSource.uri(Uri.parse(item.id))).toList(),
      ));
      _player.durationStream.listen((duration) {
        _updateQueueWithCurrentDuration(duration);
      });
      onPlay();
    } catch (e) {
      print('Error: $e');
      onStop();
    }
  }

  void _updateQueueWithCurrentDuration(Duration duration) {
    final songIndex = _player.playbackEvent.currentIndex;
    print('current index: $songIndex, duration: $duration');
    final modifiedMediaItem = mediaItem.copyWith(duration: duration);
    _queue[songIndex] = modifiedMediaItem;
    AudioServiceBackground.setMediaItem(_queue[songIndex]);
    AudioServiceBackground.setQueue(_queue);
  }

  @override
  Future<void> onSkipToQueueItem(String mediaId) async {
    final newIndex = queue.indexWhere((item) => item.id == mediaId);
    if (newIndex == -1) return;
    _skipState = newIndex > index
        ? AudioProcessingState.skippingToNext
        : AudioProcessingState.skippingToPrevious;
    _player.seek(Duration.zero, index: newIndex);
  }

  @override
  Future<void> onPlay() => _player.play();

  @override
  Future<void> onPause() => _player.pause();

  @override
  Future<void> onSeekTo(Duration position) => _player.seek(position);

  @override
  Future<void> onFastForward() => _seekRelative(fastForwardInterval);

  @override
  Future<void> onRewind() => _seekRelative(-rewindInterval);

  @override
  Future<void> onStop() async {
    await _player.dispose();
    _eventSubscription.cancel();
    await _broadcastState();
    await super.onStop();
  }

  /// Jumps away from the current position by [offset].
  Future<void> _seekRelative(Duration offset) async {
    var newPosition = _player.position + offset;
    if (newPosition < Duration.zero) newPosition = Duration.zero;
    if (newPosition > mediaItem.duration) newPosition = mediaItem.duration;
    await _player.seek(newPosition);
  }

  /// Broadcasts the current state to all clients.
  Future<void> _broadcastState() async {
    await AudioServiceBackground.setState(
      controls: [
        MediaControl.skipToPrevious,
        if (_player.playing) MediaControl.pause else MediaControl.play,
        MediaControl.skipToNext,
      ],
      androidCompactActions: [0, 1, 2],
      processingState: _getProcessingState(),
      playing: _player.playing,
      position: _player.position,
      bufferedPosition: _player.bufferedPosition,
      speed: _player.speed,
    );
  }

  /// Maps just_audio's processing state into into audio_service's playing
  /// state. If we are in the middle of a skip, we use [_skipState] instead.
  AudioProcessingState _getProcessingState() {
    if (_skipState != null) return _skipState;
    switch (_player.processingState) {
      case ProcessingState.none:
        return AudioProcessingState.stopped;
      case ProcessingState.loading:
        return AudioProcessingState.connecting;
      case ProcessingState.buffering:
        return AudioProcessingState.buffering;
      case ProcessingState.ready:
        return AudioProcessingState.ready;
      case ProcessingState.completed:
        return AudioProcessingState.completed;
      default:
        throw Exception("Invalid state: ${_player.processingState}");
    }
  }
}

然后在我的状态管理中 class 我从 AudioService 得到了一个像这样的流:

Stream<AudioPlayerState> get mediaStateStream =>
    Rx.combineLatest2<Duration, MediaItem, AudioPlayerState>(
        AudioService.positionStream,
        AudioService.currentMediaItemStream,
        (position, mediaItem) => AudioPlayerState(position, mediaItem.duration));

AudioPlayerState 在哪里

class AudioPlayerState {
  const AudioPlayerState(this.currentTime, this.totalTime);
  final Duration currentTime;
  final Duration totalTime;

  const AudioPlayerState.initial() : this(Duration.zero, Duration.zero);
}

我在 Flutter UI 中使用 StreamBuilder 来收听 mediaStateStream 并更新我的音频播放器搜索栏小部件。