如何控制 FLutter audio_service 包中通知栏中显示的内容
How can I control what is shown in the notification bar in the package audio_service for FLutter
我想知道在哪里可以更改 audio_service 包中通知栏中显示的内容。更具体地说,你能告诉我在哪里可以从通知栏中删除滑块(搜索栏)吗? (直到您展开栏才会显示)。
或者通过在其上方添加当前位置和最大持续时间来改进它。但我想我可以做到这一点,如果我能找到我在哪里可以使用代码。
我的代码来自ryanheise的例子:
import 'dart:io';
import 'dart:math';
import 'package:audio_session/audio_session.dart';
import 'package:flutter/material.dart';
import 'dart:async';
import 'package:audio_service/audio_service.dart';
import 'package:just_audio/just_audio.dart';
import 'package:rxdart/rxdart.dart';
void main() => runApp(new MyApp());
class MyApp extends StatelessWidget {
@override
Widget build(BuildContext context) {
return MaterialApp(
title: 'Audio Service Demo',
theme: ThemeData(primarySwatch: Colors.blue),
home: AudioServiceWidget(child: MainScreen()),
);
}
}
class MainScreen extends StatelessWidget {
/// Tracks the position while the user drags the seek bar.
final BehaviorSubject<double> _dragPositionSubject =
BehaviorSubject.seeded(null);
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
title: const Text('Audio Service Demo'),
),
body: Center(
child: StreamBuilder<ScreenState>(
stream: _screenStateStream,
builder: (context, snapshot) {
final screenState = snapshot.data;
final queue = screenState?.queue;
final mediaItem = screenState?.mediaItem;
final state = screenState?.playbackState;
final processingState =
state?.processingState ?? AudioProcessingState.none;
final playing = state?.playing ?? false;
return Column(
mainAxisAlignment: MainAxisAlignment.center,
children: [
if (processingState == AudioProcessingState.none) ...[
audioPlayerButton(),
] else ...[
if (queue != null && queue.isNotEmpty)
Row(
mainAxisAlignment: MainAxisAlignment.center,
children: [
IconButton(
icon: Icon(Icons.skip_previous),
iconSize: 64.0,
onPressed: mediaItem == queue.first
? null
: AudioService.skipToPrevious,
),
IconButton(
icon: Icon(Icons.skip_next),
iconSize: 64.0,
onPressed: mediaItem == queue.last
? null
: AudioService.skipToNext,
),
],
),
if (mediaItem?.title != null) Text(mediaItem.title),
Row(
mainAxisAlignment: MainAxisAlignment.center,
children: [
if (playing) pauseButton() else playButton(),
stopButton(),
],
),
positionIndicator(mediaItem, state),
Text("Processing state: " +
"$processingState".replaceAll(RegExp(r'^.*\.'), '')),
StreamBuilder(
stream: AudioService.customEventStream,
builder: (context, snapshot) {
return Text("custom event: ${snapshot.data}");
},
),
StreamBuilder<bool>(
stream: AudioService.notificationClickEventStream,
builder: (context, snapshot) {
return Text(
'Notification Click Status: ${snapshot.data}',
);
},
),
],
],
);
},
),
),
);
}
/// Encapsulate all the different data we're interested in into a single
/// stream so we don't have to nest StreamBuilders.
Stream<ScreenState> get _screenStateStream =>
Rx.combineLatest3<List<MediaItem>, MediaItem, PlaybackState, ScreenState>(
AudioService.queueStream,
AudioService.currentMediaItemStream,
AudioService.playbackStateStream,
(queue, mediaItem, playbackState) =>
ScreenState(queue, mediaItem, playbackState));
RaisedButton audioPlayerButton() => startButton(
'AudioPlayer',
() {
AudioService.start(
backgroundTaskEntrypoint: _audioPlayerTaskEntrypoint,
androidNotificationChannelName: 'Audio Service Demo',
// Enable this if you want the Android service to exit the foreground state on pause.
//androidStopForegroundOnPause: true,
androidNotificationColor: 0xFF2196f3,
androidNotificationIcon: 'mipmap/ic_launcher',
androidEnableQueue: true,
);
},
);
RaisedButton startButton(String label, VoidCallback onPressed) =>
RaisedButton(
child: Text(label),
onPressed: onPressed,
);
IconButton playButton() => IconButton(
icon: Icon(Icons.play_arrow),
iconSize: 64.0,
onPressed: AudioService.play,
);
IconButton pauseButton() => IconButton(
icon: Icon(Icons.pause),
iconSize: 64.0,
onPressed: AudioService.pause,
);
IconButton stopButton() => IconButton(
icon: Icon(Icons.stop),
iconSize: 64.0,
onPressed: AudioService.stop,
);
Widget positionIndicator(MediaItem mediaItem, PlaybackState state) {
double seekPos;
return StreamBuilder(
stream: Rx.combineLatest2<double, double, double>(
_dragPositionSubject.stream,
Stream.periodic(Duration(milliseconds: 200)),
(dragPosition, _) => dragPosition),
builder: (context, snapshot) {
double position =
snapshot.data ?? state.currentPosition.inMilliseconds.toDouble();
double duration = mediaItem?.duration?.inMilliseconds?.toDouble();
return Column(
children: [
if (duration != null)
Slider(
min: 0.0,
max: duration,
value: seekPos ?? max(0.0, min(position, duration)),
onChanged: (value) {
_dragPositionSubject.add(value);
},
onChangeEnd: (value) {
AudioService.seekTo(Duration(milliseconds: value.toInt()));
// Due to a delay in platform channel communication, there is
// a brief moment after releasing the Slider thumb before the
// new position is broadcast from the platform side. This
// hack is to hold onto seekPos until the next state update
// comes through.
// TODO: Improve this code.
seekPos = value;
_dragPositionSubject.add(null);
},
),
Text("${state.currentPosition}"),
],
);
},
);
}
}
class ScreenState {
final List<MediaItem> queue;
final MediaItem mediaItem;
final PlaybackState playbackState;
ScreenState(this.queue, this.mediaItem, this.playbackState);
}
// NOTE: Your entrypoint MUST be a top-level function.
void _audioPlayerTaskEntrypoint() async {
AudioServiceBackground.run(() => AudioPlayerTask());
}
/// This task defines logic for playing a list of podcast episodes.
class AudioPlayerTask extends BackgroundAudioTask {
final _mediaLibrary = MediaLibrary();
AudioPlayer _player = new AudioPlayer();
AudioProcessingState _skipState;
Seeker _seeker;
StreamSubscription<PlaybackEvent> _eventSubscription;
List<MediaItem> get queue => _mediaLibrary.items;
int get index => _player.currentIndex;
MediaItem get mediaItem => index == null ? null : queue[index];
@override
Future<void> onStart(Map<String, dynamic> params) async {
// We configure the audio session for speech since we're playing a podcast.
// You can also put this in your app's initialisation if your app doesn't
// switch between two types of audio as this example does.
final session = await AudioSession.instance;
await session.configure(AudioSessionConfiguration.speech());
// Broadcast media item changes.
_player.currentIndexStream.listen((index) {
if (index != null) AudioServiceBackground.setMediaItem(queue[index]);
});
// Propagate all events from the audio player to AudioService clients.
_eventSubscription = _player.playbackEventStream.listen((event) {
_broadcastState();
});
// Special processing for state transitions.
_player.processingStateStream.listen((state) {
switch (state) {
case ProcessingState.completed:
// In this example, the service stops when reaching the end.
onStop();
break;
case ProcessingState.ready:
// If we just came from skipping between tracks, clear the skip
// state now that we're ready to play.
_skipState = null;
break;
default:
break;
}
});
// Load and broadcast the queue
AudioServiceBackground.setQueue(queue);
try {
await _player.load(ConcatenatingAudioSource(
children:
queue.map((item) => AudioSource.uri(Uri.parse(item.id))).toList(),
));
// In this example, we automatically start playing on start.
onPlay();
} catch (e) {
print("Error: $e");
onStop();
}
}
@override
Future<void> onSkipToQueueItem(String mediaId) async {
// Then default implementations of onSkipToNext and onSkipToPrevious will
// delegate to this method.
final newIndex = queue.indexWhere((item) => item.id == mediaId);
if (newIndex == -1) return;
// During a skip, the player may enter the buffering state. We could just
// propagate that state directly to AudioService clients but AudioService
// has some more specific states we could use for skipping to next and
// previous. This variable holds the preferred state to send instead of
// buffering during a skip, and it is cleared as soon as the player exits
// buffering (see the listener in onStart).
_skipState = newIndex > index
? AudioProcessingState.skippingToNext
: AudioProcessingState.skippingToPrevious;
// This jumps to the beginning of the queue item at newIndex.
_player.seek(Duration.zero, index: newIndex);
}
@override
Future<void> onPlay() => _player.play();
@override
Future<void> onPause() => _player.pause();
@override
Future<void> onSeekTo(Duration position) => _player.seek(position);
@override
Future<void> onFastForward() => _seekRelative(fastForwardInterval);
@override
Future<void> onRewind() => _seekRelative(-rewindInterval);
@override
Future<void> onSeekForward(bool begin) async => _seekContinuously(begin, 1);
@override
Future<void> onSeekBackward(bool begin) async => _seekContinuously(begin, -1);
@override
Future<void> onStop() async {
await _player.pause();
await _player.dispose();
_eventSubscription.cancel();
// It is important to wait for this state to be broadcast before we shut
// down the task. If we don't, the background task will be destroyed before
// the message gets sent to the UI.
await _broadcastState();
// Shut down this task
await super.onStop();
}
/// Jumps away from the current position by [offset].
Future<void> _seekRelative(Duration offset) async {
var newPosition = _player.position + offset;
// Make sure we don't jump out of bounds.
if (newPosition < Duration.zero) newPosition = Duration.zero;
if (newPosition > mediaItem.duration) newPosition = mediaItem.duration;
// Perform the jump via a seek.
await _player.seek(newPosition);
}
/// Begins or stops a continuous seek in [direction]. After it begins it will
/// continue seeking forward or backward by 10 seconds within the audio, at
/// intervals of 1 second in app time.
void _seekContinuously(bool begin, int direction) {
_seeker?.stop();
if (begin) {
_seeker = Seeker(_player, Duration(seconds: 10 * direction),
Duration(seconds: 1), mediaItem)
..start();
}
}
/// Broadcasts the current state to all clients.
Future<void> _broadcastState() async {
await AudioServiceBackground.setState(
controls: [
MediaControl.skipToPrevious,
if (_player.playing) MediaControl.pause else MediaControl.play,
MediaControl.stop,
MediaControl.skipToNext,
],
systemActions: [
MediaAction.play,
MediaAction.seekForward,
MediaAction.seekBackward,
],
processingState: _getProcessingState(),
playing: _player.playing,
position: _player.position,
bufferedPosition: _player.bufferedPosition,
speed: _player.speed,
);
}
/// Maps just_audio's processing state into into audio_service's playing
/// state. If we are in the middle of a skip, we use [_skipState] instead.
AudioProcessingState _getProcessingState() {
if (_skipState != null) return _skipState;
switch (_player.processingState) {
case ProcessingState.none:
return AudioProcessingState.stopped;
case ProcessingState.loading:
return AudioProcessingState.connecting;
case ProcessingState.buffering:
return AudioProcessingState.buffering;
case ProcessingState.ready:
return AudioProcessingState.ready;
case ProcessingState.completed:
return AudioProcessingState.completed;
default:
throw Exception("Invalid state: ${_player.processingState}");
}
}
}
/// Provides access to a library of media items. In your app, this could come
/// from a database or web service.
class MediaLibrary {
final _items = <MediaItem>[
MediaItem(
id: "https://s3.amazonaws.com/scifri-episodes/scifri20181123-episode.mp3",
album: "Science Friday",
title: "A Salute To Head-Scratching Science",
artist: "Science Friday and WNYC Studios",
duration: Duration(milliseconds: 5739820),
artUri:
"https://media.wnyc.org/i/1400/1400/l/80/1/ScienceFriday_WNYCStudios_1400.jpg",
),
MediaItem(
id: "https://s3.amazonaws.com/scifri-segments/scifri201711241.mp3",
album: "Science Friday",
title: "From Cat Rheology To Operatic Incompetence",
artist: "Science Friday and WNYC Studios",
duration: Duration(milliseconds: 2856950),
artUri:
"https://media.wnyc.org/i/1400/1400/l/80/1/ScienceFriday_WNYCStudios_1400.jpg",
),
];
List<MediaItem> get items => _items;
}
/// An object that performs interruptable sleep.
class Sleeper {
Completer _blockingCompleter;
/// Sleep for a duration. If sleep is interrupted, a
/// [SleeperInterruptedException] will be thrown.
Future<void> sleep([Duration duration]) async {
_blockingCompleter = Completer();
if (duration != null) {
await Future.any([Future.delayed(duration), _blockingCompleter.future]);
} else {
await _blockingCompleter.future;
}
final interrupted = _blockingCompleter.isCompleted;
_blockingCompleter = null;
if (interrupted) {
throw SleeperInterruptedException();
}
}
/// Interrupt any sleep that's underway.
void interrupt() {
if (_blockingCompleter?.isCompleted == false) {
_blockingCompleter.complete();
}
}
}
class SleeperInterruptedException {}
/// A wrapper around FlutterTts that makes it easier to wait for speech to
/// complete.
class Seeker {
final AudioPlayer player;
final Duration positionInterval;
final Duration stepInterval;
final MediaItem mediaItem;
bool _running = false;
Seeker(
this.player,
this.positionInterval,
this.stepInterval,
this.mediaItem,
);
start() async {
_running = true;
while (_running) {
Duration newPosition = player.position + positionInterval;
if (newPosition < Duration.zero) newPosition = Duration.zero;
if (newPosition > mediaItem.duration) newPosition = mediaItem.duration;
player.seek(newPosition);
await Future.delayed(stepInterval);
}
}
stop() {
_running = false;
}
}
该示例应该已经在搜索栏下方显示了当前位置和持续时间,它通过调用 AudioServiceBackground.setMediaItem
(0.17) 或 AudioHandler.mediaItem.add
(0.18) 来设置持续时间,并调用AudioServiceBackground.setState
(0.17) 或 AudioHandler.playbackState.add
(0.18) 设置当前位置。您不能影响它的显示位置,因为这是由操作系统选择的,但对于 Android 和 iOS.
,它通常位于搜索栏下方,而不是上方。
在 0.17 中,您可以通过从 AudioServiceBackground.setState
的 systemActions
参数中删除 seekTo
媒体操作来删除搜索栏,您似乎已经这样做了。来自 setState documentation:
Any other action you would like to enable for clients that is not a
clickable notification button should be specified in the
systemActions parameter. For example:
- MediaAction.seekTo (enable a seek bar)
在 0.18 中,您可以通过从 PlaybackState
的 systemActions
参数中删除 seek
媒体操作来执行相同的操作。来自 PlaybackState documentation:
Note that specifying [MediaAction.seek] in [systemActions] will enable a
seek bar in both the Android notification and the iOS control center, but
on Android, it will show only if the media item's duration has been set.
我想知道在哪里可以更改 audio_service 包中通知栏中显示的内容。更具体地说,你能告诉我在哪里可以从通知栏中删除滑块(搜索栏)吗? (直到您展开栏才会显示)。 或者通过在其上方添加当前位置和最大持续时间来改进它。但我想我可以做到这一点,如果我能找到我在哪里可以使用代码。
我的代码来自ryanheise的例子:
import 'dart:io';
import 'dart:math';
import 'package:audio_session/audio_session.dart';
import 'package:flutter/material.dart';
import 'dart:async';
import 'package:audio_service/audio_service.dart';
import 'package:just_audio/just_audio.dart';
import 'package:rxdart/rxdart.dart';
void main() => runApp(new MyApp());
class MyApp extends StatelessWidget {
@override
Widget build(BuildContext context) {
return MaterialApp(
title: 'Audio Service Demo',
theme: ThemeData(primarySwatch: Colors.blue),
home: AudioServiceWidget(child: MainScreen()),
);
}
}
class MainScreen extends StatelessWidget {
/// Tracks the position while the user drags the seek bar.
final BehaviorSubject<double> _dragPositionSubject =
BehaviorSubject.seeded(null);
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
title: const Text('Audio Service Demo'),
),
body: Center(
child: StreamBuilder<ScreenState>(
stream: _screenStateStream,
builder: (context, snapshot) {
final screenState = snapshot.data;
final queue = screenState?.queue;
final mediaItem = screenState?.mediaItem;
final state = screenState?.playbackState;
final processingState =
state?.processingState ?? AudioProcessingState.none;
final playing = state?.playing ?? false;
return Column(
mainAxisAlignment: MainAxisAlignment.center,
children: [
if (processingState == AudioProcessingState.none) ...[
audioPlayerButton(),
] else ...[
if (queue != null && queue.isNotEmpty)
Row(
mainAxisAlignment: MainAxisAlignment.center,
children: [
IconButton(
icon: Icon(Icons.skip_previous),
iconSize: 64.0,
onPressed: mediaItem == queue.first
? null
: AudioService.skipToPrevious,
),
IconButton(
icon: Icon(Icons.skip_next),
iconSize: 64.0,
onPressed: mediaItem == queue.last
? null
: AudioService.skipToNext,
),
],
),
if (mediaItem?.title != null) Text(mediaItem.title),
Row(
mainAxisAlignment: MainAxisAlignment.center,
children: [
if (playing) pauseButton() else playButton(),
stopButton(),
],
),
positionIndicator(mediaItem, state),
Text("Processing state: " +
"$processingState".replaceAll(RegExp(r'^.*\.'), '')),
StreamBuilder(
stream: AudioService.customEventStream,
builder: (context, snapshot) {
return Text("custom event: ${snapshot.data}");
},
),
StreamBuilder<bool>(
stream: AudioService.notificationClickEventStream,
builder: (context, snapshot) {
return Text(
'Notification Click Status: ${snapshot.data}',
);
},
),
],
],
);
},
),
),
);
}
/// Encapsulate all the different data we're interested in into a single
/// stream so we don't have to nest StreamBuilders.
Stream<ScreenState> get _screenStateStream =>
Rx.combineLatest3<List<MediaItem>, MediaItem, PlaybackState, ScreenState>(
AudioService.queueStream,
AudioService.currentMediaItemStream,
AudioService.playbackStateStream,
(queue, mediaItem, playbackState) =>
ScreenState(queue, mediaItem, playbackState));
RaisedButton audioPlayerButton() => startButton(
'AudioPlayer',
() {
AudioService.start(
backgroundTaskEntrypoint: _audioPlayerTaskEntrypoint,
androidNotificationChannelName: 'Audio Service Demo',
// Enable this if you want the Android service to exit the foreground state on pause.
//androidStopForegroundOnPause: true,
androidNotificationColor: 0xFF2196f3,
androidNotificationIcon: 'mipmap/ic_launcher',
androidEnableQueue: true,
);
},
);
RaisedButton startButton(String label, VoidCallback onPressed) =>
RaisedButton(
child: Text(label),
onPressed: onPressed,
);
IconButton playButton() => IconButton(
icon: Icon(Icons.play_arrow),
iconSize: 64.0,
onPressed: AudioService.play,
);
IconButton pauseButton() => IconButton(
icon: Icon(Icons.pause),
iconSize: 64.0,
onPressed: AudioService.pause,
);
IconButton stopButton() => IconButton(
icon: Icon(Icons.stop),
iconSize: 64.0,
onPressed: AudioService.stop,
);
Widget positionIndicator(MediaItem mediaItem, PlaybackState state) {
double seekPos;
return StreamBuilder(
stream: Rx.combineLatest2<double, double, double>(
_dragPositionSubject.stream,
Stream.periodic(Duration(milliseconds: 200)),
(dragPosition, _) => dragPosition),
builder: (context, snapshot) {
double position =
snapshot.data ?? state.currentPosition.inMilliseconds.toDouble();
double duration = mediaItem?.duration?.inMilliseconds?.toDouble();
return Column(
children: [
if (duration != null)
Slider(
min: 0.0,
max: duration,
value: seekPos ?? max(0.0, min(position, duration)),
onChanged: (value) {
_dragPositionSubject.add(value);
},
onChangeEnd: (value) {
AudioService.seekTo(Duration(milliseconds: value.toInt()));
// Due to a delay in platform channel communication, there is
// a brief moment after releasing the Slider thumb before the
// new position is broadcast from the platform side. This
// hack is to hold onto seekPos until the next state update
// comes through.
// TODO: Improve this code.
seekPos = value;
_dragPositionSubject.add(null);
},
),
Text("${state.currentPosition}"),
],
);
},
);
}
}
class ScreenState {
final List<MediaItem> queue;
final MediaItem mediaItem;
final PlaybackState playbackState;
ScreenState(this.queue, this.mediaItem, this.playbackState);
}
// NOTE: Your entrypoint MUST be a top-level function.
void _audioPlayerTaskEntrypoint() async {
AudioServiceBackground.run(() => AudioPlayerTask());
}
/// This task defines logic for playing a list of podcast episodes.
class AudioPlayerTask extends BackgroundAudioTask {
final _mediaLibrary = MediaLibrary();
AudioPlayer _player = new AudioPlayer();
AudioProcessingState _skipState;
Seeker _seeker;
StreamSubscription<PlaybackEvent> _eventSubscription;
List<MediaItem> get queue => _mediaLibrary.items;
int get index => _player.currentIndex;
MediaItem get mediaItem => index == null ? null : queue[index];
@override
Future<void> onStart(Map<String, dynamic> params) async {
// We configure the audio session for speech since we're playing a podcast.
// You can also put this in your app's initialisation if your app doesn't
// switch between two types of audio as this example does.
final session = await AudioSession.instance;
await session.configure(AudioSessionConfiguration.speech());
// Broadcast media item changes.
_player.currentIndexStream.listen((index) {
if (index != null) AudioServiceBackground.setMediaItem(queue[index]);
});
// Propagate all events from the audio player to AudioService clients.
_eventSubscription = _player.playbackEventStream.listen((event) {
_broadcastState();
});
// Special processing for state transitions.
_player.processingStateStream.listen((state) {
switch (state) {
case ProcessingState.completed:
// In this example, the service stops when reaching the end.
onStop();
break;
case ProcessingState.ready:
// If we just came from skipping between tracks, clear the skip
// state now that we're ready to play.
_skipState = null;
break;
default:
break;
}
});
// Load and broadcast the queue
AudioServiceBackground.setQueue(queue);
try {
await _player.load(ConcatenatingAudioSource(
children:
queue.map((item) => AudioSource.uri(Uri.parse(item.id))).toList(),
));
// In this example, we automatically start playing on start.
onPlay();
} catch (e) {
print("Error: $e");
onStop();
}
}
@override
Future<void> onSkipToQueueItem(String mediaId) async {
// Then default implementations of onSkipToNext and onSkipToPrevious will
// delegate to this method.
final newIndex = queue.indexWhere((item) => item.id == mediaId);
if (newIndex == -1) return;
// During a skip, the player may enter the buffering state. We could just
// propagate that state directly to AudioService clients but AudioService
// has some more specific states we could use for skipping to next and
// previous. This variable holds the preferred state to send instead of
// buffering during a skip, and it is cleared as soon as the player exits
// buffering (see the listener in onStart).
_skipState = newIndex > index
? AudioProcessingState.skippingToNext
: AudioProcessingState.skippingToPrevious;
// This jumps to the beginning of the queue item at newIndex.
_player.seek(Duration.zero, index: newIndex);
}
@override
Future<void> onPlay() => _player.play();
@override
Future<void> onPause() => _player.pause();
@override
Future<void> onSeekTo(Duration position) => _player.seek(position);
@override
Future<void> onFastForward() => _seekRelative(fastForwardInterval);
@override
Future<void> onRewind() => _seekRelative(-rewindInterval);
@override
Future<void> onSeekForward(bool begin) async => _seekContinuously(begin, 1);
@override
Future<void> onSeekBackward(bool begin) async => _seekContinuously(begin, -1);
@override
Future<void> onStop() async {
await _player.pause();
await _player.dispose();
_eventSubscription.cancel();
// It is important to wait for this state to be broadcast before we shut
// down the task. If we don't, the background task will be destroyed before
// the message gets sent to the UI.
await _broadcastState();
// Shut down this task
await super.onStop();
}
/// Jumps away from the current position by [offset].
Future<void> _seekRelative(Duration offset) async {
var newPosition = _player.position + offset;
// Make sure we don't jump out of bounds.
if (newPosition < Duration.zero) newPosition = Duration.zero;
if (newPosition > mediaItem.duration) newPosition = mediaItem.duration;
// Perform the jump via a seek.
await _player.seek(newPosition);
}
/// Begins or stops a continuous seek in [direction]. After it begins it will
/// continue seeking forward or backward by 10 seconds within the audio, at
/// intervals of 1 second in app time.
void _seekContinuously(bool begin, int direction) {
_seeker?.stop();
if (begin) {
_seeker = Seeker(_player, Duration(seconds: 10 * direction),
Duration(seconds: 1), mediaItem)
..start();
}
}
/// Broadcasts the current state to all clients.
Future<void> _broadcastState() async {
await AudioServiceBackground.setState(
controls: [
MediaControl.skipToPrevious,
if (_player.playing) MediaControl.pause else MediaControl.play,
MediaControl.stop,
MediaControl.skipToNext,
],
systemActions: [
MediaAction.play,
MediaAction.seekForward,
MediaAction.seekBackward,
],
processingState: _getProcessingState(),
playing: _player.playing,
position: _player.position,
bufferedPosition: _player.bufferedPosition,
speed: _player.speed,
);
}
/// Maps just_audio's processing state into into audio_service's playing
/// state. If we are in the middle of a skip, we use [_skipState] instead.
AudioProcessingState _getProcessingState() {
if (_skipState != null) return _skipState;
switch (_player.processingState) {
case ProcessingState.none:
return AudioProcessingState.stopped;
case ProcessingState.loading:
return AudioProcessingState.connecting;
case ProcessingState.buffering:
return AudioProcessingState.buffering;
case ProcessingState.ready:
return AudioProcessingState.ready;
case ProcessingState.completed:
return AudioProcessingState.completed;
default:
throw Exception("Invalid state: ${_player.processingState}");
}
}
}
/// Provides access to a library of media items. In your app, this could come
/// from a database or web service.
class MediaLibrary {
final _items = <MediaItem>[
MediaItem(
id: "https://s3.amazonaws.com/scifri-episodes/scifri20181123-episode.mp3",
album: "Science Friday",
title: "A Salute To Head-Scratching Science",
artist: "Science Friday and WNYC Studios",
duration: Duration(milliseconds: 5739820),
artUri:
"https://media.wnyc.org/i/1400/1400/l/80/1/ScienceFriday_WNYCStudios_1400.jpg",
),
MediaItem(
id: "https://s3.amazonaws.com/scifri-segments/scifri201711241.mp3",
album: "Science Friday",
title: "From Cat Rheology To Operatic Incompetence",
artist: "Science Friday and WNYC Studios",
duration: Duration(milliseconds: 2856950),
artUri:
"https://media.wnyc.org/i/1400/1400/l/80/1/ScienceFriday_WNYCStudios_1400.jpg",
),
];
List<MediaItem> get items => _items;
}
/// An object that performs interruptable sleep.
class Sleeper {
Completer _blockingCompleter;
/// Sleep for a duration. If sleep is interrupted, a
/// [SleeperInterruptedException] will be thrown.
Future<void> sleep([Duration duration]) async {
_blockingCompleter = Completer();
if (duration != null) {
await Future.any([Future.delayed(duration), _blockingCompleter.future]);
} else {
await _blockingCompleter.future;
}
final interrupted = _blockingCompleter.isCompleted;
_blockingCompleter = null;
if (interrupted) {
throw SleeperInterruptedException();
}
}
/// Interrupt any sleep that's underway.
void interrupt() {
if (_blockingCompleter?.isCompleted == false) {
_blockingCompleter.complete();
}
}
}
class SleeperInterruptedException {}
/// A wrapper around FlutterTts that makes it easier to wait for speech to
/// complete.
class Seeker {
final AudioPlayer player;
final Duration positionInterval;
final Duration stepInterval;
final MediaItem mediaItem;
bool _running = false;
Seeker(
this.player,
this.positionInterval,
this.stepInterval,
this.mediaItem,
);
start() async {
_running = true;
while (_running) {
Duration newPosition = player.position + positionInterval;
if (newPosition < Duration.zero) newPosition = Duration.zero;
if (newPosition > mediaItem.duration) newPosition = mediaItem.duration;
player.seek(newPosition);
await Future.delayed(stepInterval);
}
}
stop() {
_running = false;
}
}
该示例应该已经在搜索栏下方显示了当前位置和持续时间,它通过调用 AudioServiceBackground.setMediaItem
(0.17) 或 AudioHandler.mediaItem.add
(0.18) 来设置持续时间,并调用AudioServiceBackground.setState
(0.17) 或 AudioHandler.playbackState.add
(0.18) 设置当前位置。您不能影响它的显示位置,因为这是由操作系统选择的,但对于 Android 和 iOS.
在 0.17 中,您可以通过从 AudioServiceBackground.setState
的 systemActions
参数中删除 seekTo
媒体操作来删除搜索栏,您似乎已经这样做了。来自 setState documentation:
Any other action you would like to enable for clients that is not a clickable notification button should be specified in the systemActions parameter. For example:
- MediaAction.seekTo (enable a seek bar)
在 0.18 中,您可以通过从 PlaybackState
的 systemActions
参数中删除 seek
媒体操作来执行相同的操作。来自 PlaybackState documentation:
Note that specifying [MediaAction.seek] in [systemActions] will enable a seek bar in both the Android notification and the iOS control center, but on Android, it will show only if the media item's duration has been set.