For Freezer 0.5.2
This commit is contained in:
parent
b268066d26
commit
c169591d41
36
.github/ISSUE_TEMPLATE/bug_report.md
vendored
36
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@ -1,13 +1,45 @@
|
|||||||
---
|
---
|
||||||
name: Bug report
|
name: Bug report
|
||||||
about: Create a report to help us improve
|
about: Follow the instructions carefully on the next page.
|
||||||
title: ''
|
title: ''
|
||||||
labels: 1 backlog, bug
|
labels: 1 backlog, bug
|
||||||
assignees: ryanheise
|
assignees: ryanheise
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
<!-- ALL SECTIONS BELOW MUST BE COMPLETED -->
|
<!--
|
||||||
|
|
||||||
|
Note: Issues that don't follow these instructions will be closed,
|
||||||
|
therefore please read them carefully.
|
||||||
|
|
||||||
|
1. A bug report must demonstrate a bug in the plugin, and not merely a
|
||||||
|
bug in your app. Understand that this plugin WILL throw exceptions
|
||||||
|
or otherwise misbehave if not used in accordance with the
|
||||||
|
documentation. In order to verify that you have indeed found a bug,
|
||||||
|
you will need to make a reference to the documentation in order to
|
||||||
|
explain how the actual behaviour you experienced is different from
|
||||||
|
the behaviour that was documented. If the behaviour you want is
|
||||||
|
undocumented, please submit either a documentation request or a
|
||||||
|
feature request instead, whichever is more appropriate.
|
||||||
|
|
||||||
|
2. You must supply a link to a minimal reproduction project and explain
|
||||||
|
what steps I need to perform (as a user) in the app to reproduce the
|
||||||
|
bug. A minimal reproduction project can be created by forking this
|
||||||
|
project and making the minimal number of changes required to the
|
||||||
|
example to reproduce the bug. Do not post code directly into the bug
|
||||||
|
report, it must be a link to a git repo that I can clone and then
|
||||||
|
immediately run.
|
||||||
|
|
||||||
|
3. Leave all markdown formatting in this template intact. Do not modify
|
||||||
|
the section headings in any way, and insert your answers below each
|
||||||
|
section heading. Use code markdown (3 backticks) when inserting
|
||||||
|
errors and logs, not only for readability, but also to avoid issue
|
||||||
|
reference spamming using the # symbol.
|
||||||
|
|
||||||
|
THANK YOU :-D
|
||||||
|
|
||||||
|
|
||||||
|
-->
|
||||||
**Which API doesn't behave as documented, and how does it misbehave?**
|
**Which API doesn't behave as documented, and how does it misbehave?**
|
||||||
Name here the specific methods or fields that are not behaving as documented, and explain clearly what is happening.
|
Name here the specific methods or fields that are not behaving as documented, and explain clearly what is happening.
|
||||||
|
|
||||||
|
8
.github/ISSUE_TEMPLATE/config.yml
vendored
8
.github/ISSUE_TEMPLATE/config.yml
vendored
@ -1,8 +1,8 @@
|
|||||||
blank_issues_enabled: false
|
blank_issues_enabled: false
|
||||||
contact_links:
|
contact_links:
|
||||||
- name: Community Support
|
- name: Stack Overflow
|
||||||
url: https://stackoverflow.com/search?q=audio_service
|
url: https://stackoverflow.com/search?q=audio_service
|
||||||
about: Ask for help on Stack Overflow.
|
about: Ask here if it's not a bug report, documentation request or feature request.
|
||||||
- name: New to Flutter?
|
- name: Gitter
|
||||||
url: https://gitter.im/flutter/flutter
|
url: https://gitter.im/flutter/flutter
|
||||||
about: Chat with other Flutter developers on Gitter.
|
about: Ask here if you want to have a live chat with other Flutter developers.
|
||||||
|
20
.github/ISSUE_TEMPLATE/documentation-request.md
vendored
20
.github/ISSUE_TEMPLATE/documentation-request.md
vendored
@ -1,6 +1,6 @@
|
|||||||
---
|
---
|
||||||
name: Documentation request
|
name: Documentation request
|
||||||
about: Suggest an improvement to the documentation
|
about: Follow the instructions carefully on the next page.
|
||||||
title: ''
|
title: ''
|
||||||
labels: 1 backlog, documentation
|
labels: 1 backlog, documentation
|
||||||
assignees: ryanheise
|
assignees: ryanheise
|
||||||
@ -9,15 +9,19 @@ assignees: ryanheise
|
|||||||
|
|
||||||
<!--
|
<!--
|
||||||
|
|
||||||
PLEASE READ CAREFULLY!
|
Note: Issues that don't follow these instructions will be closed,
|
||||||
|
therefore please read them carefully.
|
||||||
|
|
||||||
|
1. This form is not intended for asking questions or asking for
|
||||||
|
support. For that, you are advised to ask your question on
|
||||||
|
StackOverflow or Gitter. Instead, this form is intended for people
|
||||||
|
who wish to help improve this plugin's documentation in a concrete
|
||||||
|
way.
|
||||||
|
|
||||||
|
2. To that end, it is required that you link to the specific
|
||||||
|
page/section, and quote the words that are unclear (unless you are
|
||||||
|
proposing an entirely new section), and describe how you would like
|
||||||
FOR YOUR DOCUMENTATION REQUEST TO BE PROCESSED, YOU WILL NEED
|
it to be improved.
|
||||||
TO FILL IN ALL SECTIONS BELOW. DON'T DELETE THE HEADINGS.
|
|
||||||
|
|
||||||
|
|
||||||
THANK YOU :-D
|
THANK YOU :-D
|
||||||
|
|
||||||
|
31
.github/ISSUE_TEMPLATE/feature_request.md
vendored
31
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@ -1,6 +1,6 @@
|
|||||||
---
|
---
|
||||||
name: Feature request
|
name: Feature request
|
||||||
about: Suggest an idea for this project
|
about: Follow the instructions carefully on the next page.
|
||||||
title: ''
|
title: ''
|
||||||
labels: 1 backlog, enhancement
|
labels: 1 backlog, enhancement
|
||||||
assignees: ryanheise
|
assignees: ryanheise
|
||||||
@ -9,15 +9,26 @@ assignees: ryanheise
|
|||||||
|
|
||||||
<!--
|
<!--
|
||||||
|
|
||||||
PLEASE READ CAREFULLY!
|
Note: Issues that don't follow these instructions will be closed,
|
||||||
|
therefore please read them carefully.
|
||||||
|
|
||||||
|
1. A prerequisite before requesting a feature is that you familiarise
|
||||||
|
yourself with the existing features by reading the API
|
||||||
|
documentation.
|
||||||
|
|
||||||
|
2. If it is unclear from the documentation whether an existing feature
|
||||||
|
is the one you want, this is a shortcoming of the documentation. In
|
||||||
|
this case, please submit a documentation request instead.
|
||||||
|
|
||||||
|
3. Do not use this form for asking questions. My goal is to provide
|
||||||
|
good documentation that answers your questions, and if the
|
||||||
|
documentation isn't doing its job, please submit a documentation
|
||||||
|
request to help me to improve it. Remember that the purpose of this
|
||||||
|
GitHub issues page is for plugin development and not for support
|
||||||
|
(community support is available via StackOverflow and Gitter).
|
||||||
|
|
||||||
|
4. You must complete at least the first 3 sections below. Leave the
|
||||||
FOR YOUR FEATURE REQUEST TO BE PROCESSED, YOU WILL NEED
|
section headings intact, and insert your answers below each heading.
|
||||||
TO FILL IN ALL SECTIONS BELOW. DON'T DELETE THE HEADINGS.
|
|
||||||
|
|
||||||
|
|
||||||
THANK YOU :-D
|
THANK YOU :-D
|
||||||
|
|
||||||
@ -26,13 +37,13 @@ assignees: ryanheise
|
|||||||
|
|
||||||
|
|
||||||
**Is your feature request related to a problem? Please describe.**
|
**Is your feature request related to a problem? Please describe.**
|
||||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
<!-- A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] -->
|
||||||
|
|
||||||
**Describe the solution you'd like**
|
**Describe the solution you'd like**
|
||||||
A clear and concise description of what you want to happen.
|
<!-- A clear and concise description of what you want to happen. -->
|
||||||
|
|
||||||
**Describe alternatives you've considered**
|
**Describe alternatives you've considered**
|
||||||
A clear and concise description of any alternative solutions or features you've considered.
|
<!-- A clear and concise description of any alternative solutions or features you've considered. -->
|
||||||
|
|
||||||
**Additional context**
|
**Additional context**
|
||||||
Add any other context or screenshots about the feature request here.
|
<!-- Add any other context or screenshots about the feature request here. -->
|
||||||
|
@ -1,3 +1,9 @@
|
|||||||
|
## 0.15.1
|
||||||
|
|
||||||
|
* Fix loading of file:// artUri values.
|
||||||
|
* Allow booleans/doubles in MediaItems.
|
||||||
|
* Silently ignore duplicate onStop requests.
|
||||||
|
|
||||||
## 0.15.0
|
## 0.15.0
|
||||||
|
|
||||||
* Web support (@keaganhilliard)
|
* Web support (@keaganhilliard)
|
||||||
|
@ -15,8 +15,6 @@ import android.graphics.BitmapFactory;
|
|||||||
import android.media.AudioAttributes;
|
import android.media.AudioAttributes;
|
||||||
import android.media.AudioFocusRequest;
|
import android.media.AudioFocusRequest;
|
||||||
import android.media.AudioManager;
|
import android.media.AudioManager;
|
||||||
import android.media.MediaDescription;
|
|
||||||
import android.media.MediaMetadata;
|
|
||||||
import android.os.Build;
|
import android.os.Build;
|
||||||
import android.os.Bundle;
|
import android.os.Bundle;
|
||||||
import android.os.Handler;
|
import android.os.Handler;
|
||||||
@ -29,7 +27,6 @@ import android.support.v4.media.RatingCompat;
|
|||||||
import android.support.v4.media.session.MediaControllerCompat;
|
import android.support.v4.media.session.MediaControllerCompat;
|
||||||
import android.support.v4.media.session.MediaSessionCompat;
|
import android.support.v4.media.session.MediaSessionCompat;
|
||||||
import android.support.v4.media.session.PlaybackStateCompat;
|
import android.support.v4.media.session.PlaybackStateCompat;
|
||||||
import android.util.Log;
|
|
||||||
import android.util.LruCache;
|
import android.util.LruCache;
|
||||||
import android.view.KeyEvent;
|
import android.view.KeyEvent;
|
||||||
|
|
||||||
@ -161,8 +158,13 @@ public class AudioService extends MediaBrowserServiceCompat {
|
|||||||
mediaSession.setActive(false);
|
mediaSession.setActive(false);
|
||||||
releaseWakeLock();
|
releaseWakeLock();
|
||||||
stopForeground(true);
|
stopForeground(true);
|
||||||
notificationCreated = false;
|
|
||||||
stopSelf();
|
stopSelf();
|
||||||
|
// This still does not solve the Android 11 problem.
|
||||||
|
// if (notificationCreated) {
|
||||||
|
// NotificationManager notificationManager = (NotificationManager)getSystemService(Context.NOTIFICATION_SERVICE);
|
||||||
|
// notificationManager.cancel(NOTIFICATION_ID);
|
||||||
|
// }
|
||||||
|
notificationCreated = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static boolean isRunning() {
|
public static boolean isRunning() {
|
||||||
@ -377,7 +379,7 @@ public class AudioService extends MediaBrowserServiceCompat {
|
|||||||
wakeLock.release();
|
wakeLock.release();
|
||||||
}
|
}
|
||||||
|
|
||||||
static MediaMetadataCompat createMediaMetadata(String mediaId, String album, String title, String artist, String genre, Long duration, String artUri, String displayTitle, String displaySubtitle, String displayDescription, RatingCompat rating, Map<?, ?> extras) {
|
static MediaMetadataCompat createMediaMetadata(String mediaId, String album, String title, String artist, String genre, Long duration, String artUri, Boolean playable, String displayTitle, String displaySubtitle, String displayDescription, RatingCompat rating, Map<?, ?> extras) {
|
||||||
MediaMetadataCompat.Builder builder = new MediaMetadataCompat.Builder()
|
MediaMetadataCompat.Builder builder = new MediaMetadataCompat.Builder()
|
||||||
.putString(MediaMetadataCompat.METADATA_KEY_MEDIA_ID, mediaId)
|
.putString(MediaMetadataCompat.METADATA_KEY_MEDIA_ID, mediaId)
|
||||||
.putString(MediaMetadataCompat.METADATA_KEY_ALBUM, album)
|
.putString(MediaMetadataCompat.METADATA_KEY_ALBUM, album)
|
||||||
@ -402,7 +404,8 @@ public class AudioService extends MediaBrowserServiceCompat {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if (playable != null)
|
||||||
|
builder.putLong("playable_long", playable ? 1 : 0);
|
||||||
if (displayTitle != null)
|
if (displayTitle != null)
|
||||||
builder.putString(MediaMetadataCompat.METADATA_KEY_DISPLAY_TITLE, displayTitle);
|
builder.putString(MediaMetadataCompat.METADATA_KEY_DISPLAY_TITLE, displayTitle);
|
||||||
if (displaySubtitle != null)
|
if (displaySubtitle != null)
|
||||||
@ -422,6 +425,10 @@ public class AudioService extends MediaBrowserServiceCompat {
|
|||||||
builder.putLong("extra_long_" + key, (Integer)value);
|
builder.putLong("extra_long_" + key, (Integer)value);
|
||||||
} else if (value instanceof String) {
|
} else if (value instanceof String) {
|
||||||
builder.putString("extra_string_" + key, (String)value);
|
builder.putString("extra_string_" + key, (String)value);
|
||||||
|
} else if (value instanceof Boolean) {
|
||||||
|
builder.putLong("extra_boolean_" + key, (Boolean)value ? 1 : 0);
|
||||||
|
} else if (value instanceof Double) {
|
||||||
|
builder.putString("extra_double_" + key, value.toString());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -554,7 +561,6 @@ public class AudioService extends MediaBrowserServiceCompat {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public class MediaSessionCallback extends MediaSessionCompat.Callback {
|
public class MediaSessionCallback extends MediaSessionCompat.Callback {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onAddQueueItem(MediaDescriptionCompat description) {
|
public void onAddQueueItem(MediaDescriptionCompat description) {
|
||||||
if (listener == null) return;
|
if (listener == null) return;
|
||||||
|
@ -60,7 +60,6 @@ import io.flutter.embedding.engine.dart.DartExecutor;
|
|||||||
import io.flutter.embedding.engine.dart.DartExecutor.DartCallback;
|
import io.flutter.embedding.engine.dart.DartExecutor.DartCallback;
|
||||||
|
|
||||||
import android.content.res.AssetManager;
|
import android.content.res.AssetManager;
|
||||||
import android.util.Log;
|
|
||||||
|
|
||||||
import io.flutter.view.FlutterNativeView;
|
import io.flutter.view.FlutterNativeView;
|
||||||
import io.flutter.view.FlutterRunArguments;
|
import io.flutter.view.FlutterRunArguments;
|
||||||
@ -997,6 +996,7 @@ public class AudioServicePlugin implements FlutterPlugin, ActivityAware {
|
|||||||
raw.put("genre", metadataToString(mediaMetadata, MediaMetadataCompat.METADATA_KEY_GENRE));
|
raw.put("genre", metadataToString(mediaMetadata, MediaMetadataCompat.METADATA_KEY_GENRE));
|
||||||
if (mediaMetadata.containsKey(MediaMetadataCompat.METADATA_KEY_DURATION))
|
if (mediaMetadata.containsKey(MediaMetadataCompat.METADATA_KEY_DURATION))
|
||||||
raw.put("duration", mediaMetadata.getLong(MediaMetadataCompat.METADATA_KEY_DURATION));
|
raw.put("duration", mediaMetadata.getLong(MediaMetadataCompat.METADATA_KEY_DURATION));
|
||||||
|
raw.put("playable", mediaMetadata.getLong("playable_long") != 0);
|
||||||
raw.put("displayTitle", metadataToString(mediaMetadata, MediaMetadataCompat.METADATA_KEY_DISPLAY_TITLE));
|
raw.put("displayTitle", metadataToString(mediaMetadata, MediaMetadataCompat.METADATA_KEY_DISPLAY_TITLE));
|
||||||
raw.put("displaySubtitle", metadataToString(mediaMetadata, MediaMetadataCompat.METADATA_KEY_DISPLAY_SUBTITLE));
|
raw.put("displaySubtitle", metadataToString(mediaMetadata, MediaMetadataCompat.METADATA_KEY_DISPLAY_SUBTITLE));
|
||||||
raw.put("displayDescription", metadataToString(mediaMetadata, MediaMetadataCompat.METADATA_KEY_DISPLAY_DESCRIPTION));
|
raw.put("displayDescription", metadataToString(mediaMetadata, MediaMetadataCompat.METADATA_KEY_DISPLAY_DESCRIPTION));
|
||||||
@ -1011,6 +1011,12 @@ public class AudioServicePlugin implements FlutterPlugin, ActivityAware {
|
|||||||
} else if (key.startsWith("extra_string_")) {
|
} else if (key.startsWith("extra_string_")) {
|
||||||
String rawKey = key.substring("extra_string_".length());
|
String rawKey = key.substring("extra_string_".length());
|
||||||
extras.put(rawKey, mediaMetadata.getString(key));
|
extras.put(rawKey, mediaMetadata.getString(key));
|
||||||
|
} else if (key.startsWith("extra_boolean_")) {
|
||||||
|
String rawKey = key.substring("extra_boolean_".length());
|
||||||
|
extras.put(rawKey, mediaMetadata.getLong(key) != 0);
|
||||||
|
} else if (key.startsWith("extra_double_")) {
|
||||||
|
String rawKey = key.substring("extra_double_".length());
|
||||||
|
extras.put(rawKey, new Double(mediaMetadata.getString(key)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (extras.size() > 0) {
|
if (extras.size() > 0) {
|
||||||
@ -1028,6 +1034,7 @@ public class AudioServicePlugin implements FlutterPlugin, ActivityAware {
|
|||||||
(String)rawMediaItem.get("genre"),
|
(String)rawMediaItem.get("genre"),
|
||||||
getLong(rawMediaItem.get("duration")),
|
getLong(rawMediaItem.get("duration")),
|
||||||
(String)rawMediaItem.get("artUri"),
|
(String)rawMediaItem.get("artUri"),
|
||||||
|
(Boolean)rawMediaItem.get("playable"),
|
||||||
(String)rawMediaItem.get("displayTitle"),
|
(String)rawMediaItem.get("displayTitle"),
|
||||||
(String)rawMediaItem.get("displaySubtitle"),
|
(String)rawMediaItem.get("displaySubtitle"),
|
||||||
(String)rawMediaItem.get("displayDescription"),
|
(String)rawMediaItem.get("displayDescription"),
|
||||||
|
@ -12,6 +12,7 @@
|
|||||||
<excludeFolder url="file://$MODULE_DIR$/example/build" />
|
<excludeFolder url="file://$MODULE_DIR$/example/build" />
|
||||||
</content>
|
</content>
|
||||||
<orderEntry type="sourceFolder" forTests="false" />
|
<orderEntry type="sourceFolder" forTests="false" />
|
||||||
|
<orderEntry type="library" name="Dart Packages" level="project" />
|
||||||
<orderEntry type="library" name="Dart SDK" level="project" />
|
<orderEntry type="library" name="Dart SDK" level="project" />
|
||||||
<orderEntry type="library" name="Flutter Plugins" level="project" />
|
<orderEntry type="library" name="Flutter Plugins" level="project" />
|
||||||
</component>
|
</component>
|
||||||
|
@ -329,8 +329,8 @@ class MediaItem {
|
|||||||
duration: raw['duration'] != null
|
duration: raw['duration'] != null
|
||||||
? Duration(milliseconds: raw['duration'])
|
? Duration(milliseconds: raw['duration'])
|
||||||
: null,
|
: null,
|
||||||
playable: raw['playable']??true,
|
|
||||||
artUri: raw['artUri'],
|
artUri: raw['artUri'],
|
||||||
|
playable: raw['playable'],
|
||||||
displayTitle: raw['displayTitle'],
|
displayTitle: raw['displayTitle'],
|
||||||
displaySubtitle: raw['displaySubtitle'],
|
displaySubtitle: raw['displaySubtitle'],
|
||||||
displayDescription: raw['displayDescription'],
|
displayDescription: raw['displayDescription'],
|
||||||
@ -592,6 +592,16 @@ class AudioService {
|
|||||||
static ReceivePort _customEventReceivePort;
|
static ReceivePort _customEventReceivePort;
|
||||||
static StreamSubscription _customEventSubscription;
|
static StreamSubscription _customEventSubscription;
|
||||||
|
|
||||||
|
/// A queue of tasks to be processed serially. Tasks that are processed on
|
||||||
|
/// this queue:
|
||||||
|
///
|
||||||
|
/// - [connect]
|
||||||
|
/// - [disconnect]
|
||||||
|
/// - [start]
|
||||||
|
///
|
||||||
|
/// TODO: Queue other tasks? Note, only short-running tasks should be queued.
|
||||||
|
static final _asyncTaskQueue = _AsyncTaskQueue();
|
||||||
|
|
||||||
/// Connects to the service from your UI so that audio playback can be
|
/// Connects to the service from your UI so that audio playback can be
|
||||||
/// controlled.
|
/// controlled.
|
||||||
///
|
///
|
||||||
@ -600,93 +610,95 @@ class AudioService {
|
|||||||
/// other methods in this class will work only while connected.
|
/// other methods in this class will work only while connected.
|
||||||
///
|
///
|
||||||
/// Use [AudioServiceWidget] to handle this automatically.
|
/// Use [AudioServiceWidget] to handle this automatically.
|
||||||
static Future<void> connect() async {
|
static Future<void> connect() => _asyncTaskQueue.schedule(() async {
|
||||||
_channel.setMethodCallHandler((MethodCall call) async {
|
if (_connected) return;
|
||||||
switch (call.method) {
|
_channel.setMethodCallHandler((MethodCall call) async {
|
||||||
case 'onChildrenLoaded':
|
switch (call.method) {
|
||||||
final List<Map> args = List<Map>.from(call.arguments[0]);
|
case 'onChildrenLoaded':
|
||||||
_browseMediaChildren =
|
final List<Map> args = List<Map>.from(call.arguments[0]);
|
||||||
args.map((raw) => MediaItem.fromJson(raw)).toList();
|
_browseMediaChildren =
|
||||||
_browseMediaChildrenSubject.add(_browseMediaChildren);
|
args.map((raw) => MediaItem.fromJson(raw)).toList();
|
||||||
break;
|
_browseMediaChildrenSubject.add(_browseMediaChildren);
|
||||||
case 'onPlaybackStateChanged':
|
break;
|
||||||
// If this event arrives too late, ignore it.
|
case 'onPlaybackStateChanged':
|
||||||
if (_afterStop) return;
|
// If this event arrives too late, ignore it.
|
||||||
final List args = call.arguments;
|
if (_afterStop) return;
|
||||||
int actionBits = args[2];
|
final List args = call.arguments;
|
||||||
_playbackState = PlaybackState(
|
int actionBits = args[2];
|
||||||
processingState: AudioProcessingState.values[args[0]],
|
_playbackState = PlaybackState(
|
||||||
playing: args[1],
|
processingState: AudioProcessingState.values[args[0]],
|
||||||
actions: MediaAction.values
|
playing: args[1],
|
||||||
.where((action) => (actionBits & (1 << action.index)) != 0)
|
actions: MediaAction.values
|
||||||
.toSet(),
|
.where((action) => (actionBits & (1 << action.index)) != 0)
|
||||||
position: Duration(milliseconds: args[3]),
|
.toSet(),
|
||||||
bufferedPosition: Duration(milliseconds: args[4]),
|
position: Duration(milliseconds: args[3]),
|
||||||
speed: args[5],
|
bufferedPosition: Duration(milliseconds: args[4]),
|
||||||
updateTime: Duration(milliseconds: args[6]),
|
speed: args[5],
|
||||||
repeatMode: AudioServiceRepeatMode.values[args[7]],
|
updateTime: Duration(milliseconds: args[6]),
|
||||||
shuffleMode: AudioServiceShuffleMode.values[args[8]],
|
repeatMode: AudioServiceRepeatMode.values[args[7]],
|
||||||
);
|
shuffleMode: AudioServiceShuffleMode.values[args[8]],
|
||||||
_playbackStateSubject.add(_playbackState);
|
);
|
||||||
break;
|
_playbackStateSubject.add(_playbackState);
|
||||||
case 'onMediaChanged':
|
break;
|
||||||
_currentMediaItem = call.arguments[0] != null
|
case 'onMediaChanged':
|
||||||
? MediaItem.fromJson(call.arguments[0])
|
_currentMediaItem = call.arguments[0] != null
|
||||||
: null;
|
? MediaItem.fromJson(call.arguments[0])
|
||||||
_currentMediaItemSubject.add(_currentMediaItem);
|
: null;
|
||||||
break;
|
_currentMediaItemSubject.add(_currentMediaItem);
|
||||||
case 'onQueueChanged':
|
break;
|
||||||
final List<Map> args = call.arguments[0] != null
|
case 'onQueueChanged':
|
||||||
? List<Map>.from(call.arguments[0])
|
final List<Map> args = call.arguments[0] != null
|
||||||
: null;
|
? List<Map>.from(call.arguments[0])
|
||||||
_queue = args?.map((raw) => MediaItem.fromJson(raw))?.toList();
|
: null;
|
||||||
_queueSubject.add(_queue);
|
_queue = args?.map((raw) => MediaItem.fromJson(raw))?.toList();
|
||||||
break;
|
_queueSubject.add(_queue);
|
||||||
case 'onStopped':
|
break;
|
||||||
_browseMediaChildren = null;
|
case 'onStopped':
|
||||||
_browseMediaChildrenSubject.add(null);
|
_browseMediaChildren = null;
|
||||||
_playbackState = null;
|
_browseMediaChildrenSubject.add(null);
|
||||||
_playbackStateSubject.add(null);
|
_playbackState = null;
|
||||||
_currentMediaItem = null;
|
_playbackStateSubject.add(null);
|
||||||
_currentMediaItemSubject.add(null);
|
_currentMediaItem = null;
|
||||||
_queue = null;
|
_currentMediaItemSubject.add(null);
|
||||||
_queueSubject.add(null);
|
_queue = null;
|
||||||
_notificationSubject.add(false);
|
_queueSubject.add(null);
|
||||||
_running = false;
|
_notificationSubject.add(false);
|
||||||
_afterStop = true;
|
_running = false;
|
||||||
break;
|
_afterStop = true;
|
||||||
case 'notificationClicked':
|
break;
|
||||||
_notificationSubject.add(call.arguments[0]);
|
case 'notificationClicked':
|
||||||
break;
|
_notificationSubject.add(call.arguments[0]);
|
||||||
}
|
break;
|
||||||
});
|
}
|
||||||
if (AudioService.usesIsolate) {
|
});
|
||||||
_customEventReceivePort = ReceivePort();
|
if (AudioService.usesIsolate) {
|
||||||
_customEventSubscription = _customEventReceivePort.listen((event) {
|
_customEventReceivePort = ReceivePort();
|
||||||
_customEventSubject.add(event);
|
_customEventSubscription = _customEventReceivePort.listen((event) {
|
||||||
|
_customEventSubject.add(event);
|
||||||
|
});
|
||||||
|
IsolateNameServer.removePortNameMapping(_CUSTOM_EVENT_PORT_NAME);
|
||||||
|
IsolateNameServer.registerPortWithName(
|
||||||
|
_customEventReceivePort.sendPort, _CUSTOM_EVENT_PORT_NAME);
|
||||||
|
}
|
||||||
|
await _channel.invokeMethod("connect");
|
||||||
|
_running = await _channel.invokeMethod("isRunning");
|
||||||
|
_connected = true;
|
||||||
});
|
});
|
||||||
IsolateNameServer.removePortNameMapping(_CUSTOM_EVENT_PORT_NAME);
|
|
||||||
IsolateNameServer.registerPortWithName(
|
|
||||||
_customEventReceivePort.sendPort, _CUSTOM_EVENT_PORT_NAME);
|
|
||||||
}
|
|
||||||
await _channel.invokeMethod("connect");
|
|
||||||
_running = await _channel.invokeMethod("isRunning");
|
|
||||||
_connected = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Disconnects your UI from the service.
|
/// Disconnects your UI from the service.
|
||||||
///
|
///
|
||||||
/// This method should be called when the UI is no longer visible.
|
/// This method should be called when the UI is no longer visible.
|
||||||
///
|
///
|
||||||
/// Use [AudioServiceWidget] to handle this automatically.
|
/// Use [AudioServiceWidget] to handle this automatically.
|
||||||
static Future<void> disconnect() async {
|
static Future<void> disconnect() => _asyncTaskQueue.schedule(() async {
|
||||||
_channel.setMethodCallHandler(null);
|
if (!_connected) return;
|
||||||
_customEventSubscription?.cancel();
|
_channel.setMethodCallHandler(null);
|
||||||
_customEventSubscription = null;
|
_customEventSubscription?.cancel();
|
||||||
_customEventReceivePort = null;
|
_customEventSubscription = null;
|
||||||
await _channel.invokeMethod("disconnect");
|
_customEventReceivePort = null;
|
||||||
_connected = false;
|
await _channel.invokeMethod("disconnect");
|
||||||
}
|
_connected = false;
|
||||||
|
});
|
||||||
|
|
||||||
/// True if the UI is connected.
|
/// True if the UI is connected.
|
||||||
static bool get connected => _connected;
|
static bool get connected => _connected;
|
||||||
@ -738,6 +750,12 @@ class AudioService {
|
|||||||
/// Android. If your app will run on Android and has a queue, you should set
|
/// Android. If your app will run on Android and has a queue, you should set
|
||||||
/// this to true.
|
/// this to true.
|
||||||
///
|
///
|
||||||
|
/// [androidStopForegroundOnPause] will switch the Android service to a lower
|
||||||
|
/// priority state when playback is paused allowing the user to swipe away the
|
||||||
|
/// notification. Note that while in this lower priority state, the operating
|
||||||
|
/// system will also be able to kill your service at any time to reclaim
|
||||||
|
/// resources.
|
||||||
|
///
|
||||||
/// This method waits for [BackgroundAudioTask.onStart] to complete, and
|
/// This method waits for [BackgroundAudioTask.onStart] to complete, and
|
||||||
/// completes with true if the task was successfully started, or false
|
/// completes with true if the task was successfully started, or false
|
||||||
/// otherwise.
|
/// otherwise.
|
||||||
@ -757,59 +775,62 @@ class AudioService {
|
|||||||
Duration fastForwardInterval = const Duration(seconds: 10),
|
Duration fastForwardInterval = const Duration(seconds: 10),
|
||||||
Duration rewindInterval = const Duration(seconds: 10),
|
Duration rewindInterval = const Duration(seconds: 10),
|
||||||
}) async {
|
}) async {
|
||||||
if (_running) return false;
|
return await _asyncTaskQueue.schedule(() async {
|
||||||
_running = true;
|
if (!_connected) throw Exception("Not connected");
|
||||||
_afterStop = false;
|
if (_running) return false;
|
||||||
ui.CallbackHandle handle;
|
_running = true;
|
||||||
if (AudioService.usesIsolate) {
|
_afterStop = false;
|
||||||
handle = ui.PluginUtilities.getCallbackHandle(backgroundTaskEntrypoint);
|
ui.CallbackHandle handle;
|
||||||
if (handle == null) {
|
if (AudioService.usesIsolate) {
|
||||||
return false;
|
handle = ui.PluginUtilities.getCallbackHandle(backgroundTaskEntrypoint);
|
||||||
|
if (handle == null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
var callbackHandle = handle?.toRawHandle();
|
var callbackHandle = handle?.toRawHandle();
|
||||||
if (kIsWeb) {
|
if (kIsWeb) {
|
||||||
// Platform throws runtime exceptions on web
|
// Platform throws runtime exceptions on web
|
||||||
} else if (Platform.isIOS) {
|
} else if (Platform.isIOS) {
|
||||||
// NOTE: to maintain compatibility between the Android and iOS
|
// NOTE: to maintain compatibility between the Android and iOS
|
||||||
// implementations, we ensure that the iOS background task also runs in
|
// implementations, we ensure that the iOS background task also runs in
|
||||||
// an isolate. Currently, the standard Isolate API does not allow
|
// an isolate. Currently, the standard Isolate API does not allow
|
||||||
// isolates to invoke methods on method channels. That may be fixed in
|
// isolates to invoke methods on method channels. That may be fixed in
|
||||||
// the future, but until then, we use the flutter_isolate plugin which
|
// the future, but until then, we use the flutter_isolate plugin which
|
||||||
// creates a FlutterNativeView for us, similar to what the Android
|
// creates a FlutterNativeView for us, similar to what the Android
|
||||||
// implementation does.
|
// implementation does.
|
||||||
// TODO: remove dependency on flutter_isolate by either using the
|
// TODO: remove dependency on flutter_isolate by either using the
|
||||||
// FlutterNativeView API directly or by waiting until Flutter allows
|
// FlutterNativeView API directly or by waiting until Flutter allows
|
||||||
// regular isolates to use method channels.
|
// regular isolates to use method channels.
|
||||||
await FlutterIsolate.spawn(_iosIsolateEntrypoint, callbackHandle);
|
await FlutterIsolate.spawn(_iosIsolateEntrypoint, callbackHandle);
|
||||||
}
|
}
|
||||||
final success = await _channel.invokeMethod('start', {
|
final success = await _channel.invokeMethod('start', {
|
||||||
'callbackHandle': callbackHandle,
|
'callbackHandle': callbackHandle,
|
||||||
'params': params,
|
'params': params,
|
||||||
'androidNotificationChannelName': androidNotificationChannelName,
|
'androidNotificationChannelName': androidNotificationChannelName,
|
||||||
'androidNotificationChannelDescription':
|
'androidNotificationChannelDescription':
|
||||||
androidNotificationChannelDescription,
|
androidNotificationChannelDescription,
|
||||||
'androidNotificationColor': androidNotificationColor,
|
'androidNotificationColor': androidNotificationColor,
|
||||||
'androidNotificationIcon': androidNotificationIcon,
|
'androidNotificationIcon': androidNotificationIcon,
|
||||||
'androidNotificationClickStartsActivity':
|
'androidNotificationClickStartsActivity':
|
||||||
androidNotificationClickStartsActivity,
|
androidNotificationClickStartsActivity,
|
||||||
'androidNotificationOngoing': androidNotificationOngoing,
|
'androidNotificationOngoing': androidNotificationOngoing,
|
||||||
'androidResumeOnClick': androidResumeOnClick,
|
'androidResumeOnClick': androidResumeOnClick,
|
||||||
'androidStopForegroundOnPause': androidStopForegroundOnPause,
|
'androidStopForegroundOnPause': androidStopForegroundOnPause,
|
||||||
'androidEnableQueue': androidEnableQueue,
|
'androidEnableQueue': androidEnableQueue,
|
||||||
'androidArtDownscaleSize': androidArtDownscaleSize != null
|
'androidArtDownscaleSize': androidArtDownscaleSize != null
|
||||||
? {
|
? {
|
||||||
'width': androidArtDownscaleSize.width,
|
'width': androidArtDownscaleSize.width,
|
||||||
'height': androidArtDownscaleSize.height
|
'height': androidArtDownscaleSize.height
|
||||||
}
|
}
|
||||||
: null,
|
: null,
|
||||||
'fastForwardInterval': fastForwardInterval.inMilliseconds,
|
'fastForwardInterval': fastForwardInterval.inMilliseconds,
|
||||||
'rewindInterval': rewindInterval.inMilliseconds,
|
'rewindInterval': rewindInterval.inMilliseconds,
|
||||||
|
});
|
||||||
|
_running = await _channel.invokeMethod("isRunning");
|
||||||
|
if (!AudioService.usesIsolate) backgroundTaskEntrypoint();
|
||||||
|
return success;
|
||||||
});
|
});
|
||||||
_running = await _channel.invokeMethod("isRunning");
|
|
||||||
if (!AudioService.usesIsolate) backgroundTaskEntrypoint();
|
|
||||||
return success;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sets the parent of the children that [browseMediaChildrenStream] broadcasts.
|
/// Sets the parent of the children that [browseMediaChildrenStream] broadcasts.
|
||||||
@ -1050,6 +1071,7 @@ class AudioServiceBackground {
|
|||||||
static List<MediaItem> _queue;
|
static List<MediaItem> _queue;
|
||||||
static BaseCacheManager _cacheManager;
|
static BaseCacheManager _cacheManager;
|
||||||
static BackgroundAudioTask _task;
|
static BackgroundAudioTask _task;
|
||||||
|
static bool _running = false;
|
||||||
|
|
||||||
/// The current media playback state.
|
/// The current media playback state.
|
||||||
///
|
///
|
||||||
@ -1075,6 +1097,7 @@ class AudioServiceBackground {
|
|||||||
/// any requests by the client to play, pause and otherwise control audio
|
/// any requests by the client to play, pause and otherwise control audio
|
||||||
/// playback.
|
/// playback.
|
||||||
static Future<void> run(BackgroundAudioTask taskBuilder()) async {
|
static Future<void> run(BackgroundAudioTask taskBuilder()) async {
|
||||||
|
_running = true;
|
||||||
_backgroundChannel =
|
_backgroundChannel =
|
||||||
const MethodChannel('ryanheise.com/audioServiceBackground');
|
const MethodChannel('ryanheise.com/audioServiceBackground');
|
||||||
WidgetsFlutterBinding.ensureInitialized();
|
WidgetsFlutterBinding.ensureInitialized();
|
||||||
@ -1234,6 +1257,10 @@ class AudioServiceBackground {
|
|||||||
|
|
||||||
/// Shuts down the background audio task within the background isolate.
|
/// Shuts down the background audio task within the background isolate.
|
||||||
static Future<void> _shutdown() async {
|
static Future<void> _shutdown() async {
|
||||||
|
if (!_running) return;
|
||||||
|
// Set this to false immediately so that if duplicate shutdown requests come
|
||||||
|
// through, they are ignored.
|
||||||
|
_running = false;
|
||||||
final audioSession = await AudioSession.instance;
|
final audioSession = await AudioSession.instance;
|
||||||
try {
|
try {
|
||||||
await audioSession.setActive(false);
|
await audioSession.setActive(false);
|
||||||
@ -1343,15 +1370,15 @@ class AudioServiceBackground {
|
|||||||
await _backgroundChannel.invokeMethod('setState', [
|
await _backgroundChannel.invokeMethod('setState', [
|
||||||
rawControls,
|
rawControls,
|
||||||
rawSystemActions,
|
rawSystemActions,
|
||||||
processingState.index,
|
processingState?.index ?? AudioProcessingState.none.index,
|
||||||
playing,
|
playing ?? false,
|
||||||
position.inMilliseconds,
|
position?.inMilliseconds ?? 0,
|
||||||
bufferedPosition.inMilliseconds,
|
bufferedPosition?.inMilliseconds ?? 0,
|
||||||
speed,
|
speed ?? 1.0,
|
||||||
updateTime?.inMilliseconds,
|
updateTime?.inMilliseconds,
|
||||||
androidCompactActions,
|
androidCompactActions,
|
||||||
repeatMode.index,
|
repeatMode?.index ?? AudioServiceRepeatMode.none.index,
|
||||||
shuffleMode.index,
|
shuffleMode?.index ?? AudioServiceShuffleMode.none.index,
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1371,19 +1398,22 @@ class AudioServiceBackground {
|
|||||||
_mediaItem = mediaItem;
|
_mediaItem = mediaItem;
|
||||||
if (mediaItem.artUri != null) {
|
if (mediaItem.artUri != null) {
|
||||||
// We potentially need to fetch the art.
|
// We potentially need to fetch the art.
|
||||||
final fileInfo = _cacheManager.getFileFromMemory(mediaItem.artUri);
|
String filePath = _getLocalPath(mediaItem.artUri);
|
||||||
String filePath = fileInfo?.file?.path;
|
|
||||||
if (filePath == null) {
|
if (filePath == null) {
|
||||||
// We haven't fetched the art yet, so show the metadata now, and again
|
final fileInfo = _cacheManager.getFileFromMemory(mediaItem.artUri);
|
||||||
// after we load the art.
|
filePath = fileInfo?.file?.path;
|
||||||
await _backgroundChannel.invokeMethod(
|
if (filePath == null) {
|
||||||
'setMediaItem', mediaItem.toJson());
|
// We haven't fetched the art yet, so show the metadata now, and again
|
||||||
// Load the art
|
// after we load the art.
|
||||||
filePath = await _loadArtwork(mediaItem);
|
await _backgroundChannel.invokeMethod(
|
||||||
// If we failed to download the art, abort.
|
'setMediaItem', mediaItem.toJson());
|
||||||
if (filePath == null) return;
|
// Load the art
|
||||||
// If we've already set a new media item, cancel this request.
|
filePath = await _loadArtwork(mediaItem);
|
||||||
if (mediaItem != _mediaItem) return;
|
// If we failed to download the art, abort.
|
||||||
|
if (filePath == null) return;
|
||||||
|
// If we've already set a new media item, cancel this request.
|
||||||
|
if (mediaItem != _mediaItem) return;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
final extras = Map.of(mediaItem.extras ?? <String, dynamic>{});
|
final extras = Map.of(mediaItem.extras ?? <String, dynamic>{});
|
||||||
extras['artCacheFile'] = filePath;
|
extras['artCacheFile'] = filePath;
|
||||||
@ -1406,9 +1436,9 @@ class AudioServiceBackground {
|
|||||||
try {
|
try {
|
||||||
final artUri = mediaItem.artUri;
|
final artUri = mediaItem.artUri;
|
||||||
if (artUri != null) {
|
if (artUri != null) {
|
||||||
const prefix = 'file://';
|
String local = _getLocalPath(artUri);
|
||||||
if (artUri.toLowerCase().startsWith(prefix)) {
|
if (local != null) {
|
||||||
return artUri.substring(prefix.length);
|
return local;
|
||||||
} else {
|
} else {
|
||||||
final file = await _cacheManager.getSingleFile(mediaItem.artUri);
|
final file = await _cacheManager.getSingleFile(mediaItem.artUri);
|
||||||
return file.path;
|
return file.path;
|
||||||
@ -1418,6 +1448,14 @@ class AudioServiceBackground {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static String _getLocalPath(String artUri) {
|
||||||
|
const prefix = "file://";
|
||||||
|
if (artUri.toLowerCase().startsWith(prefix)) {
|
||||||
|
return artUri.substring(prefix.length);
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
/// Notifies clients that the child media items of [parentMediaId] have
|
/// Notifies clients that the child media items of [parentMediaId] have
|
||||||
/// changed.
|
/// changed.
|
||||||
///
|
///
|
||||||
@ -1668,7 +1706,16 @@ abstract class BackgroundAudioTask {
|
|||||||
Future<void> onTaskRemoved() async {}
|
Future<void> onTaskRemoved() async {}
|
||||||
|
|
||||||
/// Called on Android when the user swipes away the notification. The default
|
/// Called on Android when the user swipes away the notification. The default
|
||||||
/// implementation (which you may override) calls [onStop].
|
/// implementation (which you may override) calls [onStop]. Note that by
|
||||||
|
/// default, the service runs in the foreground state which (despite the name)
|
||||||
|
/// allows the service to run at a high priority in the background without the
|
||||||
|
/// operating system killing it. While in the foreground state, the
|
||||||
|
/// notification cannot be swiped away. You can pass a parameter value of
|
||||||
|
/// `true` for `androidStopForegroundOnPause` in the [AudioService.start]
|
||||||
|
/// method if you would like the service to exit the foreground state when
|
||||||
|
/// playback is paused. This will allow the user to swipe the notification
|
||||||
|
/// away while playback is paused (but it will also allow the operating system
|
||||||
|
/// to kill your service at any time to free up resources).
|
||||||
Future<void> onClose() => onStop();
|
Future<void> onClose() => onStop();
|
||||||
|
|
||||||
void _setParams({
|
void _setParams({
|
||||||
@ -1686,7 +1733,8 @@ abstract class BackgroundAudioTask {
|
|||||||
int i = queue.indexOf(mediaItem);
|
int i = queue.indexOf(mediaItem);
|
||||||
if (i == -1) return;
|
if (i == -1) return;
|
||||||
int newIndex = i + offset;
|
int newIndex = i + offset;
|
||||||
if (newIndex < queue.length) await onSkipToQueueItem(queue[newIndex]?.id);
|
if (newIndex >= 0 && newIndex < queue.length)
|
||||||
|
await onSkipToQueueItem(queue[newIndex]?.id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1763,3 +1811,37 @@ class _AudioServiceWidgetState extends State<AudioServiceWidget>
|
|||||||
enum AudioServiceShuffleMode { none, all, group }
|
enum AudioServiceShuffleMode { none, all, group }
|
||||||
|
|
||||||
enum AudioServiceRepeatMode { none, one, all, group }
|
enum AudioServiceRepeatMode { none, one, all, group }
|
||||||
|
|
||||||
|
class _AsyncTaskQueue {
|
||||||
|
final _queuedAsyncTaskController = StreamController<_AsyncTaskQueueEntry>();
|
||||||
|
|
||||||
|
_AsyncTaskQueue() {
|
||||||
|
_process();
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<void> _process() async {
|
||||||
|
await for (var entry in _queuedAsyncTaskController.stream) {
|
||||||
|
try {
|
||||||
|
final result = await entry.asyncTask();
|
||||||
|
entry.completer.complete(result);
|
||||||
|
} catch (e, stacktrace) {
|
||||||
|
entry.completer.completeError(e, stacktrace);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<dynamic> schedule(_AsyncTask asyncTask) async {
|
||||||
|
final completer = Completer<dynamic>();
|
||||||
|
_queuedAsyncTaskController.add(_AsyncTaskQueueEntry(asyncTask, completer));
|
||||||
|
return completer.future;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class _AsyncTaskQueueEntry {
|
||||||
|
final _AsyncTask asyncTask;
|
||||||
|
final Completer completer;
|
||||||
|
|
||||||
|
_AsyncTaskQueueEntry(this.asyncTask, this.completer);
|
||||||
|
}
|
||||||
|
|
||||||
|
typedef _AsyncTask = Future<dynamic> Function();
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
name: audio_service
|
name: audio_service
|
||||||
description: Flutter plugin to play audio in the background while the screen is off.
|
description: Flutter plugin to play audio in the background while the screen is off.
|
||||||
version: 0.15.0
|
version: 0.15.1
|
||||||
homepage: https://github.com/ryanheise/audio_service
|
homepage: https://github.com/ryanheise/audio_service
|
||||||
|
|
||||||
environment:
|
environment:
|
||||||
|
Loading…
Reference in New Issue
Block a user