Initial commit

This commit is contained in:
exttex 2020-09-18 19:25:00 +02:00
commit 73fce9905f
87 changed files with 7529 additions and 0 deletions

1
.github/FUNDING.yml vendored Normal file
View File

@ -0,0 +1 @@
github: ryanheise

48
.github/ISSUE_TEMPLATE/bug_report.md vendored Normal file
View File

@ -0,0 +1,48 @@
---
name: Bug report
about: Create a report to help us improve
title: ''
labels: 1 backlog, bug
assignees: ryanheise
---
<!-- ALL SECTIONS BELOW MUST BE COMPLETED -->
**Which API doesn't behave as documented, and how does it misbehave?**
Name here the specific methods or fields that are not behaving as documented, and explain clearly what is happening.
**Minimal reproduction project**
Provide a link here using one of two options:
1. Fork this repository and modify the example to reproduce the bug, then provide a link here.
2. If the unmodified official example already reproduces the bug, just write "The example".
**To Reproduce (i.e. user steps, not code)**
Steps to reproduce the behavior:
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
**Error messages**
```
If applicable, copy & paste error message here, within the triple quotes to preserve formatting.
```
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Runtime Environment (please complete the following information if relevant):**
- Device: [e.g. Samsung Galaxy Note 8]
- OS: [e.g. Android 8.0.0]
**Flutter SDK version**
```
insert output of "flutter doctor" here
```
**Additional context**
Add any other context about the problem here.

8
.github/ISSUE_TEMPLATE/config.yml vendored Normal file
View File

@ -0,0 +1,8 @@
blank_issues_enabled: false
contact_links:
- name: Community Support
url: https://stackoverflow.com/search?q=audio_service
about: Ask for help on Stack Overflow.
- name: New to Flutter?
url: https://gitter.im/flutter/flutter
about: Chat with other Flutter developers on Gitter.

View File

@ -0,0 +1,39 @@
---
name: Documentation request
about: Suggest an improvement to the documentation
title: ''
labels: 1 backlog, documentation
assignees: ryanheise
---
<!--
PLEASE READ CAREFULLY!
FOR YOUR DOCUMENTATION REQUEST TO BE PROCESSED, YOU WILL NEED
TO FILL IN ALL SECTIONS BELOW. DON'T DELETE THE HEADINGS.
THANK YOU :-D
-->
**To which pages does your suggestion apply?**
- Direct URL 1
- Direct URL 2
- ...
**Quote the sentences(s) from the documentation to be improved (if any)**
> Insert here. (Skip if you are proposing an entirely new section.)
**Describe your suggestion**
...

View File

@ -0,0 +1,38 @@
---
name: Feature request
about: Suggest an idea for this project
title: ''
labels: 1 backlog, enhancement
assignees: ryanheise
---
<!--
PLEASE READ CAREFULLY!
FOR YOUR FEATURE REQUEST TO BE PROCESSED, YOU WILL NEED
TO FILL IN ALL SECTIONS BELOW. DON'T DELETE THE HEADINGS.
THANK YOU :-D
-->
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.

View File

@ -0,0 +1,19 @@
---
name: Frequently Asked Questions
about: Suggest a new question for the Wiki FAQ
title: ''
labels: 1 backlog, question
assignees: ryanheise
---
## Checklist
<!-- Replace [ ] with [x] to confirm an item in the checklist -->
- [ ] The question is not already in the FAQ.
- [ ] The question is not too narrow or specific to a particular application.
## Suggested Question
Write the question here.

12
.github/workflows/auto-close.yml vendored Normal file
View File

@ -0,0 +1,12 @@
name: Autocloser
on: [issues]
jobs:
autoclose:
runs-on: ubuntu-latest
steps:
- name: Autoclose issues that did not follow issue template
uses: roots/issue-closer-action@v1.1
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
issue-close-message: "This issue was automatically closed because it did not follow the issue template."
issue-pattern: "Which API(.|[\\r\\n])*Minimal reproduction project(.|[\\r\\n])*To Reproduce|To which pages(.|[\\r\\n])*Describe your suggestion|Is your feature request(.|[\\r\\n])*Describe the solution you'd like"

19
.gitignore vendored Normal file
View File

@ -0,0 +1,19 @@
.DS_Store
.dart_tool/
.packages
.pub/
pubspec.lock
build/
doc/
**/ios/Flutter/flutter_export_environment.sh
android/.project
example/android/.project
android/.classpath
android/.settings/org.eclipse.buildship.core.prefs
example/android/.settings/org.eclipse.buildship.core.prefs
example/android/app/.classpath
example/android/app/.project
example/android/app/.settings/org.eclipse.buildship.core.prefs

2
.idea/.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
# Project exclude paths
/.

View File

@ -0,0 +1,116 @@
<component name="ProjectCodeStyleConfiguration">
<code_scheme name="Project" version="173">
<codeStyleSettings language="XML">
<indentOptions>
<option name="CONTINUATION_INDENT_SIZE" value="4" />
</indentOptions>
<arrangement>
<rules>
<section>
<rule>
<match>
<AND>
<NAME>xmlns:android</NAME>
<XML_ATTRIBUTE />
<XML_NAMESPACE>^$</XML_NAMESPACE>
</AND>
</match>
</rule>
</section>
<section>
<rule>
<match>
<AND>
<NAME>xmlns:.*</NAME>
<XML_ATTRIBUTE />
<XML_NAMESPACE>^$</XML_NAMESPACE>
</AND>
</match>
<order>BY_NAME</order>
</rule>
</section>
<section>
<rule>
<match>
<AND>
<NAME>.*:id</NAME>
<XML_ATTRIBUTE />
<XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
</AND>
</match>
</rule>
</section>
<section>
<rule>
<match>
<AND>
<NAME>.*:name</NAME>
<XML_ATTRIBUTE />
<XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
</AND>
</match>
</rule>
</section>
<section>
<rule>
<match>
<AND>
<NAME>name</NAME>
<XML_ATTRIBUTE />
<XML_NAMESPACE>^$</XML_NAMESPACE>
</AND>
</match>
</rule>
</section>
<section>
<rule>
<match>
<AND>
<NAME>style</NAME>
<XML_ATTRIBUTE />
<XML_NAMESPACE>^$</XML_NAMESPACE>
</AND>
</match>
</rule>
</section>
<section>
<rule>
<match>
<AND>
<NAME>.*</NAME>
<XML_ATTRIBUTE />
<XML_NAMESPACE>^$</XML_NAMESPACE>
</AND>
</match>
<order>BY_NAME</order>
</rule>
</section>
<section>
<rule>
<match>
<AND>
<NAME>.*</NAME>
<XML_ATTRIBUTE />
<XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
</AND>
</match>
<order>ANDROID_ATTRIBUTE_ORDER</order>
</rule>
</section>
<section>
<rule>
<match>
<AND>
<NAME>.*</NAME>
<XML_ATTRIBUTE />
<XML_NAMESPACE>.*</XML_NAMESPACE>
</AND>
</match>
<order>BY_NAME</order>
</rule>
</section>
</rules>
</arrangement>
</codeStyleSettings>
</code_scheme>
</component>

View File

@ -0,0 +1,19 @@
<component name="libraryTable">
<library name="Dart SDK">
<CLASSES>
<root url="file:///home/ryan/opt/flutter/bin/cache/dart-sdk/lib/async" />
<root url="file:///home/ryan/opt/flutter/bin/cache/dart-sdk/lib/collection" />
<root url="file:///home/ryan/opt/flutter/bin/cache/dart-sdk/lib/convert" />
<root url="file:///home/ryan/opt/flutter/bin/cache/dart-sdk/lib/core" />
<root url="file:///home/ryan/opt/flutter/bin/cache/dart-sdk/lib/developer" />
<root url="file:///home/ryan/opt/flutter/bin/cache/dart-sdk/lib/html" />
<root url="file:///home/ryan/opt/flutter/bin/cache/dart-sdk/lib/io" />
<root url="file:///home/ryan/opt/flutter/bin/cache/dart-sdk/lib/isolate" />
<root url="file:///home/ryan/opt/flutter/bin/cache/dart-sdk/lib/math" />
<root url="file:///home/ryan/opt/flutter/bin/cache/dart-sdk/lib/mirrors" />
<root url="file:///home/ryan/opt/flutter/bin/cache/dart-sdk/lib/typed_data" />
</CLASSES>
<JAVADOC />
<SOURCES />
</library>
</component>

View File

@ -0,0 +1,15 @@
<component name="libraryTable">
<library name="Flutter Plugins" type="FlutterPluginsLibraryType">
<CLASSES>
<root url="file://$USER_HOME$/flutter/.pub-cache/hosted/pub.dartlang.org/flutter_isolate-1.0.0+14" />
<root url="file://$USER_HOME$/flutter/.pub-cache/hosted/pub.dartlang.org/sqflite-1.3.1+1" />
<root url="file://$USER_HOME$/flutter/.pub-cache/hosted/pub.dartlang.org/audio_session-0.0.7" />
<root url="file://$USER_HOME$/flutter/.pub-cache/hosted/pub.dartlang.org/path_provider_macos-0.0.4+4" />
<root url="file://$PROJECT_DIR$" />
<root url="file://$USER_HOME$/flutter/.pub-cache/hosted/pub.dartlang.org/path_provider-1.6.14" />
<root url="file://$USER_HOME$/flutter/.pub-cache/hosted/pub.dartlang.org/path_provider_linux-0.0.1+2" />
</CLASSES>
<JAVADOC />
<SOURCES />
</library>
</component>

View File

@ -0,0 +1,9 @@
<component name="libraryTable">
<library name="Flutter for Android">
<CLASSES>
<root url="jar:///home/ryan/opt/flutter/bin/cache/artifacts/engine/android-arm/flutter.jar!/" />
</CLASSES>
<JAVADOC />
<SOURCES />
</library>
</component>

9
.idea/modules.xml Normal file
View File

@ -0,0 +1,9 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/audio_service.iml" filepath="$PROJECT_DIR$/audio_service.iml" />
<module fileurl="file://$PROJECT_DIR$/audio_service_android.iml" filepath="$PROJECT_DIR$/audio_service_android.iml" />
</modules>
</component>
</project>

View File

@ -0,0 +1,6 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="example/lib/main.dart" type="FlutterRunConfigurationType" factoryName="Flutter">
<option name="filePath" value="$PROJECT_DIR$/example/lib/main.dart" />
<method />
</configuration>
</component>

6
.idea/vcs.xml Normal file
View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="" vcs="Git" />
</component>
</project>

67
.idea/workspace.xml Normal file
View File

@ -0,0 +1,67 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ChangeListManager">
<list default="true" id="10fe4e03-808b-4cca-b552-b754ebc9fc8e" name="Default Changelist" comment="">
<change beforePath="$PROJECT_DIR$/.idea/workspace.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/workspace.xml" afterDir="false" />
<change beforePath="$PROJECT_DIR$/lib/audio_service.dart" beforeDir="false" afterPath="$PROJECT_DIR$/lib/audio_service.dart" afterDir="false" />
</list>
<option name="EXCLUDED_CONVERTED_TO_IGNORED" value="true" />
<option name="SHOW_DIALOG" value="false" />
<option name="HIGHLIGHT_CONFLICTS" value="true" />
<option name="HIGHLIGHT_NON_ACTIVE_CHANGELIST" value="false" />
<option name="LAST_RESOLUTION" value="IGNORE" />
</component>
<component name="ExecutionTargetManager" SELECTED_TARGET="Android10" />
<component name="Git.Settings">
<option name="RECENT_GIT_ROOT_PATH" value="$PROJECT_DIR$" />
</component>
<component name="IgnoredFileRootStore">
<option name="generatedRoots">
<set>
<option value="$PROJECT_DIR$/.idea" />
</set>
</option>
</component>
<component name="ProjectId" id="1heOT4v7Yxgr9Nb2PRjB67yYpUz" />
<component name="PropertiesComponent">
<property name="dart.analysis.tool.window.force.activate" value="true" />
<property name="last_opened_file_path" value="$PROJECT_DIR$" />
<property name="show.migrate.to.gradle.popup" value="false" />
</component>
<component name="RunDashboard">
<option name="ruleStates">
<list>
<RuleState>
<option name="name" value="ConfigurationTypeDashboardGroupingRule" />
</RuleState>
<RuleState>
<option name="name" value="StatusDashboardGroupingRule" />
</RuleState>
</list>
</option>
</component>
<component name="SvnConfiguration">
<configuration />
</component>
<component name="TaskManager">
<task active="true" id="Default" summary="Default task">
<changelist id="10fe4e03-808b-4cca-b552-b754ebc9fc8e" name="Default Changelist" comment="" />
<created>1600368096000</created>
<option name="number" value="Default" />
<option name="presentableId" value="Default" />
<updated>1600368096000</updated>
</task>
<servers />
</component>
<component name="Vcs.Log.Tabs.Properties">
<option name="TAB_STATES">
<map>
<entry key="MAIN">
<value>
<State />
</value>
</entry>
</map>
</option>
</component>
</project>

254
CHANGELOG.md Normal file
View File

@ -0,0 +1,254 @@
## 0.15.0
* Web support (@keaganhilliard)
* macOS support (@hacker1024)
* Route next/previous buttons to onClick on Android (@stonega)
* Correctly scale skip intervals for control center (@subhash279)
* Handle repeated stop/start calls more robustly.
* Fix Android 11 bugs.
## 0.14.1
* audio_session dependency now supports minSdkVersion 16 on Android.
## 0.14.0
* audio session management now handled by audio_session (see [Migration Guide](https://github.com/ryanheise/audio_service/wiki/Migration-Guide#0140)).
* Exceptions in background audio task are logged and forwarded to client.
## 0.13.0
* All BackgroundAudioTask callbacks are now async.
* Add default implementation of onSkipToNext/onSkipToPrevious.
* Bug fixes.
## 0.12.0
* Add setRepeatMode/setShuffleMode.
* Enable iOS Control Center buttons based on setState.
* Support seek forward/backward in iOS Control Center.
* Add default behaviour to BackgroundAudioTask.
* Bug fixes.
* Simplify example.
## 0.11.2
* Fix bug with album metadata on Android.
## 0.11.1
* Allow setting the iOS audio session category and options.
* Allow AudioServiceWidget to recognise swipe gesture on iOS.
* Check for null title and album on Android.
## 0.11.0
* Breaking change: onStop must await super.onStop to shutdown task.
* Fix Android memory leak.
## 0.10.0
* Replace androidStopOnRemoveTask with onTaskRemoved callback.
* Add onClose callback.
* Breaking change: new MediaButtonReceiver in AndroidManifest.xml.
## 0.9.0
* New state model: split into playing + processingState.
* androidStopForegroundOnPause ties foreground state to playing state.
* Add MediaItem.toJson/fromJson.
* Add AudioService.notificationClickEventStream (Android).
* Add AudioService.updateMediaItem.
* Add AudioService.setSpeed.
* Add PlaybackState.bufferedPosition.
* Add custom AudioService.start parameters.
* Rename replaceQueue -> updateQueue.
* Rename Android-specific start parameters with android- prefix.
* Use Duration type for all time values.
* Pass fastForward/rewind intervals through to background task.
* Allow connections from background contexts (e.g. android_alarm_manager).
* Unify iOS/Android focus APIs.
* Bug fixes and dependency updates.
## 0.8.0
* Allow UI to await the result of custom actions.
* Allow background to broadcast custom events to UI.
* Improve memory management for art bitmaps on Android.
* Convenience methods: replaceQueue, playMediaItem, addQueueItems.
* Bug fixes and dependency updates.
## 0.7.2
* Shutdown background task if task killed by IO (Android).
* Bug fixes and dependency updates.
## 0.7.1
* Add AudioServiceWidget to auto-manage connections.
* Allow file URIs for artUri.
## 0.7.0
* Support skip forward/backward in command center (iOS).
* Add 'extras' field to MediaItem.
* Artwork caching and preloading supported on Android+iOS.
* Bug fixes.
## 0.6.2
* Bug fixes.
## 0.6.1
* Option to stop service on closing task (Android).
## 0.6.0
* Migrated to V2 embedding API (Flutter 1.12).
## 0.5.7
* Destroy isolates after use.
## 0.5.6
* Support Flutter 1.12.
## 0.5.5
* Bump sdk version to 2.6.0.
## 0.5.4
* Fix Android memory leak.
## 0.5.3
* Support Queue, album art and other missing features on iOS.
## 0.5.2
* Update documentation and example.
## 0.5.1
* Playback state broadcast on connect (iOS).
## 0.5.0
* Partial iOS support.
## 0.4.2
* Option to call stopForeground on pause.
## 0.4.1
* Fix queue support bug
## 0.4.0
* Breaking change: AudioServiceBackground.run takes a single parameter.
## 0.3.1
* Update example to disconnect when pressing back button.
## 0.3.0
* Breaking change: updateTime now measured since epoch instead of boot time.
## 0.2.1
* Streams use RxDart BehaviorSubject.
## 0.2.0
* Migrate to AndroidX.
## 0.1.1
* Bump targetSdkVersion to 28
* Clear client-side metadata and state on stop.
## 0.1.0
* onClick is now always called for media button clicks.
* Option to set notifications as ongoing.
## 0.0.15
* Option to set subText in notification.
* Support media item ratings
## 0.0.14
* Can update existing media items.
* Can specify order of Android notification compact actions.
* Bug fix with connect.
## 0.0.13
* Option to preload artwork.
* Allow client to browse media items.
## 0.0.12
* More options to customise the notification content.
## 0.0.11
* Breaking API changes.
* Connection callbacks replaced by a streams API.
* AudioService properties for playbackState, currentMediaItem, queue.
* Option to set Android notification channel description.
* AudioService.customAction awaits completion of the action.
## 0.0.10
* Bug fixes with queue management.
* AudioService.start completes when the background task is ready.
## 0.0.9
* Support queue management.
## 0.0.8
* Bug fix.
## 0.0.7
* onMediaChanged takes MediaItem parameter.
* Support playFromMediaId, fastForward, rewind.
## 0.0.6
* All APIs address media items by String mediaId.
## 0.0.5
* Show media art in notification and lock screen.
## 0.0.4
* Support and example for playing TextToSpeech.
* Click notification to launch UI.
* More properties added to MediaItem.
* Minor API changes.
## 0.0.3
* Pause now keeps background isolate running
* Notification channel id is generated from package name
* Updated example to use audioplayer plugin
* Fixed media button handling
## 0.0.2
* Better connection handling.
## 0.0.1
* Initial release.

21
LICENSE Normal file
View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2018-2020 Ryan Heise and the project contributors.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

269
README.md Normal file
View File

@ -0,0 +1,269 @@
# audio_service
This plugin wraps around your existing audio code to allow it to run in the background or with the screen turned off, and allows your app to interact with headset buttons, the Android lock screen and notification, iOS control center, wearables and Android Auto. It is suitable for:
* Music players
* Text-to-speech readers
* Podcast players
* Navigators
* More!
## How does this plugin work?
You encapsulate your audio code in a background task which runs in a special isolate that continues to run when your UI is absent. Your background task implements callbacks to respond to playback requests coming from your Flutter UI, headset buttons, the lock screen, notification, iOS control center, car displays and smart watches:
![audio_service_callbacks](https://user-images.githubusercontent.com/19899190/84386442-b305cc80-ac34-11ea-8c2f-1b4cb126a98d.png)
You can implement these callbacks to play any sort of audio that is appropriate for your app, such as music files or streams, audio assets, text to speech, synthesised audio, or combinations of these.
| Feature | Android | iOS | macOS | Web |
| ------- | :-------: | :-----: | :-----: | :-----: |
| background audio | ✅ | ✅ | ✅ | ✅ |
| headset clicks | ✅ | ✅ | ✅ | ✅ |
| start/stop/play/pause/seek/rate | ✅ | ✅ | ✅ | ✅ |
| fast forward/rewind | ✅ | ✅ | ✅ | ✅ |
| repeat/shuffle mode | ✅ | ✅ | ✅ | ✅ |
| queue manipulation, skip next/prev | ✅ | ✅ | ✅ | ✅ |
| custom actions | ✅ | ✅ | ✅ | ✅ |
| custom events | ✅ | ✅ | ✅ | ✅ |
| notifications/control center | ✅ | ✅ | ✅ | ✅ |
| lock screen controls | ✅ | ✅ | | ✅ |
| album art | ✅ | ✅ | ✅ | ✅ |
| Android Auto, Apple CarPlay | (untested) | ✅ | | |
If you'd like to help with any missing features, please join us on the [GitHub issues page](https://github.com/ryanheise/audio_service/issues).
## Migrating to 0.14.0
Audio focus, interruptions (e.g. phone calls), mixing, ducking and the configuration of your app's audio category and attributes, are now handled by the [audio_session](https://pub.dev/packages/audio_session) package. Read the [Migration Guide](https://github.com/ryanheise/audio_service/wiki/Migration-Guide#0140) for details.
## Can I make use of other plugins within the background audio task?
Yes! `audio_service` is designed to let you implement the audio logic however you want, using whatever plugins you want. You can use your favourite audio plugins such as [just_audio](https://pub.dartlang.org/packages/just_audio), [flutter_radio](https://pub.dev/packages/flutter_radio), [flutter_tts](https://pub.dartlang.org/packages/flutter_tts), and others, within your background audio task. There are also plugins like [just_audio_service](https://github.com/yringler/just_audio_service) that provide default implementations of `BackgroundAudioTask` to make your job easier.
Note that this plugin will not work with other audio plugins that overlap in responsibility with this plugin (i.e. background audio, iOS control center, Android notifications, lock screen, headset buttons, etc.)
## Example
### Background code
Your audio code will run in a special background isolate, separate and detachable from your app's UI. To achieve this, define a subclass of `BackgroundAudioTask` that overrides a set of callbacks to respond to client requests:
```dart
class MyBackgroundTask extends BackgroundAudioTask {
// Initialise your audio task.
onStart(Map<String, dynamic> params) {}
// Handle a request to stop audio and finish the task.
onStop() async {}
// Handle a request to play audio.
onPlay() {}
// Handle a request to pause audio.
onPause() {}
// Handle a headset button click (play/pause, skip next/prev).
onClick(MediaButton button) {}
// Handle a request to skip to the next queue item.
onSkipToNext() {}
// Handle a request to skip to the previous queue item.
onSkipToPrevious() {}
// Handle a request to seek to a position.
onSeekTo(Duration position) {}
}
```
You can implement these (and other) callbacks to play any type of audio depending on the requirements of your app. For example, if you are building a podcast player, you may have code such as the following:
```dart
import 'package:just_audio/just_audio.dart';
class PodcastBackgroundTask extends BackgroundAudioTask {
AudioPlayer _player = AudioPlayer();
onPlay() async {
_player.play();
// Show the media notification, and let all clients know what
// playback state and media item to display.
await AudioServiceBackground.setState(playing: true, ...);
await AudioServiceBackground.setMediaItem(MediaItem(title: "Hey Jude", ...))
}
```
If you are instead building a text-to-speech reader, you may have code such as the following:
```dart
import 'package:flutter_tts/flutter_tts.dart';
class ReaderBackgroundTask extends BackgroundAudioTask {
FlutterTts _tts = FlutterTts();
String article;
onPlay() async {
_tts.speak(article);
// Show the media notification, and let all clients know what
// playback state and media item to display.
await AudioServiceBackground.setState(playing: true, ...);
await AudioServiceBackground.setMediaItem(MediaItem(album: "Business Insider", ...))
}
}
```
There are several methods in the `AudioServiceBackground` class that are made available to your background audio task to allow it to communicate to clients outside the isolate, such as your Flutter UI (if present), the iOS control center, the Android notification and lock screen. These are:
* `AudioServiceBackground.setState` broadcasts the current playback state to all clients. This includes whether or not audio is playing, but also whether audio is buffering, the current playback position and buffer position, the current playback speed, and the set of audio controls that should be made available. When you broadcast this information to all clients, it allows them to update their user interfaces to show the appropriate set of buttons, and show the correct audio position on seek bars, for example. It is important for you to call this method whenever any of these pieces of state changes. You will typically want to call this method from your `onStart`, `onPlay`, `onPause`, `onSkipToNext`, `onSkipToPrevious` and `onStop` callbacks.
* `AudioServiceBackground.setMediaItem` broadcasts the currently playing media item to all clients. This includes the track title, artist, genre, duration, any artwork to display, and other information. When you broadcast this information to all clients, it allows them to update their user interface accordingly so that it is displayed on the lock screen, the notification, and in your Flutter UI (if present). You will typically want to call this method from your `onStart`, `onSkipToNext` and `onSkipToPrevious` callbacks.
* `AudioServiceBackground.setQueue` broadcasts the current queue to all clients. Some clients like Android Auto may display this information in their user interfaces. You will typically want to call this method from your `onStart` callback. Other callbacks exist where it may be appropriate to call this method such as `onAddQueueItem` and `onRemoveQueueItem`.
### UI code
Connecting to `AudioService`:
```dart
// Wrap your "/" route's widget tree in an AudioServiceWidget:
return MaterialApp(
home: AudioServiceWidget(MainScreen()),
);
```
Starting your background audio task:
```dart
await AudioService.start(
backgroundTaskEntrypoint: _myEntrypoint,
androidNotificationIcon: 'mipmap/ic_launcher',
// An example of passing custom parameters.
// These will be passed through to your `onStart` callback.
params: {'url': 'https://somewhere.com/sometrack.mp3'},
);
// this must be a top-level function
void _myEntrypoint() => AudioServiceBackground.run(() => MyBackgroundTask());
```
Sending messages to it:
* `AudioService.play()`
* `AudioService.pause()`
* `AudioService.click()`
* `AudioService.skipToNext()`
* `AudioService.skipToPrevious()`
* `AudioService.seekTo(Duration(seconds: 53))`
Shutting it down:
```dart
// This will pass through to your `onStop` callback.
AudioService.stop();
```
Reacting to state changes:
* `AudioService.playbackStateStream` (e.g. playing/paused, buffering/ready)
* `AudioService.currentMediaItemStream` (metadata about the currently playing media item)
* `AudioService.queueStream` (the current queue/playlist)
Keep in mind that your UI and background task run in separate isolates and do not share memory. The only way they communicate is via message passing. Your Flutter UI will only use the `AudioService` API to communicate with the background task, while your background task will only use the `AudioServiceBackground` API to interact with the clients, which include the Flutter UI.
### Connecting to `AudioService` from the background
You can also send messages to your background audio task from another background callback (e.g. android_alarm_manager) by manually connecting to it:
```dart
await AudioService.connect(); // Note: the "await" is necessary!
AudioService.play();
```
## Configuring the audio session
If your app uses audio, you should tell the operating system what kind of usage scenario your app has and how your app will interact with other audio apps on the device. Different audio apps often have unique requirements. For example, when a navigator app speaks driving instructions, a music player should duck its audio while a podcast player should pause its audio. Depending on which one of these three apps you are building, you will need to configure your app's audio settings and callbacks to appropriately handle these interactions.
Use the [audio_session](https://pub.dev/packages/audio_session) package to change the default audio session configuration for your app. E.g. for a podcast player, you may use:
```dart
final session = await AudioSession.instance;
await session.configure(AudioSessionConfiguration.speech());
```
Each time you invoke an audio plugin to play audio, that plugin will activate your app's shared audio session to inform the operating system that your app is actively playing audio. Depending on the configuration set above, this will also inform other audio apps to either stop playing audio, or possibly continue playing at a lower volume (i.e. ducking). You normally do not need to activate the audio session yourself, however if the audio plugin you use does not activate the audio session, you can activate it yourself:
```dart
// Activate the audio session before playing audio.
if (await session.setActive(true)) {
// Now play audio.
} else {
// The request was denied and the app should not play audio
}
```
When another app activates its audio session, it similarly may ask your app to pause or duck its audio. Once again, the particular audio plugin you use may automatically pause or duck audio when requested. However, if it does not, you can respond to these events yourself by listening to `session.interruptionEventStream`. Similarly, if the audio plugin doesn't handle unplugged headphone events, you can respond to these yourself by listening to `session.becomingNoisyEventStream`. For more information, consult the documentation for [audio_session](https://pub.dev/packages/audio_session).
Note: If your app uses a number of different audio plugins, e.g. for audio recording, or text to speech, or background audio, it is possible that those plugins may internally override each other's audio session settings since there is only a single audio session shared by your app. Therefore, it is recommended that you apply your own preferred configuration using audio_session after all other audio plugins have loaded. You may consider asking the developer of each audio plugin you use to provide an option to not overwrite these global settings and allow them be managed externally.
## Android setup
These instructions assume that your project follows the new project template introduced in Flutter 1.12. If your project was created prior to 1.12 and uses the old project structure, you can update your project to follow the [new project template](https://github.com/flutter/flutter/wiki/Upgrading-pre-1.12-Android-projects).
Additionally:
1. Edit your project's `AndroidManifest.xml` file to declare the permission to create a wake lock, and add component entries for the `<service>` and `<receiver>`:
```xml
<manifest ...>
<uses-permission android:name="android.permission.WAKE_LOCK"/>
<uses-permission android:name="android.permission.FOREGROUND_SERVICE"/>
<application ...>
...
<service android:name="com.ryanheise.audioservice.AudioService">
<intent-filter>
<action android:name="android.media.browse.MediaBrowserService" />
</intent-filter>
</service>
<receiver android:name="com.ryanheise.audioservice.MediaButtonReceiver" >
<intent-filter>
<action android:name="android.intent.action.MEDIA_BUTTON" />
</intent-filter>
</receiver>
</application>
</manifest>
```
2. Starting from Flutter 1.12, you will need to disable the `shrinkResources` setting in your `android/app/build.gradle` file, otherwise the icon resources used in the Android notification will be removed during the build:
```
android {
compileSdkVersion 28
...
buildTypes {
release {
signingConfig ...
shrinkResources false // ADD THIS LINE
}
}
}
```
## iOS setup
Insert this in your `Info.plist` file:
```
<key>UIBackgroundModes</key>
<array>
<string>audio</string>
</array>
```
The example project may be consulted for context.
## macOS setup
The minimum supported macOS version is 10.12.2 (though this could be changed with some work in the future).
Modify the platform line in `macos/Podfile` to look like the following:
```
platform :osx, '10.12.2'
```
# Where can I find more information?
* [Tutorial](https://github.com/ryanheise/audio_service/wiki/Tutorial): walks you through building a simple audio player while explaining the basic concepts.
* [Full example](https://github.com/ryanheise/audio_service/blob/master/example/lib/main.dart): The `example` subdirectory on GitHub demonstrates both music and text-to-speech use cases.
* [Frequently Asked Questions](https://github.com/ryanheise/audio_service/wiki/FAQ)
* [API documentation](https://pub.dev/documentation/audio_service/latest/audio_service/audio_service-library.html)

8
android/.gitignore vendored Normal file
View File

@ -0,0 +1,8 @@
*.iml
.gradle
/local.properties
/.idea/workspace.xml
/.idea/libraries
.DS_Store
/build
/captures

39
android/build.gradle Normal file
View File

@ -0,0 +1,39 @@
group 'com.ryanheise.audioservice'
version '1.0-SNAPSHOT'
buildscript {
repositories {
google()
jcenter()
}
dependencies {
classpath 'com.android.tools.build:gradle:3.5.0'
}
}
rootProject.allprojects {
repositories {
google()
jcenter()
}
}
apply plugin: 'com.android.library'
android {
compileSdkVersion 28
defaultConfig {
minSdkVersion 16
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
}
lintOptions {
disable 'InvalidPackage'
}
}
dependencies {
implementation 'androidx.core:core:1.1.0'
implementation 'androidx.media:media:1.1.0'
}

View File

@ -0,0 +1,4 @@
org.gradle.jvmargs=-Xmx1536M
android.enableR8=true
android.useAndroidX=true
android.enableJetifier=true

Binary file not shown.

View File

@ -0,0 +1,6 @@
#Thu Sep 17 20:40:30 CEST 2020
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.4-all.zip

172
android/gradlew vendored Normal file
View File

@ -0,0 +1,172 @@
#!/usr/bin/env sh
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS=""
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn () {
echo "$*"
}
die () {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
NONSTOP* )
nonstop=true
;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin, switch paths to Windows format before running java
if $cygwin ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=$((i+1))
done
case $i in
(0) set -- ;;
(1) set -- "$args0" ;;
(2) set -- "$args0" "$args1" ;;
(3) set -- "$args0" "$args1" "$args2" ;;
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Escape application args
save () {
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
echo " "
}
APP_ARGS=$(save "$@")
# Collect all arguments for the java command, following the shell quoting and substitution rules
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
cd "$(dirname "$0")"
fi
exec "$JAVACMD" "$@"

84
android/gradlew.bat vendored Normal file
View File

@ -0,0 +1,84 @@
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS=
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto init
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto init
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:init
@rem Get command-line arguments, handling Windows variants
if not "%OS%" == "Windows_NT" goto win9xME_args
:win9xME_args
@rem Slurp the command line arguments.
set CMD_LINE_ARGS=
set _SKIP=2
:win9xME_args_slurp
if "x%~1" == "x" goto execute
set CMD_LINE_ARGS=%*
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

1
android/settings.gradle Normal file
View File

@ -0,0 +1 @@
rootProject.name = 'audio_service'

View File

@ -0,0 +1,3 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.ryanheise.audioservice">
</manifest>

View File

@ -0,0 +1,8 @@
package com.ryanheise.audioservice;
public enum AudioInterruption {
pause,
temporaryPause,
temporaryDuck,
unknownPause,
}

View File

@ -0,0 +1,16 @@
package com.ryanheise.audioservice;
public enum AudioProcessingState {
none,
connecting,
ready,
buffering,
fastForwarding,
rewinding,
skippingToPrevious,
skippingToNext,
skippingToQueueItem,
completed,
stopped,
error,
}

View File

@ -0,0 +1,805 @@
package com.ryanheise.audioservice;
import android.app.Activity;
import android.app.Notification;
import android.app.NotificationChannel;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.BroadcastReceiver;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.media.AudioAttributes;
import android.media.AudioFocusRequest;
import android.media.AudioManager;
import android.media.MediaDescription;
import android.media.MediaMetadata;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.os.Looper;
import android.os.PowerManager;
import android.support.v4.media.MediaBrowserCompat;
import android.support.v4.media.MediaDescriptionCompat;
import android.support.v4.media.MediaMetadataCompat;
import android.support.v4.media.RatingCompat;
import android.support.v4.media.session.MediaControllerCompat;
import android.support.v4.media.session.MediaSessionCompat;
import android.support.v4.media.session.PlaybackStateCompat;
import android.util.Log;
import android.util.LruCache;
import android.view.KeyEvent;
import androidx.annotation.RequiresApi;
import androidx.core.app.NotificationCompat;
import androidx.media.MediaBrowserServiceCompat;
import androidx.media.app.NotificationCompat.MediaStyle;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class AudioService extends MediaBrowserServiceCompat {
private static final int NOTIFICATION_ID = 1124;
private static final int REQUEST_CONTENT_INTENT = 1000;
private static final String MEDIA_ROOT_ID = "root";
// See the comment in onMediaButtonEvent to understand how the BYPASS keycodes work.
// We hijack KEYCODE_MUTE and KEYCODE_MEDIA_RECORD since the media session subsystem
// considers these keycodes relevant to media playback and will pass them on to us.
public static final int KEYCODE_BYPASS_PLAY = KeyEvent.KEYCODE_MUTE;
public static final int KEYCODE_BYPASS_PAUSE = KeyEvent.KEYCODE_MEDIA_RECORD;
public static final int MAX_COMPACT_ACTIONS = 3;
private static volatile boolean running;
static AudioService instance;
private static PendingIntent contentIntent;
private static boolean resumeOnClick;
private static ServiceListener listener;
static String androidNotificationChannelName;
static String androidNotificationChannelDescription;
static Integer notificationColor;
static String androidNotificationIcon;
static boolean androidNotificationClickStartsActivity;
static boolean androidNotificationOngoing;
static boolean androidStopForegroundOnPause;
private static List<MediaSessionCompat.QueueItem> queue = new ArrayList<MediaSessionCompat.QueueItem>();
private static int queueIndex = -1;
private static Map<String, MediaMetadataCompat> mediaMetadataCache = new HashMap<>();
private static Set<String> artUriBlacklist = new HashSet<>();
private static LruCache<String, Bitmap> artBitmapCache;
private static Size artDownscaleSize;
private static boolean playing = false;
private static AudioProcessingState processingState = AudioProcessingState.none;
private static int repeatMode;
private static int shuffleMode;
private static boolean notificationCreated;
public static void init(Activity activity, boolean resumeOnClick, String androidNotificationChannelName, String androidNotificationChannelDescription, String action, Integer notificationColor, String androidNotificationIcon, boolean androidNotificationClickStartsActivity, boolean androidNotificationOngoing, boolean androidStopForegroundOnPause, Size artDownscaleSize, ServiceListener listener) {
if (running)
throw new IllegalStateException("AudioService already running");
running = true;
Context context = activity.getApplicationContext();
Intent intent = new Intent(context, activity.getClass());
intent.setAction(action);
contentIntent = PendingIntent.getActivity(context, REQUEST_CONTENT_INTENT, intent, PendingIntent.FLAG_UPDATE_CURRENT);
AudioService.listener = listener;
AudioService.resumeOnClick = resumeOnClick;
AudioService.androidNotificationChannelName = androidNotificationChannelName;
AudioService.androidNotificationChannelDescription = androidNotificationChannelDescription;
AudioService.notificationColor = notificationColor;
AudioService.androidNotificationIcon = androidNotificationIcon;
AudioService.androidNotificationClickStartsActivity = androidNotificationClickStartsActivity;
AudioService.androidNotificationOngoing = androidNotificationOngoing;
AudioService.androidStopForegroundOnPause = androidStopForegroundOnPause;
AudioService.artDownscaleSize = artDownscaleSize;
notificationCreated = false;
playing = false;
processingState = AudioProcessingState.none;
repeatMode = 0;
shuffleMode = 0;
// Get max available VM memory, exceeding this amount will throw an
// OutOfMemory exception. Stored in kilobytes as LruCache takes an
// int in its constructor.
final int maxMemory = (int)(Runtime.getRuntime().maxMemory() / 1024);
// Use 1/8th of the available memory for this memory cache.
final int cacheSize = maxMemory / 8;
artBitmapCache = new LruCache<String, Bitmap>(cacheSize) {
@Override
protected int sizeOf(String key, Bitmap bitmap) {
// The cache size will be measured in kilobytes rather than
// number of items.
return bitmap.getByteCount() / 1024;
}
};
}
public static AudioProcessingState getProcessingState() {
return processingState;
}
public static boolean isPlaying() {
return playing;
}
public static int getRepeatMode() {
return repeatMode;
}
public static int getShuffleMode() {
return shuffleMode;
}
public void stop() {
running = false;
mediaMetadata = null;
resumeOnClick = false;
listener = null;
androidNotificationChannelName = null;
androidNotificationChannelDescription = null;
notificationColor = null;
androidNotificationIcon = null;
artDownscaleSize = null;
queue.clear();
queueIndex = -1;
mediaMetadataCache.clear();
actions.clear();
artBitmapCache.evictAll();
compactActionIndices = null;
mediaSession.setQueue(queue);
mediaSession.setActive(false);
releaseWakeLock();
stopForeground(true);
notificationCreated = false;
stopSelf();
}
public static boolean isRunning() {
return running;
}
private PowerManager.WakeLock wakeLock;
private MediaSessionCompat mediaSession;
private MediaSessionCallback mediaSessionCallback;
private MediaMetadataCompat preparedMedia;
private List<NotificationCompat.Action> actions = new ArrayList<NotificationCompat.Action>();
private int[] compactActionIndices;
private MediaMetadataCompat mediaMetadata;
private Object audioFocusRequest;
private String notificationChannelId;
private Handler handler = new Handler(Looper.getMainLooper());
int getResourceId(String resource) {
String[] parts = resource.split("/");
String resourceType = parts[0];
String resourceName = parts[1];
return getResources().getIdentifier(resourceName, resourceType, getApplicationContext().getPackageName());
}
NotificationCompat.Action action(String resource, String label, long actionCode) {
int iconId = getResourceId(resource);
return new NotificationCompat.Action(iconId, label,
buildMediaButtonPendingIntent(actionCode));
}
PendingIntent buildMediaButtonPendingIntent(long action) {
int keyCode = toKeyCode(action);
if (keyCode == KeyEvent.KEYCODE_UNKNOWN)
return null;
Intent intent = new Intent(this, MediaButtonReceiver.class);
intent.setAction(Intent.ACTION_MEDIA_BUTTON);
intent.putExtra(Intent.EXTRA_KEY_EVENT, new KeyEvent(KeyEvent.ACTION_DOWN, keyCode));
return PendingIntent.getBroadcast(this, keyCode, intent, 0);
}
PendingIntent buildDeletePendingIntent() {
Intent intent = new Intent(this, MediaButtonReceiver.class);
intent.setAction(MediaButtonReceiver.ACTION_NOTIFICATION_DELETE);
return PendingIntent.getBroadcast(this, 0, intent, 0);
}
public static int toKeyCode(long action) {
if (action == PlaybackStateCompat.ACTION_PLAY) {
return KEYCODE_BYPASS_PLAY;
} else if (action == PlaybackStateCompat.ACTION_PAUSE) {
return KEYCODE_BYPASS_PAUSE;
} else {
return PlaybackStateCompat.toKeyCode(action);
}
}
void setState(List<NotificationCompat.Action> actions, int actionBits, int[] compactActionIndices, AudioProcessingState processingState, boolean playing, long position, long bufferedPosition, float speed, long updateTime, int repeatMode, int shuffleMode) {
this.actions = actions;
this.compactActionIndices = compactActionIndices;
boolean wasPlaying = AudioService.playing;
AudioService.processingState = processingState;
AudioService.playing = playing;
AudioService.repeatMode = repeatMode;
AudioService.shuffleMode = shuffleMode;
PlaybackStateCompat.Builder stateBuilder = new PlaybackStateCompat.Builder()
.setActions(PlaybackStateCompat.ACTION_PLAY_PAUSE | actionBits)
.setState(getPlaybackState(), position, speed, updateTime)
.setBufferedPosition(bufferedPosition);
mediaSession.setPlaybackState(stateBuilder.build());
if (!running) return;
if (!wasPlaying && playing) {
enterPlayingState();
} else if (wasPlaying && !playing) {
exitPlayingState();
}
updateNotification();
}
public int getPlaybackState() {
switch (processingState) {
case none: return PlaybackStateCompat.STATE_NONE;
case connecting: return PlaybackStateCompat.STATE_CONNECTING;
case ready: return playing ? PlaybackStateCompat.STATE_PLAYING : PlaybackStateCompat.STATE_PAUSED;
case buffering: return PlaybackStateCompat.STATE_BUFFERING;
case fastForwarding: return PlaybackStateCompat.STATE_FAST_FORWARDING;
case rewinding: return PlaybackStateCompat.STATE_REWINDING;
case skippingToPrevious: return PlaybackStateCompat.STATE_SKIPPING_TO_PREVIOUS;
case skippingToNext: return PlaybackStateCompat.STATE_SKIPPING_TO_NEXT;
case skippingToQueueItem: return PlaybackStateCompat.STATE_SKIPPING_TO_QUEUE_ITEM;
case completed: return playing ? PlaybackStateCompat.STATE_PLAYING : PlaybackStateCompat.STATE_PAUSED;
case stopped: return PlaybackStateCompat.STATE_STOPPED;
case error: return PlaybackStateCompat.STATE_ERROR;
default: return PlaybackStateCompat.STATE_NONE;
}
}
private Notification buildNotification() {
int[] compactActionIndices = this.compactActionIndices;
if (compactActionIndices == null) {
compactActionIndices = new int[Math.min(MAX_COMPACT_ACTIONS, actions.size())];
for (int i = 0; i < compactActionIndices.length; i++) compactActionIndices[i] = i;
}
NotificationCompat.Builder builder = getNotificationBuilder();
if (mediaMetadata != null) {
MediaDescriptionCompat description = mediaMetadata.getDescription();
if (description.getTitle() != null)
builder.setContentTitle(description.getTitle());
if (description.getSubtitle() != null)
builder.setContentText(description.getSubtitle());
if (description.getDescription() != null)
builder.setSubText(description.getDescription());
if (description.getIconBitmap() != null)
builder.setLargeIcon(description.getIconBitmap());
}
if (androidNotificationClickStartsActivity)
builder.setContentIntent(mediaSession.getController().getSessionActivity());
if (notificationColor != null)
builder.setColor(notificationColor);
for (NotificationCompat.Action action : actions) {
builder.addAction(action);
}
builder.setStyle(new MediaStyle()
.setMediaSession(mediaSession.getSessionToken())
.setShowActionsInCompactView(compactActionIndices)
.setShowCancelButton(true)
.setCancelButtonIntent(buildMediaButtonPendingIntent(PlaybackStateCompat.ACTION_STOP))
);
if (androidNotificationOngoing)
builder.setOngoing(true);
Notification notification = builder.build();
return notification;
}
private NotificationCompat.Builder getNotificationBuilder() {
NotificationCompat.Builder notificationBuilder = null;
if (notificationBuilder == null) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O)
createChannel();
int iconId = getResourceId(androidNotificationIcon);
notificationBuilder = new NotificationCompat.Builder(this, notificationChannelId)
.setSmallIcon(iconId)
.setVisibility(NotificationCompat.VISIBILITY_PUBLIC)
.setShowWhen(false)
.setDeleteIntent(buildDeletePendingIntent())
;
}
return notificationBuilder;
}
public void handleDeleteNotification() {
if (listener == null) return;
listener.onClose();
}
@RequiresApi(Build.VERSION_CODES.O)
private void createChannel() {
NotificationManager notificationManager = (NotificationManager)getSystemService(Context.NOTIFICATION_SERVICE);
NotificationChannel channel = notificationManager.getNotificationChannel(notificationChannelId);
if (channel == null) {
channel = new NotificationChannel(notificationChannelId, androidNotificationChannelName, NotificationManager.IMPORTANCE_LOW);
if (androidNotificationChannelDescription != null)
channel.setDescription(androidNotificationChannelDescription);
notificationManager.createNotificationChannel(channel);
}
}
private void updateNotification() {
if (!notificationCreated) return;
NotificationManager notificationManager = (NotificationManager)getSystemService(Context.NOTIFICATION_SERVICE);
notificationManager.notify(NOTIFICATION_ID, buildNotification());
}
private boolean enterPlayingState() {
startService(new Intent(AudioService.this, AudioService.class));
if (!mediaSession.isActive())
mediaSession.setActive(true);
acquireWakeLock();
mediaSession.setSessionActivity(contentIntent);
internalStartForeground();
return true;
}
private void exitPlayingState() {
if (androidStopForegroundOnPause) {
exitForegroundState();
}
}
private void exitForegroundState() {
stopForeground(false);
releaseWakeLock();
}
private void internalStartForeground() {
startForeground(NOTIFICATION_ID, buildNotification());
notificationCreated = true;
}
private void acquireWakeLock() {
if (!wakeLock.isHeld())
wakeLock.acquire();
}
private void releaseWakeLock() {
if (wakeLock.isHeld())
wakeLock.release();
}
static MediaMetadataCompat createMediaMetadata(String mediaId, String album, String title, String artist, String genre, Long duration, String artUri, String displayTitle, String displaySubtitle, String displayDescription, RatingCompat rating, Map<?, ?> extras) {
MediaMetadataCompat.Builder builder = new MediaMetadataCompat.Builder()
.putString(MediaMetadataCompat.METADATA_KEY_MEDIA_ID, mediaId)
.putString(MediaMetadataCompat.METADATA_KEY_ALBUM, album)
.putString(MediaMetadataCompat.METADATA_KEY_TITLE, title);
if (artist != null)
builder.putString(MediaMetadataCompat.METADATA_KEY_ARTIST, artist);
if (genre != null)
builder.putString(MediaMetadataCompat.METADATA_KEY_GENRE, genre);
if (duration != null)
builder.putLong(MediaMetadataCompat.METADATA_KEY_DURATION, duration);
if (artUri != null) {
builder.putString(MediaMetadataCompat.METADATA_KEY_DISPLAY_ICON_URI, artUri);
String artCacheFilePath = null;
if (extras != null) {
artCacheFilePath = (String)extras.get("artCacheFile");
}
if (artCacheFilePath != null) {
Bitmap bitmap = loadArtBitmapFromFile(artCacheFilePath);
if (bitmap != null) {
builder.putBitmap(MediaMetadataCompat.METADATA_KEY_ALBUM_ART, bitmap);
builder.putBitmap(MediaMetadataCompat.METADATA_KEY_DISPLAY_ICON, bitmap);
}
}
}
if (displayTitle != null)
builder.putString(MediaMetadataCompat.METADATA_KEY_DISPLAY_TITLE, displayTitle);
if (displaySubtitle != null)
builder.putString(MediaMetadataCompat.METADATA_KEY_DISPLAY_SUBTITLE, displaySubtitle);
if (displayDescription != null)
builder.putString(MediaMetadataCompat.METADATA_KEY_DISPLAY_DESCRIPTION, displayDescription);
if (rating != null) {
builder.putRating(MediaMetadataCompat.METADATA_KEY_RATING, rating);
}
if (extras != null) {
for (Object o : extras.keySet()) {
String key = (String)o;
Object value = extras.get(key);
if (value instanceof Long) {
builder.putLong("extra_long_" + key, (Long)value);
} else if (value instanceof Integer) {
builder.putLong("extra_long_" + key, (Integer)value);
} else if (value instanceof String) {
builder.putString("extra_string_" + key, (String)value);
}
}
}
MediaMetadataCompat mediaMetadata = builder.build();
mediaMetadataCache.put(mediaId, mediaMetadata);
return mediaMetadata;
}
static MediaMetadataCompat getMediaMetadata(String mediaId) {
return mediaMetadataCache.get(mediaId);
}
@Override
public void onCreate() {
super.onCreate();
instance = this;
notificationChannelId = getApplication().getPackageName() + ".channel";
mediaSession = new MediaSessionCompat(this, "media-session");
mediaSession.setMediaButtonReceiver(null); // TODO: Make this configurable
mediaSession.setFlags(MediaSessionCompat.FLAG_HANDLES_MEDIA_BUTTONS | MediaSessionCompat.FLAG_HANDLES_TRANSPORT_CONTROLS);
PlaybackStateCompat.Builder stateBuilder = new PlaybackStateCompat.Builder()
.setActions(PlaybackStateCompat.ACTION_PLAY);
mediaSession.setPlaybackState(stateBuilder.build());
mediaSession.setCallback(mediaSessionCallback = new MediaSessionCallback());
setSessionToken(mediaSession.getSessionToken());
mediaSession.setQueue(queue);
PowerManager pm = (PowerManager)getSystemService(Context.POWER_SERVICE);
wakeLock = pm.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, AudioService.class.getName());
}
void enableQueue() {
mediaSession.setFlags(MediaSessionCompat.FLAG_HANDLES_MEDIA_BUTTONS | MediaSessionCompat.FLAG_HANDLES_TRANSPORT_CONTROLS | MediaSessionCompat.FLAG_HANDLES_QUEUE_COMMANDS);
}
void setQueue(List<MediaSessionCompat.QueueItem> queue) {
this.queue = queue;
mediaSession.setQueue(queue);
}
void playMediaItem(MediaDescriptionCompat description) {
mediaSessionCallback.onPlayMediaItem(description);
}
void setMetadata(final MediaMetadataCompat mediaMetadata) {
this.mediaMetadata = mediaMetadata;
mediaSession.setMetadata(mediaMetadata);
updateNotification();
}
static Bitmap loadArtBitmapFromFile(String path) {
Bitmap bitmap = artBitmapCache.get(path);
if (bitmap != null) return bitmap;
try {
if (artDownscaleSize != null) {
BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
BitmapFactory.decodeFile(path, options);
int imageHeight = options.outHeight;
int imageWidth = options.outWidth;
options.inSampleSize = calculateInSampleSize(options, artDownscaleSize.width, artDownscaleSize.height);
options.inJustDecodeBounds = false;
bitmap = BitmapFactory.decodeFile(path, options);
} else {
bitmap = BitmapFactory.decodeFile(path);
}
artBitmapCache.put(path, bitmap);
return bitmap;
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
private static int calculateInSampleSize(BitmapFactory.Options options, int reqWidth, int reqHeight) {
final int height = options.outHeight;
final int width = options.outWidth;
int inSampleSize = 1;
if (height > reqHeight || width > reqWidth) {
final int halfHeight = height / 2;
final int halfWidth = width / 2;
while ((halfHeight / inSampleSize) >= reqHeight
&& (halfWidth / inSampleSize) >= reqWidth) {
inSampleSize *= 2;
}
}
return inSampleSize;
}
@Override
public BrowserRoot onGetRoot(String clientPackageName, int clientUid, Bundle rootHints) {
return new BrowserRoot(MEDIA_ROOT_ID, null);
}
@Override
public void onLoadChildren(final String parentMediaId, final Result<List<MediaBrowserCompat.MediaItem>> result) {
if (listener == null) {
result.sendResult(new ArrayList<MediaBrowserCompat.MediaItem>());
return;
}
listener.onLoadChildren(parentMediaId, result);
}
@Override
public int onStartCommand(final Intent intent, int flags, int startId) {
MediaButtonReceiver.handleIntent(mediaSession, intent);
return START_NOT_STICKY;
}
@Override
public void onDestroy() {
super.onDestroy();
if (listener != null) {
listener.onDestroy();
}
mediaSession.release();
instance = null;
}
@Override
public void onTaskRemoved(Intent rootIntent) {
if (listener != null) {
listener.onTaskRemoved();
}
super.onTaskRemoved(rootIntent);
}
public class MediaSessionCallback extends MediaSessionCompat.Callback {
@Override
public void onAddQueueItem(MediaDescriptionCompat description) {
if (listener == null) return;
listener.onAddQueueItem(getMediaMetadata(description.getMediaId()));
}
@Override
public void onAddQueueItem(MediaDescriptionCompat description, int index) {
if (listener == null) return;
listener.onAddQueueItemAt(getMediaMetadata(description.getMediaId()), index);
}
@Override
public void onRemoveQueueItem(MediaDescriptionCompat description) {
if (listener == null) return;
listener.onRemoveQueueItem(getMediaMetadata(description.getMediaId()));
}
@Override
public void onPrepare() {
if (listener == null) return;
if (!mediaSession.isActive())
mediaSession.setActive(true);
listener.onPrepare();
}
@Override
public void onPlay() {
if (listener == null) return;
listener.onPlay();
}
@Override
public void onPrepareFromMediaId(String mediaId, Bundle extras) {
if (listener == null) return;
if (!mediaSession.isActive())
mediaSession.setActive(true);
listener.onPrepareFromMediaId(mediaId);
}
@Override
public void onPlayFromMediaId(final String mediaId, final Bundle extras) {
if (listener == null) return;
listener.onPlayFromMediaId(mediaId);
}
@Override
public boolean onMediaButtonEvent(Intent mediaButtonEvent) {
if (listener == null) return false;
final KeyEvent event = (KeyEvent)mediaButtonEvent.getExtras().get(Intent.EXTRA_KEY_EVENT);
if (event.getAction() == KeyEvent.ACTION_DOWN) {
switch (event.getKeyCode()) {
case KEYCODE_BYPASS_PLAY:
onPlay();
break;
case KEYCODE_BYPASS_PAUSE:
onPause();
break;
case KeyEvent.KEYCODE_MEDIA_STOP:
onStop();
break;
case KeyEvent.KEYCODE_MEDIA_FAST_FORWARD:
onFastForward();
break;
case KeyEvent.KEYCODE_MEDIA_REWIND:
onRewind();
break;
// Android unfortunately reroutes media button clicks to
// KEYCODE_MEDIA_PLAY/PAUSE instead of the expected KEYCODE_HEADSETHOOK
// or KEYCODE_MEDIA_PLAY_PAUSE. As a result, we can't genuinely tell if
// onMediaButtonEvent was called because a media button was actually
// pressed or because a PLAY/PAUSE action was pressed instead! To get
// around this, we make PLAY and PAUSE actions use different keycodes:
// KEYCODE_BYPASS_PLAY/PAUSE. Now if we get KEYCODE_MEDIA_PLAY/PUASE
// we know it is actually a media button press.
case KeyEvent.KEYCODE_MEDIA_NEXT:
case KeyEvent.KEYCODE_MEDIA_PREVIOUS:
case KeyEvent.KEYCODE_MEDIA_PLAY:
case KeyEvent.KEYCODE_MEDIA_PAUSE:
// These are the "genuine" media button click events
case KeyEvent.KEYCODE_MEDIA_PLAY_PAUSE:
case KeyEvent.KEYCODE_HEADSETHOOK:
MediaControllerCompat controller = mediaSession.getController();
listener.onClick(mediaControl(event));
break;
}
}
return true;
}
private MediaControl mediaControl(KeyEvent event) {
switch (event.getKeyCode()) {
case KeyEvent.KEYCODE_MEDIA_PLAY_PAUSE:
case KeyEvent.KEYCODE_HEADSETHOOK:
return MediaControl.media;
case KeyEvent.KEYCODE_MEDIA_NEXT:
return MediaControl.next;
case KeyEvent.KEYCODE_MEDIA_PREVIOUS:
return MediaControl.previous;
default:
return MediaControl.media;
}
}
@Override
public void onPause() {
if (listener == null) return;
listener.onPause();
}
@Override
public void onStop() {
if (listener == null) return;
listener.onStop();
}
@Override
public void onSkipToNext() {
if (listener == null) return;
listener.onSkipToNext();
}
@Override
public void onSkipToPrevious() {
if (listener == null) return;
listener.onSkipToPrevious();
}
@Override
public void onFastForward() {
if (listener == null) return;
listener.onFastForward();
}
@Override
public void onRewind() {
if (listener == null) return;
listener.onRewind();
}
@Override
public void onSkipToQueueItem(long id) {
if (listener == null) return;
listener.onSkipToQueueItem(id);
}
@Override
public void onSeekTo(long pos) {
if (listener == null) return;
listener.onSeekTo(pos);
}
@Override
public void onSetRating(RatingCompat rating) {
if (listener == null) return;
listener.onSetRating(rating);
}
@Override
public void onSetRepeatMode(int repeatMode) {
if (listener == null) return;
listener.onSetRepeatMode(repeatMode);
}
@Override
public void onSetShuffleMode(int shuffleMode) {
if (listener == null) return;
listener.onSetShuffleMode(shuffleMode);
}
@Override
public void onSetRating(RatingCompat rating, Bundle extras) {
if (listener == null) return;
listener.onSetRating(rating, extras);
}
//
// NON-STANDARD METHODS
//
public void onPlayMediaItem(final MediaDescriptionCompat description) {
if (listener == null) return;
listener.onPlayMediaItem(getMediaMetadata(description.getMediaId()));
}
}
public static interface ServiceListener {
void onLoadChildren(String parentMediaId, Result<List<MediaBrowserCompat.MediaItem>> result);
void onClick(MediaControl mediaControl);
void onPrepare();
void onPrepareFromMediaId(String mediaId);
//void onPrepareFromSearch(String query);
//void onPrepareFromUri(String uri);
void onPlay();
void onPlayFromMediaId(String mediaId);
//void onPlayFromSearch(String query, Map<?,?> extras);
//void onPlayFromUri(String uri, Map<?,?> extras);
void onSkipToQueueItem(long id);
void onPause();
void onSkipToNext();
void onSkipToPrevious();
void onFastForward();
void onRewind();
void onStop();
void onDestroy();
void onSeekTo(long pos);
void onSetRating(RatingCompat rating);
void onSetRating(RatingCompat rating, Bundle extras);
void onSetRepeatMode(int repeatMode);
//void onSetShuffleModeEnabled(boolean enabled);
void onSetShuffleMode(int shuffleMode);
//void onCustomAction(String action, Bundle extras);
void onAddQueueItem(MediaMetadataCompat metadata);
void onAddQueueItemAt(MediaMetadataCompat metadata, int index);
void onRemoveQueueItem(MediaMetadataCompat metadata);
//
// NON-STANDARD METHODS
//
void onPlayMediaItem(MediaMetadataCompat metadata);
void onTaskRemoved();
void onClose();
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,19 @@
package com.ryanheise.audioservice;
import android.content.Context;
import android.content.Intent;
public class MediaButtonReceiver extends androidx.media.session.MediaButtonReceiver {
public static final String ACTION_NOTIFICATION_DELETE = "com.ryanheise.audioservice.intent.action.ACTION_NOTIFICATION_DELETE";
@Override
public void onReceive(Context context, Intent intent) {
if (intent != null
&& ACTION_NOTIFICATION_DELETE.equals(intent.getAction())
&& AudioService.instance != null) {
AudioService.instance.handleDeleteNotification();
return;
}
super.onReceive(context, intent);
}
}

View File

@ -0,0 +1,7 @@
package com.ryanheise.audioservice;
public enum MediaControl {
media,
next,
previous
}

View File

@ -0,0 +1,11 @@
package com.ryanheise.audioservice;
public class Size {
public int width;
public int height;
public Size(int width, int height) {
this.width = width;
this.height = height;
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 561 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 584 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 157 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 352 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 379 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 410 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 121 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 241 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 267 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 170 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 285 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 344 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 354 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 114 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 389 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 460 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 188 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 538 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 602 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 639 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 152 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 684 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 770 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 315 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 720 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 832 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 857 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 252 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 838 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 952 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 461 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 316 B

18
audio_service.iml Normal file
View File

@ -0,0 +1,18 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="JAVA_MODULE" version="4">
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
<content url="file://$MODULE_DIR$">
<sourceFolder url="file://$MODULE_DIR$/lib" isTestSource="false" />
<excludeFolder url="file://$MODULE_DIR$/.dart_tool" />
<excludeFolder url="file://$MODULE_DIR$/.idea" />
<excludeFolder url="file://$MODULE_DIR$/.pub" />
<excludeFolder url="file://$MODULE_DIR$/build" />
<excludeFolder url="file://$MODULE_DIR$/example/.pub" />
<excludeFolder url="file://$MODULE_DIR$/example/build" />
</content>
<orderEntry type="sourceFolder" forTests="false" />
<orderEntry type="library" name="Dart SDK" level="project" />
<orderEntry type="library" name="Flutter Plugins" level="project" />
</component>
</module>

30
audio_service_android.iml Normal file
View File

@ -0,0 +1,30 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="JAVA_MODULE" version="4">
<component name="FacetManager">
<facet type="android" name="Android">
<configuration>
<option name="ALLOW_USER_CONFIGURATION" value="false" />
<option name="GEN_FOLDER_RELATIVE_PATH_APT" value="/android/gen" />
<option name="GEN_FOLDER_RELATIVE_PATH_AIDL" value="/android/gen" />
<option name="MANIFEST_FILE_RELATIVE_PATH" value="/android/AndroidManifest.xml" />
<option name="RES_FOLDER_RELATIVE_PATH" value="/android/res" />
<option name="ASSETS_FOLDER_RELATIVE_PATH" value="/android/assets" />
<option name="LIBS_FOLDER_RELATIVE_PATH" value="/android/libs" />
<option name="PROGUARD_LOGS_FOLDER_RELATIVE_PATH" value="/android/proguard_logs" />
</configuration>
</facet>
</component>
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
<content url="file://$MODULE_DIR$/android">
<sourceFolder url="file://$MODULE_DIR$/android/src/main/java" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/android/gen" isTestSource="false" generated="true" />
</content>
<content url="file://$MODULE_DIR$/example/android">
<sourceFolder url="file://$MODULE_DIR$/example/android/app/src/main/java" isTestSource="false" />
</content>
<orderEntry type="jdk" jdkName="Android API 25 Platform" jdkType="Android SDK" />
<orderEntry type="sourceFolder" forTests="false" />
<orderEntry type="library" name="Flutter for Android" level="project" />
</component>
</module>

View File

@ -0,0 +1,617 @@
#import "AudioServicePlugin.h"
#import <AVFoundation/AVFoundation.h>
#import <MediaPlayer/MediaPlayer.h>
// If you'd like to help, please see the TODO comments below, then open a
// GitHub issue to announce your intention to work on a particular feature, and
// submit a pull request. We have an open discussion over at issue #10 about
// all things iOS if you'd like to discuss approaches or ask for input. Thank
// you for your support!
@implementation AudioServicePlugin
static FlutterMethodChannel *channel = nil;
static FlutterMethodChannel *backgroundChannel = nil;
static BOOL _running = NO;
static FlutterResult startResult = nil;
static MPRemoteCommandCenter *commandCenter = nil;
static NSArray *queue = nil;
static NSMutableDictionary *mediaItem = nil;
static long actionBits;
static NSArray *commands;
static BOOL _controlsUpdated = NO;
static enum AudioProcessingState processingState = none;
static BOOL playing = NO;
static NSNumber *position = nil;
static NSNumber *bufferedPosition = nil;
static NSNumber *updateTime = nil;
static NSNumber *speed = nil;
static NSNumber *repeatMode = nil;
static NSNumber *shuffleMode = nil;
static NSNumber *fastForwardInterval = nil;
static NSNumber *rewindInterval = nil;
static NSMutableDictionary *params = nil;
static MPMediaItemArtwork* artwork = nil;
+ (void)registerWithRegistrar:(NSObject<FlutterPluginRegistrar>*)registrar {
@synchronized(self) {
// TODO: Need a reliable way to detect whether this is the client
// or background.
// TODO: Handle multiple clients.
// As no separate isolate is used on macOS, add both handlers to the one registrar.
#if TARGET_OS_IPHONE
if (channel == nil) {
#endif
AudioServicePlugin *instance = [[AudioServicePlugin alloc] init:registrar];
channel = [FlutterMethodChannel
methodChannelWithName:@"ryanheise.com/audioService"
binaryMessenger:[registrar messenger]];
[registrar addMethodCallDelegate:instance channel:channel];
#if TARGET_OS_IPHONE
} else {
AudioServicePlugin *instance = [[AudioServicePlugin alloc] init:registrar];
#endif
backgroundChannel = [FlutterMethodChannel
methodChannelWithName:@"ryanheise.com/audioServiceBackground"
binaryMessenger:[registrar messenger]];
[registrar addMethodCallDelegate:instance channel:backgroundChannel];
#if TARGET_OS_IPHONE
}
#endif
}
}
- (instancetype)init:(NSObject<FlutterPluginRegistrar> *)registrar {
self = [super init];
NSAssert(self, @"super init cannot be nil");
return self;
}
- (void)broadcastPlaybackState {
[channel invokeMethod:@"onPlaybackStateChanged" arguments:@[
// processingState
@(processingState),
// playing
@(playing),
// actions
@(actionBits),
// position
position,
// bufferedPosition
bufferedPosition,
// playback speed
speed,
// update time since epoch
updateTime,
// repeat mode
repeatMode,
// shuffle mode
shuffleMode,
]];
}
- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result {
// TODO:
// - Restructure this so that we have a separate method call delegate
// for the client instance and the background instance so that methods
// can't be called on the wrong instance.
if ([@"connect" isEqualToString:call.method]) {
long long msSinceEpoch = (long long)([[NSDate date] timeIntervalSince1970] * 1000.0);
if (position == nil) {
position = @(0);
bufferedPosition = @(0);
updateTime = [NSNumber numberWithLongLong: msSinceEpoch];
speed = [NSNumber numberWithDouble: 1.0];
repeatMode = @(0);
shuffleMode = @(0);
}
// Notify client of state on subscribing.
[self broadcastPlaybackState];
[channel invokeMethod:@"onMediaChanged" arguments:@[mediaItem ? mediaItem : [NSNull null]]];
[channel invokeMethod:@"onQueueChanged" arguments:@[queue ? queue : [NSNull null]]];
result(nil);
} else if ([@"disconnect" isEqualToString:call.method]) {
result(nil);
} else if ([@"start" isEqualToString:call.method]) {
if (_running) {
result(@NO);
return;
}
_running = YES;
// The result will be sent after the background task actually starts.
// See the "ready" case below.
startResult = result;
#if TARGET_OS_IPHONE
[AVAudioSession sharedInstance];
#endif
// Set callbacks on MPRemoteCommandCenter
fastForwardInterval = [call.arguments objectForKey:@"fastForwardInterval"];
rewindInterval = [call.arguments objectForKey:@"rewindInterval"];
commandCenter = [MPRemoteCommandCenter sharedCommandCenter];
commands = @[
commandCenter.stopCommand,
commandCenter.pauseCommand,
commandCenter.playCommand,
commandCenter.skipBackwardCommand,
commandCenter.previousTrackCommand,
commandCenter.nextTrackCommand,
commandCenter.skipForwardCommand,
[NSNull null],
commandCenter.changePlaybackPositionCommand,
commandCenter.togglePlayPauseCommand,
[NSNull null],
[NSNull null],
[NSNull null],
[NSNull null],
[NSNull null],
[NSNull null],
[NSNull null],
[NSNull null],
commandCenter.changeRepeatModeCommand,
[NSNull null],
[NSNull null],
commandCenter.changeShuffleModeCommand,
commandCenter.seekBackwardCommand,
commandCenter.seekForwardCommand,
];
[commandCenter.changePlaybackRateCommand setEnabled:YES];
[commandCenter.togglePlayPauseCommand setEnabled:YES];
[commandCenter.togglePlayPauseCommand addTarget:self action:@selector(togglePlayPause:)];
// TODO: enable more commands
// Language options
if (@available(iOS 9.0, macOS 10.12.2, *)) {
[commandCenter.enableLanguageOptionCommand setEnabled:NO];
[commandCenter.disableLanguageOptionCommand setEnabled:NO];
}
// Rating
[commandCenter.ratingCommand setEnabled:NO];
// Feedback
[commandCenter.likeCommand setEnabled:NO];
[commandCenter.dislikeCommand setEnabled:NO];
[commandCenter.bookmarkCommand setEnabled:NO];
[self updateControls];
// Params
params = [call.arguments objectForKey:@"params"];
#if TARGET_OS_OSX
// No isolate can be used for macOS until https://github.com/flutter/flutter/issues/65222 is resolved.
// We send a result here, and then the Dart code continues in the main isolate.
result(@YES);
#endif
} else if ([@"ready" isEqualToString:call.method]) {
NSMutableDictionary *startParams = [NSMutableDictionary new];
startParams[@"fastForwardInterval"] = fastForwardInterval;
startParams[@"rewindInterval"] = rewindInterval;
startParams[@"params"] = params;
result(startParams);
} else if ([@"started" isEqualToString:call.method]) {
#if TARGET_OS_IPHONE
if (startResult) {
startResult(@YES);
startResult = nil;
}
#endif
result(@YES);
} else if ([@"stopped" isEqualToString:call.method]) {
_running = NO;
[channel invokeMethod:@"onStopped" arguments:nil];
[commandCenter.changePlaybackRateCommand setEnabled:NO];
[commandCenter.togglePlayPauseCommand setEnabled:NO];
[commandCenter.togglePlayPauseCommand removeTarget:nil];
[MPNowPlayingInfoCenter defaultCenter].nowPlayingInfo = nil;
processingState = none;
playing = NO;
position = nil;
bufferedPosition = nil;
updateTime = nil;
speed = nil;
artwork = nil;
mediaItem = nil;
repeatMode = @(0);
shuffleMode = @(0);
actionBits = 0;
[self updateControls];
_controlsUpdated = NO;
queue = nil;
startResult = nil;
fastForwardInterval = nil;
rewindInterval = nil;
params = nil;
commandCenter = nil;
result(@YES);
} else if ([@"isRunning" isEqualToString:call.method]) {
if (_running) {
result(@YES);
} else {
result(@NO);
}
} else if ([@"setBrowseMediaParent" isEqualToString:call.method]) {
result(@YES);
} else if ([@"addQueueItem" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onAddQueueItem" arguments:@[call.arguments] result: result];
} else if ([@"addQueueItemAt" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onAddQueueItemAt" arguments:call.arguments result: result];
} else if ([@"removeQueueItem" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onRemoveQueueItem" arguments:@[call.arguments] result: result];
} else if ([@"updateQueue" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onUpdateQueue" arguments:@[call.arguments] result: result];
} else if ([@"updateMediaItem" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onUpdateMediaItem" arguments:@[call.arguments] result: result];
} else if ([@"click" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onClick" arguments:@[call.arguments] result: result];
} else if ([@"prepare" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onPrepare" arguments:nil result: result];
} else if ([@"prepareFromMediaId" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onPrepareFromMediaId" arguments:@[call.arguments] result: result];
} else if ([@"play" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onPlay" arguments:nil result: result];
} else if ([@"playFromMediaId" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onPlayFromMediaId" arguments:@[call.arguments] result: result];
} else if ([@"playMediaItem" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onPlayMediaItem" arguments:@[call.arguments] result: result];
} else if ([@"skipToQueueItem" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onSkipToQueueItem" arguments:@[call.arguments] result: result];
} else if ([@"pause" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onPause" arguments:nil result: result];
} else if ([@"stop" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onStop" arguments:nil result: result];
} else if ([@"seekTo" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onSeekTo" arguments:@[call.arguments] result: result];
} else if ([@"skipToNext" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onSkipToNext" arguments:nil result: result];
} else if ([@"skipToPrevious" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onSkipToPrevious" arguments:nil result: result];
} else if ([@"fastForward" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onFastForward" arguments:nil result: result];
} else if ([@"rewind" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onRewind" arguments:nil result: result];
} else if ([@"setRepeatMode" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onSetRepeatMode" arguments:@[call.arguments] result: result];
} else if ([@"setShuffleMode" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onSetShuffleMode" arguments:@[call.arguments] result: result];
} else if ([@"setRating" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onSetRating" arguments:@[call.arguments[@"rating"], call.arguments[@"extras"]] result: result];
} else if ([@"setSpeed" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onSetSpeed" arguments:@[call.arguments] result: result];
} else if ([@"seekForward" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onSeekForward" arguments:@[call.arguments] result: result];
} else if ([@"seekBackward" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onSeekBackward" arguments:@[call.arguments] result: result];
} else if ([@"setState" isEqualToString:call.method]) {
long long msSinceEpoch;
if (call.arguments[7] != [NSNull null]) {
msSinceEpoch = [call.arguments[7] longLongValue];
} else {
msSinceEpoch = (long long)([[NSDate date] timeIntervalSince1970] * 1000.0);
}
actionBits = 0;
NSArray *controlsArray = call.arguments[0];
for (int i = 0; i < controlsArray.count; i++) {
NSDictionary *control = (NSDictionary *)controlsArray[i];
NSNumber *actionIndex = (NSNumber *)control[@"action"];
int actionCode = 1 << [actionIndex intValue];
actionBits |= actionCode;
}
NSArray *systemActionsArray = call.arguments[1];
for (int i = 0; i < systemActionsArray.count; i++) {
NSNumber *actionIndex = (NSNumber *)systemActionsArray[i];
int actionCode = 1 << [actionIndex intValue];
actionBits |= actionCode;
}
processingState = [call.arguments[2] intValue];
playing = [call.arguments[3] boolValue];
position = call.arguments[4];
bufferedPosition = call.arguments[5];
speed = call.arguments[6];
repeatMode = call.arguments[9];
shuffleMode = call.arguments[10];
updateTime = [NSNumber numberWithLongLong: msSinceEpoch];
[self broadcastPlaybackState];
[self updateControls];
[self updateNowPlayingInfo];
result(@(YES));
} else if ([@"setQueue" isEqualToString:call.method]) {
queue = call.arguments;
[channel invokeMethod:@"onQueueChanged" arguments:@[queue]];
result(@YES);
} else if ([@"setMediaItem" isEqualToString:call.method]) {
mediaItem = call.arguments;
NSString* artUri = mediaItem[@"artUri"];
artwork = nil;
if (![artUri isEqual: [NSNull null]]) {
NSString* artCacheFilePath = [NSNull null];
NSDictionary* extras = mediaItem[@"extras"];
if (![extras isEqual: [NSNull null]]) {
artCacheFilePath = extras[@"artCacheFile"];
}
if (![artCacheFilePath isEqual: [NSNull null]]) {
#if TARGET_OS_IPHONE
UIImage* artImage = [UIImage imageWithContentsOfFile:artCacheFilePath];
#else
NSImage* artImage = [[NSImage alloc] initWithContentsOfFile:artCacheFilePath];
#endif
if (artImage != nil) {
#if TARGET_OS_IPHONE
artwork = [[MPMediaItemArtwork alloc] initWithImage: artImage];
#else
artwork = [[MPMediaItemArtwork alloc] initWithBoundsSize:artImage.size requestHandler:^NSImage* _Nonnull(CGSize aSize) {
return artImage;
}];
#endif
}
}
}
[self updateNowPlayingInfo];
[channel invokeMethod:@"onMediaChanged" arguments:@[call.arguments]];
result(@(YES));
} else if ([@"notifyChildrenChanged" isEqualToString:call.method]) {
result(@YES);
} else if ([@"androidForceEnableMediaButtons" isEqualToString:call.method]) {
result(@YES);
} else {
// TODO: Check if this implementation is correct.
// Can I just pass on the result as the last argument?
[backgroundChannel invokeMethod:call.method arguments:call.arguments result: result];
}
}
- (MPRemoteCommandHandlerStatus) play: (MPRemoteCommandEvent *) event {
NSLog(@"play");
[backgroundChannel invokeMethod:@"onPlay" arguments:nil];
return MPRemoteCommandHandlerStatusSuccess;
}
- (MPRemoteCommandHandlerStatus) pause: (MPRemoteCommandEvent *) event {
NSLog(@"pause");
[backgroundChannel invokeMethod:@"onPause" arguments:nil];
return MPRemoteCommandHandlerStatusSuccess;
}
- (void) updateNowPlayingInfo {
NSMutableDictionary *nowPlayingInfo = [NSMutableDictionary new];
if (mediaItem) {
nowPlayingInfo[MPMediaItemPropertyTitle] = mediaItem[@"title"];
nowPlayingInfo[MPMediaItemPropertyAlbumTitle] = mediaItem[@"album"];
if (mediaItem[@"artist"] != [NSNull null]) {
nowPlayingInfo[MPMediaItemPropertyArtist] = mediaItem[@"artist"];
}
if (mediaItem[@"duration"] != [NSNull null]) {
nowPlayingInfo[MPMediaItemPropertyPlaybackDuration] = [NSNumber numberWithLongLong: ([mediaItem[@"duration"] longLongValue] / 1000)];
}
if (@available(iOS 3.0, macOS 10.13.2, *)) {
if (artwork) {
nowPlayingInfo[MPMediaItemPropertyArtwork] = artwork;
}
}
nowPlayingInfo[MPNowPlayingInfoPropertyElapsedPlaybackTime] = [NSNumber numberWithInt:([position intValue] / 1000)];
}
nowPlayingInfo[MPNowPlayingInfoPropertyPlaybackRate] = [NSNumber numberWithDouble: playing ? 1.0 : 0.0];
[MPNowPlayingInfoCenter defaultCenter].nowPlayingInfo = nowPlayingInfo;
}
- (void) updateControls {
for (enum MediaAction action = AStop; action <= ASeekForward; action++) {
[self updateControl:action];
}
_controlsUpdated = YES;
}
- (void) updateControl:(enum MediaAction)action {
MPRemoteCommand *command = commands[action];
if (command == [NSNull null]) return;
// Shift the actionBits right until the least significant bit is the tested action bit, and AND that with a 1 at the same position.
// All bytes become 0, other than the tested action bit, which will be 0 or 1 according to its status in the actionBits long.
BOOL enable = ((actionBits >> action) & 1);
if (_controlsUpdated && enable == command.enabled) return;
[command setEnabled:enable];
switch (action) {
case AStop:
if (enable) {
[commandCenter.stopCommand addTarget:self action:@selector(stop:)];
} else {
[commandCenter.stopCommand removeTarget:nil];
}
break;
case APause:
if (enable) {
[commandCenter.pauseCommand addTarget:self action:@selector(pause:)];
} else {
[commandCenter.pauseCommand removeTarget:nil];
}
break;
case APlay:
if (enable) {
[commandCenter.playCommand addTarget:self action:@selector(play:)];
} else {
[commandCenter.playCommand removeTarget:nil];
}
break;
case ARewind:
if (rewindInterval.integerValue > 0) {
if (enable) {
[commandCenter.skipBackwardCommand addTarget: self action:@selector(skipBackward:)];
int rewindIntervalInSeconds = [rewindInterval intValue]/1000;
NSNumber *rewindIntervalInSec = [NSNumber numberWithInt: rewindIntervalInSeconds];
commandCenter.skipBackwardCommand.preferredIntervals = @[rewindIntervalInSec];
} else {
[commandCenter.skipBackwardCommand removeTarget:nil];
}
}
break;
case ASkipToPrevious:
if (enable) {
[commandCenter.previousTrackCommand addTarget:self action:@selector(previousTrack:)];
} else {
[commandCenter.previousTrackCommand removeTarget:nil];
}
break;
case ASkipToNext:
if (enable) {
[commandCenter.nextTrackCommand addTarget:self action:@selector(nextTrack:)];
} else {
[commandCenter.nextTrackCommand removeTarget:nil];
}
break;
case AFastForward:
if (fastForwardInterval.integerValue > 0) {
if (enable) {
[commandCenter.skipForwardCommand addTarget: self action:@selector(skipForward:)];
int fastForwardIntervalInSeconds = [fastForwardInterval intValue]/1000;
NSNumber *fastForwardIntervalInSec = [NSNumber numberWithInt: fastForwardIntervalInSeconds];
commandCenter.skipForwardCommand.preferredIntervals = @[fastForwardIntervalInSec];
} else {
[commandCenter.skipForwardCommand removeTarget:nil];
}
}
break;
case ASetRating:
// TODO:
// commandCenter.ratingCommand
// commandCenter.dislikeCommand
// commandCenter.bookmarkCommand
break;
case ASeekTo:
if (@available(iOS 9.1, macOS 10.12.2, *)) {
if (enable) {
[commandCenter.changePlaybackPositionCommand addTarget:self action:@selector(changePlaybackPosition:)];
} else {
[commandCenter.changePlaybackPositionCommand removeTarget:nil];
}
}
case APlayPause:
// Automatically enabled.
break;
case ASetRepeatMode:
if (enable) {
[commandCenter.changeRepeatModeCommand addTarget:self action:@selector(changeRepeatMode:)];
} else {
[commandCenter.changeRepeatModeCommand removeTarget:nil];
}
break;
case ASetShuffleMode:
if (enable) {
[commandCenter.changeShuffleModeCommand addTarget:self action:@selector(changeShuffleMode:)];
} else {
[commandCenter.changeShuffleModeCommand removeTarget:nil];
}
break;
case ASeekBackward:
if (enable) {
[commandCenter.seekBackwardCommand addTarget:self action:@selector(seekBackward:)];
} else {
[commandCenter.seekBackwardCommand removeTarget:nil];
}
break;
case ASeekForward:
if (enable) {
[commandCenter.seekForwardCommand addTarget:self action:@selector(seekForward:)];
} else {
[commandCenter.seekForwardCommand removeTarget:nil];
}
break;
}
}
- (MPRemoteCommandHandlerStatus) togglePlayPause: (MPRemoteCommandEvent *) event {
NSLog(@"togglePlayPause");
[backgroundChannel invokeMethod:@"onClick" arguments:@[@(0)]];
return MPRemoteCommandHandlerStatusSuccess;
}
- (MPRemoteCommandHandlerStatus) stop: (MPRemoteCommandEvent *) event {
NSLog(@"stop");
[backgroundChannel invokeMethod:@"onStop" arguments:nil];
return MPRemoteCommandHandlerStatusSuccess;
}
- (MPRemoteCommandHandlerStatus) nextTrack: (MPRemoteCommandEvent *) event {
NSLog(@"nextTrack");
[backgroundChannel invokeMethod:@"onSkipToNext" arguments:nil];
return MPRemoteCommandHandlerStatusSuccess;
}
- (MPRemoteCommandHandlerStatus) previousTrack: (MPRemoteCommandEvent *) event {
NSLog(@"previousTrack");
[backgroundChannel invokeMethod:@"onSkipToPrevious" arguments:nil];
return MPRemoteCommandHandlerStatusSuccess;
}
- (MPRemoteCommandHandlerStatus) changePlaybackPosition: (MPChangePlaybackPositionCommandEvent *) event {
NSLog(@"changePlaybackPosition");
[backgroundChannel invokeMethod:@"onSeekTo" arguments: @[@((long long) (event.positionTime * 1000))]];
return MPRemoteCommandHandlerStatusSuccess;
}
- (MPRemoteCommandHandlerStatus) skipForward: (MPRemoteCommandEvent *) event {
NSLog(@"skipForward");
[backgroundChannel invokeMethod:@"onFastForward" arguments:nil];
return MPRemoteCommandHandlerStatusSuccess;
}
- (MPRemoteCommandHandlerStatus) skipBackward: (MPRemoteCommandEvent *) event {
NSLog(@"skipBackward");
[backgroundChannel invokeMethod:@"onRewind" arguments:nil];
return MPRemoteCommandHandlerStatusSuccess;
}
- (MPRemoteCommandHandlerStatus) seekForward: (MPSeekCommandEvent *) event {
NSLog(@"seekForward");
BOOL begin = event.type == MPSeekCommandEventTypeBeginSeeking;
[backgroundChannel invokeMethod:@"onSeekForward" arguments:@[@(begin)]];
return MPRemoteCommandHandlerStatusSuccess;
}
- (MPRemoteCommandHandlerStatus) seekBackward: (MPSeekCommandEvent *) event {
NSLog(@"seekBackward");
BOOL begin = event.type == MPSeekCommandEventTypeBeginSeeking;
[backgroundChannel invokeMethod:@"onSeekBackward" arguments:@[@(begin)]];
return MPRemoteCommandHandlerStatusSuccess;
}
- (MPRemoteCommandHandlerStatus) changeRepeatMode: (MPChangeRepeatModeCommandEvent *) event {
NSLog(@"changeRepeatMode");
int modeIndex;
switch (event.repeatType) {
case MPRepeatTypeOff:
modeIndex = 0;
break;
case MPRepeatTypeOne:
modeIndex = 1;
break;
// MPRepeatTypeAll
default:
modeIndex = 2;
break;
}
[backgroundChannel invokeMethod:@"onSetRepeatMode" arguments:@[@(modeIndex)]];
return MPRemoteCommandHandlerStatusSuccess;
}
- (MPRemoteCommandHandlerStatus) changeShuffleMode: (MPChangeShuffleModeCommandEvent *) event {
NSLog(@"changeShuffleMode");
int modeIndex;
switch (event.shuffleType) {
case MPShuffleTypeOff:
modeIndex = 0;
break;
case MPShuffleTypeItems:
modeIndex = 1;
break;
// MPShuffleTypeCollections
default:
modeIndex = 2;
break;
}
[backgroundChannel invokeMethod:@"onSetShuffleMode" arguments:@[@(modeIndex)]];
return MPRemoteCommandHandlerStatusSuccess;
}
- (void) dealloc {
[[NSNotificationCenter defaultCenter] removeObserver:self];
}
@end

37
ios/.gitignore vendored Normal file
View File

@ -0,0 +1,37 @@
.idea/
.vagrant/
.sconsign.dblite
.svn/
.DS_Store
*.swp
profile
DerivedData/
build/
GeneratedPluginRegistrant.h
GeneratedPluginRegistrant.m
.generated/
*.pbxuser
*.mode1v3
*.mode2v3
*.perspectivev3
!default.pbxuser
!default.mode1v3
!default.mode2v3
!default.perspectivev3
xcuserdata
*.moved-aside
*.pyc
*sync/
Icon?
.tags*
/Flutter/Generated.xcconfig
/Flutter/flutter_export_environment.sh

0
ios/Assets/.gitkeep Normal file
View File

View File

@ -0,0 +1,54 @@
#import <Flutter/Flutter.h>
@interface AudioServicePlugin : NSObject<FlutterPlugin>
@end
enum AudioProcessingState {
none,
connecting,
ready,
buffering,
fastForwarding,
rewinding,
skippingToPrevious,
skippingToNext,
skippingToQueueItem,
completed,
stopped,
error
};
enum AudioInterruption {
AIPause,
AITemporaryPause,
AITemporaryDuck,
AIUnknownPause
};
enum MediaAction {
AStop,
APause,
APlay,
ARewind,
ASkipToPrevious,
ASkipToNext,
AFastForward,
ASetRating,
ASeekTo,
APlayPause,
APlayFromMediaId,
APlayFromSearch,
ASkipToQueueItem,
APlayFromUri,
APrepare,
APrepareFromMediaId,
APrepareFromSearch,
APrepareFromUri,
ASetRepeatMode,
AUnused_1, // deprecated (setShuffleModeEnabled)
AUnused_2, // setCaptioningEnabled
ASetShuffleMode,
// Non-standard
ASeekBackward,
ASeekForward,
};

View File

@ -0,0 +1,617 @@
#import "AudioServicePlugin.h"
#import <AVFoundation/AVFoundation.h>
#import <MediaPlayer/MediaPlayer.h>
// If you'd like to help, please see the TODO comments below, then open a
// GitHub issue to announce your intention to work on a particular feature, and
// submit a pull request. We have an open discussion over at issue #10 about
// all things iOS if you'd like to discuss approaches or ask for input. Thank
// you for your support!
@implementation AudioServicePlugin
static FlutterMethodChannel *channel = nil;
static FlutterMethodChannel *backgroundChannel = nil;
static BOOL _running = NO;
static FlutterResult startResult = nil;
static MPRemoteCommandCenter *commandCenter = nil;
static NSArray *queue = nil;
static NSMutableDictionary *mediaItem = nil;
static long actionBits;
static NSArray *commands;
static BOOL _controlsUpdated = NO;
static enum AudioProcessingState processingState = none;
static BOOL playing = NO;
static NSNumber *position = nil;
static NSNumber *bufferedPosition = nil;
static NSNumber *updateTime = nil;
static NSNumber *speed = nil;
static NSNumber *repeatMode = nil;
static NSNumber *shuffleMode = nil;
static NSNumber *fastForwardInterval = nil;
static NSNumber *rewindInterval = nil;
static NSMutableDictionary *params = nil;
static MPMediaItemArtwork* artwork = nil;
+ (void)registerWithRegistrar:(NSObject<FlutterPluginRegistrar>*)registrar {
@synchronized(self) {
// TODO: Need a reliable way to detect whether this is the client
// or background.
// TODO: Handle multiple clients.
// As no separate isolate is used on macOS, add both handlers to the one registrar.
#if TARGET_OS_IPHONE
if (channel == nil) {
#endif
AudioServicePlugin *instance = [[AudioServicePlugin alloc] init:registrar];
channel = [FlutterMethodChannel
methodChannelWithName:@"ryanheise.com/audioService"
binaryMessenger:[registrar messenger]];
[registrar addMethodCallDelegate:instance channel:channel];
#if TARGET_OS_IPHONE
} else {
AudioServicePlugin *instance = [[AudioServicePlugin alloc] init:registrar];
#endif
backgroundChannel = [FlutterMethodChannel
methodChannelWithName:@"ryanheise.com/audioServiceBackground"
binaryMessenger:[registrar messenger]];
[registrar addMethodCallDelegate:instance channel:backgroundChannel];
#if TARGET_OS_IPHONE
}
#endif
}
}
- (instancetype)init:(NSObject<FlutterPluginRegistrar> *)registrar {
self = [super init];
NSAssert(self, @"super init cannot be nil");
return self;
}
- (void)broadcastPlaybackState {
[channel invokeMethod:@"onPlaybackStateChanged" arguments:@[
// processingState
@(processingState),
// playing
@(playing),
// actions
@(actionBits),
// position
position,
// bufferedPosition
bufferedPosition,
// playback speed
speed,
// update time since epoch
updateTime,
// repeat mode
repeatMode,
// shuffle mode
shuffleMode,
]];
}
- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result {
// TODO:
// - Restructure this so that we have a separate method call delegate
// for the client instance and the background instance so that methods
// can't be called on the wrong instance.
if ([@"connect" isEqualToString:call.method]) {
long long msSinceEpoch = (long long)([[NSDate date] timeIntervalSince1970] * 1000.0);
if (position == nil) {
position = @(0);
bufferedPosition = @(0);
updateTime = [NSNumber numberWithLongLong: msSinceEpoch];
speed = [NSNumber numberWithDouble: 1.0];
repeatMode = @(0);
shuffleMode = @(0);
}
// Notify client of state on subscribing.
[self broadcastPlaybackState];
[channel invokeMethod:@"onMediaChanged" arguments:@[mediaItem ? mediaItem : [NSNull null]]];
[channel invokeMethod:@"onQueueChanged" arguments:@[queue ? queue : [NSNull null]]];
result(nil);
} else if ([@"disconnect" isEqualToString:call.method]) {
result(nil);
} else if ([@"start" isEqualToString:call.method]) {
if (_running) {
result(@NO);
return;
}
_running = YES;
// The result will be sent after the background task actually starts.
// See the "ready" case below.
startResult = result;
#if TARGET_OS_IPHONE
[AVAudioSession sharedInstance];
#endif
// Set callbacks on MPRemoteCommandCenter
fastForwardInterval = [call.arguments objectForKey:@"fastForwardInterval"];
rewindInterval = [call.arguments objectForKey:@"rewindInterval"];
commandCenter = [MPRemoteCommandCenter sharedCommandCenter];
commands = @[
commandCenter.stopCommand,
commandCenter.pauseCommand,
commandCenter.playCommand,
commandCenter.skipBackwardCommand,
commandCenter.previousTrackCommand,
commandCenter.nextTrackCommand,
commandCenter.skipForwardCommand,
[NSNull null],
commandCenter.changePlaybackPositionCommand,
commandCenter.togglePlayPauseCommand,
[NSNull null],
[NSNull null],
[NSNull null],
[NSNull null],
[NSNull null],
[NSNull null],
[NSNull null],
[NSNull null],
commandCenter.changeRepeatModeCommand,
[NSNull null],
[NSNull null],
commandCenter.changeShuffleModeCommand,
commandCenter.seekBackwardCommand,
commandCenter.seekForwardCommand,
];
[commandCenter.changePlaybackRateCommand setEnabled:YES];
[commandCenter.togglePlayPauseCommand setEnabled:YES];
[commandCenter.togglePlayPauseCommand addTarget:self action:@selector(togglePlayPause:)];
// TODO: enable more commands
// Language options
if (@available(iOS 9.0, macOS 10.12.2, *)) {
[commandCenter.enableLanguageOptionCommand setEnabled:NO];
[commandCenter.disableLanguageOptionCommand setEnabled:NO];
}
// Rating
[commandCenter.ratingCommand setEnabled:NO];
// Feedback
[commandCenter.likeCommand setEnabled:NO];
[commandCenter.dislikeCommand setEnabled:NO];
[commandCenter.bookmarkCommand setEnabled:NO];
[self updateControls];
// Params
params = [call.arguments objectForKey:@"params"];
#if TARGET_OS_OSX
// No isolate can be used for macOS until https://github.com/flutter/flutter/issues/65222 is resolved.
// We send a result here, and then the Dart code continues in the main isolate.
result(@YES);
#endif
} else if ([@"ready" isEqualToString:call.method]) {
NSMutableDictionary *startParams = [NSMutableDictionary new];
startParams[@"fastForwardInterval"] = fastForwardInterval;
startParams[@"rewindInterval"] = rewindInterval;
startParams[@"params"] = params;
result(startParams);
} else if ([@"started" isEqualToString:call.method]) {
#if TARGET_OS_IPHONE
if (startResult) {
startResult(@YES);
startResult = nil;
}
#endif
result(@YES);
} else if ([@"stopped" isEqualToString:call.method]) {
_running = NO;
[channel invokeMethod:@"onStopped" arguments:nil];
[commandCenter.changePlaybackRateCommand setEnabled:NO];
[commandCenter.togglePlayPauseCommand setEnabled:NO];
[commandCenter.togglePlayPauseCommand removeTarget:nil];
[MPNowPlayingInfoCenter defaultCenter].nowPlayingInfo = nil;
processingState = none;
playing = NO;
position = nil;
bufferedPosition = nil;
updateTime = nil;
speed = nil;
artwork = nil;
mediaItem = nil;
repeatMode = @(0);
shuffleMode = @(0);
actionBits = 0;
[self updateControls];
_controlsUpdated = NO;
queue = nil;
startResult = nil;
fastForwardInterval = nil;
rewindInterval = nil;
params = nil;
commandCenter = nil;
result(@YES);
} else if ([@"isRunning" isEqualToString:call.method]) {
if (_running) {
result(@YES);
} else {
result(@NO);
}
} else if ([@"setBrowseMediaParent" isEqualToString:call.method]) {
result(@YES);
} else if ([@"addQueueItem" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onAddQueueItem" arguments:@[call.arguments] result: result];
} else if ([@"addQueueItemAt" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onAddQueueItemAt" arguments:call.arguments result: result];
} else if ([@"removeQueueItem" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onRemoveQueueItem" arguments:@[call.arguments] result: result];
} else if ([@"updateQueue" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onUpdateQueue" arguments:@[call.arguments] result: result];
} else if ([@"updateMediaItem" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onUpdateMediaItem" arguments:@[call.arguments] result: result];
} else if ([@"click" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onClick" arguments:@[call.arguments] result: result];
} else if ([@"prepare" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onPrepare" arguments:nil result: result];
} else if ([@"prepareFromMediaId" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onPrepareFromMediaId" arguments:@[call.arguments] result: result];
} else if ([@"play" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onPlay" arguments:nil result: result];
} else if ([@"playFromMediaId" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onPlayFromMediaId" arguments:@[call.arguments] result: result];
} else if ([@"playMediaItem" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onPlayMediaItem" arguments:@[call.arguments] result: result];
} else if ([@"skipToQueueItem" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onSkipToQueueItem" arguments:@[call.arguments] result: result];
} else if ([@"pause" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onPause" arguments:nil result: result];
} else if ([@"stop" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onStop" arguments:nil result: result];
} else if ([@"seekTo" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onSeekTo" arguments:@[call.arguments] result: result];
} else if ([@"skipToNext" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onSkipToNext" arguments:nil result: result];
} else if ([@"skipToPrevious" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onSkipToPrevious" arguments:nil result: result];
} else if ([@"fastForward" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onFastForward" arguments:nil result: result];
} else if ([@"rewind" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onRewind" arguments:nil result: result];
} else if ([@"setRepeatMode" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onSetRepeatMode" arguments:@[call.arguments] result: result];
} else if ([@"setShuffleMode" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onSetShuffleMode" arguments:@[call.arguments] result: result];
} else if ([@"setRating" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onSetRating" arguments:@[call.arguments[@"rating"], call.arguments[@"extras"]] result: result];
} else if ([@"setSpeed" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onSetSpeed" arguments:@[call.arguments] result: result];
} else if ([@"seekForward" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onSeekForward" arguments:@[call.arguments] result: result];
} else if ([@"seekBackward" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onSeekBackward" arguments:@[call.arguments] result: result];
} else if ([@"setState" isEqualToString:call.method]) {
long long msSinceEpoch;
if (call.arguments[7] != [NSNull null]) {
msSinceEpoch = [call.arguments[7] longLongValue];
} else {
msSinceEpoch = (long long)([[NSDate date] timeIntervalSince1970] * 1000.0);
}
actionBits = 0;
NSArray *controlsArray = call.arguments[0];
for (int i = 0; i < controlsArray.count; i++) {
NSDictionary *control = (NSDictionary *)controlsArray[i];
NSNumber *actionIndex = (NSNumber *)control[@"action"];
int actionCode = 1 << [actionIndex intValue];
actionBits |= actionCode;
}
NSArray *systemActionsArray = call.arguments[1];
for (int i = 0; i < systemActionsArray.count; i++) {
NSNumber *actionIndex = (NSNumber *)systemActionsArray[i];
int actionCode = 1 << [actionIndex intValue];
actionBits |= actionCode;
}
processingState = [call.arguments[2] intValue];
playing = [call.arguments[3] boolValue];
position = call.arguments[4];
bufferedPosition = call.arguments[5];
speed = call.arguments[6];
repeatMode = call.arguments[9];
shuffleMode = call.arguments[10];
updateTime = [NSNumber numberWithLongLong: msSinceEpoch];
[self broadcastPlaybackState];
[self updateControls];
[self updateNowPlayingInfo];
result(@(YES));
} else if ([@"setQueue" isEqualToString:call.method]) {
queue = call.arguments;
[channel invokeMethod:@"onQueueChanged" arguments:@[queue]];
result(@YES);
} else if ([@"setMediaItem" isEqualToString:call.method]) {
mediaItem = call.arguments;
NSString* artUri = mediaItem[@"artUri"];
artwork = nil;
if (![artUri isEqual: [NSNull null]]) {
NSString* artCacheFilePath = [NSNull null];
NSDictionary* extras = mediaItem[@"extras"];
if (![extras isEqual: [NSNull null]]) {
artCacheFilePath = extras[@"artCacheFile"];
}
if (![artCacheFilePath isEqual: [NSNull null]]) {
#if TARGET_OS_IPHONE
UIImage* artImage = [UIImage imageWithContentsOfFile:artCacheFilePath];
#else
NSImage* artImage = [[NSImage alloc] initWithContentsOfFile:artCacheFilePath];
#endif
if (artImage != nil) {
#if TARGET_OS_IPHONE
artwork = [[MPMediaItemArtwork alloc] initWithImage: artImage];
#else
artwork = [[MPMediaItemArtwork alloc] initWithBoundsSize:artImage.size requestHandler:^NSImage* _Nonnull(CGSize aSize) {
return artImage;
}];
#endif
}
}
}
[self updateNowPlayingInfo];
[channel invokeMethod:@"onMediaChanged" arguments:@[call.arguments]];
result(@(YES));
} else if ([@"notifyChildrenChanged" isEqualToString:call.method]) {
result(@YES);
} else if ([@"androidForceEnableMediaButtons" isEqualToString:call.method]) {
result(@YES);
} else {
// TODO: Check if this implementation is correct.
// Can I just pass on the result as the last argument?
[backgroundChannel invokeMethod:call.method arguments:call.arguments result: result];
}
}
- (MPRemoteCommandHandlerStatus) play: (MPRemoteCommandEvent *) event {
NSLog(@"play");
[backgroundChannel invokeMethod:@"onPlay" arguments:nil];
return MPRemoteCommandHandlerStatusSuccess;
}
- (MPRemoteCommandHandlerStatus) pause: (MPRemoteCommandEvent *) event {
NSLog(@"pause");
[backgroundChannel invokeMethod:@"onPause" arguments:nil];
return MPRemoteCommandHandlerStatusSuccess;
}
- (void) updateNowPlayingInfo {
NSMutableDictionary *nowPlayingInfo = [NSMutableDictionary new];
if (mediaItem) {
nowPlayingInfo[MPMediaItemPropertyTitle] = mediaItem[@"title"];
nowPlayingInfo[MPMediaItemPropertyAlbumTitle] = mediaItem[@"album"];
if (mediaItem[@"artist"] != [NSNull null]) {
nowPlayingInfo[MPMediaItemPropertyArtist] = mediaItem[@"artist"];
}
if (mediaItem[@"duration"] != [NSNull null]) {
nowPlayingInfo[MPMediaItemPropertyPlaybackDuration] = [NSNumber numberWithLongLong: ([mediaItem[@"duration"] longLongValue] / 1000)];
}
if (@available(iOS 3.0, macOS 10.13.2, *)) {
if (artwork) {
nowPlayingInfo[MPMediaItemPropertyArtwork] = artwork;
}
}
nowPlayingInfo[MPNowPlayingInfoPropertyElapsedPlaybackTime] = [NSNumber numberWithInt:([position intValue] / 1000)];
}
nowPlayingInfo[MPNowPlayingInfoPropertyPlaybackRate] = [NSNumber numberWithDouble: playing ? 1.0 : 0.0];
[MPNowPlayingInfoCenter defaultCenter].nowPlayingInfo = nowPlayingInfo;
}
- (void) updateControls {
for (enum MediaAction action = AStop; action <= ASeekForward; action++) {
[self updateControl:action];
}
_controlsUpdated = YES;
}
- (void) updateControl:(enum MediaAction)action {
MPRemoteCommand *command = commands[action];
if (command == [NSNull null]) return;
// Shift the actionBits right until the least significant bit is the tested action bit, and AND that with a 1 at the same position.
// All bytes become 0, other than the tested action bit, which will be 0 or 1 according to its status in the actionBits long.
BOOL enable = ((actionBits >> action) & 1);
if (_controlsUpdated && enable == command.enabled) return;
[command setEnabled:enable];
switch (action) {
case AStop:
if (enable) {
[commandCenter.stopCommand addTarget:self action:@selector(stop:)];
} else {
[commandCenter.stopCommand removeTarget:nil];
}
break;
case APause:
if (enable) {
[commandCenter.pauseCommand addTarget:self action:@selector(pause:)];
} else {
[commandCenter.pauseCommand removeTarget:nil];
}
break;
case APlay:
if (enable) {
[commandCenter.playCommand addTarget:self action:@selector(play:)];
} else {
[commandCenter.playCommand removeTarget:nil];
}
break;
case ARewind:
if (rewindInterval.integerValue > 0) {
if (enable) {
[commandCenter.skipBackwardCommand addTarget: self action:@selector(skipBackward:)];
int rewindIntervalInSeconds = [rewindInterval intValue]/1000;
NSNumber *rewindIntervalInSec = [NSNumber numberWithInt: rewindIntervalInSeconds];
commandCenter.skipBackwardCommand.preferredIntervals = @[rewindIntervalInSec];
} else {
[commandCenter.skipBackwardCommand removeTarget:nil];
}
}
break;
case ASkipToPrevious:
if (enable) {
[commandCenter.previousTrackCommand addTarget:self action:@selector(previousTrack:)];
} else {
[commandCenter.previousTrackCommand removeTarget:nil];
}
break;
case ASkipToNext:
if (enable) {
[commandCenter.nextTrackCommand addTarget:self action:@selector(nextTrack:)];
} else {
[commandCenter.nextTrackCommand removeTarget:nil];
}
break;
case AFastForward:
if (fastForwardInterval.integerValue > 0) {
if (enable) {
[commandCenter.skipForwardCommand addTarget: self action:@selector(skipForward:)];
int fastForwardIntervalInSeconds = [fastForwardInterval intValue]/1000;
NSNumber *fastForwardIntervalInSec = [NSNumber numberWithInt: fastForwardIntervalInSeconds];
commandCenter.skipForwardCommand.preferredIntervals = @[fastForwardIntervalInSec];
} else {
[commandCenter.skipForwardCommand removeTarget:nil];
}
}
break;
case ASetRating:
// TODO:
// commandCenter.ratingCommand
// commandCenter.dislikeCommand
// commandCenter.bookmarkCommand
break;
case ASeekTo:
if (@available(iOS 9.1, macOS 10.12.2, *)) {
if (enable) {
[commandCenter.changePlaybackPositionCommand addTarget:self action:@selector(changePlaybackPosition:)];
} else {
[commandCenter.changePlaybackPositionCommand removeTarget:nil];
}
}
case APlayPause:
// Automatically enabled.
break;
case ASetRepeatMode:
if (enable) {
[commandCenter.changeRepeatModeCommand addTarget:self action:@selector(changeRepeatMode:)];
} else {
[commandCenter.changeRepeatModeCommand removeTarget:nil];
}
break;
case ASetShuffleMode:
if (enable) {
[commandCenter.changeShuffleModeCommand addTarget:self action:@selector(changeShuffleMode:)];
} else {
[commandCenter.changeShuffleModeCommand removeTarget:nil];
}
break;
case ASeekBackward:
if (enable) {
[commandCenter.seekBackwardCommand addTarget:self action:@selector(seekBackward:)];
} else {
[commandCenter.seekBackwardCommand removeTarget:nil];
}
break;
case ASeekForward:
if (enable) {
[commandCenter.seekForwardCommand addTarget:self action:@selector(seekForward:)];
} else {
[commandCenter.seekForwardCommand removeTarget:nil];
}
break;
}
}
- (MPRemoteCommandHandlerStatus) togglePlayPause: (MPRemoteCommandEvent *) event {
NSLog(@"togglePlayPause");
[backgroundChannel invokeMethod:@"onClick" arguments:@[@(0)]];
return MPRemoteCommandHandlerStatusSuccess;
}
- (MPRemoteCommandHandlerStatus) stop: (MPRemoteCommandEvent *) event {
NSLog(@"stop");
[backgroundChannel invokeMethod:@"onStop" arguments:nil];
return MPRemoteCommandHandlerStatusSuccess;
}
- (MPRemoteCommandHandlerStatus) nextTrack: (MPRemoteCommandEvent *) event {
NSLog(@"nextTrack");
[backgroundChannel invokeMethod:@"onSkipToNext" arguments:nil];
return MPRemoteCommandHandlerStatusSuccess;
}
- (MPRemoteCommandHandlerStatus) previousTrack: (MPRemoteCommandEvent *) event {
NSLog(@"previousTrack");
[backgroundChannel invokeMethod:@"onSkipToPrevious" arguments:nil];
return MPRemoteCommandHandlerStatusSuccess;
}
- (MPRemoteCommandHandlerStatus) changePlaybackPosition: (MPChangePlaybackPositionCommandEvent *) event {
NSLog(@"changePlaybackPosition");
[backgroundChannel invokeMethod:@"onSeekTo" arguments: @[@((long long) (event.positionTime * 1000))]];
return MPRemoteCommandHandlerStatusSuccess;
}
- (MPRemoteCommandHandlerStatus) skipForward: (MPRemoteCommandEvent *) event {
NSLog(@"skipForward");
[backgroundChannel invokeMethod:@"onFastForward" arguments:nil];
return MPRemoteCommandHandlerStatusSuccess;
}
- (MPRemoteCommandHandlerStatus) skipBackward: (MPRemoteCommandEvent *) event {
NSLog(@"skipBackward");
[backgroundChannel invokeMethod:@"onRewind" arguments:nil];
return MPRemoteCommandHandlerStatusSuccess;
}
- (MPRemoteCommandHandlerStatus) seekForward: (MPSeekCommandEvent *) event {
NSLog(@"seekForward");
BOOL begin = event.type == MPSeekCommandEventTypeBeginSeeking;
[backgroundChannel invokeMethod:@"onSeekForward" arguments:@[@(begin)]];
return MPRemoteCommandHandlerStatusSuccess;
}
- (MPRemoteCommandHandlerStatus) seekBackward: (MPSeekCommandEvent *) event {
NSLog(@"seekBackward");
BOOL begin = event.type == MPSeekCommandEventTypeBeginSeeking;
[backgroundChannel invokeMethod:@"onSeekBackward" arguments:@[@(begin)]];
return MPRemoteCommandHandlerStatusSuccess;
}
- (MPRemoteCommandHandlerStatus) changeRepeatMode: (MPChangeRepeatModeCommandEvent *) event {
NSLog(@"changeRepeatMode");
int modeIndex;
switch (event.repeatType) {
case MPRepeatTypeOff:
modeIndex = 0;
break;
case MPRepeatTypeOne:
modeIndex = 1;
break;
// MPRepeatTypeAll
default:
modeIndex = 2;
break;
}
[backgroundChannel invokeMethod:@"onSetRepeatMode" arguments:@[@(modeIndex)]];
return MPRemoteCommandHandlerStatusSuccess;
}
- (MPRemoteCommandHandlerStatus) changeShuffleMode: (MPChangeShuffleModeCommandEvent *) event {
NSLog(@"changeShuffleMode");
int modeIndex;
switch (event.shuffleType) {
case MPShuffleTypeOff:
modeIndex = 0;
break;
case MPShuffleTypeItems:
modeIndex = 1;
break;
// MPShuffleTypeCollections
default:
modeIndex = 2;
break;
}
[backgroundChannel invokeMethod:@"onSetShuffleMode" arguments:@[@(modeIndex)]];
return MPRemoteCommandHandlerStatusSuccess;
}
- (void) dealloc {
[[NSNotificationCenter defaultCenter] removeObserver:self];
}
@end

21
ios/audio_service.podspec Normal file
View File

@ -0,0 +1,21 @@
#
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'audio_service'
s.version = '0.0.1'
s.summary = 'A new flutter plugin project.'
s.description = <<-DESC
A new flutter plugin project.
DESC
s.homepage = 'http://example.com'
s.license = { :file => '../LICENSE' }
s.author = { 'Your Company' => 'email@example.com' }
s.source = { :path => '.' }
s.source_files = 'Classes/**/*'
s.public_header_files = 'Classes/**/*.h'
s.dependency 'Flutter'
s.ios.deployment_target = '8.0'
end

1765
lib/audio_service.dart Normal file

File diff suppressed because it is too large Load Diff

354
lib/audio_service_web.dart Normal file
View File

@ -0,0 +1,354 @@
import 'dart:async';
import 'dart:html' as html;
import 'dart:js' as js;
import 'package:audio_service/js/media_metadata.dart';
import 'js/media_session_web.dart';
import 'package:audio_service/audio_service.dart';
import 'package:flutter/services.dart';
import 'package:flutter_web_plugins/flutter_web_plugins.dart';
const String _CUSTOM_PREFIX = 'custom_';
class Art {
String src;
String type;
String sizes;
Art({this.src, this.type, this.sizes});
}
class AudioServicePlugin {
int fastForwardInterval;
int rewindInterval;
Map params;
bool started = false;
ClientHandler clientHandler;
BackgroundHandler backgroundHandler;
static void registerWith(Registrar registrar) {
AudioServicePlugin(registrar);
}
AudioServicePlugin(Registrar registrar) {
clientHandler = ClientHandler(this, registrar);
backgroundHandler = BackgroundHandler(this, registrar);
}
}
class ClientHandler {
final AudioServicePlugin plugin;
final MethodChannel channel;
ClientHandler(this.plugin, Registrar registrar)
: channel = MethodChannel(
'ryanheise.com/audioService',
const StandardMethodCodec(),
registrar.messenger,
) {
channel.setMethodCallHandler(handleServiceMethodCall);
}
Future<T> invokeMethod<T>(String method, [dynamic arguments]) =>
channel.invokeMethod(method, arguments);
Future<dynamic> handleServiceMethodCall(MethodCall call) async {
switch (call.method) {
case 'start':
plugin.fastForwardInterval = call.arguments['fastForwardInterval'];
plugin.rewindInterval = call.arguments['rewindInterval'];
plugin.params = call.arguments['params'];
plugin.started = true;
return plugin.started;
case 'connect':
// No-op not really anything for us to do with connect on the web, the
// streams should all be hydrated
break;
case 'disconnect':
// No-op not really anything for us to do with disconnect on the web,
// the streams should stay hydrated because everything is static and we
// aren't working with isolates
break;
case 'isRunning':
return plugin.started;
case 'rewind':
return plugin.backgroundHandler.invokeMethod('onRewind');
case 'fastForward':
return plugin.backgroundHandler.invokeMethod('onFastForward');
case 'skipToPrevious':
return plugin.backgroundHandler.invokeMethod('onSkipToPrevious');
case 'skipToNext':
return plugin.backgroundHandler.invokeMethod('onSkipToNext');
case 'play':
return plugin.backgroundHandler.invokeMethod('onPlay');
case 'pause':
return plugin.backgroundHandler.invokeMethod('onPause');
case 'stop':
return plugin.backgroundHandler.invokeMethod('onStop');
case 'seekTo':
return plugin.backgroundHandler
.invokeMethod('onSeekTo', [call.arguments]);
case 'prepareFromMediaId':
return plugin.backgroundHandler
.invokeMethod('onPrepareFromMediaId', [call.arguments]);
case 'playFromMediaId':
return plugin.backgroundHandler
.invokeMethod('onPlayFromMediaId', [call.arguments]);
case 'setBrowseMediaParent':
return plugin.backgroundHandler
.invokeMethod('onLoadChildren', [call.arguments]);
case 'onClick':
// No-op we don't really have the idea of a bluetooth button click on
// the web
break;
case 'addQueueItem':
return plugin.backgroundHandler
.invokeMethod('onAddQueueItem', [call.arguments]);
case 'addQueueItemAt':
return plugin.backgroundHandler
.invokeMethod('onQueueItemAt', call.arguments);
case 'removeQueueItem':
return plugin.backgroundHandler
.invokeMethod('onRemoveQueueItem', [call.arguments]);
case 'updateQueue':
return plugin.backgroundHandler
.invokeMethod('onUpdateQueue', [call.arguments]);
case 'updateMediaItem':
return plugin.backgroundHandler
.invokeMethod('onUpdateMediaItem', [call.arguments]);
case 'prepare':
return plugin.backgroundHandler.invokeMethod('onPrepare');
case 'playMediaItem':
return plugin.backgroundHandler
.invokeMethod('onPlayMediaItem', [call.arguments]);
case 'skipToQueueItem':
return plugin.backgroundHandler
.invokeMethod('onSkipToMediaItem', [call.arguments]);
case 'setRepeatMode':
return plugin.backgroundHandler
.invokeMethod('onSetRepeatMode', [call.arguments]);
case 'setShuffleMode':
return plugin.backgroundHandler
.invokeMethod('onSetShuffleMode', [call.arguments]);
case 'setRating':
return plugin.backgroundHandler.invokeMethod('onSetRating',
[call.arguments['rating'], call.arguments['extras']]);
case 'setSpeed':
return plugin.backgroundHandler
.invokeMethod('onSetSpeed', [call.arguments]);
default:
if (call.method.startsWith(_CUSTOM_PREFIX)) {
final result = await plugin.backgroundHandler
.invokeMethod(call.method, call.arguments);
return result;
}
throw PlatformException(
code: 'Unimplemented',
details: "The audio Service plugin for web doesn't implement "
"the method '${call.method}'");
}
}
}
class BackgroundHandler {
final AudioServicePlugin plugin;
final MethodChannel channel;
MediaItem mediaItem;
BackgroundHandler(this.plugin, Registrar registrar)
: channel = MethodChannel(
'ryanheise.com/audioServiceBackground',
const StandardMethodCodec(),
registrar.messenger,
) {
channel.setMethodCallHandler(handleBackgroundMethodCall);
}
Future<T> invokeMethod<T>(String method, [dynamic arguments]) =>
channel.invokeMethod(method, arguments);
Future<dynamic> handleBackgroundMethodCall(MethodCall call) async {
switch (call.method) {
case 'started':
return started(call);
case 'ready':
return ready(call);
case 'stopped':
return stopped(call);
case 'setState':
return setState(call);
case 'setMediaItem':
return setMediaItem(call);
case 'setQueue':
return setQueue(call);
case 'androidForceEnableMediaButtons':
//no-op
break;
default:
throw PlatformException(
code: 'Unimplemented',
details:
"The audio service background plugin for web doesn't implement "
"the method '${call.method}'");
}
}
Future<bool> started(MethodCall call) async => true;
Future<dynamic> ready(MethodCall call) async => {
'fastForwardInterval': plugin.fastForwardInterval ?? 30000,
'rewindInterval': plugin.rewindInterval ?? 30000,
'params': plugin.params
};
Future<void> stopped(MethodCall call) async {
final session = html.window.navigator.mediaSession;
session.metadata = null;
plugin.started = false;
mediaItem = null;
plugin.clientHandler.invokeMethod('onStopped');
}
Future<void> setState(MethodCall call) async {
final session = html.window.navigator.mediaSession;
final List args = call.arguments;
final List<MediaControl> controls = call.arguments[0]
.map<MediaControl>((element) => MediaControl(
action: MediaAction.values[element['action']],
androidIcon: element['androidIcon'],
label: element['label']))
.toList();
// Reset the handlers
// TODO: Make this better... Like only change ones that have been changed
try {
session.setActionHandler('play', null);
session.setActionHandler('pause', null);
session.setActionHandler('previoustrack', null);
session.setActionHandler('nexttrack', null);
session.setActionHandler('seekbackward', null);
session.setActionHandler('seekforward', null);
session.setActionHandler('stop', null);
} catch (e) {}
int actionBits = 0;
for (final control in controls) {
try {
switch (control.action) {
case MediaAction.play:
session.setActionHandler('play', AudioService.play);
break;
case MediaAction.pause:
session.setActionHandler('pause', AudioService.pause);
break;
case MediaAction.skipToPrevious:
session.setActionHandler(
'previoustrack', AudioService.skipToPrevious);
break;
case MediaAction.skipToNext:
session.setActionHandler('nexttrack', AudioService.skipToNext);
break;
// The naming convention here is a bit odd but seekbackward seems more
// analagous to rewind than seekBackward
case MediaAction.rewind:
session.setActionHandler('seekbackward', AudioService.rewind);
break;
case MediaAction.fastForward:
session.setActionHandler('seekforward', AudioService.fastForward);
break;
case MediaAction.stop:
session.setActionHandler('stop', AudioService.stop);
break;
default:
// no-op
break;
}
} catch (e) {}
int actionCode = 1 << control.action.index;
actionBits |= actionCode;
}
for (int rawSystemAction in call.arguments[1]) {
MediaAction action = MediaAction.values[rawSystemAction];
switch (action) {
case MediaAction.seekTo:
try {
setActionHandler('seekto', js.allowInterop((ActionResult ev) {
print(ev.action);
print(ev.seekTime);
// Chrome uses seconds for whatever reason
AudioService.seekTo(Duration(
milliseconds: (ev.seekTime * 1000).round(),
));
}));
} catch (e) {}
break;
default:
// no-op
break;
}
int actionCode = 1 << rawSystemAction;
actionBits |= actionCode;
}
try {
// Dart also doesn't expose setPositionState
if (mediaItem != null) {
print(
'Setting positionState Duration(${mediaItem.duration.inSeconds}), PlaybackRate(${args[6] ?? 1.0}), Position(${Duration(milliseconds: args[4]).inSeconds})');
// Chrome looks for seconds for some reason
setPositionState(PositionState(
duration: (mediaItem.duration?.inMilliseconds ?? 0) / 1000,
playbackRate: args[6] ?? 1.0,
position: (args[4] ?? 0) / 1000,
));
}
} catch (e) {
print(e);
}
plugin.clientHandler.invokeMethod('onPlaybackStateChanged', [
args[2], // Processing state
args[3], // Playing
actionBits, // Action bits
args[4], // Position
args[5], // bufferedPosition
args[6] ?? 1.0, // speed
args[7] ?? DateTime.now().millisecondsSinceEpoch, // updateTime
args[9], // repeatMode
args[10], // shuffleMode
]);
}
Future<void> setMediaItem(MethodCall call) async {
mediaItem = MediaItem.fromJson(call.arguments);
// This would be how we could pull images out of the cache... But nothing is actually cached on web
final artUri = /* mediaItem.extras['artCacheFile'] ?? */
mediaItem.artUri;
try {
metadata = MediaMetadata(MetadataLiteral(
album: mediaItem.album,
title: mediaItem.title,
artist: mediaItem.artist,
artwork: [
MetadataArtwork(
src: artUri,
sizes: '512x512',
)
],
));
} catch (e) {
print('Metadata failed $e');
}
plugin.clientHandler.invokeMethod('onMediaChanged', [mediaItem.toJson()]);
}
Future<void> setQueue(MethodCall call) async {
plugin.clientHandler.invokeMethod('onQueueChanged', [call.arguments]);
}
}

View File

@ -0,0 +1,32 @@
@JS()
library media_metadata;
import 'package:js/js.dart';
@JS('MediaMetadata')
class MediaMetadata {
external MediaMetadata(MetadataLiteral md);
}
@JS()
@anonymous
class MetadataLiteral {
external String get title;
external String get album;
external String get artist;
external List<MetadataArtwork> get artwork;
external factory MetadataLiteral(
{String title,
String album,
String artist,
List<MetadataArtwork> artwork});
}
@JS()
@anonymous
class MetadataArtwork {
external String get src;
external String get sizes;
external String get type;
external factory MetadataArtwork({String src, String sizes, String type});
}

View File

@ -0,0 +1,36 @@
@JS('navigator.mediaSession')
library media_session_web;
import 'package:js/js.dart';
import 'media_metadata.dart';
@JS('setActionHandler')
external void setActionHandler(String action, Function(ActionResult) callback);
@JS('setPositionState')
external void setPositionState(PositionState state);
@JS()
@anonymous
class ActionResult {
external String get action;
external double get seekTime;
external factory ActionResult({String action, double seekTime});
}
@JS()
@anonymous
class PositionState {
external double get duration;
external double get playbackRate;
external double get position;
external factory PositionState({
double duration,
double playbackRate,
double position,
});
}
@JS('metadata')
external set metadata(MediaMetadata metadata);

View File

@ -0,0 +1,54 @@
#import <FlutterMacOS/FlutterMacOS.h>
@interface AudioServicePlugin : NSObject<FlutterPlugin>
@end
enum AudioProcessingState {
none,
connecting,
ready,
buffering,
fastForwarding,
rewinding,
skippingToPrevious,
skippingToNext,
skippingToQueueItem,
completed,
stopped,
error
};
enum AudioInterruption {
AIPause,
AITemporaryPause,
AITemporaryDuck,
AIUnknownPause
};
enum MediaAction {
AStop,
APause,
APlay,
ARewind,
ASkipToPrevious,
ASkipToNext,
AFastForward,
ASetRating,
ASeekTo,
APlayPause,
APlayFromMediaId,
APlayFromSearch,
ASkipToQueueItem,
APlayFromUri,
APrepare,
APrepareFromMediaId,
APrepareFromSearch,
APrepareFromUri,
ASetRepeatMode,
AUnused_1, // deprecated (setShuffleModeEnabled)
AUnused_2, // setCaptioningEnabled
ASetShuffleMode,
// Non-standard
ASeekBackward,
ASeekForward,
};

View File

@ -0,0 +1,617 @@
#import "AudioServicePlugin.h"
#import <AVFoundation/AVFoundation.h>
#import <MediaPlayer/MediaPlayer.h>
// If you'd like to help, please see the TODO comments below, then open a
// GitHub issue to announce your intention to work on a particular feature, and
// submit a pull request. We have an open discussion over at issue #10 about
// all things iOS if you'd like to discuss approaches or ask for input. Thank
// you for your support!
@implementation AudioServicePlugin
static FlutterMethodChannel *channel = nil;
static FlutterMethodChannel *backgroundChannel = nil;
static BOOL _running = NO;
static FlutterResult startResult = nil;
static MPRemoteCommandCenter *commandCenter = nil;
static NSArray *queue = nil;
static NSMutableDictionary *mediaItem = nil;
static long actionBits;
static NSArray *commands;
static BOOL _controlsUpdated = NO;
static enum AudioProcessingState processingState = none;
static BOOL playing = NO;
static NSNumber *position = nil;
static NSNumber *bufferedPosition = nil;
static NSNumber *updateTime = nil;
static NSNumber *speed = nil;
static NSNumber *repeatMode = nil;
static NSNumber *shuffleMode = nil;
static NSNumber *fastForwardInterval = nil;
static NSNumber *rewindInterval = nil;
static NSMutableDictionary *params = nil;
static MPMediaItemArtwork* artwork = nil;
+ (void)registerWithRegistrar:(NSObject<FlutterPluginRegistrar>*)registrar {
@synchronized(self) {
// TODO: Need a reliable way to detect whether this is the client
// or background.
// TODO: Handle multiple clients.
// As no separate isolate is used on macOS, add both handlers to the one registrar.
#if TARGET_OS_IPHONE
if (channel == nil) {
#endif
AudioServicePlugin *instance = [[AudioServicePlugin alloc] init:registrar];
channel = [FlutterMethodChannel
methodChannelWithName:@"ryanheise.com/audioService"
binaryMessenger:[registrar messenger]];
[registrar addMethodCallDelegate:instance channel:channel];
#if TARGET_OS_IPHONE
} else {
AudioServicePlugin *instance = [[AudioServicePlugin alloc] init:registrar];
#endif
backgroundChannel = [FlutterMethodChannel
methodChannelWithName:@"ryanheise.com/audioServiceBackground"
binaryMessenger:[registrar messenger]];
[registrar addMethodCallDelegate:instance channel:backgroundChannel];
#if TARGET_OS_IPHONE
}
#endif
}
}
- (instancetype)init:(NSObject<FlutterPluginRegistrar> *)registrar {
self = [super init];
NSAssert(self, @"super init cannot be nil");
return self;
}
- (void)broadcastPlaybackState {
[channel invokeMethod:@"onPlaybackStateChanged" arguments:@[
// processingState
@(processingState),
// playing
@(playing),
// actions
@(actionBits),
// position
position,
// bufferedPosition
bufferedPosition,
// playback speed
speed,
// update time since epoch
updateTime,
// repeat mode
repeatMode,
// shuffle mode
shuffleMode,
]];
}
- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result {
// TODO:
// - Restructure this so that we have a separate method call delegate
// for the client instance and the background instance so that methods
// can't be called on the wrong instance.
if ([@"connect" isEqualToString:call.method]) {
long long msSinceEpoch = (long long)([[NSDate date] timeIntervalSince1970] * 1000.0);
if (position == nil) {
position = @(0);
bufferedPosition = @(0);
updateTime = [NSNumber numberWithLongLong: msSinceEpoch];
speed = [NSNumber numberWithDouble: 1.0];
repeatMode = @(0);
shuffleMode = @(0);
}
// Notify client of state on subscribing.
[self broadcastPlaybackState];
[channel invokeMethod:@"onMediaChanged" arguments:@[mediaItem ? mediaItem : [NSNull null]]];
[channel invokeMethod:@"onQueueChanged" arguments:@[queue ? queue : [NSNull null]]];
result(nil);
} else if ([@"disconnect" isEqualToString:call.method]) {
result(nil);
} else if ([@"start" isEqualToString:call.method]) {
if (_running) {
result(@NO);
return;
}
_running = YES;
// The result will be sent after the background task actually starts.
// See the "ready" case below.
startResult = result;
#if TARGET_OS_IPHONE
[AVAudioSession sharedInstance];
#endif
// Set callbacks on MPRemoteCommandCenter
fastForwardInterval = [call.arguments objectForKey:@"fastForwardInterval"];
rewindInterval = [call.arguments objectForKey:@"rewindInterval"];
commandCenter = [MPRemoteCommandCenter sharedCommandCenter];
commands = @[
commandCenter.stopCommand,
commandCenter.pauseCommand,
commandCenter.playCommand,
commandCenter.skipBackwardCommand,
commandCenter.previousTrackCommand,
commandCenter.nextTrackCommand,
commandCenter.skipForwardCommand,
[NSNull null],
commandCenter.changePlaybackPositionCommand,
commandCenter.togglePlayPauseCommand,
[NSNull null],
[NSNull null],
[NSNull null],
[NSNull null],
[NSNull null],
[NSNull null],
[NSNull null],
[NSNull null],
commandCenter.changeRepeatModeCommand,
[NSNull null],
[NSNull null],
commandCenter.changeShuffleModeCommand,
commandCenter.seekBackwardCommand,
commandCenter.seekForwardCommand,
];
[commandCenter.changePlaybackRateCommand setEnabled:YES];
[commandCenter.togglePlayPauseCommand setEnabled:YES];
[commandCenter.togglePlayPauseCommand addTarget:self action:@selector(togglePlayPause:)];
// TODO: enable more commands
// Language options
if (@available(iOS 9.0, macOS 10.12.2, *)) {
[commandCenter.enableLanguageOptionCommand setEnabled:NO];
[commandCenter.disableLanguageOptionCommand setEnabled:NO];
}
// Rating
[commandCenter.ratingCommand setEnabled:NO];
// Feedback
[commandCenter.likeCommand setEnabled:NO];
[commandCenter.dislikeCommand setEnabled:NO];
[commandCenter.bookmarkCommand setEnabled:NO];
[self updateControls];
// Params
params = [call.arguments objectForKey:@"params"];
#if TARGET_OS_OSX
// No isolate can be used for macOS until https://github.com/flutter/flutter/issues/65222 is resolved.
// We send a result here, and then the Dart code continues in the main isolate.
result(@YES);
#endif
} else if ([@"ready" isEqualToString:call.method]) {
NSMutableDictionary *startParams = [NSMutableDictionary new];
startParams[@"fastForwardInterval"] = fastForwardInterval;
startParams[@"rewindInterval"] = rewindInterval;
startParams[@"params"] = params;
result(startParams);
} else if ([@"started" isEqualToString:call.method]) {
#if TARGET_OS_IPHONE
if (startResult) {
startResult(@YES);
startResult = nil;
}
#endif
result(@YES);
} else if ([@"stopped" isEqualToString:call.method]) {
_running = NO;
[channel invokeMethod:@"onStopped" arguments:nil];
[commandCenter.changePlaybackRateCommand setEnabled:NO];
[commandCenter.togglePlayPauseCommand setEnabled:NO];
[commandCenter.togglePlayPauseCommand removeTarget:nil];
[MPNowPlayingInfoCenter defaultCenter].nowPlayingInfo = nil;
processingState = none;
playing = NO;
position = nil;
bufferedPosition = nil;
updateTime = nil;
speed = nil;
artwork = nil;
mediaItem = nil;
repeatMode = @(0);
shuffleMode = @(0);
actionBits = 0;
[self updateControls];
_controlsUpdated = NO;
queue = nil;
startResult = nil;
fastForwardInterval = nil;
rewindInterval = nil;
params = nil;
commandCenter = nil;
result(@YES);
} else if ([@"isRunning" isEqualToString:call.method]) {
if (_running) {
result(@YES);
} else {
result(@NO);
}
} else if ([@"setBrowseMediaParent" isEqualToString:call.method]) {
result(@YES);
} else if ([@"addQueueItem" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onAddQueueItem" arguments:@[call.arguments] result: result];
} else if ([@"addQueueItemAt" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onAddQueueItemAt" arguments:call.arguments result: result];
} else if ([@"removeQueueItem" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onRemoveQueueItem" arguments:@[call.arguments] result: result];
} else if ([@"updateQueue" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onUpdateQueue" arguments:@[call.arguments] result: result];
} else if ([@"updateMediaItem" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onUpdateMediaItem" arguments:@[call.arguments] result: result];
} else if ([@"click" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onClick" arguments:@[call.arguments] result: result];
} else if ([@"prepare" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onPrepare" arguments:nil result: result];
} else if ([@"prepareFromMediaId" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onPrepareFromMediaId" arguments:@[call.arguments] result: result];
} else if ([@"play" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onPlay" arguments:nil result: result];
} else if ([@"playFromMediaId" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onPlayFromMediaId" arguments:@[call.arguments] result: result];
} else if ([@"playMediaItem" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onPlayMediaItem" arguments:@[call.arguments] result: result];
} else if ([@"skipToQueueItem" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onSkipToQueueItem" arguments:@[call.arguments] result: result];
} else if ([@"pause" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onPause" arguments:nil result: result];
} else if ([@"stop" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onStop" arguments:nil result: result];
} else if ([@"seekTo" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onSeekTo" arguments:@[call.arguments] result: result];
} else if ([@"skipToNext" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onSkipToNext" arguments:nil result: result];
} else if ([@"skipToPrevious" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onSkipToPrevious" arguments:nil result: result];
} else if ([@"fastForward" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onFastForward" arguments:nil result: result];
} else if ([@"rewind" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onRewind" arguments:nil result: result];
} else if ([@"setRepeatMode" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onSetRepeatMode" arguments:@[call.arguments] result: result];
} else if ([@"setShuffleMode" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onSetShuffleMode" arguments:@[call.arguments] result: result];
} else if ([@"setRating" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onSetRating" arguments:@[call.arguments[@"rating"], call.arguments[@"extras"]] result: result];
} else if ([@"setSpeed" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onSetSpeed" arguments:@[call.arguments] result: result];
} else if ([@"seekForward" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onSeekForward" arguments:@[call.arguments] result: result];
} else if ([@"seekBackward" isEqualToString:call.method]) {
[backgroundChannel invokeMethod:@"onSeekBackward" arguments:@[call.arguments] result: result];
} else if ([@"setState" isEqualToString:call.method]) {
long long msSinceEpoch;
if (call.arguments[7] != [NSNull null]) {
msSinceEpoch = [call.arguments[7] longLongValue];
} else {
msSinceEpoch = (long long)([[NSDate date] timeIntervalSince1970] * 1000.0);
}
actionBits = 0;
NSArray *controlsArray = call.arguments[0];
for (int i = 0; i < controlsArray.count; i++) {
NSDictionary *control = (NSDictionary *)controlsArray[i];
NSNumber *actionIndex = (NSNumber *)control[@"action"];
int actionCode = 1 << [actionIndex intValue];
actionBits |= actionCode;
}
NSArray *systemActionsArray = call.arguments[1];
for (int i = 0; i < systemActionsArray.count; i++) {
NSNumber *actionIndex = (NSNumber *)systemActionsArray[i];
int actionCode = 1 << [actionIndex intValue];
actionBits |= actionCode;
}
processingState = [call.arguments[2] intValue];
playing = [call.arguments[3] boolValue];
position = call.arguments[4];
bufferedPosition = call.arguments[5];
speed = call.arguments[6];
repeatMode = call.arguments[9];
shuffleMode = call.arguments[10];
updateTime = [NSNumber numberWithLongLong: msSinceEpoch];
[self broadcastPlaybackState];
[self updateControls];
[self updateNowPlayingInfo];
result(@(YES));
} else if ([@"setQueue" isEqualToString:call.method]) {
queue = call.arguments;
[channel invokeMethod:@"onQueueChanged" arguments:@[queue]];
result(@YES);
} else if ([@"setMediaItem" isEqualToString:call.method]) {
mediaItem = call.arguments;
NSString* artUri = mediaItem[@"artUri"];
artwork = nil;
if (![artUri isEqual: [NSNull null]]) {
NSString* artCacheFilePath = [NSNull null];
NSDictionary* extras = mediaItem[@"extras"];
if (![extras isEqual: [NSNull null]]) {
artCacheFilePath = extras[@"artCacheFile"];
}
if (![artCacheFilePath isEqual: [NSNull null]]) {
#if TARGET_OS_IPHONE
UIImage* artImage = [UIImage imageWithContentsOfFile:artCacheFilePath];
#else
NSImage* artImage = [[NSImage alloc] initWithContentsOfFile:artCacheFilePath];
#endif
if (artImage != nil) {
#if TARGET_OS_IPHONE
artwork = [[MPMediaItemArtwork alloc] initWithImage: artImage];
#else
artwork = [[MPMediaItemArtwork alloc] initWithBoundsSize:artImage.size requestHandler:^NSImage* _Nonnull(CGSize aSize) {
return artImage;
}];
#endif
}
}
}
[self updateNowPlayingInfo];
[channel invokeMethod:@"onMediaChanged" arguments:@[call.arguments]];
result(@(YES));
} else if ([@"notifyChildrenChanged" isEqualToString:call.method]) {
result(@YES);
} else if ([@"androidForceEnableMediaButtons" isEqualToString:call.method]) {
result(@YES);
} else {
// TODO: Check if this implementation is correct.
// Can I just pass on the result as the last argument?
[backgroundChannel invokeMethod:call.method arguments:call.arguments result: result];
}
}
- (MPRemoteCommandHandlerStatus) play: (MPRemoteCommandEvent *) event {
NSLog(@"play");
[backgroundChannel invokeMethod:@"onPlay" arguments:nil];
return MPRemoteCommandHandlerStatusSuccess;
}
- (MPRemoteCommandHandlerStatus) pause: (MPRemoteCommandEvent *) event {
NSLog(@"pause");
[backgroundChannel invokeMethod:@"onPause" arguments:nil];
return MPRemoteCommandHandlerStatusSuccess;
}
- (void) updateNowPlayingInfo {
NSMutableDictionary *nowPlayingInfo = [NSMutableDictionary new];
if (mediaItem) {
nowPlayingInfo[MPMediaItemPropertyTitle] = mediaItem[@"title"];
nowPlayingInfo[MPMediaItemPropertyAlbumTitle] = mediaItem[@"album"];
if (mediaItem[@"artist"] != [NSNull null]) {
nowPlayingInfo[MPMediaItemPropertyArtist] = mediaItem[@"artist"];
}
if (mediaItem[@"duration"] != [NSNull null]) {
nowPlayingInfo[MPMediaItemPropertyPlaybackDuration] = [NSNumber numberWithLongLong: ([mediaItem[@"duration"] longLongValue] / 1000)];
}
if (@available(iOS 3.0, macOS 10.13.2, *)) {
if (artwork) {
nowPlayingInfo[MPMediaItemPropertyArtwork] = artwork;
}
}
nowPlayingInfo[MPNowPlayingInfoPropertyElapsedPlaybackTime] = [NSNumber numberWithInt:([position intValue] / 1000)];
}
nowPlayingInfo[MPNowPlayingInfoPropertyPlaybackRate] = [NSNumber numberWithDouble: playing ? 1.0 : 0.0];
[MPNowPlayingInfoCenter defaultCenter].nowPlayingInfo = nowPlayingInfo;
}
- (void) updateControls {
for (enum MediaAction action = AStop; action <= ASeekForward; action++) {
[self updateControl:action];
}
_controlsUpdated = YES;
}
- (void) updateControl:(enum MediaAction)action {
MPRemoteCommand *command = commands[action];
if (command == [NSNull null]) return;
// Shift the actionBits right until the least significant bit is the tested action bit, and AND that with a 1 at the same position.
// All bytes become 0, other than the tested action bit, which will be 0 or 1 according to its status in the actionBits long.
BOOL enable = ((actionBits >> action) & 1);
if (_controlsUpdated && enable == command.enabled) return;
[command setEnabled:enable];
switch (action) {
case AStop:
if (enable) {
[commandCenter.stopCommand addTarget:self action:@selector(stop:)];
} else {
[commandCenter.stopCommand removeTarget:nil];
}
break;
case APause:
if (enable) {
[commandCenter.pauseCommand addTarget:self action:@selector(pause:)];
} else {
[commandCenter.pauseCommand removeTarget:nil];
}
break;
case APlay:
if (enable) {
[commandCenter.playCommand addTarget:self action:@selector(play:)];
} else {
[commandCenter.playCommand removeTarget:nil];
}
break;
case ARewind:
if (rewindInterval.integerValue > 0) {
if (enable) {
[commandCenter.skipBackwardCommand addTarget: self action:@selector(skipBackward:)];
int rewindIntervalInSeconds = [rewindInterval intValue]/1000;
NSNumber *rewindIntervalInSec = [NSNumber numberWithInt: rewindIntervalInSeconds];
commandCenter.skipBackwardCommand.preferredIntervals = @[rewindIntervalInSec];
} else {
[commandCenter.skipBackwardCommand removeTarget:nil];
}
}
break;
case ASkipToPrevious:
if (enable) {
[commandCenter.previousTrackCommand addTarget:self action:@selector(previousTrack:)];
} else {
[commandCenter.previousTrackCommand removeTarget:nil];
}
break;
case ASkipToNext:
if (enable) {
[commandCenter.nextTrackCommand addTarget:self action:@selector(nextTrack:)];
} else {
[commandCenter.nextTrackCommand removeTarget:nil];
}
break;
case AFastForward:
if (fastForwardInterval.integerValue > 0) {
if (enable) {
[commandCenter.skipForwardCommand addTarget: self action:@selector(skipForward:)];
int fastForwardIntervalInSeconds = [fastForwardInterval intValue]/1000;
NSNumber *fastForwardIntervalInSec = [NSNumber numberWithInt: fastForwardIntervalInSeconds];
commandCenter.skipForwardCommand.preferredIntervals = @[fastForwardIntervalInSec];
} else {
[commandCenter.skipForwardCommand removeTarget:nil];
}
}
break;
case ASetRating:
// TODO:
// commandCenter.ratingCommand
// commandCenter.dislikeCommand
// commandCenter.bookmarkCommand
break;
case ASeekTo:
if (@available(iOS 9.1, macOS 10.12.2, *)) {
if (enable) {
[commandCenter.changePlaybackPositionCommand addTarget:self action:@selector(changePlaybackPosition:)];
} else {
[commandCenter.changePlaybackPositionCommand removeTarget:nil];
}
}
case APlayPause:
// Automatically enabled.
break;
case ASetRepeatMode:
if (enable) {
[commandCenter.changeRepeatModeCommand addTarget:self action:@selector(changeRepeatMode:)];
} else {
[commandCenter.changeRepeatModeCommand removeTarget:nil];
}
break;
case ASetShuffleMode:
if (enable) {
[commandCenter.changeShuffleModeCommand addTarget:self action:@selector(changeShuffleMode:)];
} else {
[commandCenter.changeShuffleModeCommand removeTarget:nil];
}
break;
case ASeekBackward:
if (enable) {
[commandCenter.seekBackwardCommand addTarget:self action:@selector(seekBackward:)];
} else {
[commandCenter.seekBackwardCommand removeTarget:nil];
}
break;
case ASeekForward:
if (enable) {
[commandCenter.seekForwardCommand addTarget:self action:@selector(seekForward:)];
} else {
[commandCenter.seekForwardCommand removeTarget:nil];
}
break;
}
}
- (MPRemoteCommandHandlerStatus) togglePlayPause: (MPRemoteCommandEvent *) event {
NSLog(@"togglePlayPause");
[backgroundChannel invokeMethod:@"onClick" arguments:@[@(0)]];
return MPRemoteCommandHandlerStatusSuccess;
}
- (MPRemoteCommandHandlerStatus) stop: (MPRemoteCommandEvent *) event {
NSLog(@"stop");
[backgroundChannel invokeMethod:@"onStop" arguments:nil];
return MPRemoteCommandHandlerStatusSuccess;
}
- (MPRemoteCommandHandlerStatus) nextTrack: (MPRemoteCommandEvent *) event {
NSLog(@"nextTrack");
[backgroundChannel invokeMethod:@"onSkipToNext" arguments:nil];
return MPRemoteCommandHandlerStatusSuccess;
}
- (MPRemoteCommandHandlerStatus) previousTrack: (MPRemoteCommandEvent *) event {
NSLog(@"previousTrack");
[backgroundChannel invokeMethod:@"onSkipToPrevious" arguments:nil];
return MPRemoteCommandHandlerStatusSuccess;
}
- (MPRemoteCommandHandlerStatus) changePlaybackPosition: (MPChangePlaybackPositionCommandEvent *) event {
NSLog(@"changePlaybackPosition");
[backgroundChannel invokeMethod:@"onSeekTo" arguments: @[@((long long) (event.positionTime * 1000))]];
return MPRemoteCommandHandlerStatusSuccess;
}
- (MPRemoteCommandHandlerStatus) skipForward: (MPRemoteCommandEvent *) event {
NSLog(@"skipForward");
[backgroundChannel invokeMethod:@"onFastForward" arguments:nil];
return MPRemoteCommandHandlerStatusSuccess;
}
- (MPRemoteCommandHandlerStatus) skipBackward: (MPRemoteCommandEvent *) event {
NSLog(@"skipBackward");
[backgroundChannel invokeMethod:@"onRewind" arguments:nil];
return MPRemoteCommandHandlerStatusSuccess;
}
- (MPRemoteCommandHandlerStatus) seekForward: (MPSeekCommandEvent *) event {
NSLog(@"seekForward");
BOOL begin = event.type == MPSeekCommandEventTypeBeginSeeking;
[backgroundChannel invokeMethod:@"onSeekForward" arguments:@[@(begin)]];
return MPRemoteCommandHandlerStatusSuccess;
}
- (MPRemoteCommandHandlerStatus) seekBackward: (MPSeekCommandEvent *) event {
NSLog(@"seekBackward");
BOOL begin = event.type == MPSeekCommandEventTypeBeginSeeking;
[backgroundChannel invokeMethod:@"onSeekBackward" arguments:@[@(begin)]];
return MPRemoteCommandHandlerStatusSuccess;
}
- (MPRemoteCommandHandlerStatus) changeRepeatMode: (MPChangeRepeatModeCommandEvent *) event {
NSLog(@"changeRepeatMode");
int modeIndex;
switch (event.repeatType) {
case MPRepeatTypeOff:
modeIndex = 0;
break;
case MPRepeatTypeOne:
modeIndex = 1;
break;
// MPRepeatTypeAll
default:
modeIndex = 2;
break;
}
[backgroundChannel invokeMethod:@"onSetRepeatMode" arguments:@[@(modeIndex)]];
return MPRemoteCommandHandlerStatusSuccess;
}
- (MPRemoteCommandHandlerStatus) changeShuffleMode: (MPChangeShuffleModeCommandEvent *) event {
NSLog(@"changeShuffleMode");
int modeIndex;
switch (event.shuffleType) {
case MPShuffleTypeOff:
modeIndex = 0;
break;
case MPShuffleTypeItems:
modeIndex = 1;
break;
// MPShuffleTypeCollections
default:
modeIndex = 2;
break;
}
[backgroundChannel invokeMethod:@"onSetShuffleMode" arguments:@[@(modeIndex)]];
return MPRemoteCommandHandlerStatusSuccess;
}
- (void) dealloc {
[[NSNotificationCenter defaultCenter] removeObserver:self];
}
@end

View File

@ -0,0 +1,22 @@
#
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html.
# Run `pod lib lint audio_service.podspec' to validate before publishing.
#
Pod::Spec.new do |s|
s.name = 'audio_service'
s.version = '0.14.1'
s.summary = 'Flutter plugin to play audio in the background while the screen is off.'
s.description = <<-DESC
Flutter plugin to play audio in the background while the screen is off.
DESC
s.homepage = 'https://github.com/ryanheise/audio_service'
s.license = { :file => '../LICENSE' }
s.author = { 'Ryan Heise' => 'ryan@ryanheise.com' }
s.source = { :path => '.' }
s.source_files = 'Classes/**/*'
s.dependency 'FlutterMacOS'
s.platform = :osx, '10.12.2'
s.pod_target_xcconfig = { 'DEFINES_MODULE' => 'YES' }
s.swift_version = '5.0'
end

34
pubspec.yaml Normal file
View File

@ -0,0 +1,34 @@
name: audio_service
description: Flutter plugin to play audio in the background while the screen is off.
version: 0.15.0
homepage: https://github.com/ryanheise/audio_service
environment:
sdk: ">=2.7.0 <3.0.0"
flutter: ">=1.12.13+hotfix.5"
dependencies:
audio_session: ^0.0.5
rxdart: ^0.24.1
flutter_isolate: ^1.0.0+14
flutter_cache_manager: ^1.4.0
js: ^0.6.2
flutter:
sdk: flutter
flutter_web_plugins:
sdk: flutter
flutter:
plugin:
platforms:
android:
package: com.ryanheise.audioservice
pluginClass: AudioServicePlugin
ios:
pluginClass: AudioServicePlugin
macos:
pluginClass: AudioServicePlugin
web:
pluginClass: AudioServicePlugin
fileName: audio_service_web.dart