diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000..ca40f23 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "audio_service"] + path = audio_service + url = https://notabug.org/exttex/audio_service diff --git a/audio_service b/audio_service new file mode 160000 index 0000000..73fce99 --- /dev/null +++ b/audio_service @@ -0,0 +1 @@ +Subproject commit 73fce9905f9ffeec0270f7c89b70cd0eaa762fb6 diff --git a/just_audio/.github/FUNDING.yml b/just_audio/.github/FUNDING.yml deleted file mode 100644 index 738822d..0000000 --- a/just_audio/.github/FUNDING.yml +++ /dev/null @@ -1 +0,0 @@ -github: ryanheise diff --git a/just_audio/.github/ISSUE_TEMPLATE/bug-report.md b/just_audio/.github/ISSUE_TEMPLATE/bug-report.md deleted file mode 100644 index bbd2a31..0000000 --- a/just_audio/.github/ISSUE_TEMPLATE/bug-report.md +++ /dev/null @@ -1,53 +0,0 @@ ---- -name: Bug report -about: Create a report to help us improve -title: '' -labels: 1 backlog, bug -assignees: ryanheise - ---- - - -**Which API doesn't behave as documented, and how does it misbehave?** -Name here the specific methods or fields that are not behaving as documented, and explain clearly what is happening. - -**Minimal reproduction project** -Provide a link here using one of two options: -1. Fork this repository and modify the example to reproduce the bug, then provide a link here. -2. If the unmodified official example already reproduces the bug, just write "The example". - -**To Reproduce (i.e. user steps, not code)** -Steps to reproduce the behavior: -1. Go to '...' -2. Click on '....' -3. Scroll down to '....' -4. See error - -**Error messages** - -``` -If applicable, copy & paste error message here, within the triple quotes to preserve formatting. -``` - -**Expected behavior** -A clear and concise description of what you expected to happen. - - -**Screenshots** -If applicable, add screenshots to help explain your problem. - -**Desktop (please complete the following information):** - - OS: [e.g. MacOS + version] - - Browser [e.g. chrome, safari + version] - -**Smartphone (please complete the following information):** - - Device: [e.g. iPhone6] - - OS: [e.g. iOS8.1] - -**Flutter SDK version** -``` -insert output of "flutter doctor" here -``` - -**Additional context** -Add any other context about the problem here. diff --git a/just_audio/.github/ISSUE_TEMPLATE/config.yml b/just_audio/.github/ISSUE_TEMPLATE/config.yml deleted file mode 100644 index dc89ca6..0000000 --- a/just_audio/.github/ISSUE_TEMPLATE/config.yml +++ /dev/null @@ -1,8 +0,0 @@ -blank_issues_enabled: false -contact_links: - - name: Community Support - url: https://stackoverflow.com/search?q=just_audio - about: Ask for help on Stack Overflow. - - name: New to Flutter? - url: https://gitter.im/flutter/flutter - about: Chat with other Flutter developers on Gitter. diff --git a/just_audio/.github/ISSUE_TEMPLATE/documentation-request.md b/just_audio/.github/ISSUE_TEMPLATE/documentation-request.md deleted file mode 100644 index 1d61cd1..0000000 --- a/just_audio/.github/ISSUE_TEMPLATE/documentation-request.md +++ /dev/null @@ -1,39 +0,0 @@ ---- -name: Documentation request -about: Suggest an improvement to the documentation -title: '' -labels: 1 backlog, documentation -assignees: ryanheise - ---- - - - -**To which pages does your suggestion apply?** - -- Direct URL 1 -- Direct URL 2 -- ... - -**Quote the sentences(s) from the documentation to be improved (if any)** - -> Insert here. (Skip if you are proposing an entirely new section.) - -**Describe your suggestion** - -... diff --git a/just_audio/.github/ISSUE_TEMPLATE/feature_request.md b/just_audio/.github/ISSUE_TEMPLATE/feature_request.md deleted file mode 100644 index 1444d42..0000000 --- a/just_audio/.github/ISSUE_TEMPLATE/feature_request.md +++ /dev/null @@ -1,37 +0,0 @@ ---- -name: Feature request -about: Suggest an idea for this project -title: '' -labels: 1 backlog, enhancement -assignees: ryanheise - ---- - - - -**Is your feature request related to a problem? Please describe.** -A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] - -**Describe the solution you'd like** -A clear and concise description of what you want to happen. - -**Describe alternatives you've considered** -A clear and concise description of any alternative solutions or features you've considered. - -**Additional context** -Add any other context or screenshots about the feature request here. diff --git a/just_audio/.gitignore b/just_audio/.gitignore deleted file mode 100644 index 07488ba..0000000 --- a/just_audio/.gitignore +++ /dev/null @@ -1,70 +0,0 @@ -# Miscellaneous -*.class -*.log -*.pyc -*.swp -.DS_Store -.atom/ -.buildlog/ -.history -.svn/ - -# IntelliJ related -*.iml -*.ipr -*.iws -.idea/ - -# Visual Studio Code related -.vscode/ - -# Flutter/Dart/Pub related -**/doc/api/ -.dart_tool/ -.flutter-plugins -.packages -.pub-cache/ -.pub/ -/build/ - -# Android related -**/android/**/gradle-wrapper.jar -**/android/.gradle -**/android/captures/ -**/android/gradlew -**/android/gradlew.bat -**/android/local.properties -**/android/**/GeneratedPluginRegistrant.java - -# iOS/XCode related -**/ios/**/*.mode1v3 -**/ios/**/*.mode2v3 -**/ios/**/*.moved-aside -**/ios/**/*.pbxuser -**/ios/**/*.perspectivev3 -**/ios/**/*sync/ -**/ios/**/.sconsign.dblite -**/ios/**/.tags* -**/ios/**/.vagrant/ -**/ios/**/DerivedData/ -**/ios/**/Icon? -**/ios/**/Pods/ -**/ios/**/.symlinks/ -**/ios/**/profile -**/ios/**/xcuserdata -**/ios/.generated/ -**/ios/Flutter/App.framework -**/ios/Flutter/Flutter.framework -**/ios/Flutter/Generated.xcconfig -**/ios/Flutter/app.flx -**/ios/Flutter/app.zip -**/ios/Flutter/flutter_assets/ -**/ios/ServiceDefinitions.json -**/ios/Runner/GeneratedPluginRegistrant.* - -# Exceptions to above rules. -!**/ios/**/default.mode1v3 -!**/ios/**/default.mode2v3 -!**/ios/**/default.pbxuser -!**/ios/**/default.perspectivev3 -!/packages/flutter_tools/test/data/dart_dependencies_test/**/.packages diff --git a/just_audio/.metadata b/just_audio/.metadata deleted file mode 100644 index 8536f75..0000000 --- a/just_audio/.metadata +++ /dev/null @@ -1,10 +0,0 @@ -# This file tracks properties of this Flutter project. -# Used by Flutter tool to assess capabilities and perform upgrades etc. -# -# This file should be version controlled and should not be manually edited. - -version: - revision: 68587a0916366e9512a78df22c44163d041dd5f3 - channel: stable - -project_type: plugin diff --git a/just_audio/CHANGELOG.md b/just_audio/CHANGELOG.md deleted file mode 100644 index fc87c15..0000000 --- a/just_audio/CHANGELOG.md +++ /dev/null @@ -1,114 +0,0 @@ -## 0.3.1 - -* Prevent hang in dispose - -## 0.3.0 - -* Playlists -* Looping -* Shuffling -* Composing -* Clipping support added for iOS/macOS -* New player state model consisting of: - * playing: true/false - * processingState: none/loading/buffering/ready/completed -* Feature complete on iOS and macOS (except for DASH) -* Improved example -* Exception classes - -## 0.2.2 - -* Fix dependencies for stable channel. - -## 0.2.1 - -* Improve handling of headers. -* Report setUrl errors and duration on web. - -## 0.2.0 - -* Support dynamic duration -* Support seeking to end of live streams -* Support request headers -* V2 implementation -* Report setUrl errors on iOS -* setUrl throws exception if interrupted -* Return null when duration is unknown - -## 0.1.10 - -* Option to set audio session category on iOS. - -## 0.1.9 - -* Bug fixes. - -## 0.1.8 - -* Reduce distortion at slow speeds on iOS - -## 0.1.7 - -* Minor bug fixes. - -## 0.1.6 - -* Eliminate event lag over method channels. -* Report setUrl errors on Android. -* Report Icy Metadata on Android. -* Bug fixes. - -## 0.1.5 - -* Update dependencies and documentation. - -## 0.1.4 - -* Add MacOS implementation. -* Support cross-platform redirects on Android. -* Bug fixes. - -## 0.1.3 - -* Fix bug in web implementation. - -## 0.1.2 - -* Broadcast how much audio has been buffered. - -## 0.1.1 - -* Web implementation. -* iOS option to minimize stalling. -* Fix setAsset on iOS. - -## 0.1.0 - -* Separate buffering state from PlaybackState. -* More permissive state transitions. -* Support playing local files on iOS. - -## 0.0.6 - -* Bug fixes. - -## 0.0.5 - -* API change for audio clipping. -* Performance improvements and bug fixes on Android. - -## 0.0.4 - -* Remove reseeking hack. - -## 0.0.3 - -* Feature to change audio speed. - -## 0.0.2 - -* iOS implementation for testing (may not work). - -## 0.0.1 - -* Initial release with Android implementation. diff --git a/just_audio/LICENSE b/just_audio/LICENSE deleted file mode 100644 index 27a8b32..0000000 --- a/just_audio/LICENSE +++ /dev/null @@ -1,229 +0,0 @@ -MIT License - -Copyright (c) 2019-2020 Ryan Heise. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - -============================================================================== - -This software includes the ExoPlayer library which is licensed under the Apache -License, Version 2.0. - - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/just_audio/README.md b/just_audio/README.md deleted file mode 100644 index 60fb32f..0000000 --- a/just_audio/README.md +++ /dev/null @@ -1,222 +0,0 @@ -# just_audio - -This Flutter plugin plays audio from URLs, files, assets, DASH/HLS streams and playlists. Furthermore, it can clip, concatenate, loop, shuffle and compose audio into complex arrangements with gapless playback. This plugin can be used with [audio_service](https://pub.dev/packages/audio_service) to play audio in the background and control playback from the lock screen, Android notifications, the iOS Control Center, and headset buttons. - -## Features - -| Feature | Android | iOS | MacOS | Web | -| ------- | :-------: | :-----: | :-----: | :-----: | -| read from URL | ✅ | ✅ | ✅ | ✅ | -| read from file | ✅ | ✅ | ✅ | | -| read from asset | ✅ | ✅ | ✅ | | -| request headers | ✅ | ✅ | ✅ | | -| DASH | ✅ | | | | -| HLS | ✅ | ✅ | ✅ | | -| buffer status/position | ✅ | ✅ | ✅ | ✅ | -| play/pause/seek | ✅ | ✅ | ✅ | ✅ | -| set volume | ✅ | ✅ | ✅ | ✅ | -| set speed | ✅ | ✅ | ✅ | ✅ | -| clip audio | ✅ | ✅ | ✅ | ✅ | -| playlists | ✅ | ✅ | ✅ | ✅ | -| looping | ✅ | ✅ | ✅ | ✅ | -| shuffle | ✅ | ✅ | ✅ | ✅ | -| compose audio | ✅ | ✅ | ✅ | ✅ | -| gapless playback | ✅ | ✅ | ✅ | | -| report player errors | ✅ | ✅ | ✅ | ✅ | - -Please consider reporting any bugs you encounter [here](https://github.com/ryanheise/just_audio/issues) or submitting pull requests [here](https://github.com/ryanheise/just_audio/pulls). - -## Example - -![just_audio](https://user-images.githubusercontent.com/19899190/89558581-bf369080-d857-11ea-9376-3a5055284bab.png) - -Initialisation: - -```dart -final player = AudioPlayer(); -var duration = await player.setUrl('https://foo.com/bar.mp3'); -``` - -Standard controls: - -```dart -player.play(); // Usually you don't want to wait for playback to finish. -await player.seek(Duration(seconds: 10)); -await player.pause(); -``` - -Clipping audio: - -```dart -await player.setClip(start: Duration(seconds: 10), end: Duration(seconds: 20)); -await player.play(); // Waits until the clip has finished playing -``` -Adjusting audio: - -```dart -await player.setSpeed(2.0); // Double speed -await player.setVolume(0.5); // Halve volume -``` - -Gapless playlists: - -```dart -await player.load( - ConcatenatingAudioSource( - children: [ - AudioSource.uri(Uri.parse("https://example.com/track1.mp3")), - AudioSource.uri(Uri.parse("https://example.com/track2.mp3")), - AudioSource.uri(Uri.parse("https://example.com/track3.mp3")), - ], - ), -); -player.seekToNext(); -player.seekToPrevious(); -// Jump to the beginning of track3.mp3. -player.seek(Duration(milliseconds: 0), index: 2); -``` - -Looping and shuffling: - -```dart -player.setLoopMode(LoopMode.off); // no looping (default) -player.setLoopMode(LoopMode.all); // loop playlist -player.setLoopMode(LoopMode.one); // loop current item -player.setShuffleModeEnabled(true); // shuffle except for current item -``` - -Composing audio sources: - -```dart -player.load( - // Loop child 4 times - LoopingAudioSource( - count: 4, - // Play children one after the other - child: ConcatenatingAudioSource( - children: [ - // Play a regular media file - ProgressiveAudioSource(Uri.parse("https://example.com/foo.mp3")), - // Play a DASH stream - DashAudioSource(Uri.parse("https://example.com/audio.mdp")), - // Play an HLS stream - HlsAudioSource(Uri.parse("https://example.com/audio.m3u8")), - // Play a segment of the child - ClippingAudioSource( - child: ProgressiveAudioSource(Uri.parse("https://w.xyz/p.mp3")), - start: Duration(seconds: 25), - end: Duration(seconds: 30), - ), - ], - ), - ), -); -``` - -Releasing resources: - -```dart -await player.dispose(); -``` - -Catching player errors: - -```dart -try { - await player.setUrl("https://s3.amazonaws.com/404-file.mp3"); -} catch (e) { - print("Error: $e"); -} -``` - -Listening to state changes: - -```dart -player.playerStateStream.listen((state) { - if (state.playing) ... else ... - switch (state.processingState) { - case AudioPlaybackState.none: ... - case AudioPlaybackState.loading: ... - case AudioPlaybackState.buffering: ... - case AudioPlaybackState.ready: ... - case AudioPlaybackState.completed: ... - } -}); - -// See also: -// - durationStream -// - positionStream -// - bufferedPositionStream -// - currentIndexStream -// - icyMetadataStream -// - playingStream -// - processingStateStream -// - loopModeStream -// - shuffleModeEnabledStream -// - volumeStream -// - speedStream -// - playbackEventStream -``` - -## Platform specific configuration - -### Android - -If you wish to connect to non-HTTPS URLS, add the following attribute to the `application` element of your `AndroidManifest.xml` file: - -```xml - -``` - -### iOS - -If you wish to connect to non-HTTPS URLS, add the following to your `Info.plist` file: - -```xml -NSAppTransportSecurity - - NSAllowsArbitraryLoads - - NSAllowsArbitraryLoadsForMedia - - -``` - -By default, iOS will mute your app's audio when your phone is switched to -silent mode. Depending on the requirements of your app, you can change the -default audio session category using `AudioPlayer.setIosCategory`. For example, -if you are writing a media app, Apple recommends that you set the category to -`AVAudioSessionCategoryPlayback`, which you can achieve by adding the following -code to your app's initialisation: - -```dart -AudioPlayer.setIosCategory(IosCategory.playback); -``` - -Note: If your app uses a number of different audio plugins in combination, e.g. -for audio recording, or text to speech, or background audio, it is possible -that those plugins may internally override the setting you choose here. You may -consider asking the developer of each other plugin you use to provide a similar -method so that you can configure the same audio session category universally -across all plugins you use. - -### MacOS - -To allow your MacOS application to access audio files on the Internet, add the following to your `DebugProfile.entitlements` and `Release.entitlements` files: - -```xml - com.apple.security.network.client - -``` - -If you wish to connect to non-HTTPS URLS, add the following to your `Info.plist` file: - -```xml -NSAppTransportSecurity - - NSAllowsArbitraryLoads - - NSAllowsArbitraryLoadsForMedia - - -``` diff --git a/just_audio/android/.gitignore b/just_audio/android/.gitignore deleted file mode 100644 index c6cbe56..0000000 --- a/just_audio/android/.gitignore +++ /dev/null @@ -1,8 +0,0 @@ -*.iml -.gradle -/local.properties -/.idea/workspace.xml -/.idea/libraries -.DS_Store -/build -/captures diff --git a/just_audio/android/build.gradle b/just_audio/android/build.gradle deleted file mode 100644 index d63baca..0000000 --- a/just_audio/android/build.gradle +++ /dev/null @@ -1,48 +0,0 @@ -group 'com.ryanheise.just_audio' -version '1.0' - -buildscript { - repositories { - google() - jcenter() - } - - dependencies { - classpath 'com.android.tools.build:gradle:3.6.3' - } -} - -rootProject.allprojects { - repositories { - google() - jcenter() - } -} - -apply plugin: 'com.android.library' - -android { - compileSdkVersion 28 - - defaultConfig { - minSdkVersion 16 - testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" - } - - lintOptions { - disable 'InvalidPackage' - } - - compileOptions { - sourceCompatibility 1.8 - targetCompatibility 1.8 - } -} - -dependencies { - implementation 'com.google.android.exoplayer:exoplayer-core:2.11.4' - implementation 'com.google.android.exoplayer:exoplayer-dash:2.11.4' - implementation 'com.google.android.exoplayer:exoplayer-hls:2.11.4' - implementation 'com.google.android.exoplayer:exoplayer-smoothstreaming:2.11.4' - compile files('libs/extension-flac.aar') -} diff --git a/just_audio/android/gradle.properties b/just_audio/android/gradle.properties deleted file mode 100644 index 38c8d45..0000000 --- a/just_audio/android/gradle.properties +++ /dev/null @@ -1,4 +0,0 @@ -org.gradle.jvmargs=-Xmx1536M -android.enableR8=true -android.useAndroidX=true -android.enableJetifier=true diff --git a/just_audio/android/gradle/wrapper/gradle-wrapper.properties b/just_audio/android/gradle/wrapper/gradle-wrapper.properties deleted file mode 100644 index 212deb2..0000000 --- a/just_audio/android/gradle/wrapper/gradle-wrapper.properties +++ /dev/null @@ -1,6 +0,0 @@ -#Mon Aug 10 13:15:44 CEST 2020 -distributionBase=GRADLE_USER_HOME -distributionPath=wrapper/dists -zipStoreBase=GRADLE_USER_HOME -zipStorePath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.4-all.zip diff --git a/just_audio/android/libs/extension-flac.aar b/just_audio/android/libs/extension-flac.aar deleted file mode 100644 index 62d38a2..0000000 Binary files a/just_audio/android/libs/extension-flac.aar and /dev/null differ diff --git a/just_audio/android/settings.gradle b/just_audio/android/settings.gradle deleted file mode 100644 index c17dc37..0000000 --- a/just_audio/android/settings.gradle +++ /dev/null @@ -1 +0,0 @@ -rootProject.name = 'just_audio' diff --git a/just_audio/android/src/main/AndroidManifest.xml b/just_audio/android/src/main/AndroidManifest.xml deleted file mode 100644 index e83e841..0000000 --- a/just_audio/android/src/main/AndroidManifest.xml +++ /dev/null @@ -1,3 +0,0 @@ - - diff --git a/just_audio/android/src/main/java/com/ryanheise/just_audio/AudioPlayer.java b/just_audio/android/src/main/java/com/ryanheise/just_audio/AudioPlayer.java deleted file mode 100644 index 43b1118..0000000 --- a/just_audio/android/src/main/java/com/ryanheise/just_audio/AudioPlayer.java +++ /dev/null @@ -1,724 +0,0 @@ -package com.ryanheise.just_audio; - -import android.content.Context; -import android.net.Uri; -import android.os.Handler; -import android.util.Log; - -import com.google.android.exoplayer2.C; -import com.google.android.exoplayer2.ExoPlaybackException; -import com.google.android.exoplayer2.PlaybackParameters; -import com.google.android.exoplayer2.Player; -import com.google.android.exoplayer2.SimpleExoPlayer; -import com.google.android.exoplayer2.Timeline; -import com.google.android.exoplayer2.metadata.Metadata; -import com.google.android.exoplayer2.metadata.MetadataOutput; -import com.google.android.exoplayer2.metadata.icy.IcyHeaders; -import com.google.android.exoplayer2.metadata.icy.IcyInfo; -import com.google.android.exoplayer2.source.ClippingMediaSource; -import com.google.android.exoplayer2.source.ConcatenatingMediaSource; -import com.google.android.exoplayer2.source.LoopingMediaSource; -import com.google.android.exoplayer2.source.MediaSource; -import com.google.android.exoplayer2.source.ProgressiveMediaSource; -import com.google.android.exoplayer2.source.ShuffleOrder; -import com.google.android.exoplayer2.source.ShuffleOrder.DefaultShuffleOrder; -import com.google.android.exoplayer2.source.TrackGroup; -import com.google.android.exoplayer2.source.TrackGroupArray; -import com.google.android.exoplayer2.source.dash.DashMediaSource; -import com.google.android.exoplayer2.source.hls.HlsMediaSource; -import com.google.android.exoplayer2.trackselection.TrackSelectionArray; -import com.google.android.exoplayer2.upstream.DataSource; -import com.google.android.exoplayer2.upstream.DefaultDataSourceFactory; -import com.google.android.exoplayer2.upstream.DefaultHttpDataSource; -import com.google.android.exoplayer2.upstream.DefaultHttpDataSourceFactory; -import com.google.android.exoplayer2.upstream.HttpDataSource; -import com.google.android.exoplayer2.util.Util; -import io.flutter.plugin.common.BinaryMessenger; -import io.flutter.plugin.common.EventChannel; -import io.flutter.plugin.common.EventChannel.EventSink; -import io.flutter.plugin.common.MethodCall; -import io.flutter.plugin.common.MethodChannel; -import io.flutter.plugin.common.MethodChannel.MethodCallHandler; -import io.flutter.plugin.common.MethodChannel.Result; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Random; -import java.util.stream.Collectors; -import com.ryanheise.just_audio.DeezerDataSource; - -public class AudioPlayer implements MethodCallHandler, Player.EventListener, MetadataOutput { - - static final String TAG = "AudioPlayer"; - - private static Random random = new Random(); - - private final Context context; - private final MethodChannel methodChannel; - private final EventChannel eventChannel; - private EventSink eventSink; - - private ProcessingState processingState; - private long updateTime; - private long updatePosition; - private long bufferedPosition; - private long duration; - private Long start; - private Long end; - private Long seekPos; - private Result prepareResult; - private Result playResult; - private Result seekResult; - private boolean seekProcessed; - private boolean playing; - private Map mediaSources = new HashMap(); - private IcyInfo icyInfo; - private IcyHeaders icyHeaders; - private int errorCount; - - private SimpleExoPlayer player; - private MediaSource mediaSource; - private Integer currentIndex; - private Map loopingChildren = new HashMap<>(); - private Map loopingCounts = new HashMap<>(); - private final Handler handler = new Handler(); - private final Runnable bufferWatcher = new Runnable() { - @Override - public void run() { - if (player == null) { - return; - } - - long newBufferedPosition = player.getBufferedPosition(); - if (newBufferedPosition != bufferedPosition) { - bufferedPosition = newBufferedPosition; - broadcastPlaybackEvent(); - } - switch (processingState) { - case buffering: - handler.postDelayed(this, 200); - break; - case ready: - if (playing) { - handler.postDelayed(this, 500); - } else { - handler.postDelayed(this, 1000); - } - break; - } - } - }; - - private final Runnable onDispose; - - public AudioPlayer(final Context applicationContext, final BinaryMessenger messenger, - final String id, final Runnable onDispose) { - this.context = applicationContext; - this.onDispose = onDispose; - methodChannel = new MethodChannel(messenger, "com.ryanheise.just_audio.methods." + id); - methodChannel.setMethodCallHandler(this); - eventChannel = new EventChannel(messenger, "com.ryanheise.just_audio.events." + id); - eventChannel.setStreamHandler(new EventChannel.StreamHandler() { - @Override - public void onListen(final Object arguments, final EventSink eventSink) { - AudioPlayer.this.eventSink = eventSink; - } - - @Override - public void onCancel(final Object arguments) { - eventSink = null; - } - }); - processingState = ProcessingState.none; - } - - private void startWatchingBuffer() { - handler.removeCallbacks(bufferWatcher); - handler.post(bufferWatcher); - } - - @Override - public void onMetadata(Metadata metadata) { - for (int i = 0; i < metadata.length(); i++) { - final Metadata.Entry entry = metadata.get(i); - if (entry instanceof IcyInfo) { - icyInfo = (IcyInfo) entry; - broadcastPlaybackEvent(); - } - } - } - - @Override - public void onTracksChanged(TrackGroupArray trackGroups, TrackSelectionArray trackSelections) { - for (int i = 0; i < trackGroups.length; i++) { - TrackGroup trackGroup = trackGroups.get(i); - - for (int j = 0; j < trackGroup.length; j++) { - Metadata metadata = trackGroup.getFormat(j).metadata; - - if (metadata != null) { - for (int k = 0; k < metadata.length(); k++) { - final Metadata.Entry entry = metadata.get(k); - if (entry instanceof IcyHeaders) { - icyHeaders = (IcyHeaders) entry; - broadcastPlaybackEvent(); - } - } - } - } - } - } - - @Override - public void onPositionDiscontinuity(int reason) { - switch (reason) { - case Player.DISCONTINUITY_REASON_PERIOD_TRANSITION: - case Player.DISCONTINUITY_REASON_SEEK: - onItemMayHaveChanged(); - break; - } - } - - @Override - public void onTimelineChanged(Timeline timeline, int reason) { - if (reason == Player.TIMELINE_CHANGE_REASON_DYNAMIC) { - onItemMayHaveChanged(); - } - } - - private void onItemMayHaveChanged() { - Integer newIndex = player.getCurrentWindowIndex(); - if (newIndex != currentIndex) { - currentIndex = newIndex; - } - broadcastPlaybackEvent(); - } - - @Override - public void onPlayerStateChanged(boolean playWhenReady, int playbackState) { - switch (playbackState) { - case Player.STATE_READY: - if (prepareResult != null) { - duration = getDuration(); - transition(ProcessingState.ready); - prepareResult.success(duration); - prepareResult = null; - } else { - transition(ProcessingState.ready); - } - if (seekProcessed) { - completeSeek(); - } - break; - case Player.STATE_BUFFERING: - if (processingState != ProcessingState.buffering) { - transition(ProcessingState.buffering); - startWatchingBuffer(); - } - break; - case Player.STATE_ENDED: - if (processingState != ProcessingState.completed) { - transition(ProcessingState.completed); - } - if (playResult != null) { - playResult.success(null); - playResult = null; - } - break; - } - } - - @Override - public void onPlayerError(ExoPlaybackException error) { - switch (error.type) { - case ExoPlaybackException.TYPE_SOURCE: - Log.e(TAG, "TYPE_SOURCE: " + error.getSourceException().getMessage()); - break; - - case ExoPlaybackException.TYPE_RENDERER: - Log.e(TAG, "TYPE_RENDERER: " + error.getRendererException().getMessage()); - break; - - case ExoPlaybackException.TYPE_UNEXPECTED: - Log.e(TAG, "TYPE_UNEXPECTED: " + error.getUnexpectedException().getMessage()); - break; - - default: - Log.e(TAG, "default: " + error.getUnexpectedException().getMessage()); - } - sendError(String.valueOf(error.type), error.getMessage()); - errorCount++; - if (player.hasNext() && currentIndex != null && errorCount <= 5) { - int nextIndex = currentIndex + 1; - player.prepare(mediaSource); - player.seekTo(nextIndex, 0); - } - } - - @Override - public void onSeekProcessed() { - if (seekResult != null) { - seekProcessed = true; - if (player.getPlaybackState() == Player.STATE_READY) { - completeSeek(); - } - } - } - - private void completeSeek() { - seekProcessed = false; - seekPos = null; - seekResult.success(null); - seekResult = null; - } - - @Override - public void onMethodCall(final MethodCall call, final Result result) { - ensurePlayerInitialized(); - - final List args = (List) call.arguments; - try { - switch (call.method) { - case "load": - load(getAudioSource(args.get(0)), result); - break; - case "play": - play(result); - break; - case "pause": - pause(); - result.success(null); - break; - case "setVolume": - setVolume((float) ((double) ((Double) args.get(0)))); - result.success(null); - break; - case "setSpeed": - setSpeed((float) ((double) ((Double) args.get(0)))); - result.success(null); - break; - case "setLoopMode": - setLoopMode((Integer) args.get(0)); - result.success(null); - break; - case "setShuffleModeEnabled": - setShuffleModeEnabled((Boolean) args.get(0)); - result.success(null); - break; - case "setAutomaticallyWaitsToMinimizeStalling": - result.success(null); - break; - case "seek": - Long position = getLong(args.get(0)); - Integer index = (Integer)args.get(1); - seek(position == null ? C.TIME_UNSET : position, result, index); - break; - case "dispose": - dispose(); - result.success(null); - break; - case "concatenating.add": - concatenating(args.get(0)) - .addMediaSource(getAudioSource(args.get(1)), handler, () -> result.success(null)); - break; - case "concatenating.insert": - concatenating(args.get(0)) - .addMediaSource((Integer)args.get(1), getAudioSource(args.get(2)), handler, () -> result.success(null)); - break; - case "concatenating.addAll": - concatenating(args.get(0)) - .addMediaSources(getAudioSources(args.get(1)), handler, () -> result.success(null)); - break; - case "concatenating.insertAll": - concatenating(args.get(0)) - .addMediaSources((Integer)args.get(1), getAudioSources(args.get(2)), handler, () -> result.success(null)); - break; - case "concatenating.removeAt": - concatenating(args.get(0)) - .removeMediaSource((Integer)args.get(1), handler, () -> result.success(null)); - break; - case "concatenating.removeRange": - concatenating(args.get(0)) - .removeMediaSourceRange((Integer)args.get(1), (Integer)args.get(2), handler, () -> result.success(null)); - break; - case "concatenating.move": - concatenating(args.get(0)) - .moveMediaSource((Integer)args.get(1), (Integer)args.get(2), handler, () -> result.success(null)); - break; - case "concatenating.clear": - concatenating(args.get(0)).clear(handler, () -> result.success(null)); - break; - default: - result.notImplemented(); - break; - } - } catch (IllegalStateException e) { - e.printStackTrace(); - result.error("Illegal state: " + e.getMessage(), null, null); - } catch (Exception e) { - e.printStackTrace(); - result.error("Error: " + e, null, null); - } - } - - // Set the shuffle order for mediaSource, with currentIndex at - // the first position. Traverse the tree incrementing index at each - // node. - private int setShuffleOrder(MediaSource mediaSource, int index) { - if (mediaSource instanceof ConcatenatingMediaSource) { - final ConcatenatingMediaSource source = (ConcatenatingMediaSource)mediaSource; - // Find which child is current - Integer currentChildIndex = null; - for (int i = 0; i < source.getSize(); i++) { - final int indexBefore = index; - final MediaSource child = source.getMediaSource(i); - index = setShuffleOrder(child, index); - // If currentIndex falls within this child, make this child come first. - if (currentIndex >= indexBefore && currentIndex < index) { - currentChildIndex = i; - } - } - // Shuffle so that the current child is first in the shuffle order - source.setShuffleOrder(createShuffleOrder(source.getSize(), currentChildIndex)); - } else if (mediaSource instanceof LoopingMediaSource) { - final LoopingMediaSource source = (LoopingMediaSource)mediaSource; - // The ExoPlayer API doesn't provide accessors for these so we have - // to index them ourselves. - MediaSource child = loopingChildren.get(source); - int count = loopingCounts.get(source); - for (int i = 0; i < count; i++) { - index = setShuffleOrder(child, index); - } - } else { - // An actual media item takes up one spot in the playlist. - index++; - } - return index; - } - - private static int[] shuffle(int length, Integer firstIndex) { - final int[] shuffleOrder = new int[length]; - for (int i = 0; i < length; i++) { - final int j = random.nextInt(i + 1); - shuffleOrder[i] = shuffleOrder[j]; - shuffleOrder[j] = i; - } - if (firstIndex != null) { - for (int i = 1; i < length; i++) { - if (shuffleOrder[i] == firstIndex) { - final int v = shuffleOrder[0]; - shuffleOrder[0] = shuffleOrder[i]; - shuffleOrder[i] = v; - break; - } - } - } - return shuffleOrder; - } - - // Create a shuffle order optionally fixing the first index. - private ShuffleOrder createShuffleOrder(int length, Integer firstIndex) { - int[] shuffleIndices = shuffle(length, firstIndex); - return new DefaultShuffleOrder(shuffleIndices, random.nextLong()); - } - - private ConcatenatingMediaSource concatenating(final Object index) { - return (ConcatenatingMediaSource)mediaSources.get((String)index); - } - - private MediaSource getAudioSource(final Object json) { - Map map = (Map)json; - String id = (String)map.get("id"); - MediaSource mediaSource = mediaSources.get(id); - if (mediaSource == null) { - mediaSource = decodeAudioSource(map); - mediaSources.put(id, mediaSource); - } - return mediaSource; - } - - private MediaSource decodeAudioSource(final Object json) { - Map map = (Map)json; - String id = (String)map.get("id"); - switch ((String)map.get("type")) { - case "progressive": - Uri uri = Uri.parse((String)map.get("uri")); - //Deezer - if (uri.getHost() != null && uri.getHost().contains("dzcdn.net")) { - //Track id is stored in URL fragment (after #) - String fragment = uri.getFragment(); - uri = Uri.parse(((String)map.get("uri")).replace("#" + fragment, "")); - return new ProgressiveMediaSource.Factory( - () -> { - HttpDataSource deezerDataSource = new DeezerDataSource(fragment); - return deezerDataSource; - } - ).setTag(id).createMediaSource(uri); - } - - return new ProgressiveMediaSource.Factory(buildDataSourceFactory()) - .setTag(id) - .createMediaSource(uri); - case "dash": - return new DashMediaSource.Factory(buildDataSourceFactory()) - .setTag(id) - .createMediaSource(Uri.parse((String)map.get("uri"))); - case "hls": - return new HlsMediaSource.Factory(buildDataSourceFactory()) - .setTag(id) - .createMediaSource(Uri.parse((String)map.get("uri"))); - case "concatenating": - List audioSources = (List)map.get("audioSources"); - return new ConcatenatingMediaSource( - false, // isAtomic - (Boolean)map.get("useLazyPreparation"), - new DefaultShuffleOrder(audioSources.size()), - audioSources - .stream() - .map(s -> getAudioSource(s)) - .toArray(MediaSource[]::new)); - case "clipping": - Long start = getLong(map.get("start")); - Long end = getLong(map.get("end")); - return new ClippingMediaSource(getAudioSource(map.get("audioSource")), - (start != null ? start : 0) * 1000L, - (end != null ? end : C.TIME_END_OF_SOURCE) * 1000L); - case "looping": - Integer count = (Integer)map.get("count"); - MediaSource looperChild = getAudioSource(map.get("audioSource")); - LoopingMediaSource looper = new LoopingMediaSource(looperChild, count); - // TODO: store both in a single map - loopingChildren.put(looper, looperChild); - loopingCounts.put(looper, count); - return looper; - default: - throw new IllegalArgumentException("Unknown AudioSource type: " + map.get("type")); - } - } - - private List getAudioSources(final Object json) { - return ((List)json) - .stream() - .map(s -> getAudioSource(s)) - .collect(Collectors.toList()); - } - - private DataSource.Factory buildDataSourceFactory() { - String userAgent = Util.getUserAgent(context, "just_audio"); - DataSource.Factory httpDataSourceFactory = new DefaultHttpDataSourceFactory( - userAgent, - DefaultHttpDataSource.DEFAULT_CONNECT_TIMEOUT_MILLIS, - DefaultHttpDataSource.DEFAULT_READ_TIMEOUT_MILLIS, - true - ); - return new DefaultDataSourceFactory(context, httpDataSourceFactory); - } - - private void load(final MediaSource mediaSource, final Result result) { - switch (processingState) { - case none: - break; - case loading: - abortExistingConnection(); - player.stop(); - break; - default: - player.stop(); - break; - } - errorCount = 0; - prepareResult = result; - transition(ProcessingState.loading); - if (player.getShuffleModeEnabled()) { - setShuffleOrder(mediaSource, 0); - } - this.mediaSource = mediaSource; - player.prepare(mediaSource); - } - - private void ensurePlayerInitialized() { - if (player == null) { - player = new SimpleExoPlayer.Builder(context).build(); - player.addMetadataOutput(this); - player.addListener(this); - } - } - - private void broadcastPlaybackEvent() { - final Map event = new HashMap(); - event.put("processingState", processingState.ordinal()); - event.put("updatePosition", updatePosition = getCurrentPosition()); - event.put("updateTime", updateTime = System.currentTimeMillis()); - event.put("bufferedPosition", Math.max(updatePosition, bufferedPosition)); - event.put("icyMetadata", collectIcyMetadata()); - event.put("duration", duration = getDuration()); - event.put("currentIndex", currentIndex); - - if (eventSink != null) { - eventSink.success(event); - } - } - - private Map collectIcyMetadata() { - final Map icyData = new HashMap<>(); - if (icyInfo != null) { - final Map info = new HashMap<>(); - info.put("title", icyInfo.title); - info.put("url", icyInfo.url); - icyData.put("info", info); - } - if (icyHeaders != null) { - final Map headers = new HashMap<>(); - headers.put("bitrate", icyHeaders.bitrate); - headers.put("genre", icyHeaders.genre); - headers.put("name", icyHeaders.name); - headers.put("metadataInterval", icyHeaders.metadataInterval); - headers.put("url", icyHeaders.url); - headers.put("isPublic", icyHeaders.isPublic); - icyData.put("headers", headers); - } - return icyData; - } - - private long getCurrentPosition() { - if (processingState == ProcessingState.none || processingState == ProcessingState.loading) { - return 0; - } else if (seekPos != null && seekPos != C.TIME_UNSET) { - return seekPos; - } else { - return player.getCurrentPosition(); - } - } - - private long getDuration() { - if (processingState == ProcessingState.none || processingState == ProcessingState.loading) { - return C.TIME_UNSET; - } else { - return player.getDuration(); - } - } - - private void sendError(String errorCode, String errorMsg) { - if (prepareResult != null) { - prepareResult.error(errorCode, errorMsg, null); - prepareResult = null; - } - - if (eventSink != null) { - eventSink.error(errorCode, errorMsg, null); - } - } - - private void transition(final ProcessingState newState) { - processingState = newState; - broadcastPlaybackEvent(); - } - - private String getLowerCaseExtension(Uri uri) { - // Until ExoPlayer provides automatic detection of media source types, we - // rely on the file extension. When this is absent, as a temporary - // workaround we allow the app to supply a fake extension in the URL - // fragment. e.g. https://somewhere.com/somestream?x=etc#.m3u8 - String fragment = uri.getFragment(); - String filename = fragment != null && fragment.contains(".") ? fragment : uri.getPath(); - return filename.replaceAll("^.*\\.", "").toLowerCase(); - } - - public void play(Result result) { - if (player.getPlayWhenReady()) return; - if (playResult != null) { - playResult.success(null); - } - playResult = result; - startWatchingBuffer(); - player.setPlayWhenReady(true); - if (processingState == ProcessingState.completed && playResult != null) { - playResult.success(null); - playResult = null; - } - } - - public void pause() { - if (!player.getPlayWhenReady()) return; - player.setPlayWhenReady(false); - if (playResult != null) { - playResult.success(null); - playResult = null; - } - } - - public void setVolume(final float volume) { - player.setVolume(volume); - } - - public void setSpeed(final float speed) { - player.setPlaybackParameters(new PlaybackParameters(speed)); - broadcastPlaybackEvent(); - } - - public void setLoopMode(final int mode) { - player.setRepeatMode(mode); - } - - public void setShuffleModeEnabled(final boolean enabled) { - if (enabled) { - setShuffleOrder(mediaSource, 0); - } - player.setShuffleModeEnabled(enabled); - } - - public void seek(final long position, final Result result, final Integer index) { - if (processingState == ProcessingState.none || processingState == ProcessingState.loading) { - return; - } - abortSeek(); - seekPos = position; - seekResult = result; - seekProcessed = false; - int windowIndex = index != null ? index : player.getCurrentWindowIndex(); - player.seekTo(windowIndex, position); - } - - public void dispose() { - mediaSources.clear(); - mediaSource = null; - loopingChildren.clear(); - if (player != null) { - player.release(); - player = null; - transition(ProcessingState.none); - } - if (eventSink != null) { - eventSink.endOfStream(); - } - onDispose.run(); - } - - private void abortSeek() { - if (seekResult != null) { - seekResult.success(null); - seekResult = null; - seekPos = null; - seekProcessed = false; - } - } - - private void abortExistingConnection() { - sendError("abort", "Connection aborted"); - } - - public static Long getLong(Object o) { - return (o == null || o instanceof Long) ? (Long)o : new Long(((Integer)o).intValue()); - } - - enum ProcessingState { - none, - loading, - buffering, - ready, - completed - } -} diff --git a/just_audio/android/src/main/java/com/ryanheise/just_audio/DeezerDataSource.java b/just_audio/android/src/main/java/com/ryanheise/just_audio/DeezerDataSource.java deleted file mode 100644 index 7b45092..0000000 --- a/just_audio/android/src/main/java/com/ryanheise/just_audio/DeezerDataSource.java +++ /dev/null @@ -1,264 +0,0 @@ -package com.ryanheise.just_audio; - -import android.net.Uri; -import android.util.Log; -import com.google.android.exoplayer2.upstream.DataSpec; -import com.google.android.exoplayer2.upstream.HttpDataSource; -import com.google.android.exoplayer2.upstream.TransferListener; -import java.io.BufferedInputStream; -import java.io.ByteArrayOutputStream; -import java.io.FilterInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.net.HttpURLConnection; -import java.net.URL; -import java.security.MessageDigest; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import javax.crypto.Cipher; -import javax.crypto.spec.SecretKeySpec; - -public class DeezerDataSource implements HttpDataSource { - HttpURLConnection connection; - InputStream inputStream; - int counter = 0; - byte[] key; - DataSpec dataSpec; - - //Quality fallback stuff - String trackId; - int quality = 0; - String md5origin; - String mediaVersion; - - public DeezerDataSource(String trackId) { - this.trackId = trackId; - this.key = getKey(trackId); - } - - @Override - public long open(DataSpec dataSpec) throws HttpDataSource.HttpDataSourceException { - this.dataSpec = dataSpec; - try { - //Check if real url or placeholder for quality fallback - URL url = new URL(dataSpec.uri.toString()); - String[] qp = url.getQuery().split("&"); - //Real deezcdn url doesnt have query params - if (qp.length >= 3) { - //Parse query parameters - for (int i = 0; i < qp.length; i++) { - String p = qp[i].replace("?", ""); - if (p.startsWith("md5")) { - this.md5origin = p.replace("md5=", ""); - } - if (p.startsWith("mv")) { - this.mediaVersion = p.replace("mv=", ""); - } - if (p.startsWith("q")) { - if (this.quality == 0) { - this.quality = Integer.parseInt(p.replace("q=", "")); - } - } - } - //Get real url - url = new URL(this.getTrackUrl(trackId, md5origin, mediaVersion, quality)); - } - - - this.connection = (HttpURLConnection) url.openConnection(); - this.connection.setChunkedStreamingMode(2048); - if (dataSpec.position > 0) { - this.counter = (int) (dataSpec.position/2048); - this.connection.setRequestProperty("Range", - "bytes=" + Long.toString(this.counter*2048) + "-"); - } - - InputStream is = this.connection.getInputStream(); - this.inputStream = new BufferedInputStream(new FilterInputStream(is) { - @Override - public int read(byte buffer[], int offset, int len) throws IOException { - byte[] b = new byte[2048]; - int t = 0; - int read = 0; - while (read != -1 && t != 2048) { - t += read = in.read(b, t, 2048-t); - } - - if (counter % 3 == 0) { - byte[] dec = decryptChunk(key, b); - System.arraycopy(dec, 0, buffer, offset, 2048); - } else { - System.arraycopy(b, 0, buffer, offset, 2048); - } - counter++; - - return t; - - } - },2048); - - - } catch (Exception e) { - //Quality fallback - if (this.quality == 1) { - Log.e("E", e.toString()); - throw new HttpDataSourceException("Error loading URL", dataSpec, HttpDataSourceException.TYPE_OPEN); - } - if (this.quality == 3) this.quality = 1; - if (this.quality == 9) this.quality = 3; - // r e c u r s i o n - return this.open(dataSpec); - } - String size = this.connection.getHeaderField("Content-Length"); - return Long.parseLong(size); - } - - @Override - public int read(byte[] buffer, int offset, int length) throws HttpDataSourceException { - int read = 0; - try { - read = this.inputStream.read(buffer, offset, length); - } catch (Exception e) { - Log.e("E", e.toString()); - //throw new HttpDataSourceException("Error reading from stream", this.dataSpec, HttpDataSourceException.TYPE_READ); - } - return read; - } - @Override - public void close() { - try { - if (this.inputStream != null) this.inputStream.close(); - if (this.connection != null) this.connection.disconnect(); - } catch (Exception e) { - Log.e("E", e.toString()); - } - } - - @Override - public void setRequestProperty(String name, String value) { - Log.d("D", "setRequestProperty"); - } - - @Override - public void clearRequestProperty(String name) { - Log.d("D", "clearRequestProperty"); - } - - @Override - public void clearAllRequestProperties() { - Log.d("D", "clearAllRequestProperties"); - } - - @Override - public int getResponseCode() { - Log.d("D", "getResponseCode"); - return 0; - } - - @Override - public Map> getResponseHeaders() { - return this.connection.getHeaderFields(); - } - - public final void addTransferListener(TransferListener transferListener) { - Log.d("D", "addTransferListener"); - } - - @Override - public Uri getUri() { - return Uri.parse(this.connection.getURL().toString()); - } - - public static String bytesToHex(byte[] bytes) { - final char[] HEX_ARRAY = "0123456789ABCDEF".toCharArray(); - char[] hexChars = new char[bytes.length * 2]; - for (int j = 0; j < bytes.length; j++) { - int v = bytes[j] & 0xFF; - hexChars[j * 2] = HEX_ARRAY[v >>> 4]; - hexChars[j * 2 + 1] = HEX_ARRAY[v & 0x0F]; - } - return new String(hexChars); - } - - byte[] getKey(String id) { - String secret = "g4el58wc0zvf9na1"; - try { - MessageDigest md5 = MessageDigest.getInstance("MD5"); - md5.update(id.getBytes()); - byte[] md5id = md5.digest(); - String idmd5 = bytesToHex(md5id).toLowerCase(); - String key = ""; - for(int i=0; i<16; i++) { - int s0 = idmd5.charAt(i); - int s1 = idmd5.charAt(i+16); - int s2 = secret.charAt(i); - key += (char)(s0^s1^s2); - } - return key.getBytes(); - } catch (Exception e) { - Log.e("E", e.toString()); - return new byte[0]; - } - } - - - byte[] decryptChunk(byte[] key, byte[] data) { - try { - byte[] IV = {00, 01, 02, 03, 04, 05, 06, 07}; - SecretKeySpec Skey = new SecretKeySpec(key, "Blowfish"); - Cipher cipher = Cipher.getInstance("Blowfish/CBC/NoPadding"); - cipher.init(Cipher.DECRYPT_MODE, Skey, new javax.crypto.spec.IvParameterSpec(IV)); - return cipher.doFinal(data); - }catch (Exception e) { - Log.e("D", e.toString()); - return new byte[0]; - } - } - - public String getTrackUrl(String trackId, String md5origin, String mediaVersion, int quality) { - try { - int magic = 164; - - ByteArrayOutputStream step1 = new ByteArrayOutputStream(); - step1.write(md5origin.getBytes()); - step1.write(magic); - step1.write(Integer.toString(quality).getBytes()); - step1.write(magic); - step1.write(trackId.getBytes()); - step1.write(magic); - step1.write(mediaVersion.getBytes()); - //Get MD5 - MessageDigest md5 = MessageDigest.getInstance("MD5"); - md5.update(step1.toByteArray()); - byte[] digest = md5.digest(); - String md5hex = bytesToHex(digest).toLowerCase(); - - ByteArrayOutputStream step2 = new ByteArrayOutputStream(); - step2.write(md5hex.getBytes()); - step2.write(magic); - step2.write(step1.toByteArray()); - step2.write(magic); - - //Pad step2 with dots, to get correct length - while(step2.size()%16 > 0) step2.write(46); - - //Prepare AES encryption - Cipher cipher = Cipher.getInstance("AES/ECB/NoPadding"); - SecretKeySpec key = new SecretKeySpec("jo6aey6haid2Teih".getBytes(), "AES"); - cipher.init(Cipher.ENCRYPT_MODE, key); - //Encrypt - StringBuilder step3 = new StringBuilder(); - for (int i=0; i { - plugin.stopListening(); - return false; - }); - } - - @Override - public void onAttachedToEngine(@NonNull FlutterPluginBinding binding) { - startListening(binding.getApplicationContext(), binding.getBinaryMessenger()); - } - - @Override - public void onDetachedFromEngine(@NonNull FlutterPluginBinding binding) { - stopListening(); - } - - private void startListening(Context applicationContext, BinaryMessenger messenger) { - methodCallHandler = new MainMethodCallHandler(applicationContext, messenger); - - channel = new MethodChannel(messenger, "com.ryanheise.just_audio.methods"); - channel.setMethodCallHandler(methodCallHandler); - } - - private void stopListening() { - methodCallHandler.dispose(); - methodCallHandler = null; - - channel.setMethodCallHandler(null); - } -} diff --git a/just_audio/android/src/main/java/com/ryanheise/just_audio/MainMethodCallHandler.java b/just_audio/android/src/main/java/com/ryanheise/just_audio/MainMethodCallHandler.java deleted file mode 100644 index 990a71a..0000000 --- a/just_audio/android/src/main/java/com/ryanheise/just_audio/MainMethodCallHandler.java +++ /dev/null @@ -1,52 +0,0 @@ -package com.ryanheise.just_audio; - -import android.content.Context; -import androidx.annotation.NonNull; -import io.flutter.plugin.common.BinaryMessenger; -import io.flutter.plugin.common.MethodCall; -import io.flutter.plugin.common.MethodChannel.MethodCallHandler; -import io.flutter.plugin.common.MethodChannel.Result; -import java.util.HashMap; -import java.util.List; -import java.util.ArrayList; -import java.util.Map; - -public class MainMethodCallHandler implements MethodCallHandler { - - private final Context applicationContext; - private final BinaryMessenger messenger; - - private final Map players = new HashMap<>(); - - public MainMethodCallHandler(Context applicationContext, - BinaryMessenger messenger) { - this.applicationContext = applicationContext; - this.messenger = messenger; - } - - @Override - public void onMethodCall(MethodCall call, @NonNull Result result) { - switch (call.method) { - case "init": - final List ids = call.arguments(); - String id = ids.get(0); - players.put(id, new AudioPlayer(applicationContext, messenger, id, - () -> players.remove(id) - )); - result.success(null); - break; - case "setIosCategory": - result.success(null); - break; - default: - result.notImplemented(); - break; - } - } - - void dispose() { - for (AudioPlayer player : new ArrayList(players.values())) { - player.dispose(); - } - } -} diff --git a/just_audio/darwin/Classes/AudioPlayer.m b/just_audio/darwin/Classes/AudioPlayer.m deleted file mode 100644 index ccbfdea..0000000 --- a/just_audio/darwin/Classes/AudioPlayer.m +++ /dev/null @@ -1,1138 +0,0 @@ -#import "AudioPlayer.h" -#import "AudioSource.h" -#import "IndexedAudioSource.h" -#import "UriAudioSource.h" -#import "ConcatenatingAudioSource.h" -#import "LoopingAudioSource.h" -#import "ClippingAudioSource.h" -#import -#import -#include - -// TODO: Check for and report invalid state transitions. -// TODO: Apply Apple's guidance on seeking: https://developer.apple.com/library/archive/qa/qa1820/_index.html -@implementation AudioPlayer { - NSObject* _registrar; - FlutterMethodChannel *_methodChannel; - FlutterEventChannel *_eventChannel; - FlutterEventSink _eventSink; - NSString *_playerId; - AVQueuePlayer *_player; - AudioSource *_audioSource; - NSMutableArray *_indexedAudioSources; - NSMutableArray *_order; - NSMutableArray *_orderInv; - int _index; - enum ProcessingState _processingState; - enum LoopMode _loopMode; - BOOL _shuffleModeEnabled; - long long _updateTime; - int _updatePosition; - int _lastPosition; - int _bufferedPosition; - // Set when the current item hasn't been played yet so we aren't sure whether sufficient audio has been buffered. - BOOL _bufferUnconfirmed; - CMTime _seekPos; - FlutterResult _loadResult; - FlutterResult _playResult; - id _timeObserver; - BOOL _automaticallyWaitsToMinimizeStalling; - BOOL _configuredSession; - BOOL _playing; -} - -- (instancetype)initWithRegistrar:(NSObject *)registrar playerId:(NSString*)idParam configuredSession:(BOOL)configuredSession { - self = [super init]; - NSAssert(self, @"super init cannot be nil"); - _registrar = registrar; - _playerId = idParam; - _configuredSession = configuredSession; - _methodChannel = - [FlutterMethodChannel methodChannelWithName:[NSMutableString stringWithFormat:@"com.ryanheise.just_audio.methods.%@", _playerId] - binaryMessenger:[registrar messenger]]; - _eventChannel = - [FlutterEventChannel eventChannelWithName:[NSMutableString stringWithFormat:@"com.ryanheise.just_audio.events.%@", _playerId] - binaryMessenger:[registrar messenger]]; - [_eventChannel setStreamHandler:self]; - _index = 0; - _processingState = none; - _loopMode = loopOff; - _shuffleModeEnabled = NO; - _player = nil; - _audioSource = nil; - _indexedAudioSources = nil; - _order = nil; - _orderInv = nil; - _seekPos = kCMTimeInvalid; - _timeObserver = 0; - _updatePosition = 0; - _updateTime = 0; - _lastPosition = 0; - _bufferedPosition = 0; - _bufferUnconfirmed = NO; - _playing = NO; - _loadResult = nil; - _playResult = nil; - _automaticallyWaitsToMinimizeStalling = YES; - __weak __typeof__(self) weakSelf = self; - [_methodChannel setMethodCallHandler:^(FlutterMethodCall* call, FlutterResult result) { - [weakSelf handleMethodCall:call result:result]; - }]; - return self; -} - -- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result { - NSArray* args = (NSArray*)call.arguments; - if ([@"load" isEqualToString:call.method]) { - [self load:args[0] result:result]; - } else if ([@"play" isEqualToString:call.method]) { - [self play:result]; - } else if ([@"pause" isEqualToString:call.method]) { - [self pause]; - result(nil); - } else if ([@"setVolume" isEqualToString:call.method]) { - [self setVolume:(float)[args[0] doubleValue]]; - result(nil); - } else if ([@"setSpeed" isEqualToString:call.method]) { - [self setSpeed:(float)[args[0] doubleValue]]; - result(nil); - } else if ([@"setLoopMode" isEqualToString:call.method]) { - [self setLoopMode:[args[0] intValue]]; - result(nil); - } else if ([@"setShuffleModeEnabled" isEqualToString:call.method]) { - [self setShuffleModeEnabled:(BOOL)[args[0] boolValue]]; - result(nil); - } else if ([@"setAutomaticallyWaitsToMinimizeStalling" isEqualToString:call.method]) { - [self setAutomaticallyWaitsToMinimizeStalling:(BOOL)[args[0] boolValue]]; - result(nil); - } else if ([@"seek" isEqualToString:call.method]) { - CMTime position = args[0] == [NSNull null] ? kCMTimePositiveInfinity : CMTimeMake([args[0] intValue], 1000); - [self seek:position index:args[1] completionHandler:^(BOOL finished) { - result(nil); - }]; - result(nil); - } else if ([@"dispose" isEqualToString:call.method]) { - [self dispose]; - result(nil); - } else if ([@"concatenating.add" isEqualToString:call.method]) { - [self concatenatingAdd:(NSString*)args[0] source:(NSDictionary*)args[1]]; - result(nil); - } else if ([@"concatenating.insert" isEqualToString:call.method]) { - [self concatenatingInsert:(NSString*)args[0] index:[args[1] intValue] source:(NSDictionary*)args[2]]; - result(nil); - } else if ([@"concatenating.addAll" isEqualToString:call.method]) { - [self concatenatingAddAll:(NSString*)args[0] sources:(NSArray*)args[1]]; - result(nil); - } else if ([@"concatenating.insertAll" isEqualToString:call.method]) { - [self concatenatingInsertAll:(NSString*)args[0] index:[args[1] intValue] sources:(NSArray*)args[2]]; - result(nil); - } else if ([@"concatenating.removeAt" isEqualToString:call.method]) { - [self concatenatingRemoveAt:(NSString*)args[0] index:(int)args[1]]; - result(nil); - } else if ([@"concatenating.removeRange" isEqualToString:call.method]) { - [self concatenatingRemoveRange:(NSString*)args[0] start:[args[1] intValue] end:[args[2] intValue]]; - result(nil); - } else if ([@"concatenating.move" isEqualToString:call.method]) { - [self concatenatingMove:(NSString*)args[0] currentIndex:[args[1] intValue] newIndex:[args[2] intValue]]; - result(nil); - } else if ([@"concatenating.clear" isEqualToString:call.method]) { - [self concatenatingClear:(NSString*)args[0]]; - result(nil); - } else { - result(FlutterMethodNotImplemented); - } -} - -// Untested -- (void)concatenatingAdd:(NSString *)catId source:(NSDictionary *)source { - [self concatenatingInsertAll:catId index:-1 sources:@[source]]; -} - -// Untested -- (void)concatenatingInsert:(NSString *)catId index:(int)index source:(NSDictionary *)source { - [self concatenatingInsertAll:catId index:index sources:@[source]]; -} - -// Untested -- (void)concatenatingAddAll:(NSString *)catId sources:(NSArray *)sources { - [self concatenatingInsertAll:catId index:-1 sources:sources]; -} - -// Untested -- (void)concatenatingInsertAll:(NSString *)catId index:(int)index sources:(NSArray *)sources { - // Find all duplicates of the identified ConcatenatingAudioSource. - NSMutableArray *matches = [[NSMutableArray alloc] init]; - [_audioSource findById:catId matches:matches]; - // Add each new source to each match. - for (int i = 0; i < matches.count; i++) { - ConcatenatingAudioSource *catSource = (ConcatenatingAudioSource *)matches[i]; - int idx = index >= 0 ? index : catSource.count; - NSMutableArray *audioSources = [self decodeAudioSources:sources]; - for (int j = 0; j < audioSources.count; j++) { - AudioSource *audioSource = audioSources[j]; - [catSource insertSource:audioSource atIndex:(idx + j)]; - } - } - // Index the new audio sources. - _indexedAudioSources = [[NSMutableArray alloc] init]; - [_audioSource buildSequence:_indexedAudioSources treeIndex:0]; - for (int i = 0; i < [_indexedAudioSources count]; i++) { - IndexedAudioSource *audioSource = _indexedAudioSources[i]; - if (!audioSource.isAttached) { - audioSource.playerItem.audioSource = audioSource; - [self addItemObservers:audioSource.playerItem]; - } - } - [self updateOrder]; - if (_player.currentItem) { - _index = [self indexForItem:_player.currentItem]; - } else { - _index = 0; - } - [self enqueueFrom:_index]; - // Notify each new IndexedAudioSource that it's been attached to the player. - for (int i = 0; i < [_indexedAudioSources count]; i++) { - if (!_indexedAudioSources[i].isAttached) { - [_indexedAudioSources[i] attach:_player]; - } - } - [self broadcastPlaybackEvent]; -} - -// Untested -- (void)concatenatingRemoveAt:(NSString *)catId index:(int)index { - [self concatenatingRemoveRange:catId start:index end:(index + 1)]; -} - -// Untested -- (void)concatenatingRemoveRange:(NSString *)catId start:(int)start end:(int)end { - // Find all duplicates of the identified ConcatenatingAudioSource. - NSMutableArray *matches = [[NSMutableArray alloc] init]; - [_audioSource findById:catId matches:matches]; - // Remove range from each match. - for (int i = 0; i < matches.count; i++) { - ConcatenatingAudioSource *catSource = (ConcatenatingAudioSource *)matches[i]; - int endIndex = end >= 0 ? end : catSource.count; - [catSource removeSourcesFromIndex:start toIndex:endIndex]; - } - // Re-index the remaining audio sources. - NSArray *oldIndexedAudioSources = _indexedAudioSources; - _indexedAudioSources = [[NSMutableArray alloc] init]; - [_audioSource buildSequence:_indexedAudioSources treeIndex:0]; - for (int i = 0, j = 0; i < _indexedAudioSources.count; i++, j++) { - IndexedAudioSource *audioSource = _indexedAudioSources[i]; - while (audioSource != oldIndexedAudioSources[j]) { - [self removeItemObservers:oldIndexedAudioSources[j].playerItem]; - if (j < _index) { - _index--; - } else if (j == _index) { - // The currently playing item was removed. - } - j++; - } - } - [self updateOrder]; - if (_index >= _indexedAudioSources.count) _index = _indexedAudioSources.count - 1; - if (_index < 0) _index = 0; - [self enqueueFrom:_index]; - [self broadcastPlaybackEvent]; -} - -// Untested -- (void)concatenatingMove:(NSString *)catId currentIndex:(int)currentIndex newIndex:(int)newIndex { - // Find all duplicates of the identified ConcatenatingAudioSource. - NSMutableArray *matches = [[NSMutableArray alloc] init]; - [_audioSource findById:catId matches:matches]; - // Move range within each match. - for (int i = 0; i < matches.count; i++) { - ConcatenatingAudioSource *catSource = (ConcatenatingAudioSource *)matches[i]; - [catSource moveSourceFromIndex:currentIndex toIndex:newIndex]; - } - // Re-index the audio sources. - _indexedAudioSources = [[NSMutableArray alloc] init]; - [_audioSource buildSequence:_indexedAudioSources treeIndex:0]; - _index = [self indexForItem:_player.currentItem]; - [self broadcastPlaybackEvent]; -} - -// Untested -- (void)concatenatingClear:(NSString *)catId { - [self concatenatingRemoveRange:catId start:0 end:-1]; -} - -- (FlutterError*)onListenWithArguments:(id)arguments eventSink:(FlutterEventSink)eventSink { - _eventSink = eventSink; - return nil; -} - -- (FlutterError*)onCancelWithArguments:(id)arguments { - _eventSink = nil; - return nil; -} - -- (void)checkForDiscontinuity { - if (!_eventSink) return; - if (!_playing || CMTIME_IS_VALID(_seekPos) || _processingState == completed) return; - int position = [self getCurrentPosition]; - if (_processingState == buffering) { - if (position > _lastPosition) { - [self leaveBuffering:@"stall ended"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - } - } else { - long long now = (long long)([[NSDate date] timeIntervalSince1970] * 1000.0); - long long timeSinceLastUpdate = now - _updateTime; - long long expectedPosition = _updatePosition + (long long)(timeSinceLastUpdate * _player.rate); - long long drift = position - expectedPosition; - //NSLog(@"position: %d, drift: %lld", position, drift); - // Update if we've drifted or just started observing - if (_updateTime == 0L) { - [self broadcastPlaybackEvent]; - } else if (drift < -100) { - [self enterBuffering:@"stalling"]; - NSLog(@"Drift: %lld", drift); - [self updatePosition]; - [self broadcastPlaybackEvent]; - } - } - _lastPosition = position; -} - -- (void)enterBuffering:(NSString *)reason { - NSLog(@"ENTER BUFFERING: %@", reason); - _processingState = buffering; -} - -- (void)leaveBuffering:(NSString *)reason { - NSLog(@"LEAVE BUFFERING: %@", reason); - _processingState = ready; -} - -- (void)broadcastPlaybackEvent { - if (!_eventSink) return; - _eventSink(@{ - @"processingState": @(_processingState), - @"updatePosition": @(_updatePosition), - @"updateTime": @(_updateTime), - // TODO: buffer position - @"bufferedPosition": @(_updatePosition), - // TODO: Icy Metadata - @"icyMetadata": [NSNull null], - @"duration": @([self getDuration]), - @"currentIndex": @(_index), - }); -} - -- (int)getCurrentPosition { - if (_processingState == none || _processingState == loading) { - return 0; - } else if (CMTIME_IS_VALID(_seekPos)) { - return (int)(1000 * CMTimeGetSeconds(_seekPos)); - } else if (_indexedAudioSources) { - int ms = (int)(1000 * CMTimeGetSeconds(_indexedAudioSources[_index].position)); - if (ms < 0) ms = 0; - return ms; - } else { - return 0; - } -} - -- (int)getBufferedPosition { - if (_processingState == none || _processingState == loading) { - return 0; - } else if (_indexedAudioSources) { - int ms = (int)(1000 * CMTimeGetSeconds(_indexedAudioSources[_index].bufferedPosition)); - if (ms < 0) ms = 0; - return ms; - } else { - return 0; - } -} - -- (int)getDuration { - if (_processingState == none) { - return -1; - } else if (_indexedAudioSources) { - int v = (int)(1000 * CMTimeGetSeconds(_indexedAudioSources[_index].duration)); - return v; - } else { - return 0; - } -} - -- (void)removeItemObservers:(AVPlayerItem *)playerItem { - [playerItem removeObserver:self forKeyPath:@"status"]; - [playerItem removeObserver:self forKeyPath:@"playbackBufferEmpty"]; - [playerItem removeObserver:self forKeyPath:@"playbackBufferFull"]; - //[playerItem removeObserver:self forKeyPath:@"playbackLikelyToKeepUp"]; - [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemDidPlayToEndTimeNotification object:playerItem]; - [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemFailedToPlayToEndTimeNotification object:playerItem]; - [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemPlaybackStalledNotification object:playerItem]; -} - -- (void)addItemObservers:(AVPlayerItem *)playerItem { - // Get notified when the item is loaded or had an error loading - [playerItem addObserver:self forKeyPath:@"status" options:NSKeyValueObservingOptionNew context:nil]; - // Get notified of the buffer state - [playerItem addObserver:self forKeyPath:@"playbackBufferEmpty" options:NSKeyValueObservingOptionNew context:nil]; - [playerItem addObserver:self forKeyPath:@"playbackBufferFull" options:NSKeyValueObservingOptionNew context:nil]; - [playerItem addObserver:self forKeyPath:@"loadedTimeRanges" options:NSKeyValueObservingOptionNew context:nil]; - //[playerItem addObserver:self forKeyPath:@"playbackLikelyToKeepUp" options:NSKeyValueObservingOptionNew context:nil]; - // Get notified when playback has reached the end - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(onComplete:) name:AVPlayerItemDidPlayToEndTimeNotification object:playerItem]; - // Get notified when playback stops due to a failure (currently unused) - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(onFailToComplete:) name:AVPlayerItemFailedToPlayToEndTimeNotification object:playerItem]; - // Get notified when playback stalls (currently unused) - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(onItemStalled:) name:AVPlayerItemPlaybackStalledNotification object:playerItem]; -} - -- (NSMutableArray *)decodeAudioSources:(NSArray *)data { - NSMutableArray *array = [[NSMutableArray alloc] init]; - for (int i = 0; i < [data count]; i++) { - AudioSource *source = [self decodeAudioSource:data[i]]; - [array addObject:source]; - } - return array; -} - -- (AudioSource *)decodeAudioSource:(NSDictionary *)data { - NSString *type = data[@"type"]; - if ([@"progressive" isEqualToString:type]) { - return [[UriAudioSource alloc] initWithId:data[@"id"] uri:data[@"uri"]]; - } else if ([@"dash" isEqualToString:type]) { - return [[UriAudioSource alloc] initWithId:data[@"id"] uri:data[@"uri"]]; - } else if ([@"hls" isEqualToString:type]) { - return [[UriAudioSource alloc] initWithId:data[@"id"] uri:data[@"uri"]]; - } else if ([@"concatenating" isEqualToString:type]) { - return [[ConcatenatingAudioSource alloc] initWithId:data[@"id"] - audioSources:[self decodeAudioSources:data[@"audioSources"]]]; - } else if ([@"clipping" isEqualToString:type]) { - return [[ClippingAudioSource alloc] initWithId:data[@"id"] - audioSource:[self decodeAudioSource:data[@"audioSource"]] - start:data[@"start"] - end:data[@"end"]]; - } else if ([@"looping" isEqualToString:type]) { - NSMutableArray *childSources = [NSMutableArray new]; - int count = [data[@"count"] intValue]; - for (int i = 0; i < count; i++) { - [childSources addObject:[self decodeAudioSource:data[@"audioSource"]]]; - } - return [[LoopingAudioSource alloc] initWithId:data[@"id"] audioSources:childSources]; - } else { - return nil; - } -} - -- (void)enqueueFrom:(int)index { - int oldIndex = _index; - _index = index; - - // Update the queue while keeping the currently playing item untouched. - - /* NSLog(@"before reorder: _player.items.count: ", _player.items.count); */ - /* [self dumpQueue]; */ - - // First, remove all _player items except for the currently playing one (if any). - IndexedPlayerItem *oldItem = _player.currentItem; - IndexedPlayerItem *existingItem = nil; - NSArray *oldPlayerItems = [NSArray arrayWithArray:_player.items]; - // In the first pass, preserve the old and new items. - for (int i = 0; i < oldPlayerItems.count; i++) { - if (oldPlayerItems[i] == _indexedAudioSources[_index].playerItem) { - // Preserve and tag new item if it is already in the queue. - existingItem = oldPlayerItems[i]; - } else if (oldPlayerItems[i] == oldItem) { - // Temporarily preserve old item, just to avoid jumping to - // intermediate queue positions unnecessarily. We only want to jump - // once to _index. - } else { - [_player removeItem:oldPlayerItems[i]]; - } - } - // In the second pass, remove the old item (if different from new item). - if (_index != oldIndex) { - [_player removeItem:oldItem]; - } - - /* NSLog(@"inter order: _player.items.count: ", _player.items.count); */ - /* [self dumpQueue]; */ - - // Regenerate queue - BOOL include = NO; - for (int i = 0; i < [_order count]; i++) { - int si = [_order[i] intValue]; - if (si == _index) include = YES; - if (include && _indexedAudioSources[si].playerItem != existingItem) { - [_player insertItem:_indexedAudioSources[si].playerItem afterItem:nil]; - } - } - - /* NSLog(@"after reorder: _player.items.count: ", _player.items.count); */ - /* [self dumpQueue]; */ - - if (_processingState != loading && oldItem != _indexedAudioSources[_index].playerItem) { - // || !_player.currentItem.playbackLikelyToKeepUp; - if (_player.currentItem.playbackBufferEmpty) { - [self enterBuffering:@"enqueueFrom playbackBufferEmpty"]; - } else { - [self leaveBuffering:@"enqueueFrom !playbackBufferEmpty"]; - } - [self updatePosition]; - } -} - -- (void)updatePosition { - _updatePosition = [self getCurrentPosition]; - _updateTime = (long long)([[NSDate date] timeIntervalSince1970] * 1000.0); -} - -- (void)load:(NSDictionary *)source result:(FlutterResult)result { - if (!_playing) { - [_player pause]; - } - if (_processingState == loading) { - [self abortExistingConnection]; - } - _loadResult = result; - _index = 0; - [self updatePosition]; - _processingState = loading; - [self broadcastPlaybackEvent]; - // Remove previous observers - if (_indexedAudioSources) { - for (int i = 0; i < [_indexedAudioSources count]; i++) { - [self removeItemObservers:_indexedAudioSources[i].playerItem]; - } - } - // Decode audio source - if (_audioSource && [@"clipping" isEqualToString:source[@"type"]]) { - // Check if we're clipping an audio source that was previously loaded. - UriAudioSource *child = nil; - if ([_audioSource isKindOfClass:[ClippingAudioSource class]]) { - ClippingAudioSource *clipper = (ClippingAudioSource *)_audioSource; - child = clipper.audioSource; - } else if ([_audioSource isKindOfClass:[UriAudioSource class]]) { - child = (UriAudioSource *)_audioSource; - } - if (child) { - _audioSource = [[ClippingAudioSource alloc] initWithId:source[@"id"] - audioSource:child - start:source[@"start"] - end:source[@"end"]]; - } else { - _audioSource = [self decodeAudioSource:source]; - } - } else { - _audioSource = [self decodeAudioSource:source]; - } - _indexedAudioSources = [[NSMutableArray alloc] init]; - [_audioSource buildSequence:_indexedAudioSources treeIndex:0]; - for (int i = 0; i < [_indexedAudioSources count]; i++) { - IndexedAudioSource *source = _indexedAudioSources[i]; - [self addItemObservers:source.playerItem]; - source.playerItem.audioSource = source; - } - [self updateOrder]; - // Set up an empty player - if (!_player) { - _player = [[AVQueuePlayer alloc] initWithItems:@[]]; - if (@available(macOS 10.12, iOS 10.0, *)) { - _player.automaticallyWaitsToMinimizeStalling = _automaticallyWaitsToMinimizeStalling; - // TODO: Remove these observers in dispose. - [_player addObserver:self - forKeyPath:@"timeControlStatus" - options:NSKeyValueObservingOptionNew - context:nil]; - } - [_player addObserver:self - forKeyPath:@"currentItem" - options:NSKeyValueObservingOptionNew - context:nil]; - // TODO: learn about the different ways to define weakSelf. - //__weak __typeof__(self) weakSelf = self; - //typeof(self) __weak weakSelf = self; - __unsafe_unretained typeof(self) weakSelf = self; - if (@available(macOS 10.12, iOS 10.0, *)) {} - else { - _timeObserver = [_player addPeriodicTimeObserverForInterval:CMTimeMake(200, 1000) - queue:nil - usingBlock:^(CMTime time) { - [weakSelf checkForDiscontinuity]; - } - ]; - } - } - // Initialise the AVQueuePlayer with items. - [self enqueueFrom:0]; - // Notify each IndexedAudioSource that it's been attached to the player. - for (int i = 0; i < [_indexedAudioSources count]; i++) { - [_indexedAudioSources[i] attach:_player]; - } - - if (_player.currentItem.status == AVPlayerItemStatusReadyToPlay) { - _loadResult(@([self getDuration])); - _loadResult = nil; - } else { - // We send result after the playerItem is ready in observeValueForKeyPath. - } - [self broadcastPlaybackEvent]; -} - -- (void)updateOrder { - if (_shuffleModeEnabled) { - [_audioSource shuffle:0 currentIndex: _index]; - } - _orderInv = [NSMutableArray arrayWithCapacity:[_indexedAudioSources count]]; - for (int i = 0; i < [_indexedAudioSources count]; i++) { - [_orderInv addObject:@(0)]; - } - if (_shuffleModeEnabled) { - _order = [_audioSource getShuffleOrder]; - } else { - NSMutableArray *order = [[NSMutableArray alloc] init]; - for (int i = 0; i < [_indexedAudioSources count]; i++) { - [order addObject:@(i)]; - } - _order = order; - } - for (int i = 0; i < [_indexedAudioSources count]; i++) { - _orderInv[[_order[i] intValue]] = @(i); - } -} - -- (void)onItemStalled:(NSNotification *)notification { - IndexedPlayerItem *playerItem = (IndexedPlayerItem *)notification.object; - NSLog(@"onItemStalled"); -} - -- (void)onFailToComplete:(NSNotification *)notification { - IndexedPlayerItem *playerItem = (IndexedPlayerItem *)notification.object; - NSLog(@"onFailToComplete"); -} - -- (void)onComplete:(NSNotification *)notification { - NSLog(@"onComplete"); - if (_loopMode == loopOne) { - [self seek:kCMTimeZero index:@(_index) completionHandler:^(BOOL finished) { - // XXX: Not necessary? - [self play]; - }]; - } else { - IndexedPlayerItem *endedPlayerItem = (IndexedPlayerItem *)notification.object; - IndexedAudioSource *endedSource = endedPlayerItem.audioSource; - // When an item ends, seek back to its beginning. - [endedSource seek:kCMTimeZero]; - - if ([_orderInv[_index] intValue] + 1 < [_order count]) { - // account for automatic move to next item - _index = [_order[[_orderInv[_index] intValue] + 1] intValue]; - NSLog(@"advance to next: index = %d", _index); - [self broadcastPlaybackEvent]; - } else { - // reached end of playlist - if (_loopMode == loopAll) { - NSLog(@"Loop back to first item"); - // Loop back to the beginning - // TODO: Currently there will be a gap at the loop point. - // Maybe we can do something clever by temporarily adding the - // first playlist item at the end of the queue, although this - // will affect any code that assumes the queue always - // corresponds to a contiguous region of the indexed audio - // sources. - // For now we just do a seek back to the start. - if ([_order count] == 1) { - [self seek:kCMTimeZero index:[NSNull null] completionHandler:^(BOOL finished) { - // XXX: Necessary? - [self play]; - }]; - } else { - [self seek:kCMTimeZero index:_order[0] completionHandler:^(BOOL finished) { - // XXX: Necessary? - [self play]; - }]; - } - } else { - [self complete]; - } - } - } -} - -- (void)observeValueForKeyPath:(NSString *)keyPath - ofObject:(id)object - change:(NSDictionary *)change - context:(void *)context { - - if ([keyPath isEqualToString:@"status"]) { - IndexedPlayerItem *playerItem = (IndexedPlayerItem *)object; - AVPlayerItemStatus status = AVPlayerItemStatusUnknown; - NSNumber *statusNumber = change[NSKeyValueChangeNewKey]; - if ([statusNumber isKindOfClass:[NSNumber class]]) { - status = statusNumber.intValue; - } - switch (status) { - case AVPlayerItemStatusReadyToPlay: { - if (playerItem != _player.currentItem) return; - // Detect buffering in different ways depending on whether we're playing - if (_playing) { - if (@available(macOS 10.12, iOS 10.0, *)) { - if (_player.timeControlStatus == AVPlayerTimeControlStatusWaitingToPlayAtSpecifiedRate) { - [self enterBuffering:@"ready to play: playing, waitingToPlay"]; - } else { - [self leaveBuffering:@"ready to play: playing, !waitingToPlay"]; - } - [self updatePosition]; - } else { - // If this happens when we're playing, check whether buffer is confirmed - if (_bufferUnconfirmed && !_player.currentItem.playbackBufferFull) { - // Stay in bufering - XXX Test - [self enterBuffering:@"ready to play: playing, bufferUnconfirmed && !playbackBufferFull"]; - } else { - if (_player.currentItem.playbackBufferEmpty) { - // !_player.currentItem.playbackLikelyToKeepUp; - [self enterBuffering:@"ready to play: playing, playbackBufferEmpty"]; - } else { - [self leaveBuffering:@"ready to play: playing, !playbackBufferEmpty"]; - } - [self updatePosition]; - } - } - } else { - if (_player.currentItem.playbackBufferEmpty) { - [self enterBuffering:@"ready to play: !playing, playbackBufferEmpty"]; - // || !_player.currentItem.playbackLikelyToKeepUp; - } else { - [self leaveBuffering:@"ready to play: !playing, !playbackBufferEmpty"]; - } - [self updatePosition]; - } - [self broadcastPlaybackEvent]; - if (_loadResult) { - _loadResult(@([self getDuration])); - _loadResult = nil; - } - break; - } - case AVPlayerItemStatusFailed: { - NSLog(@"AVPlayerItemStatusFailed"); - [self sendErrorForItem:playerItem]; - break; - } - case AVPlayerItemStatusUnknown: - break; - } - } else if ([keyPath isEqualToString:@"playbackBufferEmpty"] || [keyPath isEqualToString:@"playbackBufferFull"]) { - // Use these values to detect buffering. - IndexedPlayerItem *playerItem = (IndexedPlayerItem *)object; - if (playerItem != _player.currentItem) return; - // If there's a seek in progress, these values are unreliable - if (CMTIME_IS_VALID(_seekPos)) return; - // Detect buffering in different ways depending on whether we're playing - if (_playing) { - if (@available(macOS 10.12, iOS 10.0, *)) { - // We handle this with timeControlStatus instead. - } else { - if (_bufferUnconfirmed && playerItem.playbackBufferFull) { - _bufferUnconfirmed = NO; - [self leaveBuffering:@"playing, _bufferUnconfirmed && playbackBufferFull"]; - [self updatePosition]; - NSLog(@"Buffering confirmed! leaving buffering"); - [self broadcastPlaybackEvent]; - } - } - } else { - if (playerItem.playbackBufferEmpty) { - [self enterBuffering:@"!playing, playbackBufferEmpty"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - } else if (!playerItem.playbackBufferEmpty || playerItem.playbackBufferFull) { - _processingState = ready; - [self leaveBuffering:@"!playing, !playbackBufferEmpty || playbackBufferFull"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - } - } - /* } else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"]) { */ - } else if ([keyPath isEqualToString:@"timeControlStatus"]) { - if (@available(macOS 10.12, iOS 10.0, *)) { - AVPlayerTimeControlStatus status = AVPlayerTimeControlStatusPaused; - NSNumber *statusNumber = change[NSKeyValueChangeNewKey]; - if ([statusNumber isKindOfClass:[NSNumber class]]) { - status = statusNumber.intValue; - } - switch (status) { - case AVPlayerTimeControlStatusPaused: - //NSLog(@"AVPlayerTimeControlStatusPaused"); - break; - case AVPlayerTimeControlStatusWaitingToPlayAtSpecifiedRate: - //NSLog(@"AVPlayerTimeControlStatusWaitingToPlayAtSpecifiedRate"); - if (_processingState != completed) { - [self enterBuffering:@"timeControlStatus"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - } else { - NSLog(@"Ignoring wait signal because we reached the end"); - } - break; - case AVPlayerTimeControlStatusPlaying: - [self leaveBuffering:@"timeControlStatus"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - break; - } - } - } else if ([keyPath isEqualToString:@"currentItem"] && _player.currentItem) { - if (_player.currentItem.status == AVPlayerItemStatusFailed) { - if ([_orderInv[_index] intValue] + 1 < [_order count]) { - // account for automatic move to next item - _index = [_order[[_orderInv[_index] intValue] + 1] intValue]; - NSLog(@"advance to next on error: index = %d", _index); - [self broadcastPlaybackEvent]; - } else { - NSLog(@"error on last item"); - } - return; - } else { - int expectedIndex = [self indexForItem:_player.currentItem]; - if (_index != expectedIndex) { - // AVQueuePlayer will sometimes skip over error items without - // notifying this observer. - NSLog(@"Queue change detected. Adjusting index from %d -> %d", _index, expectedIndex); - _index = expectedIndex; - [self broadcastPlaybackEvent]; - } - } - //NSLog(@"currentItem changed. _index=%d", _index); - _bufferUnconfirmed = YES; - // If we've skipped or transitioned to a new item and we're not - // currently in the middle of a seek - if (CMTIME_IS_INVALID(_seekPos) && _player.currentItem.status == AVPlayerItemStatusReadyToPlay) { - [self updatePosition]; - IndexedAudioSource *source = ((IndexedPlayerItem *)_player.currentItem).audioSource; - // We should already be at position zero but for - // ClippingAudioSource it might be off by some milliseconds so we - // consider anything <= 100 as close enough. - if ((int)(1000 * CMTimeGetSeconds(source.position)) > 100) { - NSLog(@"On currentItem change, seeking back to zero"); - BOOL shouldResumePlayback = NO; - AVPlayerActionAtItemEnd originalEndAction = _player.actionAtItemEnd; - if (_playing && CMTimeGetSeconds(CMTimeSubtract(source.position, source.duration)) >= 0) { - NSLog(@"Need to pause while rewinding because we're at the end"); - shouldResumePlayback = YES; - _player.actionAtItemEnd = AVPlayerActionAtItemEndPause; - [_player pause]; - } - [self enterBuffering:@"currentItem changed, seeking"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - [source seek:kCMTimeZero completionHandler:^(BOOL finished) { - [self leaveBuffering:@"currentItem changed, finished seek"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - if (shouldResumePlayback) { - _player.actionAtItemEnd = originalEndAction; - // TODO: This logic is almost duplicated in seek. See if we can reuse this code. - [_player play]; - } - }]; - } else { - // Already at zero, no need to seek. - } - } - } else if ([keyPath isEqualToString:@"loadedTimeRanges"]) { - IndexedPlayerItem *playerItem = (IndexedPlayerItem *)object; - if (playerItem != _player.currentItem) return; - int pos = [self getBufferedPosition]; - if (pos != _bufferedPosition) { - _bufferedPosition = pos; - [self broadcastPlaybackEvent]; - } - } -} - -- (void)sendErrorForItem:(IndexedPlayerItem *)playerItem { - FlutterError *flutterError = [FlutterError errorWithCode:[NSString stringWithFormat:@"%d", playerItem.error.code] - message:playerItem.error.localizedDescription - details:nil]; - [self sendError:flutterError playerItem:playerItem]; -} - -- (void)sendError:(FlutterError *)flutterError playerItem:(IndexedPlayerItem *)playerItem { - NSLog(@"sendError"); - if (_loadResult && playerItem == _player.currentItem) { - _loadResult(flutterError); - _loadResult = nil; - } - if (_eventSink) { - // Broadcast all errors even if they aren't on the current item. - _eventSink(flutterError); - } -} - -- (void)abortExistingConnection { - FlutterError *flutterError = [FlutterError errorWithCode:@"abort" - message:@"Connection aborted" - details:nil]; - [self sendError:flutterError playerItem:nil]; -} - -- (int)indexForItem:(IndexedPlayerItem *)playerItem { - for (int i = 0; i < _indexedAudioSources.count; i++) { - if (_indexedAudioSources[i].playerItem == playerItem) { - return i; - } - } - return -1; -} - -- (void)play { - [self play:nil]; -} - -- (void)play:(FlutterResult)result { - if (result) { - if (_playResult) { - NSLog(@"INTERRUPTING PLAY"); - _playResult(nil); - } - _playResult = result; - } - _playing = YES; -#if TARGET_OS_IPHONE - if (_configuredSession) { - [[AVAudioSession sharedInstance] setActive:YES error:nil]; - } -#endif - [_player play]; - [self updatePosition]; - if (@available(macOS 10.12, iOS 10.0, *)) {} - else { - if (_bufferUnconfirmed && !_player.currentItem.playbackBufferFull) { - [self enterBuffering:@"play, _bufferUnconfirmed && !playbackBufferFull"]; - [self broadcastPlaybackEvent]; - } - } -} - -- (void)pause { - _playing = NO; - [_player pause]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - if (_playResult) { - NSLog(@"PLAY FINISHED DUE TO PAUSE"); - _playResult(nil); - _playResult = nil; - } -} - -- (void)complete { - [self updatePosition]; - _processingState = completed; - [self broadcastPlaybackEvent]; - if (_playResult) { - NSLog(@"PLAY FINISHED DUE TO COMPLETE"); - _playResult(nil); - _playResult = nil; - } -} - -- (void)setVolume:(float)volume { - [_player setVolume:volume]; -} - -- (void)setSpeed:(float)speed { - if (speed == 1.0 - || (speed < 1.0 && _player.currentItem.canPlaySlowForward) - || (speed > 1.0 && _player.currentItem.canPlayFastForward)) { - _player.rate = speed; - } - [self updatePosition]; -} - -- (void)setLoopMode:(int)loopMode { - _loopMode = loopMode; - if (_player) { - switch (_loopMode) { - case loopOne: - _player.actionAtItemEnd = AVPlayerActionAtItemEndPause; // AVPlayerActionAtItemEndNone - break; - default: - _player.actionAtItemEnd = AVPlayerActionAtItemEndAdvance; - } - } -} - -- (void)setShuffleModeEnabled:(BOOL)shuffleModeEnabled { - NSLog(@"setShuffleModeEnabled: %d", shuffleModeEnabled); - _shuffleModeEnabled = shuffleModeEnabled; - if (!_audioSource) return; - - [self updateOrder]; - - [self enqueueFrom:_index]; -} - -- (void)dumpQueue { - for (int i = 0; i < _player.items.count; i++) { - IndexedPlayerItem *playerItem = _player.items[i]; - for (int j = 0; j < _indexedAudioSources.count; j++) { - IndexedAudioSource *source = _indexedAudioSources[j]; - if (source.playerItem == playerItem) { - NSLog(@"- %d", j); - break; - } - } - } -} - -- (void)setAutomaticallyWaitsToMinimizeStalling:(bool)automaticallyWaitsToMinimizeStalling { - _automaticallyWaitsToMinimizeStalling = automaticallyWaitsToMinimizeStalling; - if (@available(macOS 10.12, iOS 10.0, *)) { - if(_player) { - _player.automaticallyWaitsToMinimizeStalling = automaticallyWaitsToMinimizeStalling; - } - } -} - -- (void)seek:(CMTime)position index:(NSNumber *)newIndex completionHandler:(void (^)(BOOL))completionHandler { - int index = _index; - if (newIndex != [NSNull null]) { - index = [newIndex intValue]; - } - if (index != _index) { - // Jump to a new item - /* if (_playing && index == _index + 1) { */ - /* // Special case for jumping to the very next item */ - /* NSLog(@"seek to next item: %d -> %d", _index, index); */ - /* [_indexedAudioSources[_index] seek:kCMTimeZero]; */ - /* _index = index; */ - /* [_player advanceToNextItem]; */ - /* [self broadcastPlaybackEvent]; */ - /* } else */ - { - // Jump to a distant item - //NSLog(@"seek# jump to distant item: %d -> %d", _index, index); - if (_playing) { - [_player pause]; - } - [_indexedAudioSources[_index] seek:kCMTimeZero]; - // The "currentItem" key observer will respect that a seek is already in progress - _seekPos = position; - [self updatePosition]; - [self enqueueFrom:index]; - IndexedAudioSource *source = _indexedAudioSources[_index]; - if (abs((int)(1000 * CMTimeGetSeconds(CMTimeSubtract(source.position, position)))) > 100) { - [self enterBuffering:@"seek to index"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - [source seek:position completionHandler:^(BOOL finished) { - if (@available(macOS 10.12, iOS 10.0, *)) { - if (_playing) { - // Handled by timeControlStatus - } else { - if (_bufferUnconfirmed && !_player.currentItem.playbackBufferFull) { - // Stay in buffering - } else if (source.playerItem.status == AVPlayerItemStatusReadyToPlay) { - [self leaveBuffering:@"seek to index finished, (!bufferUnconfirmed || playbackBufferFull) && ready to play"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - } - } - } else { - if (_bufferUnconfirmed && !_player.currentItem.playbackBufferFull) { - // Stay in buffering - } else if (source.playerItem.status == AVPlayerItemStatusReadyToPlay) { - [self leaveBuffering:@"seek to index finished, (!bufferUnconfirmed || playbackBufferFull) && ready to play"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - } - } - if (_playing) { - [_player play]; - } - _seekPos = kCMTimeInvalid; - [self broadcastPlaybackEvent]; - if (completionHandler) { - completionHandler(finished); - } - }]; - } else { - _seekPos = kCMTimeInvalid; - if (_playing) { - [_player play]; - } - } - } - } else { - // Seek within an item - if (_playing) { - [_player pause]; - } - _seekPos = position; - //NSLog(@"seek. enter buffering. pos = %d", (int)(1000*CMTimeGetSeconds(_indexedAudioSources[_index].position))); - // TODO: Move this into a separate method so it can also - // be used in skip. - [self enterBuffering:@"seek"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - [_indexedAudioSources[_index] seek:position completionHandler:^(BOOL finished) { - [self updatePosition]; - if (_playing) { - // If playing, buffering will be detected either by: - // 1. checkForDiscontinuity - // 2. timeControlStatus - [_player play]; - } else { - // If not playing, there is no reliable way to detect - // when buffering has completed, so we use - // !playbackBufferEmpty. Although this always seems to - // be full even right after a seek. - if (_player.currentItem.playbackBufferEmpty) { - [self enterBuffering:@"seek finished, playbackBufferEmpty"]; - } else { - [self leaveBuffering:@"seek finished, !playbackBufferEmpty"]; - } - [self updatePosition]; - if (_processingState != buffering) { - [self broadcastPlaybackEvent]; - } - } - _seekPos = kCMTimeInvalid; - [self broadcastPlaybackEvent]; - if (completionHandler) { - completionHandler(finished); - } - }]; - } -} - -- (void)dispose { - if (_processingState != none) { - [_player pause]; - _processingState = none; - [self broadcastPlaybackEvent]; - } - if (_timeObserver) { - [_player removeTimeObserver:_timeObserver]; - _timeObserver = 0; - } - if (_indexedAudioSources) { - for (int i = 0; i < [_indexedAudioSources count]; i++) { - [self removeItemObservers:_indexedAudioSources[i].playerItem]; - } - } - if (_player) { - [_player removeObserver:self forKeyPath:@"currentItem"]; - if (@available(macOS 10.12, iOS 10.0, *)) { - [_player removeObserver:self forKeyPath:@"timeControlStatus"]; - } - _player = nil; - } - // Untested: - // [_eventChannel setStreamHandler:nil]; - // [_methodChannel setMethodHandler:nil]; -} - -@end diff --git a/just_audio/darwin/Classes/AudioSource.m b/just_audio/darwin/Classes/AudioSource.m deleted file mode 100644 index 81534f1..0000000 --- a/just_audio/darwin/Classes/AudioSource.m +++ /dev/null @@ -1,37 +0,0 @@ -#import "AudioSource.h" -#import - -@implementation AudioSource { - NSString *_sourceId; -} - -- (instancetype)initWithId:(NSString *)sid { - self = [super init]; - NSAssert(self, @"super init cannot be nil"); - _sourceId = sid; - return self; -} - -- (NSString *)sourceId { - return _sourceId; -} - -- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex { - return 0; -} - -- (void)findById:(NSString *)sourceId matches:(NSMutableArray *)matches { - if ([_sourceId isEqualToString:sourceId]) { - [matches addObject:self]; - } -} - -- (NSArray *)getShuffleOrder { - return @[]; -} - -- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex { - return 0; -} - -@end diff --git a/just_audio/darwin/Classes/ClippingAudioSource.m b/just_audio/darwin/Classes/ClippingAudioSource.m deleted file mode 100644 index 2f3b174..0000000 --- a/just_audio/darwin/Classes/ClippingAudioSource.m +++ /dev/null @@ -1,79 +0,0 @@ -#import "AudioSource.h" -#import "ClippingAudioSource.h" -#import "IndexedPlayerItem.h" -#import "UriAudioSource.h" -#import - -@implementation ClippingAudioSource { - UriAudioSource *_audioSource; - CMTime _start; - CMTime _end; -} - -- (instancetype)initWithId:(NSString *)sid audioSource:(UriAudioSource *)audioSource start:(NSNumber *)start end:(NSNumber *)end { - self = [super initWithId:sid]; - NSAssert(self, @"super init cannot be nil"); - _audioSource = audioSource; - _start = start == [NSNull null] ? kCMTimeZero : CMTimeMake([start intValue], 1000); - _end = end == [NSNull null] ? kCMTimeInvalid : CMTimeMake([end intValue], 1000); - return self; -} - -- (UriAudioSource *)audioSource { - return _audioSource; -} - -- (void)findById:(NSString *)sourceId matches:(NSMutableArray *)matches { - [super findById:sourceId matches:matches]; - [_audioSource findById:sourceId matches:matches]; -} - -- (void)attach:(AVQueuePlayer *)player { - [super attach:player]; - _audioSource.playerItem.forwardPlaybackEndTime = _end; - // XXX: Not needed since currentItem observer handles it? - [self seek:kCMTimeZero]; -} - -- (IndexedPlayerItem *)playerItem { - return _audioSource.playerItem; -} - -- (NSArray *)getShuffleOrder { - return @[@(0)]; -} - -- (void)play:(AVQueuePlayer *)player { -} - -- (void)pause:(AVQueuePlayer *)player { -} - -- (void)stop:(AVQueuePlayer *)player { -} - -- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler { - if (!completionHandler || (self.playerItem.status == AVPlayerItemStatusReadyToPlay)) { - CMTime absPosition = CMTimeAdd(_start, position); - [_audioSource.playerItem seekToTime:absPosition toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero completionHandler:completionHandler]; - } -} - -- (CMTime)duration { - return CMTimeSubtract(CMTIME_IS_INVALID(_end) ? self.playerItem.duration : _end, _start); -} - -- (void)setDuration:(CMTime)duration { -} - -- (CMTime)position { - return CMTimeSubtract(self.playerItem.currentTime, _start); -} - -- (CMTime)bufferedPosition { - CMTime pos = CMTimeSubtract(_audioSource.bufferedPosition, _start); - CMTime dur = [self duration]; - return CMTimeCompare(pos, dur) >= 0 ? dur : pos; -} - -@end diff --git a/just_audio/darwin/Classes/ConcatenatingAudioSource.m b/just_audio/darwin/Classes/ConcatenatingAudioSource.m deleted file mode 100644 index bd7b713..0000000 --- a/just_audio/darwin/Classes/ConcatenatingAudioSource.m +++ /dev/null @@ -1,109 +0,0 @@ -#import "AudioSource.h" -#import "ConcatenatingAudioSource.h" -#import -#import - -@implementation ConcatenatingAudioSource { - NSMutableArray *_audioSources; - NSMutableArray *_shuffleOrder; -} - -- (instancetype)initWithId:(NSString *)sid audioSources:(NSMutableArray *)audioSources { - self = [super initWithId:sid]; - NSAssert(self, @"super init cannot be nil"); - _audioSources = audioSources; - return self; -} - -- (int)count { - return _audioSources.count; -} - -- (void)insertSource:(AudioSource *)audioSource atIndex:(int)index { - [_audioSources insertObject:audioSource atIndex:index]; -} - -- (void)removeSourcesFromIndex:(int)start toIndex:(int)end { - if (end == -1) end = _audioSources.count; - for (int i = start; i < end; i++) { - [_audioSources removeObjectAtIndex:start]; - } -} - -- (void)moveSourceFromIndex:(int)currentIndex toIndex:(int)newIndex { - AudioSource *source = _audioSources[currentIndex]; - [_audioSources removeObjectAtIndex:currentIndex]; - [_audioSources insertObject:source atIndex:newIndex]; -} - -- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex { - for (int i = 0; i < [_audioSources count]; i++) { - treeIndex = [_audioSources[i] buildSequence:sequence treeIndex:treeIndex]; - } - return treeIndex; -} - -- (void)findById:(NSString *)sourceId matches:(NSMutableArray *)matches { - [super findById:sourceId matches:matches]; - for (int i = 0; i < [_audioSources count]; i++) { - [_audioSources[i] findById:sourceId matches:matches]; - } -} - -- (NSArray *)getShuffleOrder { - NSMutableArray *order = [NSMutableArray new]; - int offset = [order count]; - NSMutableArray *childOrders = [NSMutableArray new]; // array of array of ints - for (int i = 0; i < [_audioSources count]; i++) { - AudioSource *audioSource = _audioSources[i]; - NSArray *childShuffleOrder = [audioSource getShuffleOrder]; - NSMutableArray *offsetChildShuffleOrder = [NSMutableArray new]; - for (int j = 0; j < [childShuffleOrder count]; j++) { - [offsetChildShuffleOrder addObject:@([childShuffleOrder[j] integerValue] + offset)]; - } - [childOrders addObject:offsetChildShuffleOrder]; - offset += [childShuffleOrder count]; - } - for (int i = 0; i < [_audioSources count]; i++) { - [order addObjectsFromArray:childOrders[[_shuffleOrder[i] integerValue]]]; - } - return order; -} - -- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex { - int currentChildIndex = -1; - for (int i = 0; i < [_audioSources count]; i++) { - int indexBefore = treeIndex; - AudioSource *child = _audioSources[i]; - treeIndex = [child shuffle:treeIndex currentIndex:currentIndex]; - if (currentIndex >= indexBefore && currentIndex < treeIndex) { - currentChildIndex = i; - } else {} - } - // Shuffle so that the current child is first in the shuffle order - _shuffleOrder = [NSMutableArray arrayWithCapacity:[_audioSources count]]; - for (int i = 0; i < [_audioSources count]; i++) { - [_shuffleOrder addObject:@(0)]; - } - NSLog(@"shuffle: audioSources.count=%d and shuffleOrder.count=%d", [_audioSources count], [_shuffleOrder count]); - // First generate a random shuffle - for (int i = 0; i < [_audioSources count]; i++) { - int j = arc4random_uniform(i + 1); - _shuffleOrder[i] = _shuffleOrder[j]; - _shuffleOrder[j] = @(i); - } - // Then bring currentIndex to the front - if (currentChildIndex != -1) { - for (int i = 1; i < [_audioSources count]; i++) { - if ([_shuffleOrder[i] integerValue] == currentChildIndex) { - NSNumber *v = _shuffleOrder[0]; - _shuffleOrder[0] = _shuffleOrder[i]; - _shuffleOrder[i] = v; - break; - } - } - } - return treeIndex; -} - -@end diff --git a/just_audio/darwin/Classes/IndexedAudioSource.m b/just_audio/darwin/Classes/IndexedAudioSource.m deleted file mode 100644 index 316f900..0000000 --- a/just_audio/darwin/Classes/IndexedAudioSource.m +++ /dev/null @@ -1,68 +0,0 @@ -#import "IndexedAudioSource.h" -#import "IndexedPlayerItem.h" -#import - -@implementation IndexedAudioSource { - BOOL _isAttached; -} - -- (instancetype)initWithId:(NSString *)sid { - self = [super init]; - NSAssert(self, @"super init cannot be nil"); - _isAttached = NO; - return self; -} - -- (IndexedPlayerItem *)playerItem { - return nil; -} - -- (BOOL)isAttached { - return _isAttached; -} - -- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex { - [sequence addObject:self]; - return treeIndex + 1; -} - -- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex { - return treeIndex + 1; -} - -- (void)attach:(AVQueuePlayer *)player { - _isAttached = YES; -} - -- (void)play:(AVQueuePlayer *)player { -} - -- (void)pause:(AVQueuePlayer *)player { -} - -- (void)stop:(AVQueuePlayer *)player { -} - -- (void)seek:(CMTime)position { - [self seek:position completionHandler:nil]; -} - -- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler { -} - -- (CMTime)duration { - return kCMTimeInvalid; -} - -- (void)setDuration:(CMTime)duration { -} - -- (CMTime)position { - return kCMTimeInvalid; -} - -- (CMTime)bufferedPosition { - return kCMTimeInvalid; -} - -@end diff --git a/just_audio/darwin/Classes/IndexedPlayerItem.m b/just_audio/darwin/Classes/IndexedPlayerItem.m deleted file mode 100644 index 87fafe0..0000000 --- a/just_audio/darwin/Classes/IndexedPlayerItem.m +++ /dev/null @@ -1,16 +0,0 @@ -#import "IndexedPlayerItem.h" -#import "IndexedAudioSource.h" - -@implementation IndexedPlayerItem { - IndexedAudioSource *_audioSource; -} - --(void)setAudioSource:(IndexedAudioSource *)audioSource { - _audioSource = audioSource; -} - --(IndexedAudioSource *)audioSource { - return _audioSource; -} - -@end diff --git a/just_audio/darwin/Classes/JustAudioPlugin.m b/just_audio/darwin/Classes/JustAudioPlugin.m deleted file mode 100644 index 982a260..0000000 --- a/just_audio/darwin/Classes/JustAudioPlugin.m +++ /dev/null @@ -1,55 +0,0 @@ -#import "JustAudioPlugin.h" -#import "AudioPlayer.h" -#import -#include - -@implementation JustAudioPlugin { - NSObject* _registrar; - BOOL _configuredSession; -} - -+ (void)registerWithRegistrar:(NSObject*)registrar { - FlutterMethodChannel* channel = [FlutterMethodChannel - methodChannelWithName:@"com.ryanheise.just_audio.methods" - binaryMessenger:[registrar messenger]]; - JustAudioPlugin* instance = [[JustAudioPlugin alloc] initWithRegistrar:registrar]; - [registrar addMethodCallDelegate:instance channel:channel]; -} - -- (instancetype)initWithRegistrar:(NSObject *)registrar { - self = [super init]; - NSAssert(self, @"super init cannot be nil"); - _registrar = registrar; - return self; -} - -- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result { - if ([@"init" isEqualToString:call.method]) { - NSArray* args = (NSArray*)call.arguments; - NSString* playerId = args[0]; - /*AudioPlayer* player =*/ [[AudioPlayer alloc] initWithRegistrar:_registrar playerId:playerId configuredSession:_configuredSession]; - result(nil); - } else if ([@"setIosCategory" isEqualToString:call.method]) { -#if TARGET_OS_IPHONE - NSNumber* categoryIndex = (NSNumber*)call.arguments; - AVAudioSessionCategory category = nil; - switch (categoryIndex.integerValue) { - case 0: category = AVAudioSessionCategoryAmbient; break; - case 1: category = AVAudioSessionCategorySoloAmbient; break; - case 2: category = AVAudioSessionCategoryPlayback; break; - case 3: category = AVAudioSessionCategoryRecord; break; - case 4: category = AVAudioSessionCategoryPlayAndRecord; break; - case 5: category = AVAudioSessionCategoryMultiRoute; break; - } - if (category) { - _configuredSession = YES; - } - [[AVAudioSession sharedInstance] setCategory:category error:nil]; -#endif - result(nil); - } else { - result(FlutterMethodNotImplemented); - } -} - -@end diff --git a/just_audio/darwin/Classes/LoopingAudioSource.m b/just_audio/darwin/Classes/LoopingAudioSource.m deleted file mode 100644 index ba4b52b..0000000 --- a/just_audio/darwin/Classes/LoopingAudioSource.m +++ /dev/null @@ -1,53 +0,0 @@ -#import "AudioSource.h" -#import "LoopingAudioSource.h" -#import - -@implementation LoopingAudioSource { - // An array of duplicates - NSArray *_audioSources; // -} - -- (instancetype)initWithId:(NSString *)sid audioSources:(NSArray *)audioSources { - self = [super initWithId:sid]; - NSAssert(self, @"super init cannot be nil"); - _audioSources = audioSources; - return self; -} - -- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex { - for (int i = 0; i < [_audioSources count]; i++) { - treeIndex = [_audioSources[i] buildSequence:sequence treeIndex:treeIndex]; - } - return treeIndex; -} - -- (void)findById:(NSString *)sourceId matches:(NSMutableArray *)matches { - [super findById:sourceId matches:matches]; - for (int i = 0; i < [_audioSources count]; i++) { - [_audioSources[i] findById:sourceId matches:matches]; - } -} - -- (NSArray *)getShuffleOrder { - NSMutableArray *order = [NSMutableArray new]; - int offset = (int)[order count]; - for (int i = 0; i < [_audioSources count]; i++) { - AudioSource *audioSource = _audioSources[i]; - NSArray *childShuffleOrder = [audioSource getShuffleOrder]; - for (int j = 0; j < [childShuffleOrder count]; j++) { - [order addObject:@([childShuffleOrder[j] integerValue] + offset)]; - } - offset += [childShuffleOrder count]; - } - return order; -} - -- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex { - // TODO: This should probably shuffle the same way on all duplicates. - for (int i = 0; i < [_audioSources count]; i++) { - treeIndex = [_audioSources[i] shuffle:treeIndex currentIndex:currentIndex]; - } - return treeIndex; -} - -@end diff --git a/just_audio/darwin/Classes/UriAudioSource.m b/just_audio/darwin/Classes/UriAudioSource.m deleted file mode 100644 index 91321d4..0000000 --- a/just_audio/darwin/Classes/UriAudioSource.m +++ /dev/null @@ -1,79 +0,0 @@ -#import "UriAudioSource.h" -#import "IndexedAudioSource.h" -#import "IndexedPlayerItem.h" -#import - -@implementation UriAudioSource { - NSString *_uri; - IndexedPlayerItem *_playerItem; - /* CMTime _duration; */ -} - -- (instancetype)initWithId:(NSString *)sid uri:(NSString *)uri { - self = [super initWithId:sid]; - NSAssert(self, @"super init cannot be nil"); - _uri = uri; - if ([_uri hasPrefix:@"file://"]) { - _playerItem = [[IndexedPlayerItem alloc] initWithURL:[NSURL fileURLWithPath:[_uri substringFromIndex:7]]]; - } else { - _playerItem = [[IndexedPlayerItem alloc] initWithURL:[NSURL URLWithString:_uri]]; - } - if (@available(macOS 10.13, iOS 11.0, *)) { - // This does the best at reducing distortion on voice with speeds below 1.0 - _playerItem.audioTimePitchAlgorithm = AVAudioTimePitchAlgorithmTimeDomain; - } - /* NSKeyValueObservingOptions options = */ - /* NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew; */ - /* [_playerItem addObserver:self */ - /* forKeyPath:@"duration" */ - /* options:options */ - /* context:nil]; */ - return self; -} - -- (IndexedPlayerItem *)playerItem { - return _playerItem; -} - -- (NSArray *)getShuffleOrder { - return @[@(0)]; -} - -- (void)play:(AVQueuePlayer *)player { -} - -- (void)pause:(AVQueuePlayer *)player { -} - -- (void)stop:(AVQueuePlayer *)player { -} - -- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler { - if (!completionHandler || (_playerItem.status == AVPlayerItemStatusReadyToPlay)) { - [_playerItem seekToTime:position toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero completionHandler:completionHandler]; - } -} - -- (CMTime)duration { - return _playerItem.duration; -} - -- (void)setDuration:(CMTime)duration { -} - -- (CMTime)position { - return _playerItem.currentTime; -} - -- (CMTime)bufferedPosition { - NSValue *last = _playerItem.loadedTimeRanges.lastObject; - if (last) { - CMTimeRange timeRange = [last CMTimeRangeValue]; - return CMTimeAdd(timeRange.start, timeRange.duration); - } else { - return _playerItem.currentTime; - } - return kCMTimeInvalid; -} - -@end diff --git a/just_audio/ios/.gitignore b/just_audio/ios/.gitignore deleted file mode 100644 index aa479fd..0000000 --- a/just_audio/ios/.gitignore +++ /dev/null @@ -1,37 +0,0 @@ -.idea/ -.vagrant/ -.sconsign.dblite -.svn/ - -.DS_Store -*.swp -profile - -DerivedData/ -build/ -GeneratedPluginRegistrant.h -GeneratedPluginRegistrant.m - -.generated/ - -*.pbxuser -*.mode1v3 -*.mode2v3 -*.perspectivev3 - -!default.pbxuser -!default.mode1v3 -!default.mode2v3 -!default.perspectivev3 - -xcuserdata - -*.moved-aside - -*.pyc -*sync/ -Icon? -.tags* - -/Flutter/Generated.xcconfig -/Flutter/flutter_export_environment.sh \ No newline at end of file diff --git a/just_audio/ios/Assets/.gitkeep b/just_audio/ios/Assets/.gitkeep deleted file mode 100644 index e69de29..0000000 diff --git a/just_audio/ios/Classes/AudioPlayer.h b/just_audio/ios/Classes/AudioPlayer.h deleted file mode 100644 index 1a985f3..0000000 --- a/just_audio/ios/Classes/AudioPlayer.h +++ /dev/null @@ -1,21 +0,0 @@ -#import - -@interface AudioPlayer : NSObject - -- (instancetype)initWithRegistrar:(NSObject *)registrar playerId:(NSString*)idParam configuredSession:(BOOL)configuredSession; - -@end - -enum ProcessingState { - none, - loading, - buffering, - ready, - completed -}; - -enum LoopMode { - loopOff, - loopOne, - loopAll -}; diff --git a/just_audio/ios/Classes/AudioPlayer.m b/just_audio/ios/Classes/AudioPlayer.m deleted file mode 100644 index ccbfdea..0000000 --- a/just_audio/ios/Classes/AudioPlayer.m +++ /dev/null @@ -1,1138 +0,0 @@ -#import "AudioPlayer.h" -#import "AudioSource.h" -#import "IndexedAudioSource.h" -#import "UriAudioSource.h" -#import "ConcatenatingAudioSource.h" -#import "LoopingAudioSource.h" -#import "ClippingAudioSource.h" -#import -#import -#include - -// TODO: Check for and report invalid state transitions. -// TODO: Apply Apple's guidance on seeking: https://developer.apple.com/library/archive/qa/qa1820/_index.html -@implementation AudioPlayer { - NSObject* _registrar; - FlutterMethodChannel *_methodChannel; - FlutterEventChannel *_eventChannel; - FlutterEventSink _eventSink; - NSString *_playerId; - AVQueuePlayer *_player; - AudioSource *_audioSource; - NSMutableArray *_indexedAudioSources; - NSMutableArray *_order; - NSMutableArray *_orderInv; - int _index; - enum ProcessingState _processingState; - enum LoopMode _loopMode; - BOOL _shuffleModeEnabled; - long long _updateTime; - int _updatePosition; - int _lastPosition; - int _bufferedPosition; - // Set when the current item hasn't been played yet so we aren't sure whether sufficient audio has been buffered. - BOOL _bufferUnconfirmed; - CMTime _seekPos; - FlutterResult _loadResult; - FlutterResult _playResult; - id _timeObserver; - BOOL _automaticallyWaitsToMinimizeStalling; - BOOL _configuredSession; - BOOL _playing; -} - -- (instancetype)initWithRegistrar:(NSObject *)registrar playerId:(NSString*)idParam configuredSession:(BOOL)configuredSession { - self = [super init]; - NSAssert(self, @"super init cannot be nil"); - _registrar = registrar; - _playerId = idParam; - _configuredSession = configuredSession; - _methodChannel = - [FlutterMethodChannel methodChannelWithName:[NSMutableString stringWithFormat:@"com.ryanheise.just_audio.methods.%@", _playerId] - binaryMessenger:[registrar messenger]]; - _eventChannel = - [FlutterEventChannel eventChannelWithName:[NSMutableString stringWithFormat:@"com.ryanheise.just_audio.events.%@", _playerId] - binaryMessenger:[registrar messenger]]; - [_eventChannel setStreamHandler:self]; - _index = 0; - _processingState = none; - _loopMode = loopOff; - _shuffleModeEnabled = NO; - _player = nil; - _audioSource = nil; - _indexedAudioSources = nil; - _order = nil; - _orderInv = nil; - _seekPos = kCMTimeInvalid; - _timeObserver = 0; - _updatePosition = 0; - _updateTime = 0; - _lastPosition = 0; - _bufferedPosition = 0; - _bufferUnconfirmed = NO; - _playing = NO; - _loadResult = nil; - _playResult = nil; - _automaticallyWaitsToMinimizeStalling = YES; - __weak __typeof__(self) weakSelf = self; - [_methodChannel setMethodCallHandler:^(FlutterMethodCall* call, FlutterResult result) { - [weakSelf handleMethodCall:call result:result]; - }]; - return self; -} - -- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result { - NSArray* args = (NSArray*)call.arguments; - if ([@"load" isEqualToString:call.method]) { - [self load:args[0] result:result]; - } else if ([@"play" isEqualToString:call.method]) { - [self play:result]; - } else if ([@"pause" isEqualToString:call.method]) { - [self pause]; - result(nil); - } else if ([@"setVolume" isEqualToString:call.method]) { - [self setVolume:(float)[args[0] doubleValue]]; - result(nil); - } else if ([@"setSpeed" isEqualToString:call.method]) { - [self setSpeed:(float)[args[0] doubleValue]]; - result(nil); - } else if ([@"setLoopMode" isEqualToString:call.method]) { - [self setLoopMode:[args[0] intValue]]; - result(nil); - } else if ([@"setShuffleModeEnabled" isEqualToString:call.method]) { - [self setShuffleModeEnabled:(BOOL)[args[0] boolValue]]; - result(nil); - } else if ([@"setAutomaticallyWaitsToMinimizeStalling" isEqualToString:call.method]) { - [self setAutomaticallyWaitsToMinimizeStalling:(BOOL)[args[0] boolValue]]; - result(nil); - } else if ([@"seek" isEqualToString:call.method]) { - CMTime position = args[0] == [NSNull null] ? kCMTimePositiveInfinity : CMTimeMake([args[0] intValue], 1000); - [self seek:position index:args[1] completionHandler:^(BOOL finished) { - result(nil); - }]; - result(nil); - } else if ([@"dispose" isEqualToString:call.method]) { - [self dispose]; - result(nil); - } else if ([@"concatenating.add" isEqualToString:call.method]) { - [self concatenatingAdd:(NSString*)args[0] source:(NSDictionary*)args[1]]; - result(nil); - } else if ([@"concatenating.insert" isEqualToString:call.method]) { - [self concatenatingInsert:(NSString*)args[0] index:[args[1] intValue] source:(NSDictionary*)args[2]]; - result(nil); - } else if ([@"concatenating.addAll" isEqualToString:call.method]) { - [self concatenatingAddAll:(NSString*)args[0] sources:(NSArray*)args[1]]; - result(nil); - } else if ([@"concatenating.insertAll" isEqualToString:call.method]) { - [self concatenatingInsertAll:(NSString*)args[0] index:[args[1] intValue] sources:(NSArray*)args[2]]; - result(nil); - } else if ([@"concatenating.removeAt" isEqualToString:call.method]) { - [self concatenatingRemoveAt:(NSString*)args[0] index:(int)args[1]]; - result(nil); - } else if ([@"concatenating.removeRange" isEqualToString:call.method]) { - [self concatenatingRemoveRange:(NSString*)args[0] start:[args[1] intValue] end:[args[2] intValue]]; - result(nil); - } else if ([@"concatenating.move" isEqualToString:call.method]) { - [self concatenatingMove:(NSString*)args[0] currentIndex:[args[1] intValue] newIndex:[args[2] intValue]]; - result(nil); - } else if ([@"concatenating.clear" isEqualToString:call.method]) { - [self concatenatingClear:(NSString*)args[0]]; - result(nil); - } else { - result(FlutterMethodNotImplemented); - } -} - -// Untested -- (void)concatenatingAdd:(NSString *)catId source:(NSDictionary *)source { - [self concatenatingInsertAll:catId index:-1 sources:@[source]]; -} - -// Untested -- (void)concatenatingInsert:(NSString *)catId index:(int)index source:(NSDictionary *)source { - [self concatenatingInsertAll:catId index:index sources:@[source]]; -} - -// Untested -- (void)concatenatingAddAll:(NSString *)catId sources:(NSArray *)sources { - [self concatenatingInsertAll:catId index:-1 sources:sources]; -} - -// Untested -- (void)concatenatingInsertAll:(NSString *)catId index:(int)index sources:(NSArray *)sources { - // Find all duplicates of the identified ConcatenatingAudioSource. - NSMutableArray *matches = [[NSMutableArray alloc] init]; - [_audioSource findById:catId matches:matches]; - // Add each new source to each match. - for (int i = 0; i < matches.count; i++) { - ConcatenatingAudioSource *catSource = (ConcatenatingAudioSource *)matches[i]; - int idx = index >= 0 ? index : catSource.count; - NSMutableArray *audioSources = [self decodeAudioSources:sources]; - for (int j = 0; j < audioSources.count; j++) { - AudioSource *audioSource = audioSources[j]; - [catSource insertSource:audioSource atIndex:(idx + j)]; - } - } - // Index the new audio sources. - _indexedAudioSources = [[NSMutableArray alloc] init]; - [_audioSource buildSequence:_indexedAudioSources treeIndex:0]; - for (int i = 0; i < [_indexedAudioSources count]; i++) { - IndexedAudioSource *audioSource = _indexedAudioSources[i]; - if (!audioSource.isAttached) { - audioSource.playerItem.audioSource = audioSource; - [self addItemObservers:audioSource.playerItem]; - } - } - [self updateOrder]; - if (_player.currentItem) { - _index = [self indexForItem:_player.currentItem]; - } else { - _index = 0; - } - [self enqueueFrom:_index]; - // Notify each new IndexedAudioSource that it's been attached to the player. - for (int i = 0; i < [_indexedAudioSources count]; i++) { - if (!_indexedAudioSources[i].isAttached) { - [_indexedAudioSources[i] attach:_player]; - } - } - [self broadcastPlaybackEvent]; -} - -// Untested -- (void)concatenatingRemoveAt:(NSString *)catId index:(int)index { - [self concatenatingRemoveRange:catId start:index end:(index + 1)]; -} - -// Untested -- (void)concatenatingRemoveRange:(NSString *)catId start:(int)start end:(int)end { - // Find all duplicates of the identified ConcatenatingAudioSource. - NSMutableArray *matches = [[NSMutableArray alloc] init]; - [_audioSource findById:catId matches:matches]; - // Remove range from each match. - for (int i = 0; i < matches.count; i++) { - ConcatenatingAudioSource *catSource = (ConcatenatingAudioSource *)matches[i]; - int endIndex = end >= 0 ? end : catSource.count; - [catSource removeSourcesFromIndex:start toIndex:endIndex]; - } - // Re-index the remaining audio sources. - NSArray *oldIndexedAudioSources = _indexedAudioSources; - _indexedAudioSources = [[NSMutableArray alloc] init]; - [_audioSource buildSequence:_indexedAudioSources treeIndex:0]; - for (int i = 0, j = 0; i < _indexedAudioSources.count; i++, j++) { - IndexedAudioSource *audioSource = _indexedAudioSources[i]; - while (audioSource != oldIndexedAudioSources[j]) { - [self removeItemObservers:oldIndexedAudioSources[j].playerItem]; - if (j < _index) { - _index--; - } else if (j == _index) { - // The currently playing item was removed. - } - j++; - } - } - [self updateOrder]; - if (_index >= _indexedAudioSources.count) _index = _indexedAudioSources.count - 1; - if (_index < 0) _index = 0; - [self enqueueFrom:_index]; - [self broadcastPlaybackEvent]; -} - -// Untested -- (void)concatenatingMove:(NSString *)catId currentIndex:(int)currentIndex newIndex:(int)newIndex { - // Find all duplicates of the identified ConcatenatingAudioSource. - NSMutableArray *matches = [[NSMutableArray alloc] init]; - [_audioSource findById:catId matches:matches]; - // Move range within each match. - for (int i = 0; i < matches.count; i++) { - ConcatenatingAudioSource *catSource = (ConcatenatingAudioSource *)matches[i]; - [catSource moveSourceFromIndex:currentIndex toIndex:newIndex]; - } - // Re-index the audio sources. - _indexedAudioSources = [[NSMutableArray alloc] init]; - [_audioSource buildSequence:_indexedAudioSources treeIndex:0]; - _index = [self indexForItem:_player.currentItem]; - [self broadcastPlaybackEvent]; -} - -// Untested -- (void)concatenatingClear:(NSString *)catId { - [self concatenatingRemoveRange:catId start:0 end:-1]; -} - -- (FlutterError*)onListenWithArguments:(id)arguments eventSink:(FlutterEventSink)eventSink { - _eventSink = eventSink; - return nil; -} - -- (FlutterError*)onCancelWithArguments:(id)arguments { - _eventSink = nil; - return nil; -} - -- (void)checkForDiscontinuity { - if (!_eventSink) return; - if (!_playing || CMTIME_IS_VALID(_seekPos) || _processingState == completed) return; - int position = [self getCurrentPosition]; - if (_processingState == buffering) { - if (position > _lastPosition) { - [self leaveBuffering:@"stall ended"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - } - } else { - long long now = (long long)([[NSDate date] timeIntervalSince1970] * 1000.0); - long long timeSinceLastUpdate = now - _updateTime; - long long expectedPosition = _updatePosition + (long long)(timeSinceLastUpdate * _player.rate); - long long drift = position - expectedPosition; - //NSLog(@"position: %d, drift: %lld", position, drift); - // Update if we've drifted or just started observing - if (_updateTime == 0L) { - [self broadcastPlaybackEvent]; - } else if (drift < -100) { - [self enterBuffering:@"stalling"]; - NSLog(@"Drift: %lld", drift); - [self updatePosition]; - [self broadcastPlaybackEvent]; - } - } - _lastPosition = position; -} - -- (void)enterBuffering:(NSString *)reason { - NSLog(@"ENTER BUFFERING: %@", reason); - _processingState = buffering; -} - -- (void)leaveBuffering:(NSString *)reason { - NSLog(@"LEAVE BUFFERING: %@", reason); - _processingState = ready; -} - -- (void)broadcastPlaybackEvent { - if (!_eventSink) return; - _eventSink(@{ - @"processingState": @(_processingState), - @"updatePosition": @(_updatePosition), - @"updateTime": @(_updateTime), - // TODO: buffer position - @"bufferedPosition": @(_updatePosition), - // TODO: Icy Metadata - @"icyMetadata": [NSNull null], - @"duration": @([self getDuration]), - @"currentIndex": @(_index), - }); -} - -- (int)getCurrentPosition { - if (_processingState == none || _processingState == loading) { - return 0; - } else if (CMTIME_IS_VALID(_seekPos)) { - return (int)(1000 * CMTimeGetSeconds(_seekPos)); - } else if (_indexedAudioSources) { - int ms = (int)(1000 * CMTimeGetSeconds(_indexedAudioSources[_index].position)); - if (ms < 0) ms = 0; - return ms; - } else { - return 0; - } -} - -- (int)getBufferedPosition { - if (_processingState == none || _processingState == loading) { - return 0; - } else if (_indexedAudioSources) { - int ms = (int)(1000 * CMTimeGetSeconds(_indexedAudioSources[_index].bufferedPosition)); - if (ms < 0) ms = 0; - return ms; - } else { - return 0; - } -} - -- (int)getDuration { - if (_processingState == none) { - return -1; - } else if (_indexedAudioSources) { - int v = (int)(1000 * CMTimeGetSeconds(_indexedAudioSources[_index].duration)); - return v; - } else { - return 0; - } -} - -- (void)removeItemObservers:(AVPlayerItem *)playerItem { - [playerItem removeObserver:self forKeyPath:@"status"]; - [playerItem removeObserver:self forKeyPath:@"playbackBufferEmpty"]; - [playerItem removeObserver:self forKeyPath:@"playbackBufferFull"]; - //[playerItem removeObserver:self forKeyPath:@"playbackLikelyToKeepUp"]; - [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemDidPlayToEndTimeNotification object:playerItem]; - [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemFailedToPlayToEndTimeNotification object:playerItem]; - [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemPlaybackStalledNotification object:playerItem]; -} - -- (void)addItemObservers:(AVPlayerItem *)playerItem { - // Get notified when the item is loaded or had an error loading - [playerItem addObserver:self forKeyPath:@"status" options:NSKeyValueObservingOptionNew context:nil]; - // Get notified of the buffer state - [playerItem addObserver:self forKeyPath:@"playbackBufferEmpty" options:NSKeyValueObservingOptionNew context:nil]; - [playerItem addObserver:self forKeyPath:@"playbackBufferFull" options:NSKeyValueObservingOptionNew context:nil]; - [playerItem addObserver:self forKeyPath:@"loadedTimeRanges" options:NSKeyValueObservingOptionNew context:nil]; - //[playerItem addObserver:self forKeyPath:@"playbackLikelyToKeepUp" options:NSKeyValueObservingOptionNew context:nil]; - // Get notified when playback has reached the end - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(onComplete:) name:AVPlayerItemDidPlayToEndTimeNotification object:playerItem]; - // Get notified when playback stops due to a failure (currently unused) - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(onFailToComplete:) name:AVPlayerItemFailedToPlayToEndTimeNotification object:playerItem]; - // Get notified when playback stalls (currently unused) - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(onItemStalled:) name:AVPlayerItemPlaybackStalledNotification object:playerItem]; -} - -- (NSMutableArray *)decodeAudioSources:(NSArray *)data { - NSMutableArray *array = [[NSMutableArray alloc] init]; - for (int i = 0; i < [data count]; i++) { - AudioSource *source = [self decodeAudioSource:data[i]]; - [array addObject:source]; - } - return array; -} - -- (AudioSource *)decodeAudioSource:(NSDictionary *)data { - NSString *type = data[@"type"]; - if ([@"progressive" isEqualToString:type]) { - return [[UriAudioSource alloc] initWithId:data[@"id"] uri:data[@"uri"]]; - } else if ([@"dash" isEqualToString:type]) { - return [[UriAudioSource alloc] initWithId:data[@"id"] uri:data[@"uri"]]; - } else if ([@"hls" isEqualToString:type]) { - return [[UriAudioSource alloc] initWithId:data[@"id"] uri:data[@"uri"]]; - } else if ([@"concatenating" isEqualToString:type]) { - return [[ConcatenatingAudioSource alloc] initWithId:data[@"id"] - audioSources:[self decodeAudioSources:data[@"audioSources"]]]; - } else if ([@"clipping" isEqualToString:type]) { - return [[ClippingAudioSource alloc] initWithId:data[@"id"] - audioSource:[self decodeAudioSource:data[@"audioSource"]] - start:data[@"start"] - end:data[@"end"]]; - } else if ([@"looping" isEqualToString:type]) { - NSMutableArray *childSources = [NSMutableArray new]; - int count = [data[@"count"] intValue]; - for (int i = 0; i < count; i++) { - [childSources addObject:[self decodeAudioSource:data[@"audioSource"]]]; - } - return [[LoopingAudioSource alloc] initWithId:data[@"id"] audioSources:childSources]; - } else { - return nil; - } -} - -- (void)enqueueFrom:(int)index { - int oldIndex = _index; - _index = index; - - // Update the queue while keeping the currently playing item untouched. - - /* NSLog(@"before reorder: _player.items.count: ", _player.items.count); */ - /* [self dumpQueue]; */ - - // First, remove all _player items except for the currently playing one (if any). - IndexedPlayerItem *oldItem = _player.currentItem; - IndexedPlayerItem *existingItem = nil; - NSArray *oldPlayerItems = [NSArray arrayWithArray:_player.items]; - // In the first pass, preserve the old and new items. - for (int i = 0; i < oldPlayerItems.count; i++) { - if (oldPlayerItems[i] == _indexedAudioSources[_index].playerItem) { - // Preserve and tag new item if it is already in the queue. - existingItem = oldPlayerItems[i]; - } else if (oldPlayerItems[i] == oldItem) { - // Temporarily preserve old item, just to avoid jumping to - // intermediate queue positions unnecessarily. We only want to jump - // once to _index. - } else { - [_player removeItem:oldPlayerItems[i]]; - } - } - // In the second pass, remove the old item (if different from new item). - if (_index != oldIndex) { - [_player removeItem:oldItem]; - } - - /* NSLog(@"inter order: _player.items.count: ", _player.items.count); */ - /* [self dumpQueue]; */ - - // Regenerate queue - BOOL include = NO; - for (int i = 0; i < [_order count]; i++) { - int si = [_order[i] intValue]; - if (si == _index) include = YES; - if (include && _indexedAudioSources[si].playerItem != existingItem) { - [_player insertItem:_indexedAudioSources[si].playerItem afterItem:nil]; - } - } - - /* NSLog(@"after reorder: _player.items.count: ", _player.items.count); */ - /* [self dumpQueue]; */ - - if (_processingState != loading && oldItem != _indexedAudioSources[_index].playerItem) { - // || !_player.currentItem.playbackLikelyToKeepUp; - if (_player.currentItem.playbackBufferEmpty) { - [self enterBuffering:@"enqueueFrom playbackBufferEmpty"]; - } else { - [self leaveBuffering:@"enqueueFrom !playbackBufferEmpty"]; - } - [self updatePosition]; - } -} - -- (void)updatePosition { - _updatePosition = [self getCurrentPosition]; - _updateTime = (long long)([[NSDate date] timeIntervalSince1970] * 1000.0); -} - -- (void)load:(NSDictionary *)source result:(FlutterResult)result { - if (!_playing) { - [_player pause]; - } - if (_processingState == loading) { - [self abortExistingConnection]; - } - _loadResult = result; - _index = 0; - [self updatePosition]; - _processingState = loading; - [self broadcastPlaybackEvent]; - // Remove previous observers - if (_indexedAudioSources) { - for (int i = 0; i < [_indexedAudioSources count]; i++) { - [self removeItemObservers:_indexedAudioSources[i].playerItem]; - } - } - // Decode audio source - if (_audioSource && [@"clipping" isEqualToString:source[@"type"]]) { - // Check if we're clipping an audio source that was previously loaded. - UriAudioSource *child = nil; - if ([_audioSource isKindOfClass:[ClippingAudioSource class]]) { - ClippingAudioSource *clipper = (ClippingAudioSource *)_audioSource; - child = clipper.audioSource; - } else if ([_audioSource isKindOfClass:[UriAudioSource class]]) { - child = (UriAudioSource *)_audioSource; - } - if (child) { - _audioSource = [[ClippingAudioSource alloc] initWithId:source[@"id"] - audioSource:child - start:source[@"start"] - end:source[@"end"]]; - } else { - _audioSource = [self decodeAudioSource:source]; - } - } else { - _audioSource = [self decodeAudioSource:source]; - } - _indexedAudioSources = [[NSMutableArray alloc] init]; - [_audioSource buildSequence:_indexedAudioSources treeIndex:0]; - for (int i = 0; i < [_indexedAudioSources count]; i++) { - IndexedAudioSource *source = _indexedAudioSources[i]; - [self addItemObservers:source.playerItem]; - source.playerItem.audioSource = source; - } - [self updateOrder]; - // Set up an empty player - if (!_player) { - _player = [[AVQueuePlayer alloc] initWithItems:@[]]; - if (@available(macOS 10.12, iOS 10.0, *)) { - _player.automaticallyWaitsToMinimizeStalling = _automaticallyWaitsToMinimizeStalling; - // TODO: Remove these observers in dispose. - [_player addObserver:self - forKeyPath:@"timeControlStatus" - options:NSKeyValueObservingOptionNew - context:nil]; - } - [_player addObserver:self - forKeyPath:@"currentItem" - options:NSKeyValueObservingOptionNew - context:nil]; - // TODO: learn about the different ways to define weakSelf. - //__weak __typeof__(self) weakSelf = self; - //typeof(self) __weak weakSelf = self; - __unsafe_unretained typeof(self) weakSelf = self; - if (@available(macOS 10.12, iOS 10.0, *)) {} - else { - _timeObserver = [_player addPeriodicTimeObserverForInterval:CMTimeMake(200, 1000) - queue:nil - usingBlock:^(CMTime time) { - [weakSelf checkForDiscontinuity]; - } - ]; - } - } - // Initialise the AVQueuePlayer with items. - [self enqueueFrom:0]; - // Notify each IndexedAudioSource that it's been attached to the player. - for (int i = 0; i < [_indexedAudioSources count]; i++) { - [_indexedAudioSources[i] attach:_player]; - } - - if (_player.currentItem.status == AVPlayerItemStatusReadyToPlay) { - _loadResult(@([self getDuration])); - _loadResult = nil; - } else { - // We send result after the playerItem is ready in observeValueForKeyPath. - } - [self broadcastPlaybackEvent]; -} - -- (void)updateOrder { - if (_shuffleModeEnabled) { - [_audioSource shuffle:0 currentIndex: _index]; - } - _orderInv = [NSMutableArray arrayWithCapacity:[_indexedAudioSources count]]; - for (int i = 0; i < [_indexedAudioSources count]; i++) { - [_orderInv addObject:@(0)]; - } - if (_shuffleModeEnabled) { - _order = [_audioSource getShuffleOrder]; - } else { - NSMutableArray *order = [[NSMutableArray alloc] init]; - for (int i = 0; i < [_indexedAudioSources count]; i++) { - [order addObject:@(i)]; - } - _order = order; - } - for (int i = 0; i < [_indexedAudioSources count]; i++) { - _orderInv[[_order[i] intValue]] = @(i); - } -} - -- (void)onItemStalled:(NSNotification *)notification { - IndexedPlayerItem *playerItem = (IndexedPlayerItem *)notification.object; - NSLog(@"onItemStalled"); -} - -- (void)onFailToComplete:(NSNotification *)notification { - IndexedPlayerItem *playerItem = (IndexedPlayerItem *)notification.object; - NSLog(@"onFailToComplete"); -} - -- (void)onComplete:(NSNotification *)notification { - NSLog(@"onComplete"); - if (_loopMode == loopOne) { - [self seek:kCMTimeZero index:@(_index) completionHandler:^(BOOL finished) { - // XXX: Not necessary? - [self play]; - }]; - } else { - IndexedPlayerItem *endedPlayerItem = (IndexedPlayerItem *)notification.object; - IndexedAudioSource *endedSource = endedPlayerItem.audioSource; - // When an item ends, seek back to its beginning. - [endedSource seek:kCMTimeZero]; - - if ([_orderInv[_index] intValue] + 1 < [_order count]) { - // account for automatic move to next item - _index = [_order[[_orderInv[_index] intValue] + 1] intValue]; - NSLog(@"advance to next: index = %d", _index); - [self broadcastPlaybackEvent]; - } else { - // reached end of playlist - if (_loopMode == loopAll) { - NSLog(@"Loop back to first item"); - // Loop back to the beginning - // TODO: Currently there will be a gap at the loop point. - // Maybe we can do something clever by temporarily adding the - // first playlist item at the end of the queue, although this - // will affect any code that assumes the queue always - // corresponds to a contiguous region of the indexed audio - // sources. - // For now we just do a seek back to the start. - if ([_order count] == 1) { - [self seek:kCMTimeZero index:[NSNull null] completionHandler:^(BOOL finished) { - // XXX: Necessary? - [self play]; - }]; - } else { - [self seek:kCMTimeZero index:_order[0] completionHandler:^(BOOL finished) { - // XXX: Necessary? - [self play]; - }]; - } - } else { - [self complete]; - } - } - } -} - -- (void)observeValueForKeyPath:(NSString *)keyPath - ofObject:(id)object - change:(NSDictionary *)change - context:(void *)context { - - if ([keyPath isEqualToString:@"status"]) { - IndexedPlayerItem *playerItem = (IndexedPlayerItem *)object; - AVPlayerItemStatus status = AVPlayerItemStatusUnknown; - NSNumber *statusNumber = change[NSKeyValueChangeNewKey]; - if ([statusNumber isKindOfClass:[NSNumber class]]) { - status = statusNumber.intValue; - } - switch (status) { - case AVPlayerItemStatusReadyToPlay: { - if (playerItem != _player.currentItem) return; - // Detect buffering in different ways depending on whether we're playing - if (_playing) { - if (@available(macOS 10.12, iOS 10.0, *)) { - if (_player.timeControlStatus == AVPlayerTimeControlStatusWaitingToPlayAtSpecifiedRate) { - [self enterBuffering:@"ready to play: playing, waitingToPlay"]; - } else { - [self leaveBuffering:@"ready to play: playing, !waitingToPlay"]; - } - [self updatePosition]; - } else { - // If this happens when we're playing, check whether buffer is confirmed - if (_bufferUnconfirmed && !_player.currentItem.playbackBufferFull) { - // Stay in bufering - XXX Test - [self enterBuffering:@"ready to play: playing, bufferUnconfirmed && !playbackBufferFull"]; - } else { - if (_player.currentItem.playbackBufferEmpty) { - // !_player.currentItem.playbackLikelyToKeepUp; - [self enterBuffering:@"ready to play: playing, playbackBufferEmpty"]; - } else { - [self leaveBuffering:@"ready to play: playing, !playbackBufferEmpty"]; - } - [self updatePosition]; - } - } - } else { - if (_player.currentItem.playbackBufferEmpty) { - [self enterBuffering:@"ready to play: !playing, playbackBufferEmpty"]; - // || !_player.currentItem.playbackLikelyToKeepUp; - } else { - [self leaveBuffering:@"ready to play: !playing, !playbackBufferEmpty"]; - } - [self updatePosition]; - } - [self broadcastPlaybackEvent]; - if (_loadResult) { - _loadResult(@([self getDuration])); - _loadResult = nil; - } - break; - } - case AVPlayerItemStatusFailed: { - NSLog(@"AVPlayerItemStatusFailed"); - [self sendErrorForItem:playerItem]; - break; - } - case AVPlayerItemStatusUnknown: - break; - } - } else if ([keyPath isEqualToString:@"playbackBufferEmpty"] || [keyPath isEqualToString:@"playbackBufferFull"]) { - // Use these values to detect buffering. - IndexedPlayerItem *playerItem = (IndexedPlayerItem *)object; - if (playerItem != _player.currentItem) return; - // If there's a seek in progress, these values are unreliable - if (CMTIME_IS_VALID(_seekPos)) return; - // Detect buffering in different ways depending on whether we're playing - if (_playing) { - if (@available(macOS 10.12, iOS 10.0, *)) { - // We handle this with timeControlStatus instead. - } else { - if (_bufferUnconfirmed && playerItem.playbackBufferFull) { - _bufferUnconfirmed = NO; - [self leaveBuffering:@"playing, _bufferUnconfirmed && playbackBufferFull"]; - [self updatePosition]; - NSLog(@"Buffering confirmed! leaving buffering"); - [self broadcastPlaybackEvent]; - } - } - } else { - if (playerItem.playbackBufferEmpty) { - [self enterBuffering:@"!playing, playbackBufferEmpty"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - } else if (!playerItem.playbackBufferEmpty || playerItem.playbackBufferFull) { - _processingState = ready; - [self leaveBuffering:@"!playing, !playbackBufferEmpty || playbackBufferFull"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - } - } - /* } else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"]) { */ - } else if ([keyPath isEqualToString:@"timeControlStatus"]) { - if (@available(macOS 10.12, iOS 10.0, *)) { - AVPlayerTimeControlStatus status = AVPlayerTimeControlStatusPaused; - NSNumber *statusNumber = change[NSKeyValueChangeNewKey]; - if ([statusNumber isKindOfClass:[NSNumber class]]) { - status = statusNumber.intValue; - } - switch (status) { - case AVPlayerTimeControlStatusPaused: - //NSLog(@"AVPlayerTimeControlStatusPaused"); - break; - case AVPlayerTimeControlStatusWaitingToPlayAtSpecifiedRate: - //NSLog(@"AVPlayerTimeControlStatusWaitingToPlayAtSpecifiedRate"); - if (_processingState != completed) { - [self enterBuffering:@"timeControlStatus"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - } else { - NSLog(@"Ignoring wait signal because we reached the end"); - } - break; - case AVPlayerTimeControlStatusPlaying: - [self leaveBuffering:@"timeControlStatus"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - break; - } - } - } else if ([keyPath isEqualToString:@"currentItem"] && _player.currentItem) { - if (_player.currentItem.status == AVPlayerItemStatusFailed) { - if ([_orderInv[_index] intValue] + 1 < [_order count]) { - // account for automatic move to next item - _index = [_order[[_orderInv[_index] intValue] + 1] intValue]; - NSLog(@"advance to next on error: index = %d", _index); - [self broadcastPlaybackEvent]; - } else { - NSLog(@"error on last item"); - } - return; - } else { - int expectedIndex = [self indexForItem:_player.currentItem]; - if (_index != expectedIndex) { - // AVQueuePlayer will sometimes skip over error items without - // notifying this observer. - NSLog(@"Queue change detected. Adjusting index from %d -> %d", _index, expectedIndex); - _index = expectedIndex; - [self broadcastPlaybackEvent]; - } - } - //NSLog(@"currentItem changed. _index=%d", _index); - _bufferUnconfirmed = YES; - // If we've skipped or transitioned to a new item and we're not - // currently in the middle of a seek - if (CMTIME_IS_INVALID(_seekPos) && _player.currentItem.status == AVPlayerItemStatusReadyToPlay) { - [self updatePosition]; - IndexedAudioSource *source = ((IndexedPlayerItem *)_player.currentItem).audioSource; - // We should already be at position zero but for - // ClippingAudioSource it might be off by some milliseconds so we - // consider anything <= 100 as close enough. - if ((int)(1000 * CMTimeGetSeconds(source.position)) > 100) { - NSLog(@"On currentItem change, seeking back to zero"); - BOOL shouldResumePlayback = NO; - AVPlayerActionAtItemEnd originalEndAction = _player.actionAtItemEnd; - if (_playing && CMTimeGetSeconds(CMTimeSubtract(source.position, source.duration)) >= 0) { - NSLog(@"Need to pause while rewinding because we're at the end"); - shouldResumePlayback = YES; - _player.actionAtItemEnd = AVPlayerActionAtItemEndPause; - [_player pause]; - } - [self enterBuffering:@"currentItem changed, seeking"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - [source seek:kCMTimeZero completionHandler:^(BOOL finished) { - [self leaveBuffering:@"currentItem changed, finished seek"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - if (shouldResumePlayback) { - _player.actionAtItemEnd = originalEndAction; - // TODO: This logic is almost duplicated in seek. See if we can reuse this code. - [_player play]; - } - }]; - } else { - // Already at zero, no need to seek. - } - } - } else if ([keyPath isEqualToString:@"loadedTimeRanges"]) { - IndexedPlayerItem *playerItem = (IndexedPlayerItem *)object; - if (playerItem != _player.currentItem) return; - int pos = [self getBufferedPosition]; - if (pos != _bufferedPosition) { - _bufferedPosition = pos; - [self broadcastPlaybackEvent]; - } - } -} - -- (void)sendErrorForItem:(IndexedPlayerItem *)playerItem { - FlutterError *flutterError = [FlutterError errorWithCode:[NSString stringWithFormat:@"%d", playerItem.error.code] - message:playerItem.error.localizedDescription - details:nil]; - [self sendError:flutterError playerItem:playerItem]; -} - -- (void)sendError:(FlutterError *)flutterError playerItem:(IndexedPlayerItem *)playerItem { - NSLog(@"sendError"); - if (_loadResult && playerItem == _player.currentItem) { - _loadResult(flutterError); - _loadResult = nil; - } - if (_eventSink) { - // Broadcast all errors even if they aren't on the current item. - _eventSink(flutterError); - } -} - -- (void)abortExistingConnection { - FlutterError *flutterError = [FlutterError errorWithCode:@"abort" - message:@"Connection aborted" - details:nil]; - [self sendError:flutterError playerItem:nil]; -} - -- (int)indexForItem:(IndexedPlayerItem *)playerItem { - for (int i = 0; i < _indexedAudioSources.count; i++) { - if (_indexedAudioSources[i].playerItem == playerItem) { - return i; - } - } - return -1; -} - -- (void)play { - [self play:nil]; -} - -- (void)play:(FlutterResult)result { - if (result) { - if (_playResult) { - NSLog(@"INTERRUPTING PLAY"); - _playResult(nil); - } - _playResult = result; - } - _playing = YES; -#if TARGET_OS_IPHONE - if (_configuredSession) { - [[AVAudioSession sharedInstance] setActive:YES error:nil]; - } -#endif - [_player play]; - [self updatePosition]; - if (@available(macOS 10.12, iOS 10.0, *)) {} - else { - if (_bufferUnconfirmed && !_player.currentItem.playbackBufferFull) { - [self enterBuffering:@"play, _bufferUnconfirmed && !playbackBufferFull"]; - [self broadcastPlaybackEvent]; - } - } -} - -- (void)pause { - _playing = NO; - [_player pause]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - if (_playResult) { - NSLog(@"PLAY FINISHED DUE TO PAUSE"); - _playResult(nil); - _playResult = nil; - } -} - -- (void)complete { - [self updatePosition]; - _processingState = completed; - [self broadcastPlaybackEvent]; - if (_playResult) { - NSLog(@"PLAY FINISHED DUE TO COMPLETE"); - _playResult(nil); - _playResult = nil; - } -} - -- (void)setVolume:(float)volume { - [_player setVolume:volume]; -} - -- (void)setSpeed:(float)speed { - if (speed == 1.0 - || (speed < 1.0 && _player.currentItem.canPlaySlowForward) - || (speed > 1.0 && _player.currentItem.canPlayFastForward)) { - _player.rate = speed; - } - [self updatePosition]; -} - -- (void)setLoopMode:(int)loopMode { - _loopMode = loopMode; - if (_player) { - switch (_loopMode) { - case loopOne: - _player.actionAtItemEnd = AVPlayerActionAtItemEndPause; // AVPlayerActionAtItemEndNone - break; - default: - _player.actionAtItemEnd = AVPlayerActionAtItemEndAdvance; - } - } -} - -- (void)setShuffleModeEnabled:(BOOL)shuffleModeEnabled { - NSLog(@"setShuffleModeEnabled: %d", shuffleModeEnabled); - _shuffleModeEnabled = shuffleModeEnabled; - if (!_audioSource) return; - - [self updateOrder]; - - [self enqueueFrom:_index]; -} - -- (void)dumpQueue { - for (int i = 0; i < _player.items.count; i++) { - IndexedPlayerItem *playerItem = _player.items[i]; - for (int j = 0; j < _indexedAudioSources.count; j++) { - IndexedAudioSource *source = _indexedAudioSources[j]; - if (source.playerItem == playerItem) { - NSLog(@"- %d", j); - break; - } - } - } -} - -- (void)setAutomaticallyWaitsToMinimizeStalling:(bool)automaticallyWaitsToMinimizeStalling { - _automaticallyWaitsToMinimizeStalling = automaticallyWaitsToMinimizeStalling; - if (@available(macOS 10.12, iOS 10.0, *)) { - if(_player) { - _player.automaticallyWaitsToMinimizeStalling = automaticallyWaitsToMinimizeStalling; - } - } -} - -- (void)seek:(CMTime)position index:(NSNumber *)newIndex completionHandler:(void (^)(BOOL))completionHandler { - int index = _index; - if (newIndex != [NSNull null]) { - index = [newIndex intValue]; - } - if (index != _index) { - // Jump to a new item - /* if (_playing && index == _index + 1) { */ - /* // Special case for jumping to the very next item */ - /* NSLog(@"seek to next item: %d -> %d", _index, index); */ - /* [_indexedAudioSources[_index] seek:kCMTimeZero]; */ - /* _index = index; */ - /* [_player advanceToNextItem]; */ - /* [self broadcastPlaybackEvent]; */ - /* } else */ - { - // Jump to a distant item - //NSLog(@"seek# jump to distant item: %d -> %d", _index, index); - if (_playing) { - [_player pause]; - } - [_indexedAudioSources[_index] seek:kCMTimeZero]; - // The "currentItem" key observer will respect that a seek is already in progress - _seekPos = position; - [self updatePosition]; - [self enqueueFrom:index]; - IndexedAudioSource *source = _indexedAudioSources[_index]; - if (abs((int)(1000 * CMTimeGetSeconds(CMTimeSubtract(source.position, position)))) > 100) { - [self enterBuffering:@"seek to index"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - [source seek:position completionHandler:^(BOOL finished) { - if (@available(macOS 10.12, iOS 10.0, *)) { - if (_playing) { - // Handled by timeControlStatus - } else { - if (_bufferUnconfirmed && !_player.currentItem.playbackBufferFull) { - // Stay in buffering - } else if (source.playerItem.status == AVPlayerItemStatusReadyToPlay) { - [self leaveBuffering:@"seek to index finished, (!bufferUnconfirmed || playbackBufferFull) && ready to play"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - } - } - } else { - if (_bufferUnconfirmed && !_player.currentItem.playbackBufferFull) { - // Stay in buffering - } else if (source.playerItem.status == AVPlayerItemStatusReadyToPlay) { - [self leaveBuffering:@"seek to index finished, (!bufferUnconfirmed || playbackBufferFull) && ready to play"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - } - } - if (_playing) { - [_player play]; - } - _seekPos = kCMTimeInvalid; - [self broadcastPlaybackEvent]; - if (completionHandler) { - completionHandler(finished); - } - }]; - } else { - _seekPos = kCMTimeInvalid; - if (_playing) { - [_player play]; - } - } - } - } else { - // Seek within an item - if (_playing) { - [_player pause]; - } - _seekPos = position; - //NSLog(@"seek. enter buffering. pos = %d", (int)(1000*CMTimeGetSeconds(_indexedAudioSources[_index].position))); - // TODO: Move this into a separate method so it can also - // be used in skip. - [self enterBuffering:@"seek"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - [_indexedAudioSources[_index] seek:position completionHandler:^(BOOL finished) { - [self updatePosition]; - if (_playing) { - // If playing, buffering will be detected either by: - // 1. checkForDiscontinuity - // 2. timeControlStatus - [_player play]; - } else { - // If not playing, there is no reliable way to detect - // when buffering has completed, so we use - // !playbackBufferEmpty. Although this always seems to - // be full even right after a seek. - if (_player.currentItem.playbackBufferEmpty) { - [self enterBuffering:@"seek finished, playbackBufferEmpty"]; - } else { - [self leaveBuffering:@"seek finished, !playbackBufferEmpty"]; - } - [self updatePosition]; - if (_processingState != buffering) { - [self broadcastPlaybackEvent]; - } - } - _seekPos = kCMTimeInvalid; - [self broadcastPlaybackEvent]; - if (completionHandler) { - completionHandler(finished); - } - }]; - } -} - -- (void)dispose { - if (_processingState != none) { - [_player pause]; - _processingState = none; - [self broadcastPlaybackEvent]; - } - if (_timeObserver) { - [_player removeTimeObserver:_timeObserver]; - _timeObserver = 0; - } - if (_indexedAudioSources) { - for (int i = 0; i < [_indexedAudioSources count]; i++) { - [self removeItemObservers:_indexedAudioSources[i].playerItem]; - } - } - if (_player) { - [_player removeObserver:self forKeyPath:@"currentItem"]; - if (@available(macOS 10.12, iOS 10.0, *)) { - [_player removeObserver:self forKeyPath:@"timeControlStatus"]; - } - _player = nil; - } - // Untested: - // [_eventChannel setStreamHandler:nil]; - // [_methodChannel setMethodHandler:nil]; -} - -@end diff --git a/just_audio/ios/Classes/AudioSource.h b/just_audio/ios/Classes/AudioSource.h deleted file mode 100644 index c192f33..0000000 --- a/just_audio/ios/Classes/AudioSource.h +++ /dev/null @@ -1,13 +0,0 @@ -#import - -@interface AudioSource : NSObject - -@property (readonly, nonatomic) NSString* sourceId; - -- (instancetype)initWithId:(NSString *)sid; -- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex; -- (void)findById:(NSString *)sourceId matches:(NSMutableArray *)matches; -- (NSArray *)getShuffleOrder; -- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex; - -@end diff --git a/just_audio/ios/Classes/AudioSource.m b/just_audio/ios/Classes/AudioSource.m deleted file mode 100644 index 81534f1..0000000 --- a/just_audio/ios/Classes/AudioSource.m +++ /dev/null @@ -1,37 +0,0 @@ -#import "AudioSource.h" -#import - -@implementation AudioSource { - NSString *_sourceId; -} - -- (instancetype)initWithId:(NSString *)sid { - self = [super init]; - NSAssert(self, @"super init cannot be nil"); - _sourceId = sid; - return self; -} - -- (NSString *)sourceId { - return _sourceId; -} - -- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex { - return 0; -} - -- (void)findById:(NSString *)sourceId matches:(NSMutableArray *)matches { - if ([_sourceId isEqualToString:sourceId]) { - [matches addObject:self]; - } -} - -- (NSArray *)getShuffleOrder { - return @[]; -} - -- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex { - return 0; -} - -@end diff --git a/just_audio/ios/Classes/ClippingAudioSource.h b/just_audio/ios/Classes/ClippingAudioSource.h deleted file mode 100644 index 127019e..0000000 --- a/just_audio/ios/Classes/ClippingAudioSource.h +++ /dev/null @@ -1,11 +0,0 @@ -#import "AudioSource.h" -#import "UriAudioSource.h" -#import - -@interface ClippingAudioSource : IndexedAudioSource - -@property (readonly, nonatomic) UriAudioSource* audioSource; - -- (instancetype)initWithId:(NSString *)sid audioSource:(UriAudioSource *)audioSource start:(NSNumber *)start end:(NSNumber *)end; - -@end diff --git a/just_audio/ios/Classes/ClippingAudioSource.m b/just_audio/ios/Classes/ClippingAudioSource.m deleted file mode 100644 index 2f3b174..0000000 --- a/just_audio/ios/Classes/ClippingAudioSource.m +++ /dev/null @@ -1,79 +0,0 @@ -#import "AudioSource.h" -#import "ClippingAudioSource.h" -#import "IndexedPlayerItem.h" -#import "UriAudioSource.h" -#import - -@implementation ClippingAudioSource { - UriAudioSource *_audioSource; - CMTime _start; - CMTime _end; -} - -- (instancetype)initWithId:(NSString *)sid audioSource:(UriAudioSource *)audioSource start:(NSNumber *)start end:(NSNumber *)end { - self = [super initWithId:sid]; - NSAssert(self, @"super init cannot be nil"); - _audioSource = audioSource; - _start = start == [NSNull null] ? kCMTimeZero : CMTimeMake([start intValue], 1000); - _end = end == [NSNull null] ? kCMTimeInvalid : CMTimeMake([end intValue], 1000); - return self; -} - -- (UriAudioSource *)audioSource { - return _audioSource; -} - -- (void)findById:(NSString *)sourceId matches:(NSMutableArray *)matches { - [super findById:sourceId matches:matches]; - [_audioSource findById:sourceId matches:matches]; -} - -- (void)attach:(AVQueuePlayer *)player { - [super attach:player]; - _audioSource.playerItem.forwardPlaybackEndTime = _end; - // XXX: Not needed since currentItem observer handles it? - [self seek:kCMTimeZero]; -} - -- (IndexedPlayerItem *)playerItem { - return _audioSource.playerItem; -} - -- (NSArray *)getShuffleOrder { - return @[@(0)]; -} - -- (void)play:(AVQueuePlayer *)player { -} - -- (void)pause:(AVQueuePlayer *)player { -} - -- (void)stop:(AVQueuePlayer *)player { -} - -- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler { - if (!completionHandler || (self.playerItem.status == AVPlayerItemStatusReadyToPlay)) { - CMTime absPosition = CMTimeAdd(_start, position); - [_audioSource.playerItem seekToTime:absPosition toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero completionHandler:completionHandler]; - } -} - -- (CMTime)duration { - return CMTimeSubtract(CMTIME_IS_INVALID(_end) ? self.playerItem.duration : _end, _start); -} - -- (void)setDuration:(CMTime)duration { -} - -- (CMTime)position { - return CMTimeSubtract(self.playerItem.currentTime, _start); -} - -- (CMTime)bufferedPosition { - CMTime pos = CMTimeSubtract(_audioSource.bufferedPosition, _start); - CMTime dur = [self duration]; - return CMTimeCompare(pos, dur) >= 0 ? dur : pos; -} - -@end diff --git a/just_audio/ios/Classes/ConcatenatingAudioSource.h b/just_audio/ios/Classes/ConcatenatingAudioSource.h deleted file mode 100644 index 2c2350a..0000000 --- a/just_audio/ios/Classes/ConcatenatingAudioSource.h +++ /dev/null @@ -1,13 +0,0 @@ -#import "AudioSource.h" -#import - -@interface ConcatenatingAudioSource : AudioSource - -@property (readonly, nonatomic) int count; - -- (instancetype)initWithId:(NSString *)sid audioSources:(NSMutableArray *)audioSources; -- (void)insertSource:(AudioSource *)audioSource atIndex:(int)index; -- (void)removeSourcesFromIndex:(int)start toIndex:(int)end; -- (void)moveSourceFromIndex:(int)currentIndex toIndex:(int)newIndex; - -@end diff --git a/just_audio/ios/Classes/ConcatenatingAudioSource.m b/just_audio/ios/Classes/ConcatenatingAudioSource.m deleted file mode 100644 index bd7b713..0000000 --- a/just_audio/ios/Classes/ConcatenatingAudioSource.m +++ /dev/null @@ -1,109 +0,0 @@ -#import "AudioSource.h" -#import "ConcatenatingAudioSource.h" -#import -#import - -@implementation ConcatenatingAudioSource { - NSMutableArray *_audioSources; - NSMutableArray *_shuffleOrder; -} - -- (instancetype)initWithId:(NSString *)sid audioSources:(NSMutableArray *)audioSources { - self = [super initWithId:sid]; - NSAssert(self, @"super init cannot be nil"); - _audioSources = audioSources; - return self; -} - -- (int)count { - return _audioSources.count; -} - -- (void)insertSource:(AudioSource *)audioSource atIndex:(int)index { - [_audioSources insertObject:audioSource atIndex:index]; -} - -- (void)removeSourcesFromIndex:(int)start toIndex:(int)end { - if (end == -1) end = _audioSources.count; - for (int i = start; i < end; i++) { - [_audioSources removeObjectAtIndex:start]; - } -} - -- (void)moveSourceFromIndex:(int)currentIndex toIndex:(int)newIndex { - AudioSource *source = _audioSources[currentIndex]; - [_audioSources removeObjectAtIndex:currentIndex]; - [_audioSources insertObject:source atIndex:newIndex]; -} - -- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex { - for (int i = 0; i < [_audioSources count]; i++) { - treeIndex = [_audioSources[i] buildSequence:sequence treeIndex:treeIndex]; - } - return treeIndex; -} - -- (void)findById:(NSString *)sourceId matches:(NSMutableArray *)matches { - [super findById:sourceId matches:matches]; - for (int i = 0; i < [_audioSources count]; i++) { - [_audioSources[i] findById:sourceId matches:matches]; - } -} - -- (NSArray *)getShuffleOrder { - NSMutableArray *order = [NSMutableArray new]; - int offset = [order count]; - NSMutableArray *childOrders = [NSMutableArray new]; // array of array of ints - for (int i = 0; i < [_audioSources count]; i++) { - AudioSource *audioSource = _audioSources[i]; - NSArray *childShuffleOrder = [audioSource getShuffleOrder]; - NSMutableArray *offsetChildShuffleOrder = [NSMutableArray new]; - for (int j = 0; j < [childShuffleOrder count]; j++) { - [offsetChildShuffleOrder addObject:@([childShuffleOrder[j] integerValue] + offset)]; - } - [childOrders addObject:offsetChildShuffleOrder]; - offset += [childShuffleOrder count]; - } - for (int i = 0; i < [_audioSources count]; i++) { - [order addObjectsFromArray:childOrders[[_shuffleOrder[i] integerValue]]]; - } - return order; -} - -- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex { - int currentChildIndex = -1; - for (int i = 0; i < [_audioSources count]; i++) { - int indexBefore = treeIndex; - AudioSource *child = _audioSources[i]; - treeIndex = [child shuffle:treeIndex currentIndex:currentIndex]; - if (currentIndex >= indexBefore && currentIndex < treeIndex) { - currentChildIndex = i; - } else {} - } - // Shuffle so that the current child is first in the shuffle order - _shuffleOrder = [NSMutableArray arrayWithCapacity:[_audioSources count]]; - for (int i = 0; i < [_audioSources count]; i++) { - [_shuffleOrder addObject:@(0)]; - } - NSLog(@"shuffle: audioSources.count=%d and shuffleOrder.count=%d", [_audioSources count], [_shuffleOrder count]); - // First generate a random shuffle - for (int i = 0; i < [_audioSources count]; i++) { - int j = arc4random_uniform(i + 1); - _shuffleOrder[i] = _shuffleOrder[j]; - _shuffleOrder[j] = @(i); - } - // Then bring currentIndex to the front - if (currentChildIndex != -1) { - for (int i = 1; i < [_audioSources count]; i++) { - if ([_shuffleOrder[i] integerValue] == currentChildIndex) { - NSNumber *v = _shuffleOrder[0]; - _shuffleOrder[0] = _shuffleOrder[i]; - _shuffleOrder[i] = v; - break; - } - } - } - return treeIndex; -} - -@end diff --git a/just_audio/ios/Classes/IndexedAudioSource.h b/just_audio/ios/Classes/IndexedAudioSource.h deleted file mode 100644 index a308a4f..0000000 --- a/just_audio/ios/Classes/IndexedAudioSource.h +++ /dev/null @@ -1,21 +0,0 @@ -#import "AudioSource.h" -#import "IndexedPlayerItem.h" -#import -#import - -@interface IndexedAudioSource : AudioSource - -@property (readonly, nonatomic) IndexedPlayerItem *playerItem; -@property (readwrite, nonatomic) CMTime duration; -@property (readonly, nonatomic) CMTime position; -@property (readonly, nonatomic) CMTime bufferedPosition; -@property (readonly, nonatomic) BOOL isAttached; - -- (void)attach:(AVQueuePlayer *)player; -- (void)play:(AVQueuePlayer *)player; -- (void)pause:(AVQueuePlayer *)player; -- (void)stop:(AVQueuePlayer *)player; -- (void)seek:(CMTime)position; -- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler; - -@end diff --git a/just_audio/ios/Classes/IndexedAudioSource.m b/just_audio/ios/Classes/IndexedAudioSource.m deleted file mode 100644 index 316f900..0000000 --- a/just_audio/ios/Classes/IndexedAudioSource.m +++ /dev/null @@ -1,68 +0,0 @@ -#import "IndexedAudioSource.h" -#import "IndexedPlayerItem.h" -#import - -@implementation IndexedAudioSource { - BOOL _isAttached; -} - -- (instancetype)initWithId:(NSString *)sid { - self = [super init]; - NSAssert(self, @"super init cannot be nil"); - _isAttached = NO; - return self; -} - -- (IndexedPlayerItem *)playerItem { - return nil; -} - -- (BOOL)isAttached { - return _isAttached; -} - -- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex { - [sequence addObject:self]; - return treeIndex + 1; -} - -- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex { - return treeIndex + 1; -} - -- (void)attach:(AVQueuePlayer *)player { - _isAttached = YES; -} - -- (void)play:(AVQueuePlayer *)player { -} - -- (void)pause:(AVQueuePlayer *)player { -} - -- (void)stop:(AVQueuePlayer *)player { -} - -- (void)seek:(CMTime)position { - [self seek:position completionHandler:nil]; -} - -- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler { -} - -- (CMTime)duration { - return kCMTimeInvalid; -} - -- (void)setDuration:(CMTime)duration { -} - -- (CMTime)position { - return kCMTimeInvalid; -} - -- (CMTime)bufferedPosition { - return kCMTimeInvalid; -} - -@end diff --git a/just_audio/ios/Classes/IndexedPlayerItem.h b/just_audio/ios/Classes/IndexedPlayerItem.h deleted file mode 100644 index 5d4a11c..0000000 --- a/just_audio/ios/Classes/IndexedPlayerItem.h +++ /dev/null @@ -1,9 +0,0 @@ -#import - -@class IndexedAudioSource; - -@interface IndexedPlayerItem : AVPlayerItem - -@property (readwrite, nonatomic) IndexedAudioSource *audioSource; - -@end diff --git a/just_audio/ios/Classes/IndexedPlayerItem.m b/just_audio/ios/Classes/IndexedPlayerItem.m deleted file mode 100644 index 87fafe0..0000000 --- a/just_audio/ios/Classes/IndexedPlayerItem.m +++ /dev/null @@ -1,16 +0,0 @@ -#import "IndexedPlayerItem.h" -#import "IndexedAudioSource.h" - -@implementation IndexedPlayerItem { - IndexedAudioSource *_audioSource; -} - --(void)setAudioSource:(IndexedAudioSource *)audioSource { - _audioSource = audioSource; -} - --(IndexedAudioSource *)audioSource { - return _audioSource; -} - -@end diff --git a/just_audio/ios/Classes/JustAudioPlugin.h b/just_audio/ios/Classes/JustAudioPlugin.h deleted file mode 100644 index a694322..0000000 --- a/just_audio/ios/Classes/JustAudioPlugin.h +++ /dev/null @@ -1,4 +0,0 @@ -#import - -@interface JustAudioPlugin : NSObject -@end diff --git a/just_audio/ios/Classes/JustAudioPlugin.m b/just_audio/ios/Classes/JustAudioPlugin.m deleted file mode 100644 index 982a260..0000000 --- a/just_audio/ios/Classes/JustAudioPlugin.m +++ /dev/null @@ -1,55 +0,0 @@ -#import "JustAudioPlugin.h" -#import "AudioPlayer.h" -#import -#include - -@implementation JustAudioPlugin { - NSObject* _registrar; - BOOL _configuredSession; -} - -+ (void)registerWithRegistrar:(NSObject*)registrar { - FlutterMethodChannel* channel = [FlutterMethodChannel - methodChannelWithName:@"com.ryanheise.just_audio.methods" - binaryMessenger:[registrar messenger]]; - JustAudioPlugin* instance = [[JustAudioPlugin alloc] initWithRegistrar:registrar]; - [registrar addMethodCallDelegate:instance channel:channel]; -} - -- (instancetype)initWithRegistrar:(NSObject *)registrar { - self = [super init]; - NSAssert(self, @"super init cannot be nil"); - _registrar = registrar; - return self; -} - -- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result { - if ([@"init" isEqualToString:call.method]) { - NSArray* args = (NSArray*)call.arguments; - NSString* playerId = args[0]; - /*AudioPlayer* player =*/ [[AudioPlayer alloc] initWithRegistrar:_registrar playerId:playerId configuredSession:_configuredSession]; - result(nil); - } else if ([@"setIosCategory" isEqualToString:call.method]) { -#if TARGET_OS_IPHONE - NSNumber* categoryIndex = (NSNumber*)call.arguments; - AVAudioSessionCategory category = nil; - switch (categoryIndex.integerValue) { - case 0: category = AVAudioSessionCategoryAmbient; break; - case 1: category = AVAudioSessionCategorySoloAmbient; break; - case 2: category = AVAudioSessionCategoryPlayback; break; - case 3: category = AVAudioSessionCategoryRecord; break; - case 4: category = AVAudioSessionCategoryPlayAndRecord; break; - case 5: category = AVAudioSessionCategoryMultiRoute; break; - } - if (category) { - _configuredSession = YES; - } - [[AVAudioSession sharedInstance] setCategory:category error:nil]; -#endif - result(nil); - } else { - result(FlutterMethodNotImplemented); - } -} - -@end diff --git a/just_audio/ios/Classes/LoopingAudioSource.h b/just_audio/ios/Classes/LoopingAudioSource.h deleted file mode 100644 index 7c524a9..0000000 --- a/just_audio/ios/Classes/LoopingAudioSource.h +++ /dev/null @@ -1,8 +0,0 @@ -#import "AudioSource.h" -#import - -@interface LoopingAudioSource : AudioSource - -- (instancetype)initWithId:(NSString *)sid audioSources:(NSArray *)audioSources; - -@end diff --git a/just_audio/ios/Classes/LoopingAudioSource.m b/just_audio/ios/Classes/LoopingAudioSource.m deleted file mode 100644 index ba4b52b..0000000 --- a/just_audio/ios/Classes/LoopingAudioSource.m +++ /dev/null @@ -1,53 +0,0 @@ -#import "AudioSource.h" -#import "LoopingAudioSource.h" -#import - -@implementation LoopingAudioSource { - // An array of duplicates - NSArray *_audioSources; // -} - -- (instancetype)initWithId:(NSString *)sid audioSources:(NSArray *)audioSources { - self = [super initWithId:sid]; - NSAssert(self, @"super init cannot be nil"); - _audioSources = audioSources; - return self; -} - -- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex { - for (int i = 0; i < [_audioSources count]; i++) { - treeIndex = [_audioSources[i] buildSequence:sequence treeIndex:treeIndex]; - } - return treeIndex; -} - -- (void)findById:(NSString *)sourceId matches:(NSMutableArray *)matches { - [super findById:sourceId matches:matches]; - for (int i = 0; i < [_audioSources count]; i++) { - [_audioSources[i] findById:sourceId matches:matches]; - } -} - -- (NSArray *)getShuffleOrder { - NSMutableArray *order = [NSMutableArray new]; - int offset = (int)[order count]; - for (int i = 0; i < [_audioSources count]; i++) { - AudioSource *audioSource = _audioSources[i]; - NSArray *childShuffleOrder = [audioSource getShuffleOrder]; - for (int j = 0; j < [childShuffleOrder count]; j++) { - [order addObject:@([childShuffleOrder[j] integerValue] + offset)]; - } - offset += [childShuffleOrder count]; - } - return order; -} - -- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex { - // TODO: This should probably shuffle the same way on all duplicates. - for (int i = 0; i < [_audioSources count]; i++) { - treeIndex = [_audioSources[i] shuffle:treeIndex currentIndex:currentIndex]; - } - return treeIndex; -} - -@end diff --git a/just_audio/ios/Classes/UriAudioSource.h b/just_audio/ios/Classes/UriAudioSource.h deleted file mode 100644 index 6ee3c2e..0000000 --- a/just_audio/ios/Classes/UriAudioSource.h +++ /dev/null @@ -1,8 +0,0 @@ -#import "IndexedAudioSource.h" -#import - -@interface UriAudioSource : IndexedAudioSource - -- (instancetype)initWithId:(NSString *)sid uri:(NSString *)uri; - -@end diff --git a/just_audio/ios/Classes/UriAudioSource.m b/just_audio/ios/Classes/UriAudioSource.m deleted file mode 100644 index 91321d4..0000000 --- a/just_audio/ios/Classes/UriAudioSource.m +++ /dev/null @@ -1,79 +0,0 @@ -#import "UriAudioSource.h" -#import "IndexedAudioSource.h" -#import "IndexedPlayerItem.h" -#import - -@implementation UriAudioSource { - NSString *_uri; - IndexedPlayerItem *_playerItem; - /* CMTime _duration; */ -} - -- (instancetype)initWithId:(NSString *)sid uri:(NSString *)uri { - self = [super initWithId:sid]; - NSAssert(self, @"super init cannot be nil"); - _uri = uri; - if ([_uri hasPrefix:@"file://"]) { - _playerItem = [[IndexedPlayerItem alloc] initWithURL:[NSURL fileURLWithPath:[_uri substringFromIndex:7]]]; - } else { - _playerItem = [[IndexedPlayerItem alloc] initWithURL:[NSURL URLWithString:_uri]]; - } - if (@available(macOS 10.13, iOS 11.0, *)) { - // This does the best at reducing distortion on voice with speeds below 1.0 - _playerItem.audioTimePitchAlgorithm = AVAudioTimePitchAlgorithmTimeDomain; - } - /* NSKeyValueObservingOptions options = */ - /* NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew; */ - /* [_playerItem addObserver:self */ - /* forKeyPath:@"duration" */ - /* options:options */ - /* context:nil]; */ - return self; -} - -- (IndexedPlayerItem *)playerItem { - return _playerItem; -} - -- (NSArray *)getShuffleOrder { - return @[@(0)]; -} - -- (void)play:(AVQueuePlayer *)player { -} - -- (void)pause:(AVQueuePlayer *)player { -} - -- (void)stop:(AVQueuePlayer *)player { -} - -- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler { - if (!completionHandler || (_playerItem.status == AVPlayerItemStatusReadyToPlay)) { - [_playerItem seekToTime:position toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero completionHandler:completionHandler]; - } -} - -- (CMTime)duration { - return _playerItem.duration; -} - -- (void)setDuration:(CMTime)duration { -} - -- (CMTime)position { - return _playerItem.currentTime; -} - -- (CMTime)bufferedPosition { - NSValue *last = _playerItem.loadedTimeRanges.lastObject; - if (last) { - CMTimeRange timeRange = [last CMTimeRangeValue]; - return CMTimeAdd(timeRange.start, timeRange.duration); - } else { - return _playerItem.currentTime; - } - return kCMTimeInvalid; -} - -@end diff --git a/just_audio/ios/just_audio.podspec b/just_audio/ios/just_audio.podspec deleted file mode 100644 index ba5c7d2..0000000 --- a/just_audio/ios/just_audio.podspec +++ /dev/null @@ -1,21 +0,0 @@ -# -# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html -# -Pod::Spec.new do |s| - s.name = 'just_audio' - s.version = '0.0.1' - s.summary = 'A new flutter plugin project.' - s.description = <<-DESC -A new flutter plugin project. - DESC - s.homepage = 'http://example.com' - s.license = { :file => '../LICENSE' } - s.author = { 'Your Company' => 'email@example.com' } - s.source = { :path => '.' } - s.source_files = 'Classes/**/*' - s.public_header_files = 'Classes/**/*.h' - s.dependency 'Flutter' - s.platform = :ios, '8.0' - s.pod_target_xcconfig = { 'DEFINES_MODULE' => 'YES', 'VALID_ARCHS[sdk=iphonesimulator*]' => 'x86_64' } -end - diff --git a/just_audio/lib/just_audio.dart b/just_audio/lib/just_audio.dart deleted file mode 100644 index 15f1b6f..0000000 --- a/just_audio/lib/just_audio.dart +++ /dev/null @@ -1,1293 +0,0 @@ -import 'dart:async'; -import 'dart:io'; - -import 'package:flutter/foundation.dart'; -import 'package:flutter/services.dart'; -import 'package:flutter/widgets.dart'; -import 'package:path/path.dart' as p; -import 'package:path_provider/path_provider.dart'; -import 'package:rxdart/rxdart.dart'; -import 'package:uuid/uuid.dart'; - -final _uuid = Uuid(); - -/// An object to manage playing audio from a URL, a locale file or an asset. -/// -/// ``` -/// final player = AudioPlayer(); -/// await player.setUrl('https://foo.com/bar.mp3'); -/// player.play(); -/// await player.pause(); -/// await player.setClip(start: Duration(seconds: 10), end: Duration(seconds: 20)); -/// await player.play(); -/// await player.setUrl('https://foo.com/baz.mp3'); -/// await player.seek(Duration(minutes: 5)); -/// player.play(); -/// await player.pause(); -/// await player.dispose(); -/// ``` -/// -/// You must call [dispose] to release the resources used by this player, -/// including any temporary files created to cache assets. -class AudioPlayer { - static final _mainChannel = MethodChannel('com.ryanheise.just_audio.methods'); - - static Future _init(String id) async { - await _mainChannel.invokeMethod('init', [id]); - return MethodChannel('com.ryanheise.just_audio.methods.$id'); - } - - /// Configure the audio session category on iOS. This method should be called - /// before playing any audio. It has no effect on Android or Flutter for Web. - /// - /// Note that the default category on iOS is [IosCategory.soloAmbient], but - /// for a typical media app, Apple recommends setting this to - /// [IosCategory.playback]. If you don't call this method, `just_audio` will - /// respect any prior category that was already set on your app's audio - /// session and will leave it alone. If it hasn't been previously set, this - /// will be [IosCategory.soloAmbient]. But if another audio plugin in your - /// app has configured a particular category, that will also be left alone. - /// - /// Note: If you use other audio plugins in conjunction with this one, it is - /// possible that each of those audio plugins may override the setting you - /// choose here. (You may consider asking the developers of the other plugins - /// to provide similar configurability so that you have complete control over - /// setting the overall category that you want for your app.) - static Future setIosCategory(IosCategory category) async { - await _mainChannel.invokeMethod('setIosCategory', category.index); - } - - final Future _channel; - final String _id; - _ProxyHttpServer _proxy; - Stream _eventChannelStream; - AudioSource _audioSource; - Map _audioSources = {}; - - PlaybackEvent _playbackEvent; - StreamSubscription _eventChannelStreamSubscription; - final _playbackEventSubject = BehaviorSubject(); - Future _durationFuture; - final _durationSubject = BehaviorSubject(); - final _processingStateSubject = BehaviorSubject(); - final _playingSubject = BehaviorSubject.seeded(false); - final _volumeSubject = BehaviorSubject.seeded(1.0); - final _speedSubject = BehaviorSubject.seeded(1.0); - final _bufferedPositionSubject = BehaviorSubject(); - final _icyMetadataSubject = BehaviorSubject(); - final _playerStateSubject = BehaviorSubject(); - final _currentIndexSubject = BehaviorSubject(); - final _loopModeSubject = BehaviorSubject(); - final _shuffleModeEnabledSubject = BehaviorSubject(); - BehaviorSubject _positionSubject; - bool _automaticallyWaitsToMinimizeStalling = true; - - /// Creates an [AudioPlayer]. - factory AudioPlayer() => AudioPlayer._internal(_uuid.v4()); - - AudioPlayer._internal(this._id) : _channel = _init(_id) { - _playbackEvent = PlaybackEvent( - processingState: ProcessingState.none, - updatePosition: Duration.zero, - updateTime: DateTime.now(), - bufferedPosition: Duration.zero, - duration: null, - icyMetadata: null, - currentIndex: null, - ); - _playbackEventSubject.add(_playbackEvent); - _eventChannelStream = EventChannel('com.ryanheise.just_audio.events.$_id') - .receiveBroadcastStream() - .map((data) { - try { - //print("received raw event: $data"); - final duration = (data['duration'] ?? -1) < 0 - ? null - : Duration(milliseconds: data['duration']); - _durationFuture = Future.value(duration); - if (duration != _playbackEvent.duration) { - _durationSubject.add(duration); - } - _playbackEvent = PlaybackEvent( - processingState: ProcessingState.values[data['processingState']], - updatePosition: Duration(milliseconds: data['updatePosition']), - updateTime: DateTime.fromMillisecondsSinceEpoch(data['updateTime']), - bufferedPosition: Duration(milliseconds: data['bufferedPosition']), - duration: duration, - icyMetadata: data['icyMetadata'] == null - ? null - : IcyMetadata.fromJson(data['icyMetadata']), - currentIndex: data['currentIndex'], - ); - //print("created event object with state: ${_playbackEvent.state}"); - return _playbackEvent; - } catch (e, stacktrace) { - print("Error parsing event: $e"); - print("$stacktrace"); - rethrow; - } - }); - _eventChannelStreamSubscription = _eventChannelStream.listen( - _playbackEventSubject.add, - onError: _playbackEventSubject.addError, - ); - _processingStateSubject.addStream(playbackEventStream - .map((event) => event.processingState) - .distinct() - .handleError((err, stack) {/* noop */})); - _bufferedPositionSubject.addStream(playbackEventStream - .map((event) => event.bufferedPosition) - .distinct() - .handleError((err, stack) {/* noop */})); - _icyMetadataSubject.addStream(playbackEventStream - .map((event) => event.icyMetadata) - .distinct() - .handleError((err, stack) {/* noop */})); - _currentIndexSubject.addStream(playbackEventStream - .map((event) => event.currentIndex) - .distinct() - .handleError((err, stack) {/* noop */})); - _playerStateSubject.addStream( - Rx.combineLatest2( - playingStream, - playbackEventStream, - (playing, event) => PlayerState(playing, event.processingState)) - .distinct() - .handleError((err, stack) {/* noop */})); - } - - /// The latest [PlaybackEvent]. - PlaybackEvent get playbackEvent => _playbackEvent; - - /// A stream of [PlaybackEvent]s. - Stream get playbackEventStream => _playbackEventSubject.stream; - - /// The duration of the current audio or null if unknown. - Duration get duration => _playbackEvent.duration; - - /// The duration of the current audio or null if unknown. - Future get durationFuture => _durationFuture; - - /// The duration of the current audio. - Stream get durationStream => _durationSubject.stream; - - /// The current [ProcessingState]. - ProcessingState get processingState => _playbackEvent.processingState; - - /// A stream of [ProcessingState]s. - Stream get processingStateStream => - _processingStateSubject.stream; - - /// Whether the player is playing. - bool get playing => _playingSubject.value; - - /// A stream of changing [playing] states. - Stream get playingStream => _playingSubject.stream; - - /// The current volume of the player. - double get volume => _volumeSubject.value; - - /// A stream of [volume] changes. - Stream get volumeStream => _volumeSubject.stream; - - /// The current speed of the player. - double get speed => _speedSubject.value; - - /// A stream of current speed values. - Stream get speedStream => _speedSubject.stream; - - /// The position up to which buffered audio is available. - Duration get bufferedPosition => _bufferedPositionSubject.value; - - /// A stream of buffered positions. - Stream get bufferedPositionStream => - _bufferedPositionSubject.stream; - - /// The latest ICY metadata received through the audio source. - IcyMetadata get icyMetadata => _playbackEvent.icyMetadata; - - /// A stream of ICY metadata received through the audio source. - Stream get icyMetadataStream => _icyMetadataSubject.stream; - - /// The current player state containing only the processing and playing - /// states. - PlayerState get playerState => _playerStateSubject.value; - - /// A stream of [PlayerState]s. - Stream get playerStateStream => _playerStateSubject.stream; - - /// The index of the current item. - int get currentIndex => _currentIndexSubject.value; - - /// A stream broadcasting the current item. - Stream get currentIndexStream => _currentIndexSubject.stream; - - /// Whether there is another item after the current index. - bool get hasNext => - _audioSource != null && - currentIndex != null && - currentIndex + 1 < _audioSource.sequence.length; - - /// Whether there is another item before the current index. - bool get hasPrevious => - _audioSource != null && currentIndex != null && currentIndex > 0; - - /// The current loop mode. - LoopMode get loopMode => _loopModeSubject.value; - - /// A stream of [LoopMode]s. - Stream get loopModeStream => _loopModeSubject.stream; - - /// Whether shuffle mode is currently enabled. - bool get shuffleModeEnabled => _shuffleModeEnabledSubject.value; - - /// A stream of the shuffle mode status. - Stream get shuffleModeEnabledStream => - _shuffleModeEnabledSubject.stream; - - /// Whether the player should automatically delay playback in order to - /// minimize stalling. (iOS 10.0 or later only) - bool get automaticallyWaitsToMinimizeStalling => - _automaticallyWaitsToMinimizeStalling; - - /// The current position of the player. - Duration get position { - if (playing && processingState == ProcessingState.ready) { - final result = _playbackEvent.updatePosition + - (DateTime.now().difference(_playbackEvent.updateTime)) * speed; - return _playbackEvent.duration == null || - result <= _playbackEvent.duration - ? result - : _playbackEvent.duration; - } else { - return _playbackEvent.updatePosition; - } - } - - /// A stream tracking the current position of this player, suitable for - /// animating a seek bar. To ensure a smooth animation, this stream emits - /// values more frequently on short items where the seek bar moves more - /// quickly, and less frequenly on long items where the seek bar moves more - /// slowly. The interval between each update will be no quicker than once - /// every 16ms and no slower than once every 200ms. - /// - /// See [createPositionStream] for more control over the stream parameters. - Stream get positionStream { - if (_positionSubject == null) { - _positionSubject = BehaviorSubject(); - _positionSubject.addStream(createPositionStream( - steps: 800, - minPeriod: Duration(milliseconds: 16), - maxPeriod: Duration(milliseconds: 200))); - } - return _positionSubject.stream; - } - - /// Creates a new stream periodically tracking the current position of this - /// player. The stream will aim to emit [steps] position updates from the - /// beginning to the end of the current audio source, at intervals of - /// [duration] / [steps]. This interval will be clipped between [minPeriod] - /// and [maxPeriod]. This stream will not emit values while audio playback is - /// paused or stalled. - /// - /// Note: each time this method is called, a new stream is created. If you - /// intend to use this stream multiple times, you should hold a reference to - /// the returned stream and close it once you are done. - Stream createPositionStream({ - int steps = 800, - Duration minPeriod = const Duration(milliseconds: 200), - Duration maxPeriod = const Duration(milliseconds: 200), - }) { - assert(minPeriod <= maxPeriod); - assert(minPeriod > Duration.zero); - Duration duration() => this.duration ?? Duration.zero; - Duration step() { - var s = duration() ~/ steps; - if (s < minPeriod) s = minPeriod; - if (s > maxPeriod) s = maxPeriod; - return s; - } - - StreamController controller = StreamController.broadcast(); - Timer currentTimer; - StreamSubscription durationSubscription; - StreamSubscription playbackEventSubscription; - void yieldPosition(Timer timer) { - if (controller.isClosed) { - timer.cancel(); - durationSubscription?.cancel(); - playbackEventSubscription?.cancel(); - return; - } - if (_durationSubject.isClosed) { - timer.cancel(); - durationSubscription?.cancel(); - playbackEventSubscription?.cancel(); - controller.close(); - return; - } - controller.add(position); - } - - currentTimer = Timer.periodic(step(), yieldPosition); - durationSubscription = durationStream.listen((duration) { - currentTimer.cancel(); - currentTimer = Timer.periodic(step(), yieldPosition); - }); - playbackEventSubscription = playbackEventStream.listen((event) { - controller.add(position); - }); - return controller.stream.distinct(); - } - - /// Convenience method to load audio from a URL with optional headers, - /// equivalent to: - /// - /// ``` - /// load(AudioSource.uri(Uri.parse(url), headers: headers)); - /// ``` - /// - /// - Future setUrl(String url, {Map headers}) => - load(AudioSource.uri(Uri.parse(url), headers: headers)); - - /// Convenience method to load audio from a file, equivalent to: - /// - /// ``` - /// load(AudioSource.uri(Uri.file(filePath))); - /// ``` - Future setFilePath(String filePath) => - load(AudioSource.uri(Uri.file(filePath))); - - /// Convenience method to load audio from an asset, equivalent to: - /// - /// ``` - /// load(AudioSource.uri(Uri.parse('asset://$filePath'))); - /// ``` - Future setAsset(String assetPath) => - load(AudioSource.uri(Uri.parse('asset://$assetPath'))); - - /// Loads audio from an [AudioSource] and completes when the audio is ready - /// to play with the duration of that audio, or null if the duration is unknown. - /// - /// This method throws: - /// - /// * [PlayerException] if the audio source was unable to be loaded. - /// * [PlayerInterruptedException] if another call to [load] happened before - /// this call completed. - Future load(AudioSource source) async { - try { - _audioSource = source; - final duration = await _load(source); - // Wait for loading state to pass. - await processingStateStream - .firstWhere((state) => state != ProcessingState.loading); - return duration; - } catch (e) { - _audioSource = null; - _audioSources.clear(); - rethrow; - } - } - - _registerAudioSource(AudioSource source) { - _audioSources[source._id] = source; - } - - Future _load(AudioSource source) async { - try { - if (!kIsWeb && source._requiresHeaders) { - if (_proxy == null) { - _proxy = _ProxyHttpServer(); - await _proxy.start(); - } - } - await source._setup(this); - _durationFuture = _invokeMethod('load', [source.toJson()]).then( - (ms) => (ms == null || ms < 0) ? null : Duration(milliseconds: ms)); - final duration = await _durationFuture; - _durationSubject.add(duration); - return duration; - } on PlatformException catch (e) { - try { - throw PlayerException(int.parse(e.code), e.message); - } on FormatException catch (_) { - if (e.code == 'abort') { - throw PlayerInterruptedException(e.message); - } else { - throw PlayerException(9999999, e.message); - } - } - } - } - - /// Clips the current [AudioSource] to the given [start] and [end] - /// timestamps. If [start] is null, it will be reset to the start of the - /// original [AudioSource]. If [end] is null, it will be reset to the end of - /// the original [AudioSource]. This method cannot be called from the - /// [AudioPlaybackState.none] state. - Future setClip({Duration start, Duration end}) async { - final duration = await _load(start == null && end == null - ? _audioSource - : ClippingAudioSource( - child: _audioSource, - start: start, - end: end, - )); - // Wait for loading state to pass. - await processingStateStream - .firstWhere((state) => state != ProcessingState.loading); - return duration; - } - - /// Tells the player to play audio as soon as an audio source is loaded and - /// ready to play. The [Future] returned by this method completes when the - /// playback completes or is paused or stopped. If the player is already - /// playing, this method completes immediately. - /// - /// This method causes [playing] to become true, and it will remain true - /// until [pause] or [stop] is called. This means that if playback completes, - /// and then you [seek] to an earlier position in the audio, playback will - /// continue playing from that position. If you instead wish to [pause] or - /// [stop] playback on completion, you can call either method as soon as - /// [processingState] becomes [ProcessingState.completed] by listening to - /// [processingStateStream]. - Future play() async { - if (playing) return; - _playingSubject.add(true); - await _invokeMethod('play'); - } - - /// Pauses the currently playing media. This method does nothing if - /// ![playing]. - Future pause() async { - if (!playing) return; - // Update local state immediately so that queries aren't surprised. - _playbackEvent = _playbackEvent.copyWith( - updatePosition: position, - updateTime: DateTime.now(), - ); - _playbackEventSubject.add(_playbackEvent); - _playingSubject.add(false); - // TODO: perhaps modify platform side to ensure new state is broadcast - // before this method returns. - await _invokeMethod('pause'); - } - - /// Convenience method to pause and seek to zero. - Future stop() async { - await pause(); - await seek(Duration.zero); - } - - /// Sets the volume of this player, where 1.0 is normal volume. - Future setVolume(final double volume) async { - _volumeSubject.add(volume); - await _invokeMethod('setVolume', [volume]); - } - - /// Sets the playback speed of this player, where 1.0 is normal speed. - Future setSpeed(final double speed) async { - _playbackEvent = _playbackEvent.copyWith( - updatePosition: position, - updateTime: DateTime.now(), - ); - _playbackEventSubject.add(_playbackEvent); - _speedSubject.add(speed); - await _invokeMethod('setSpeed', [speed]); - } - - /// Sets the [LoopMode]. The gapless looping support is as follows: - /// - /// * Android: supported - /// * iOS/macOS: not supported, however, gapless looping can be achieved by - /// using [LoopingAudioSource]. - /// * Web: not supported - Future setLoopMode(LoopMode mode) async { - _loopModeSubject.add(mode); - await _invokeMethod('setLoopMode', [mode.index]); - } - - /// Sets whether shuffle mode is enabled. - Future setShuffleModeEnabled(bool enabled) async { - _shuffleModeEnabledSubject.add(enabled); - await _invokeMethod('setShuffleModeEnabled', [enabled]); - } - - /// Sets automaticallyWaitsToMinimizeStalling for AVPlayer in iOS 10.0 or later, defaults to true. - /// Has no effect on Android clients - Future setAutomaticallyWaitsToMinimizeStalling( - final bool automaticallyWaitsToMinimizeStalling) async { - _automaticallyWaitsToMinimizeStalling = - automaticallyWaitsToMinimizeStalling; - await _invokeMethod('setAutomaticallyWaitsToMinimizeStalling', - [automaticallyWaitsToMinimizeStalling]); - } - - /// Seeks to a particular [position]. If a composition of multiple - /// [AudioSource]s has been loaded, you may also specify [index] to seek to a - /// particular item within that sequence. This method has no effect unless - /// an audio source has been loaded. - Future seek(final Duration position, {int index}) async { - switch (processingState) { - case ProcessingState.none: - case ProcessingState.loading: - return; - default: - _playbackEvent = _playbackEvent.copyWith( - updatePosition: position, - updateTime: DateTime.now(), - ); - _playbackEventSubject.add(_playbackEvent); - await _invokeMethod('seek', [position?.inMilliseconds, index]); - } - } - - /// Seek to the next item. - Future seekToNext() async { - if (hasNext) { - await seek(Duration.zero, index: currentIndex + 1); - } - } - - /// Seek to the previous item. - Future seekToPrevious() async { - if (hasPrevious) { - await seek(Duration.zero, index: currentIndex - 1); - } - } - - /// Release all resources associated with this player. You must invoke this - /// after you are done with the player. - Future dispose() async { - await _invokeMethod('dispose'); - _audioSource = null; - _audioSources.values.forEach((s) => s._dispose()); - _audioSources.clear(); - _proxy?.stop(); - await _durationSubject.close(); - await _eventChannelStreamSubscription.cancel(); - await _loopModeSubject.close(); - await _shuffleModeEnabledSubject.close(); - await _playingSubject.close(); - await _volumeSubject.close(); - await _speedSubject.close(); - if (_positionSubject != null) { - await _positionSubject.close(); - } - } - - Future _invokeMethod(String method, [dynamic args]) async => - (await _channel).invokeMethod(method, args); -} - -/// Captures the details of any error accessing, loading or playing an audio -/// source, including an invalid or inaccessible URL, or an audio encoding that -/// could not be understood. -class PlayerException { - /// On iOS and macOS, maps to `NSError.code`. On Android, maps to - /// `ExoPlaybackException.type`. On Web, maps to `MediaError.code`. - final int code; - - /// On iOS and macOS, maps to `NSError.localizedDescription`. On Android, - /// maps to `ExoPlaybackException.getMessage()`. On Web, a generic message - /// is provided. - final String message; - - PlayerException(this.code, this.message); - - @override - String toString() => "($code) $message"; -} - -/// An error that occurs when one operation on the player has been interrupted -/// (e.g. by another simultaneous operation). -class PlayerInterruptedException { - final String message; - - PlayerInterruptedException(this.message); - - @override - String toString() => "$message"; -} - -/// Encapsulates the playback state and current position of the player. -class PlaybackEvent { - /// The current processing state. - final ProcessingState processingState; - - /// When the last time a position discontinuity happened, as measured in time - /// since the epoch. - final DateTime updateTime; - - /// The position at [updateTime]. - final Duration updatePosition; - - /// The buffer position. - final Duration bufferedPosition; - - /// The media duration, or null if unknown. - final Duration duration; - - /// The latest ICY metadata received through the audio stream. - final IcyMetadata icyMetadata; - - /// The index of the currently playing item. - final int currentIndex; - - PlaybackEvent({ - @required this.processingState, - @required this.updateTime, - @required this.updatePosition, - @required this.bufferedPosition, - @required this.duration, - @required this.icyMetadata, - @required this.currentIndex, - }); - - PlaybackEvent copyWith({ - ProcessingState processingState, - DateTime updateTime, - Duration updatePosition, - Duration bufferedPosition, - double speed, - Duration duration, - IcyMetadata icyMetadata, - UriAudioSource currentIndex, - }) => - PlaybackEvent( - processingState: processingState ?? this.processingState, - updateTime: updateTime ?? this.updateTime, - updatePosition: updatePosition ?? this.updatePosition, - bufferedPosition: bufferedPosition ?? this.bufferedPosition, - duration: duration ?? this.duration, - icyMetadata: icyMetadata ?? this.icyMetadata, - currentIndex: currentIndex ?? this.currentIndex, - ); - - @override - String toString() => - "{processingState=$processingState, updateTime=$updateTime, updatePosition=$updatePosition}"; -} - -/// Enumerates the different processing states of a player. -enum ProcessingState { - /// The player has not loaded an [AudioSource]. - none, - - /// The player is loading an [AudioSource]. - loading, - - /// The player is buffering audio and unable to play. - buffering, - - /// The player is has enough audio buffered and is able to play. - ready, - - /// The player has reached the end of the audio. - completed, -} - -/// Encapsulates the playing and processing states. These two states vary -/// orthogonally, and so if [processingState] is [ProcessingState.buffering], -/// you can check [playing] to determine whether the buffering occurred while -/// the player was playing or while the player was paused. -class PlayerState { - /// Whether the player will play when [processingState] is - /// [ProcessingState.ready]. - final bool playing; - - /// The current processing state of the player. - final ProcessingState processingState; - - PlayerState(this.playing, this.processingState); - - @override - String toString() => 'playing=$playing,processingState=$processingState'; - - @override - int get hashCode => toString().hashCode; - - @override - bool operator ==(dynamic other) => - other is PlayerState && - other?.playing == playing && - other?.processingState == processingState; -} - -class IcyInfo { - final String title; - final String url; - - IcyInfo({@required this.title, @required this.url}); - - IcyInfo.fromJson(Map json) : this(title: json['title'], url: json['url']); - - @override - String toString() => 'title=$title,url=$url'; - - @override - int get hashCode => toString().hashCode; - - @override - bool operator ==(dynamic other) => - other is IcyInfo && other?.toString() == toString(); -} - -class IcyHeaders { - final int bitrate; - final String genre; - final String name; - final int metadataInterval; - final String url; - final bool isPublic; - - IcyHeaders({ - @required this.bitrate, - @required this.genre, - @required this.name, - @required this.metadataInterval, - @required this.url, - @required this.isPublic, - }); - - IcyHeaders.fromJson(Map json) - : this( - bitrate: json['bitrate'], - genre: json['genre'], - name: json['name'], - metadataInterval: json['metadataInterval'], - url: json['url'], - isPublic: json['isPublic'], - ); - - @override - String toString() => - 'bitrate=$bitrate,genre=$genre,name=$name,metadataInterval=$metadataInterval,url=$url,isPublic=$isPublic'; - - @override - int get hashCode => toString().hashCode; - - @override - bool operator ==(dynamic other) => - other is IcyHeaders && other?.toString() == toString(); -} - -class IcyMetadata { - final IcyInfo info; - final IcyHeaders headers; - - IcyMetadata({@required this.info, @required this.headers}); - - IcyMetadata.fromJson(Map json) - : this(info: json['info'], headers: json['headers']); - - @override - int get hashCode => info.hashCode ^ headers.hashCode; - - @override - bool operator ==(dynamic other) => - other is IcyMetadata && other?.info == info && other?.headers == headers; -} - -/// The audio session categories on iOS, to be used with -/// [AudioPlayer.setIosCategory]. -enum IosCategory { - ambient, - soloAmbient, - playback, - record, - playAndRecord, - multiRoute, -} - -/// A local proxy HTTP server for making remote GET requests with headers. -/// -/// TODO: Recursively attach headers to items in playlists like m3u8. -class _ProxyHttpServer { - HttpServer _server; - - /// Maps request keys to [_ProxyRequest]s. - final Map _uriMap = {}; - - /// The port this server is bound to on localhost. This is set only after - /// [start] has completed. - int get port => _server.port; - - /// Associate headers with a URL. This may be called only after [start] has - /// completed. - Uri addUrl(Uri url, Map headers) { - final path = _requestKey(url); - _uriMap[path] = _ProxyRequest(url, headers); - return url.replace( - scheme: 'http', - host: InternetAddress.loopbackIPv4.address, - port: port, - ); - } - - /// A unique key for each request that can be processed by this proxy, - /// made up of the URL path and query string. It is not possible to - /// simultaneously track requests that have the same URL path and query - /// but differ in other respects such as the port or headers. - String _requestKey(Uri uri) => '${uri.path}?${uri.query}'; - - /// Starts the server. - Future start() async { - _server = await HttpServer.bind(InternetAddress.loopbackIPv4, 0); - _server.listen((request) async { - if (request.method == 'GET') { - final path = _requestKey(request.uri); - final proxyRequest = _uriMap[path]; - final originRequest = await HttpClient().getUrl(proxyRequest.uri); - - // Rewrite request headers - final host = originRequest.headers.value('host'); - originRequest.headers.clear(); - request.headers.forEach((name, value) { - originRequest.headers.set(name, value); - }); - for (var name in proxyRequest.headers.keys) { - originRequest.headers.set(name, proxyRequest.headers[name]); - } - originRequest.headers.set('host', host); - - // Try to make normal request - try { - final originResponse = await originRequest.close(); - - request.response.headers.clear(); - originResponse.headers.forEach((name, value) { - request.response.headers.set(name, value); - }); - - // Pipe response - await originResponse.pipe(request.response); - } on HttpException { - // We likely are dealing with a streaming protocol - if (proxyRequest.uri.scheme == 'http') { - // Try parsing HTTP 0.9 response - //request.response.headers.clear(); - final socket = await Socket.connect( - proxyRequest.uri.host, proxyRequest.uri.port); - final clientSocket = - await request.response.detachSocket(writeHeaders: false); - Completer done = Completer(); - socket.listen( - clientSocket.add, - onDone: () async { - await clientSocket.flush(); - socket.close(); - clientSocket.close(); - done.complete(); - }, - ); - // Rewrite headers - final headers = {}; - request.headers.forEach((name, value) { - if (name.toLowerCase() != 'host') { - headers[name] = value.join(","); - } - }); - for (var name in proxyRequest.headers.keys) { - headers[name] = proxyRequest.headers[name]; - } - socket.write("GET ${proxyRequest.uri.path} HTTP/1.1\n"); - if (host != null) { - socket.write("Host: $host\n"); - } - for (var name in headers.keys) { - socket.write("$name: ${headers[name]}\n"); - } - socket.write("\n"); - await socket.flush(); - await done.future; - } - } - } - }); - } - - /// Stops the server - Future stop() => _server.close(); -} - -/// A request for a URL and headers made by a [_ProxyHttpServer]. -class _ProxyRequest { - final Uri uri; - final Map headers; - - _ProxyRequest(this.uri, this.headers); -} - -/// Specifies a source of audio to be played. Audio sources are composable -/// using the subclasses of this class. The same [AudioSource] instance should -/// not be used simultaneously by more than one [AudioPlayer]. -abstract class AudioSource { - final String _id; - AudioPlayer _player; - - /// Creates an [AudioSource] from a [Uri] with optional headers by - /// attempting to guess the type of stream. On iOS, this uses Apple's SDK to - /// automatically detect the stream type. On Android, the type of stream will - /// be guessed from the extension. - /// - /// If you are loading DASH or HLS streams that do not have standard "mpd" or - /// "m3u8" extensions in their URIs, this method will fail to detect the - /// stream type on Android. If you know in advance what type of audio stream - /// it is, you should instantiate [DashAudioSource] or [HlsAudioSource] - /// directly. - static AudioSource uri(Uri uri, {Map headers, Object tag}) { - bool hasExtension(Uri uri, String extension) => - uri.path.toLowerCase().endsWith('.$extension') || - uri.fragment.toLowerCase().endsWith('.$extension'); - if (hasExtension(uri, 'mpd')) { - return DashAudioSource(uri, headers: headers, tag: tag); - } else if (hasExtension(uri, 'm3u8')) { - return HlsAudioSource(uri, headers: headers, tag: tag); - } else { - return ProgressiveAudioSource(uri, headers: headers, tag: tag); - } - } - - static AudioSource fromJson(Map json) { - switch (json['type']) { - case 'progressive': - return ProgressiveAudioSource(Uri.parse(json['uri']), - headers: json['headers']); - case "dash": - return DashAudioSource(Uri.parse(json['uri']), - headers: json['headers']); - case "hls": - return HlsAudioSource(Uri.parse(json['uri']), headers: json['headers']); - case "concatenating": - return ConcatenatingAudioSource( - children: (json['audioSources'] as List) - .map((s) => AudioSource.fromJson(s)) - .toList()); - case "clipping": - return ClippingAudioSource( - child: AudioSource.fromJson(json['audioSource']), - start: Duration(milliseconds: json['start']), - end: Duration(milliseconds: json['end'])); - default: - throw Exception("Unknown AudioSource type: " + json['type']); - } - } - - AudioSource() : _id = _uuid.v4(); - - @mustCallSuper - Future _setup(AudioPlayer player) async { - _player = player; - player._registerAudioSource(this); - } - - @mustCallSuper - void _dispose() { - _player = null; - } - - bool get _requiresHeaders; - - List get sequence; - - Map toJson(); - - @override - int get hashCode => _id.hashCode; - - @override - bool operator ==(dynamic other) => other is AudioSource && other._id == _id; -} - -/// An [AudioSource] that can appear in a sequence. -abstract class IndexedAudioSource extends AudioSource { - final Object tag; - - IndexedAudioSource(this.tag); - - @override - List get sequence => [this]; -} - -/// An abstract class representing audio sources that are loaded from a URI. -abstract class UriAudioSource extends IndexedAudioSource { - final Uri uri; - final Map headers; - final String _type; - Uri _overrideUri; - File _cacheFile; - - UriAudioSource(this.uri, {this.headers, Object tag, @required String type}) - : _type = type, - super(tag); - - @override - Future _setup(AudioPlayer player) async { - await super._setup(player); - if (uri.scheme == 'asset') { - _overrideUri = Uri.file((await _loadAsset(uri.path)).path); - } else if (headers != null) { - _overrideUri = player._proxy.addUrl(uri, headers); - } - } - - @override - void _dispose() { - if (_cacheFile?.existsSync() == true) { - _cacheFile?.deleteSync(); - } - super._dispose(); - } - - Future _loadAsset(String assetPath) async { - final file = await _getCacheFile(assetPath); - this._cacheFile = file; - if (!file.existsSync()) { - await file.create(recursive: true); - await file.writeAsBytes( - (await rootBundle.load(assetPath)).buffer.asUint8List()); - } - return file; - } - - /// Get file for caching asset media with proper extension - Future _getCacheFile(final String assetPath) async => File(p.join( - (await getTemporaryDirectory()).path, - 'just_audio_asset_cache', - '${_player._id}_$_id${p.extension(assetPath)}')); - - @override - bool get _requiresHeaders => headers != null; - - @override - Map toJson() => { - 'id': _id, - 'type': _type, - 'uri': (_overrideUri ?? uri).toString(), - 'headers': headers, - }; -} - -/// An [AudioSource] representing a regular media file such asn an MP3 or M4A -/// file. The following URI schemes are supported: -/// -/// * file: loads from a local file (provided you give your app permission to -/// access that file). -/// * asset: loads from a Flutter asset (not supported on Web). -/// * http(s): loads from an HTTP(S) resource. -/// -/// On platforms except for the web, the supplied [headers] will be passed with -/// the HTTP(S) request. -class ProgressiveAudioSource extends UriAudioSource { - ProgressiveAudioSource(Uri uri, {Map headers, Object tag}) - : super(uri, headers: headers, tag: tag, type: 'progressive'); -} - -/// An [AudioSource] representing a DASH stream. -/// -/// On platforms except for the web, the supplied [headers] will be passed with -/// the HTTP(S) request. Currently headers are not recursively applied to items -/// the HTTP(S) request. Currently headers are not applied recursively. -class DashAudioSource extends UriAudioSource { - DashAudioSource(Uri uri, {Map headers, Object tag}) - : super(uri, headers: headers, tag: tag, type: 'dash'); -} - -/// An [AudioSource] representing an HLS stream. -/// -/// On platforms except for the web, the supplied [headers] will be passed with -/// the HTTP(S) request. Currently headers are not applied recursively. -class HlsAudioSource extends UriAudioSource { - HlsAudioSource(Uri uri, {Map headers, Object tag}) - : super(uri, headers: headers, tag: tag, type: 'hls'); -} - -/// An [AudioSource] representing a concatenation of multiple audio sources to -/// be played in succession. This can be used to create playlists. Playback -/// between items will be gapless on Android, iOS and macOS, while there will -/// be a slight gap on Web. -/// -/// (Untested) Audio sources can be dynamically added, removed and reordered -/// while the audio is playing. -class ConcatenatingAudioSource extends AudioSource { - final List children; - final bool useLazyPreparation; - - ConcatenatingAudioSource({ - @required this.children, - this.useLazyPreparation = false, - }); - - @override - Future _setup(AudioPlayer player) async { - await super._setup(player); - for (var source in children) { - await source._setup(player); - } - } - - /// (Untested) Appends an [AudioSource]. - Future add(AudioSource audioSource) async { - children.add(audioSource); - if (_player != null) { - await _player - ._invokeMethod('concatenating.add', [_id, audioSource.toJson()]); - } - } - - /// (Untested) Inserts an [AudioSource] at [index]. - Future insert(int index, AudioSource audioSource) async { - children.insert(index, audioSource); - if (_player != null) { - await _player._invokeMethod( - 'concatenating.insert', [_id, index, audioSource.toJson()]); - } - } - - /// (Untested) Appends multiple [AudioSource]s. - Future addAll(List children) async { - this.children.addAll(children); - if (_player != null) { - await _player._invokeMethod('concatenating.addAll', - [_id, children.map((s) => s.toJson()).toList()]); - } - } - - /// (Untested) Insert multiple [AudioSource]s at [index]. - Future insertAll(int index, List children) async { - this.children.insertAll(index, children); - if (_player != null) { - await _player._invokeMethod('concatenating.insertAll', - [_id, index, children.map((s) => s.toJson()).toList()]); - } - } - - /// (Untested) Dynmaically remove an [AudioSource] at [index] after this - /// [ConcatenatingAudioSource] has already been loaded. - Future removeAt(int index) async { - children.removeAt(index); - if (_player != null) { - await _player._invokeMethod('concatenating.removeAt', [_id, index]); - } - } - - /// (Untested) Removes a range of [AudioSource]s from index [start] inclusive - /// to [end] exclusive. - Future removeRange(int start, int end) async { - children.removeRange(start, end); - if (_player != null) { - await _player - ._invokeMethod('concatenating.removeRange', [_id, start, end]); - } - } - - /// (Untested) Moves an [AudioSource] from [currentIndex] to [newIndex]. - Future move(int currentIndex, int newIndex) async { - children.insert(newIndex, children.removeAt(currentIndex)); - if (_player != null) { - await _player - ._invokeMethod('concatenating.move', [_id, currentIndex, newIndex]); - } - } - - /// (Untested) Removes all [AudioSources]. - Future clear() async { - children.clear(); - if (_player != null) { - await _player._invokeMethod('concatenating.clear', [_id]); - } - } - - /// The number of [AudioSource]s. - int get length => children.length; - - operator [](int index) => children[index]; - - @override - List get sequence => - children.expand((s) => s.sequence).toList(); - - @override - bool get _requiresHeaders => - children.any((source) => source._requiresHeaders); - - @override - Map toJson() => { - 'id': _id, - 'type': 'concatenating', - 'audioSources': children.map((source) => source.toJson()).toList(), - 'useLazyPreparation': useLazyPreparation, - }; -} - -/// An [AudioSource] that clips the audio of a [UriAudioSource] between a -/// certain start and end time. -class ClippingAudioSource extends IndexedAudioSource { - final UriAudioSource child; - final Duration start; - final Duration end; - - /// Creates an audio source that clips [child] to the range [start]..[end], - /// where [start] and [end] default to the beginning and end of the original - /// [child] source. - ClippingAudioSource({ - @required this.child, - this.start, - this.end, - Object tag, - }) : super(tag); - - @override - Future _setup(AudioPlayer player) async { - await super._setup(player); - await child._setup(player); - } - - @override - bool get _requiresHeaders => child._requiresHeaders; - - @override - Map toJson() => { - 'id': _id, - 'type': 'clipping', - 'audioSource': child.toJson(), - 'start': start?.inMilliseconds, - 'end': end?.inMilliseconds, - }; -} - -// An [AudioSource] that loops a nested [AudioSource] a finite number of times. -// NOTE: this can be inefficient when using a large loop count. If you wish to -// loop an infinite number of times, use [AudioPlayer.setLoopMode]. -// -// On iOS and macOS, note that [LoopingAudioSource] will provide gapless -// playback while [AudioPlayer.setLoopMode] will not. (This will be supported -// in a future release.) -class LoopingAudioSource extends AudioSource { - AudioSource child; - final int count; - - LoopingAudioSource({ - @required this.child, - this.count, - }) : super(); - - @override - List get sequence => - List.generate(count, (i) => child).expand((s) => s.sequence).toList(); - - @override - bool get _requiresHeaders => child._requiresHeaders; - - @override - Map toJson() => { - 'id': _id, - 'type': 'looping', - 'audioSource': child.toJson(), - 'count': count, - }; -} - -enum LoopMode { off, one, all } diff --git a/just_audio/lib/just_audio_web.dart b/just_audio/lib/just_audio_web.dart deleted file mode 100644 index 1df5735..0000000 --- a/just_audio/lib/just_audio_web.dart +++ /dev/null @@ -1,957 +0,0 @@ -import 'dart:async'; -import 'dart:html'; -import 'dart:math'; - -import 'package:flutter/services.dart'; -import 'package:flutter/widgets.dart'; -import 'package:flutter_web_plugins/flutter_web_plugins.dart'; -import 'package:just_audio/just_audio.dart'; - -final Random _random = Random(); - -class JustAudioPlugin { - static void registerWith(Registrar registrar) { - final MethodChannel channel = MethodChannel( - 'com.ryanheise.just_audio.methods', - const StandardMethodCodec(), - registrar.messenger); - final JustAudioPlugin instance = JustAudioPlugin(registrar); - channel.setMethodCallHandler(instance.handleMethodCall); - } - - final Registrar registrar; - - JustAudioPlugin(this.registrar); - - Future handleMethodCall(MethodCall call) async { - switch (call.method) { - case 'init': - final String id = call.arguments[0]; - new Html5AudioPlayer(id: id, registrar: registrar); - return null; - case 'setIosCategory': - return null; - default: - throw PlatformException(code: 'Unimplemented'); - } - } -} - -abstract class JustAudioPlayer { - final String id; - final Registrar registrar; - final MethodChannel methodChannel; - final PluginEventChannel eventChannel; - final StreamController eventController = StreamController(); - ProcessingState _processingState = ProcessingState.none; - bool _playing = false; - int _index; - - JustAudioPlayer({@required this.id, @required this.registrar}) - : methodChannel = MethodChannel('com.ryanheise.just_audio.methods.$id', - const StandardMethodCodec(), registrar.messenger), - eventChannel = PluginEventChannel('com.ryanheise.just_audio.events.$id', - const StandardMethodCodec(), registrar.messenger) { - methodChannel.setMethodCallHandler(_methodHandler); - eventChannel.controller = eventController; - } - - Future _methodHandler(MethodCall call) async { - try { - final args = call.arguments; - switch (call.method) { - case 'load': - return await load(args[0]); - case 'play': - return await play(); - case 'pause': - return await pause(); - case 'setVolume': - return await setVolume(args[0]); - case 'setSpeed': - return await setSpeed(args[0]); - case 'setLoopMode': - return await setLoopMode(args[0]); - case 'setShuffleModeEnabled': - return await setShuffleModeEnabled(args[0]); - case 'setAutomaticallyWaitsToMinimizeStalling': - return null; - case 'seek': - return await seek(args[0], args[1]); - case 'dispose': - return dispose(); - case 'concatenating.add': - return await concatenatingAdd(args[0], args[1]); - case "concatenating.insert": - return await concatenatingInsert(args[0], args[1], args[2]); - case "concatenating.addAll": - return await concatenatingAddAll(args[0], args[1]); - case "concatenating.insertAll": - return await concatenatingInsertAll(args[0], args[1], args[2]); - case "concatenating.removeAt": - return await concatenatingRemoveAt(args[0], args[1]); - case "concatenating.removeRange": - return await concatenatingRemoveRange(args[0], args[1], args[2]); - case "concatenating.move": - return await concatenatingMove(args[0], args[1], args[2]); - case "concatenating.clear": - return await concatenatingClear(args[0]); - default: - throw PlatformException(code: 'Unimplemented'); - } - } catch (e, stacktrace) { - print("$stacktrace"); - rethrow; - } - } - - Future load(Map source); - - Future play(); - - Future pause(); - - Future setVolume(double volume); - - Future setSpeed(double speed); - - Future setLoopMode(int mode); - - Future setShuffleModeEnabled(bool enabled); - - Future seek(int position, int index); - - @mustCallSuper - void dispose() { - eventController.close(); - } - - Duration getCurrentPosition(); - - Duration getBufferedPosition(); - - Duration getDuration(); - - concatenatingAdd(String playerId, Map source); - - concatenatingInsert(String playerId, int index, Map source); - - concatenatingAddAll(String playerId, List sources); - - concatenatingInsertAll(String playerId, int index, List sources); - - concatenatingRemoveAt(String playerId, int index); - - concatenatingRemoveRange(String playerId, int start, int end); - - concatenatingMove(String playerId, int currentIndex, int newIndex); - - concatenatingClear(String playerId); - - broadcastPlaybackEvent() { - var updateTime = DateTime.now().millisecondsSinceEpoch; - eventController.add({ - 'processingState': _processingState.index, - 'updatePosition': getCurrentPosition()?.inMilliseconds, - 'updateTime': updateTime, - 'bufferedPosition': getBufferedPosition()?.inMilliseconds, - // TODO: Icy Metadata - 'icyMetadata': null, - 'duration': getDuration()?.inMilliseconds, - 'currentIndex': _index, - }); - } - - transition(ProcessingState processingState) { - _processingState = processingState; - broadcastPlaybackEvent(); - } -} - -class Html5AudioPlayer extends JustAudioPlayer { - AudioElement _audioElement = AudioElement(); - Completer _durationCompleter; - AudioSourcePlayer _audioSourcePlayer; - LoopMode _loopMode = LoopMode.off; - bool _shuffleModeEnabled = false; - final Map _audioSourcePlayers = {}; - - Html5AudioPlayer({@required String id, @required Registrar registrar}) - : super(id: id, registrar: registrar) { - _audioElement.addEventListener('durationchange', (event) { - _durationCompleter?.complete(); - broadcastPlaybackEvent(); - }); - _audioElement.addEventListener('error', (event) { - _durationCompleter?.completeError(_audioElement.error); - }); - _audioElement.addEventListener('ended', (event) async { - _currentAudioSourcePlayer.complete(); - }); - _audioElement.addEventListener('timeupdate', (event) { - _currentAudioSourcePlayer.timeUpdated(_audioElement.currentTime); - }); - _audioElement.addEventListener('loadstart', (event) { - transition(ProcessingState.buffering); - }); - _audioElement.addEventListener('waiting', (event) { - transition(ProcessingState.buffering); - }); - _audioElement.addEventListener('stalled', (event) { - transition(ProcessingState.buffering); - }); - _audioElement.addEventListener('canplaythrough', (event) { - transition(ProcessingState.ready); - }); - _audioElement.addEventListener('progress', (event) { - broadcastPlaybackEvent(); - }); - } - - List get order { - final sequence = _audioSourcePlayer.sequence; - List order = List(sequence.length); - if (_shuffleModeEnabled) { - order = _audioSourcePlayer.shuffleOrder; - } else { - for (var i = 0; i < order.length; i++) { - order[i] = i; - } - } - return order; - } - - List getInv(List order) { - List orderInv = List(order.length); - for (var i = 0; i < order.length; i++) { - orderInv[order[i]] = i; - } - return orderInv; - } - - onEnded() async { - if (_loopMode == LoopMode.one) { - await seek(0, null); - play(); - } else { - final order = this.order; - final orderInv = getInv(order); - if (orderInv[_index] + 1 < order.length) { - // move to next item - _index = order[orderInv[_index] + 1]; - await _currentAudioSourcePlayer.load(); - // Should always be true... - if (_playing) { - play(); - } - } else { - // reached end of playlist - if (_loopMode == LoopMode.all) { - // Loop back to the beginning - if (order.length == 1) { - await seek(0, null); - play(); - } else { - _index = order[0]; - await _currentAudioSourcePlayer.load(); - // Should always be true... - if (_playing) { - play(); - } - } - } else { - transition(ProcessingState.completed); - } - } - } - } - - // TODO: Improve efficiency. - IndexedAudioSourcePlayer get _currentAudioSourcePlayer => - _audioSourcePlayer != null && _index < _audioSourcePlayer.sequence.length - ? _audioSourcePlayer.sequence[_index] - : null; - - @override - Future load(Map source) async { - _currentAudioSourcePlayer?.pause(); - _audioSourcePlayer = getAudioSource(source); - _index = 0; - if (_shuffleModeEnabled) { - _audioSourcePlayer?.shuffle(0, _index); - } - return (await _currentAudioSourcePlayer.load()).inMilliseconds; - } - - Future loadUri(final Uri uri) async { - transition(ProcessingState.loading); - final src = uri.toString(); - if (src != _audioElement.src) { - _durationCompleter = Completer(); - _audioElement.src = src; - _audioElement.preload = 'auto'; - _audioElement.load(); - try { - await _durationCompleter.future; - } on MediaError catch (e) { - throw PlatformException( - code: "${e.code}", message: "Failed to load URL"); - } finally { - _durationCompleter = null; - } - } - transition(ProcessingState.ready); - final seconds = _audioElement.duration; - return seconds.isFinite - ? Duration(milliseconds: (seconds * 1000).toInt()) - : null; - } - - @override - Future play() async { - _playing = true; - await _currentAudioSourcePlayer.play(); - } - - @override - Future pause() async { - _playing = false; - _currentAudioSourcePlayer.pause(); - } - - @override - Future setVolume(double volume) async { - _audioElement.volume = volume; - } - - @override - Future setSpeed(double speed) async { - _audioElement.playbackRate = speed; - } - - @override - Future setLoopMode(int mode) async { - _loopMode = LoopMode.values[mode]; - } - - @override - Future setShuffleModeEnabled(bool enabled) async { - _shuffleModeEnabled = enabled; - if (enabled) { - _audioSourcePlayer?.shuffle(0, _index); - } - } - - @override - Future seek(int position, int newIndex) async { - int index = newIndex ?? _index; - if (index != _index) { - _currentAudioSourcePlayer.pause(); - _index = index; - await _currentAudioSourcePlayer.load(); - await _currentAudioSourcePlayer.seek(position); - if (_playing) { - _currentAudioSourcePlayer.play(); - } - } else { - await _currentAudioSourcePlayer.seek(position); - } - } - - ConcatenatingAudioSourcePlayer _concatenating(String playerId) => - _audioSourcePlayers[playerId] as ConcatenatingAudioSourcePlayer; - - concatenatingAdd(String playerId, Map source) { - final playlist = _concatenating(playerId); - playlist.add(getAudioSource(source)); - } - - concatenatingInsert(String playerId, int index, Map source) { - _concatenating(playerId).insert(index, getAudioSource(source)); - if (index <= _index) { - _index++; - } - } - - concatenatingAddAll(String playerId, List sources) { - _concatenating(playerId).addAll(getAudioSources(sources)); - } - - concatenatingInsertAll(String playerId, int index, List sources) { - _concatenating(playerId).insertAll(index, getAudioSources(sources)); - if (index <= _index) { - _index += sources.length; - } - } - - concatenatingRemoveAt(String playerId, int index) async { - // Pause if removing current item - if (_index == index && _playing) { - _currentAudioSourcePlayer.pause(); - } - _concatenating(playerId).removeAt(index); - if (_index == index) { - // Skip backward if there's nothing after this - if (index == _audioSourcePlayer.sequence.length) { - _index--; - } - // Resume playback at the new item (if it exists) - if (_playing && _currentAudioSourcePlayer != null) { - await _currentAudioSourcePlayer.load(); - _currentAudioSourcePlayer.play(); - } - } else if (index < _index) { - // Reflect that the current item has shifted its position - _index--; - } - } - - concatenatingRemoveRange(String playerId, int start, int end) async { - if (_index >= start && _index < end && _playing) { - // Pause if removing current item - _currentAudioSourcePlayer.pause(); - } - _concatenating(playerId).removeRange(start, end); - if (_index >= start && _index < end) { - // Skip backward if there's nothing after this - if (start >= _audioSourcePlayer.sequence.length) { - _index = start - 1; - } else { - _index = start; - } - // Resume playback at the new item (if it exists) - if (_playing && _currentAudioSourcePlayer != null) { - await _currentAudioSourcePlayer.load(); - _currentAudioSourcePlayer.play(); - } - } else if (end <= _index) { - // Reflect that the current item has shifted its position - _index -= (end - start); - } - } - - concatenatingMove(String playerId, int currentIndex, int newIndex) { - _concatenating(playerId).move(currentIndex, newIndex); - if (currentIndex == _index) { - _index = newIndex; - } else if (currentIndex < _index && newIndex >= _index) { - _index--; - } else if (currentIndex > _index && newIndex <= _index) { - _index++; - } - } - - concatenatingClear(String playerId) { - _currentAudioSourcePlayer.pause(); - _concatenating(playerId).clear(); - } - - @override - Duration getCurrentPosition() => _currentAudioSourcePlayer?.position; - - @override - Duration getBufferedPosition() => _currentAudioSourcePlayer?.bufferedPosition; - - @override - Duration getDuration() => _currentAudioSourcePlayer?.duration; - - @override - void dispose() { - _currentAudioSourcePlayer?.pause(); - _audioElement.removeAttribute('src'); - _audioElement.load(); - transition(ProcessingState.none); - super.dispose(); - } - - List getAudioSources(List json) => - json.map((s) => getAudioSource(s)).toList(); - - AudioSourcePlayer getAudioSource(Map json) { - final String id = json['id']; - var audioSourcePlayer = _audioSourcePlayers[id]; - if (audioSourcePlayer == null) { - audioSourcePlayer = decodeAudioSource(json); - _audioSourcePlayers[id] = audioSourcePlayer; - } - return audioSourcePlayer; - } - - AudioSourcePlayer decodeAudioSource(Map json) { - try { - switch (json['type']) { - case 'progressive': - return ProgressiveAudioSourcePlayer( - this, json['id'], Uri.parse(json['uri']), json['headers']); - case "dash": - return DashAudioSourcePlayer( - this, json['id'], Uri.parse(json['uri']), json['headers']); - case "hls": - return HlsAudioSourcePlayer( - this, json['id'], Uri.parse(json['uri']), json['headers']); - case "concatenating": - return ConcatenatingAudioSourcePlayer( - this, - json['id'], - getAudioSources(json['audioSources']), - json['useLazyPreparation']); - case "clipping": - return ClippingAudioSourcePlayer( - this, - json['id'], - getAudioSource(json['audioSource']), - Duration(milliseconds: json['start']), - Duration(milliseconds: json['end'])); - case "looping": - return LoopingAudioSourcePlayer(this, json['id'], - getAudioSource(json['audioSource']), json['count']); - default: - throw Exception("Unknown AudioSource type: " + json['type']); - } - } catch (e, stacktrace) { - print("$stacktrace"); - rethrow; - } - } -} - -abstract class AudioSourcePlayer { - Html5AudioPlayer html5AudioPlayer; - final String id; - - AudioSourcePlayer(this.html5AudioPlayer, this.id); - - List get sequence; - - List get shuffleOrder; - - int shuffle(int treeIndex, int currentIndex); -} - -abstract class IndexedAudioSourcePlayer extends AudioSourcePlayer { - IndexedAudioSourcePlayer(Html5AudioPlayer html5AudioPlayer, String id) - : super(html5AudioPlayer, id); - - Future load(); - - Future play(); - - Future pause(); - - Future seek(int position); - - Future complete(); - - Future timeUpdated(double seconds) async {} - - Duration get duration; - - Duration get position; - - Duration get bufferedPosition; - - AudioElement get _audioElement => html5AudioPlayer._audioElement; - - @override - int shuffle(int treeIndex, int currentIndex) => treeIndex + 1; - - @override - String toString() => "${this.runtimeType}"; -} - -abstract class UriAudioSourcePlayer extends IndexedAudioSourcePlayer { - final Uri uri; - final Map headers; - double _resumePos; - Duration _duration; - Completer _completer; - - UriAudioSourcePlayer( - Html5AudioPlayer html5AudioPlayer, String id, this.uri, this.headers) - : super(html5AudioPlayer, id); - - @override - List get sequence => [this]; - - @override - List get shuffleOrder => [0]; - - @override - Future load() async { - _resumePos = 0.0; - return _duration = await html5AudioPlayer.loadUri(uri); - } - - @override - Future play() async { - _audioElement.currentTime = _resumePos; - _audioElement.play(); - _completer = Completer(); - await _completer.future; - _completer = null; - } - - @override - Future pause() async { - _resumePos = _audioElement.currentTime; - _audioElement.pause(); - _interruptPlay(); - } - - @override - Future seek(int position) async { - _audioElement.currentTime = _resumePos = position / 1000.0; - } - - @override - Future complete() async { - _interruptPlay(); - html5AudioPlayer.onEnded(); - } - - _interruptPlay() { - if (_completer?.isCompleted == false) { - _completer.complete(); - } - } - - @override - Duration get duration { - return _duration; - //final seconds = _audioElement.duration; - //return seconds.isFinite - // ? Duration(milliseconds: (seconds * 1000).toInt()) - // : null; - } - - @override - Duration get position { - double seconds = _audioElement.currentTime; - return Duration(milliseconds: (seconds * 1000).toInt()); - } - - @override - Duration get bufferedPosition { - if (_audioElement.buffered.length > 0) { - return Duration( - milliseconds: - (_audioElement.buffered.end(_audioElement.buffered.length - 1) * - 1000) - .toInt()); - } else { - return Duration.zero; - } - } -} - -class ProgressiveAudioSourcePlayer extends UriAudioSourcePlayer { - ProgressiveAudioSourcePlayer( - Html5AudioPlayer html5AudioPlayer, String id, Uri uri, Map headers) - : super(html5AudioPlayer, id, uri, headers); -} - -class DashAudioSourcePlayer extends UriAudioSourcePlayer { - DashAudioSourcePlayer( - Html5AudioPlayer html5AudioPlayer, String id, Uri uri, Map headers) - : super(html5AudioPlayer, id, uri, headers); -} - -class HlsAudioSourcePlayer extends UriAudioSourcePlayer { - HlsAudioSourcePlayer( - Html5AudioPlayer html5AudioPlayer, String id, Uri uri, Map headers) - : super(html5AudioPlayer, id, uri, headers); -} - -class ConcatenatingAudioSourcePlayer extends AudioSourcePlayer { - static List generateShuffleOrder(int length, [int firstIndex]) { - final shuffleOrder = List(length); - for (var i = 0; i < length; i++) { - final j = _random.nextInt(i + 1); - shuffleOrder[i] = shuffleOrder[j]; - shuffleOrder[j] = i; - } - if (firstIndex != null) { - for (var i = 1; i < length; i++) { - if (shuffleOrder[i] == firstIndex) { - final v = shuffleOrder[0]; - shuffleOrder[0] = shuffleOrder[i]; - shuffleOrder[i] = v; - break; - } - } - } - return shuffleOrder; - } - - final List audioSourcePlayers; - final bool useLazyPreparation; - List _shuffleOrder; - - ConcatenatingAudioSourcePlayer(Html5AudioPlayer html5AudioPlayer, String id, - this.audioSourcePlayers, this.useLazyPreparation) - : _shuffleOrder = generateShuffleOrder(audioSourcePlayers.length), - super(html5AudioPlayer, id); - - @override - List get sequence => - audioSourcePlayers.expand((p) => p.sequence).toList(); - - @override - List get shuffleOrder { - final order = []; - var offset = order.length; - final childOrders = >[]; - for (var audioSourcePlayer in audioSourcePlayers) { - final childShuffleOrder = audioSourcePlayer.shuffleOrder; - childOrders.add(childShuffleOrder.map((i) => i + offset).toList()); - offset += childShuffleOrder.length; - } - for (var i = 0; i < childOrders.length; i++) { - order.addAll(childOrders[_shuffleOrder[i]]); - } - return order; - } - - @override - int shuffle(int treeIndex, int currentIndex) { - int currentChildIndex; - for (var i = 0; i < audioSourcePlayers.length; i++) { - final indexBefore = treeIndex; - final child = audioSourcePlayers[i]; - treeIndex = child.shuffle(treeIndex, currentIndex); - if (currentIndex >= indexBefore && currentIndex < treeIndex) { - currentChildIndex = i; - } else {} - } - // Shuffle so that the current child is first in the shuffle order - _shuffleOrder = - generateShuffleOrder(audioSourcePlayers.length, currentChildIndex); - return treeIndex; - } - - add(AudioSourcePlayer player) { - audioSourcePlayers.add(player); - _shuffleOrder.add(audioSourcePlayers.length - 1); - } - - insert(int index, AudioSourcePlayer player) { - audioSourcePlayers.insert(index, player); - for (var i = 0; i < audioSourcePlayers.length; i++) { - if (_shuffleOrder[i] >= index) { - _shuffleOrder[i]++; - } - } - _shuffleOrder.add(index); - } - - addAll(List players) { - audioSourcePlayers.addAll(players); - _shuffleOrder.addAll( - List.generate(players.length, (i) => audioSourcePlayers.length + i) - .toList() - ..shuffle()); - } - - insertAll(int index, List players) { - audioSourcePlayers.insertAll(index, players); - for (var i = 0; i < audioSourcePlayers.length; i++) { - if (_shuffleOrder[i] >= index) { - _shuffleOrder[i] += players.length; - } - } - _shuffleOrder.addAll( - List.generate(players.length, (i) => index + i).toList()..shuffle()); - } - - removeAt(int index) { - audioSourcePlayers.removeAt(index); - // 0 1 2 3 - // 3 2 0 1 - for (var i = 0; i < audioSourcePlayers.length; i++) { - if (_shuffleOrder[i] > index) { - _shuffleOrder[i]--; - } - } - _shuffleOrder.removeWhere((i) => i == index); - } - - removeRange(int start, int end) { - audioSourcePlayers.removeRange(start, end); - for (var i = 0; i < audioSourcePlayers.length; i++) { - if (_shuffleOrder[i] >= end) { - _shuffleOrder[i] -= (end - start); - } - } - _shuffleOrder.removeWhere((i) => i >= start && i < end); - } - - move(int currentIndex, int newIndex) { - audioSourcePlayers.insert( - newIndex, audioSourcePlayers.removeAt(currentIndex)); - } - - clear() { - audioSourcePlayers.clear(); - _shuffleOrder.clear(); - } -} - -class ClippingAudioSourcePlayer extends IndexedAudioSourcePlayer { - final UriAudioSourcePlayer audioSourcePlayer; - final Duration start; - final Duration end; - Completer _completer; - double _resumePos; - Duration _duration; - - ClippingAudioSourcePlayer(Html5AudioPlayer html5AudioPlayer, String id, - this.audioSourcePlayer, this.start, this.end) - : super(html5AudioPlayer, id); - - @override - List get sequence => [this]; - - @override - List get shuffleOrder => [0]; - - @override - Future load() async { - _resumePos = (start ?? Duration.zero).inMilliseconds / 1000.0; - Duration fullDuration = - await html5AudioPlayer.loadUri(audioSourcePlayer.uri); - _audioElement.currentTime = _resumePos; - _duration = Duration( - milliseconds: min((end ?? fullDuration).inMilliseconds, - fullDuration.inMilliseconds) - - (start ?? Duration.zero).inMilliseconds); - return _duration; - } - - double get remaining => end.inMilliseconds / 1000 - _audioElement.currentTime; - - @override - Future play() async { - _interruptPlay(ClipInterruptReason.simultaneous); - _audioElement.currentTime = _resumePos; - _audioElement.play(); - _completer = Completer(); - ClipInterruptReason reason; - while ((reason = await _completer.future) == ClipInterruptReason.seek) { - _completer = Completer(); - } - if (reason == ClipInterruptReason.end) { - html5AudioPlayer.onEnded(); - } - _completer = null; - } - - @override - Future pause() async { - _interruptPlay(ClipInterruptReason.pause); - _resumePos = _audioElement.currentTime; - _audioElement.pause(); - } - - @override - Future seek(int position) async { - _interruptPlay(ClipInterruptReason.seek); - _audioElement.currentTime = - _resumePos = start.inMilliseconds / 1000.0 + position / 1000.0; - } - - @override - Future complete() async { - _interruptPlay(ClipInterruptReason.end); - } - - @override - Future timeUpdated(double seconds) async { - if (end != null) { - if (seconds >= end.inMilliseconds / 1000) { - _interruptPlay(ClipInterruptReason.end); - } - } - } - - @override - Duration get duration { - return _duration; - } - - @override - Duration get position { - double seconds = _audioElement.currentTime; - var position = Duration(milliseconds: (seconds * 1000).toInt()); - if (start != null) { - position -= start; - } - if (position < Duration.zero) { - position = Duration.zero; - } - return position; - } - - @override - Duration get bufferedPosition { - if (_audioElement.buffered.length > 0) { - var seconds = - _audioElement.buffered.end(_audioElement.buffered.length - 1); - var position = Duration(milliseconds: (seconds * 1000).toInt()); - if (start != null) { - position -= start; - } - if (position < Duration.zero) { - position = Duration.zero; - } - if (duration != null && position > duration) { - position = duration; - } - return position; - } else { - return Duration.zero; - } - } - - _interruptPlay(ClipInterruptReason reason) { - if (_completer?.isCompleted == false) { - _completer.complete(reason); - } - } -} - -enum ClipInterruptReason { end, pause, seek, simultaneous } - -class LoopingAudioSourcePlayer extends AudioSourcePlayer { - final AudioSourcePlayer audioSourcePlayer; - final int count; - - LoopingAudioSourcePlayer(Html5AudioPlayer html5AudioPlayer, String id, - this.audioSourcePlayer, this.count) - : super(html5AudioPlayer, id); - - @override - List get sequence => - List.generate(count, (i) => audioSourcePlayer) - .expand((p) => p.sequence) - .toList(); - - @override - List get shuffleOrder { - final order = []; - var offset = order.length; - for (var i = 0; i < count; i++) { - final childShuffleOrder = audioSourcePlayer.shuffleOrder; - order.addAll(childShuffleOrder.map((i) => i + offset).toList()); - offset += childShuffleOrder.length; - } - return order; - } - - @override - int shuffle(int treeIndex, int currentIndex) { - for (var i = 0; i < count; i++) { - treeIndex = audioSourcePlayer.shuffle(treeIndex, currentIndex); - } - return treeIndex; - } -} diff --git a/just_audio/macos/.gitignore b/just_audio/macos/.gitignore deleted file mode 100644 index aa479fd..0000000 --- a/just_audio/macos/.gitignore +++ /dev/null @@ -1,37 +0,0 @@ -.idea/ -.vagrant/ -.sconsign.dblite -.svn/ - -.DS_Store -*.swp -profile - -DerivedData/ -build/ -GeneratedPluginRegistrant.h -GeneratedPluginRegistrant.m - -.generated/ - -*.pbxuser -*.mode1v3 -*.mode2v3 -*.perspectivev3 - -!default.pbxuser -!default.mode1v3 -!default.mode2v3 -!default.perspectivev3 - -xcuserdata - -*.moved-aside - -*.pyc -*sync/ -Icon? -.tags* - -/Flutter/Generated.xcconfig -/Flutter/flutter_export_environment.sh \ No newline at end of file diff --git a/just_audio/macos/Assets/.gitkeep b/just_audio/macos/Assets/.gitkeep deleted file mode 100644 index e69de29..0000000 diff --git a/just_audio/macos/Classes/AudioPlayer.h b/just_audio/macos/Classes/AudioPlayer.h deleted file mode 100644 index d64e13d..0000000 --- a/just_audio/macos/Classes/AudioPlayer.h +++ /dev/null @@ -1,21 +0,0 @@ -#import - -@interface AudioPlayer : NSObject - -- (instancetype)initWithRegistrar:(NSObject *)registrar playerId:(NSString*)idParam configuredSession:(BOOL)configuredSession; - -@end - -enum ProcessingState { - none, - loading, - buffering, - ready, - completed -}; - -enum LoopMode { - loopOff, - loopOne, - loopAll -}; diff --git a/just_audio/macos/Classes/AudioPlayer.m b/just_audio/macos/Classes/AudioPlayer.m deleted file mode 100644 index ccbfdea..0000000 --- a/just_audio/macos/Classes/AudioPlayer.m +++ /dev/null @@ -1,1138 +0,0 @@ -#import "AudioPlayer.h" -#import "AudioSource.h" -#import "IndexedAudioSource.h" -#import "UriAudioSource.h" -#import "ConcatenatingAudioSource.h" -#import "LoopingAudioSource.h" -#import "ClippingAudioSource.h" -#import -#import -#include - -// TODO: Check for and report invalid state transitions. -// TODO: Apply Apple's guidance on seeking: https://developer.apple.com/library/archive/qa/qa1820/_index.html -@implementation AudioPlayer { - NSObject* _registrar; - FlutterMethodChannel *_methodChannel; - FlutterEventChannel *_eventChannel; - FlutterEventSink _eventSink; - NSString *_playerId; - AVQueuePlayer *_player; - AudioSource *_audioSource; - NSMutableArray *_indexedAudioSources; - NSMutableArray *_order; - NSMutableArray *_orderInv; - int _index; - enum ProcessingState _processingState; - enum LoopMode _loopMode; - BOOL _shuffleModeEnabled; - long long _updateTime; - int _updatePosition; - int _lastPosition; - int _bufferedPosition; - // Set when the current item hasn't been played yet so we aren't sure whether sufficient audio has been buffered. - BOOL _bufferUnconfirmed; - CMTime _seekPos; - FlutterResult _loadResult; - FlutterResult _playResult; - id _timeObserver; - BOOL _automaticallyWaitsToMinimizeStalling; - BOOL _configuredSession; - BOOL _playing; -} - -- (instancetype)initWithRegistrar:(NSObject *)registrar playerId:(NSString*)idParam configuredSession:(BOOL)configuredSession { - self = [super init]; - NSAssert(self, @"super init cannot be nil"); - _registrar = registrar; - _playerId = idParam; - _configuredSession = configuredSession; - _methodChannel = - [FlutterMethodChannel methodChannelWithName:[NSMutableString stringWithFormat:@"com.ryanheise.just_audio.methods.%@", _playerId] - binaryMessenger:[registrar messenger]]; - _eventChannel = - [FlutterEventChannel eventChannelWithName:[NSMutableString stringWithFormat:@"com.ryanheise.just_audio.events.%@", _playerId] - binaryMessenger:[registrar messenger]]; - [_eventChannel setStreamHandler:self]; - _index = 0; - _processingState = none; - _loopMode = loopOff; - _shuffleModeEnabled = NO; - _player = nil; - _audioSource = nil; - _indexedAudioSources = nil; - _order = nil; - _orderInv = nil; - _seekPos = kCMTimeInvalid; - _timeObserver = 0; - _updatePosition = 0; - _updateTime = 0; - _lastPosition = 0; - _bufferedPosition = 0; - _bufferUnconfirmed = NO; - _playing = NO; - _loadResult = nil; - _playResult = nil; - _automaticallyWaitsToMinimizeStalling = YES; - __weak __typeof__(self) weakSelf = self; - [_methodChannel setMethodCallHandler:^(FlutterMethodCall* call, FlutterResult result) { - [weakSelf handleMethodCall:call result:result]; - }]; - return self; -} - -- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result { - NSArray* args = (NSArray*)call.arguments; - if ([@"load" isEqualToString:call.method]) { - [self load:args[0] result:result]; - } else if ([@"play" isEqualToString:call.method]) { - [self play:result]; - } else if ([@"pause" isEqualToString:call.method]) { - [self pause]; - result(nil); - } else if ([@"setVolume" isEqualToString:call.method]) { - [self setVolume:(float)[args[0] doubleValue]]; - result(nil); - } else if ([@"setSpeed" isEqualToString:call.method]) { - [self setSpeed:(float)[args[0] doubleValue]]; - result(nil); - } else if ([@"setLoopMode" isEqualToString:call.method]) { - [self setLoopMode:[args[0] intValue]]; - result(nil); - } else if ([@"setShuffleModeEnabled" isEqualToString:call.method]) { - [self setShuffleModeEnabled:(BOOL)[args[0] boolValue]]; - result(nil); - } else if ([@"setAutomaticallyWaitsToMinimizeStalling" isEqualToString:call.method]) { - [self setAutomaticallyWaitsToMinimizeStalling:(BOOL)[args[0] boolValue]]; - result(nil); - } else if ([@"seek" isEqualToString:call.method]) { - CMTime position = args[0] == [NSNull null] ? kCMTimePositiveInfinity : CMTimeMake([args[0] intValue], 1000); - [self seek:position index:args[1] completionHandler:^(BOOL finished) { - result(nil); - }]; - result(nil); - } else if ([@"dispose" isEqualToString:call.method]) { - [self dispose]; - result(nil); - } else if ([@"concatenating.add" isEqualToString:call.method]) { - [self concatenatingAdd:(NSString*)args[0] source:(NSDictionary*)args[1]]; - result(nil); - } else if ([@"concatenating.insert" isEqualToString:call.method]) { - [self concatenatingInsert:(NSString*)args[0] index:[args[1] intValue] source:(NSDictionary*)args[2]]; - result(nil); - } else if ([@"concatenating.addAll" isEqualToString:call.method]) { - [self concatenatingAddAll:(NSString*)args[0] sources:(NSArray*)args[1]]; - result(nil); - } else if ([@"concatenating.insertAll" isEqualToString:call.method]) { - [self concatenatingInsertAll:(NSString*)args[0] index:[args[1] intValue] sources:(NSArray*)args[2]]; - result(nil); - } else if ([@"concatenating.removeAt" isEqualToString:call.method]) { - [self concatenatingRemoveAt:(NSString*)args[0] index:(int)args[1]]; - result(nil); - } else if ([@"concatenating.removeRange" isEqualToString:call.method]) { - [self concatenatingRemoveRange:(NSString*)args[0] start:[args[1] intValue] end:[args[2] intValue]]; - result(nil); - } else if ([@"concatenating.move" isEqualToString:call.method]) { - [self concatenatingMove:(NSString*)args[0] currentIndex:[args[1] intValue] newIndex:[args[2] intValue]]; - result(nil); - } else if ([@"concatenating.clear" isEqualToString:call.method]) { - [self concatenatingClear:(NSString*)args[0]]; - result(nil); - } else { - result(FlutterMethodNotImplemented); - } -} - -// Untested -- (void)concatenatingAdd:(NSString *)catId source:(NSDictionary *)source { - [self concatenatingInsertAll:catId index:-1 sources:@[source]]; -} - -// Untested -- (void)concatenatingInsert:(NSString *)catId index:(int)index source:(NSDictionary *)source { - [self concatenatingInsertAll:catId index:index sources:@[source]]; -} - -// Untested -- (void)concatenatingAddAll:(NSString *)catId sources:(NSArray *)sources { - [self concatenatingInsertAll:catId index:-1 sources:sources]; -} - -// Untested -- (void)concatenatingInsertAll:(NSString *)catId index:(int)index sources:(NSArray *)sources { - // Find all duplicates of the identified ConcatenatingAudioSource. - NSMutableArray *matches = [[NSMutableArray alloc] init]; - [_audioSource findById:catId matches:matches]; - // Add each new source to each match. - for (int i = 0; i < matches.count; i++) { - ConcatenatingAudioSource *catSource = (ConcatenatingAudioSource *)matches[i]; - int idx = index >= 0 ? index : catSource.count; - NSMutableArray *audioSources = [self decodeAudioSources:sources]; - for (int j = 0; j < audioSources.count; j++) { - AudioSource *audioSource = audioSources[j]; - [catSource insertSource:audioSource atIndex:(idx + j)]; - } - } - // Index the new audio sources. - _indexedAudioSources = [[NSMutableArray alloc] init]; - [_audioSource buildSequence:_indexedAudioSources treeIndex:0]; - for (int i = 0; i < [_indexedAudioSources count]; i++) { - IndexedAudioSource *audioSource = _indexedAudioSources[i]; - if (!audioSource.isAttached) { - audioSource.playerItem.audioSource = audioSource; - [self addItemObservers:audioSource.playerItem]; - } - } - [self updateOrder]; - if (_player.currentItem) { - _index = [self indexForItem:_player.currentItem]; - } else { - _index = 0; - } - [self enqueueFrom:_index]; - // Notify each new IndexedAudioSource that it's been attached to the player. - for (int i = 0; i < [_indexedAudioSources count]; i++) { - if (!_indexedAudioSources[i].isAttached) { - [_indexedAudioSources[i] attach:_player]; - } - } - [self broadcastPlaybackEvent]; -} - -// Untested -- (void)concatenatingRemoveAt:(NSString *)catId index:(int)index { - [self concatenatingRemoveRange:catId start:index end:(index + 1)]; -} - -// Untested -- (void)concatenatingRemoveRange:(NSString *)catId start:(int)start end:(int)end { - // Find all duplicates of the identified ConcatenatingAudioSource. - NSMutableArray *matches = [[NSMutableArray alloc] init]; - [_audioSource findById:catId matches:matches]; - // Remove range from each match. - for (int i = 0; i < matches.count; i++) { - ConcatenatingAudioSource *catSource = (ConcatenatingAudioSource *)matches[i]; - int endIndex = end >= 0 ? end : catSource.count; - [catSource removeSourcesFromIndex:start toIndex:endIndex]; - } - // Re-index the remaining audio sources. - NSArray *oldIndexedAudioSources = _indexedAudioSources; - _indexedAudioSources = [[NSMutableArray alloc] init]; - [_audioSource buildSequence:_indexedAudioSources treeIndex:0]; - for (int i = 0, j = 0; i < _indexedAudioSources.count; i++, j++) { - IndexedAudioSource *audioSource = _indexedAudioSources[i]; - while (audioSource != oldIndexedAudioSources[j]) { - [self removeItemObservers:oldIndexedAudioSources[j].playerItem]; - if (j < _index) { - _index--; - } else if (j == _index) { - // The currently playing item was removed. - } - j++; - } - } - [self updateOrder]; - if (_index >= _indexedAudioSources.count) _index = _indexedAudioSources.count - 1; - if (_index < 0) _index = 0; - [self enqueueFrom:_index]; - [self broadcastPlaybackEvent]; -} - -// Untested -- (void)concatenatingMove:(NSString *)catId currentIndex:(int)currentIndex newIndex:(int)newIndex { - // Find all duplicates of the identified ConcatenatingAudioSource. - NSMutableArray *matches = [[NSMutableArray alloc] init]; - [_audioSource findById:catId matches:matches]; - // Move range within each match. - for (int i = 0; i < matches.count; i++) { - ConcatenatingAudioSource *catSource = (ConcatenatingAudioSource *)matches[i]; - [catSource moveSourceFromIndex:currentIndex toIndex:newIndex]; - } - // Re-index the audio sources. - _indexedAudioSources = [[NSMutableArray alloc] init]; - [_audioSource buildSequence:_indexedAudioSources treeIndex:0]; - _index = [self indexForItem:_player.currentItem]; - [self broadcastPlaybackEvent]; -} - -// Untested -- (void)concatenatingClear:(NSString *)catId { - [self concatenatingRemoveRange:catId start:0 end:-1]; -} - -- (FlutterError*)onListenWithArguments:(id)arguments eventSink:(FlutterEventSink)eventSink { - _eventSink = eventSink; - return nil; -} - -- (FlutterError*)onCancelWithArguments:(id)arguments { - _eventSink = nil; - return nil; -} - -- (void)checkForDiscontinuity { - if (!_eventSink) return; - if (!_playing || CMTIME_IS_VALID(_seekPos) || _processingState == completed) return; - int position = [self getCurrentPosition]; - if (_processingState == buffering) { - if (position > _lastPosition) { - [self leaveBuffering:@"stall ended"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - } - } else { - long long now = (long long)([[NSDate date] timeIntervalSince1970] * 1000.0); - long long timeSinceLastUpdate = now - _updateTime; - long long expectedPosition = _updatePosition + (long long)(timeSinceLastUpdate * _player.rate); - long long drift = position - expectedPosition; - //NSLog(@"position: %d, drift: %lld", position, drift); - // Update if we've drifted or just started observing - if (_updateTime == 0L) { - [self broadcastPlaybackEvent]; - } else if (drift < -100) { - [self enterBuffering:@"stalling"]; - NSLog(@"Drift: %lld", drift); - [self updatePosition]; - [self broadcastPlaybackEvent]; - } - } - _lastPosition = position; -} - -- (void)enterBuffering:(NSString *)reason { - NSLog(@"ENTER BUFFERING: %@", reason); - _processingState = buffering; -} - -- (void)leaveBuffering:(NSString *)reason { - NSLog(@"LEAVE BUFFERING: %@", reason); - _processingState = ready; -} - -- (void)broadcastPlaybackEvent { - if (!_eventSink) return; - _eventSink(@{ - @"processingState": @(_processingState), - @"updatePosition": @(_updatePosition), - @"updateTime": @(_updateTime), - // TODO: buffer position - @"bufferedPosition": @(_updatePosition), - // TODO: Icy Metadata - @"icyMetadata": [NSNull null], - @"duration": @([self getDuration]), - @"currentIndex": @(_index), - }); -} - -- (int)getCurrentPosition { - if (_processingState == none || _processingState == loading) { - return 0; - } else if (CMTIME_IS_VALID(_seekPos)) { - return (int)(1000 * CMTimeGetSeconds(_seekPos)); - } else if (_indexedAudioSources) { - int ms = (int)(1000 * CMTimeGetSeconds(_indexedAudioSources[_index].position)); - if (ms < 0) ms = 0; - return ms; - } else { - return 0; - } -} - -- (int)getBufferedPosition { - if (_processingState == none || _processingState == loading) { - return 0; - } else if (_indexedAudioSources) { - int ms = (int)(1000 * CMTimeGetSeconds(_indexedAudioSources[_index].bufferedPosition)); - if (ms < 0) ms = 0; - return ms; - } else { - return 0; - } -} - -- (int)getDuration { - if (_processingState == none) { - return -1; - } else if (_indexedAudioSources) { - int v = (int)(1000 * CMTimeGetSeconds(_indexedAudioSources[_index].duration)); - return v; - } else { - return 0; - } -} - -- (void)removeItemObservers:(AVPlayerItem *)playerItem { - [playerItem removeObserver:self forKeyPath:@"status"]; - [playerItem removeObserver:self forKeyPath:@"playbackBufferEmpty"]; - [playerItem removeObserver:self forKeyPath:@"playbackBufferFull"]; - //[playerItem removeObserver:self forKeyPath:@"playbackLikelyToKeepUp"]; - [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemDidPlayToEndTimeNotification object:playerItem]; - [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemFailedToPlayToEndTimeNotification object:playerItem]; - [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemPlaybackStalledNotification object:playerItem]; -} - -- (void)addItemObservers:(AVPlayerItem *)playerItem { - // Get notified when the item is loaded or had an error loading - [playerItem addObserver:self forKeyPath:@"status" options:NSKeyValueObservingOptionNew context:nil]; - // Get notified of the buffer state - [playerItem addObserver:self forKeyPath:@"playbackBufferEmpty" options:NSKeyValueObservingOptionNew context:nil]; - [playerItem addObserver:self forKeyPath:@"playbackBufferFull" options:NSKeyValueObservingOptionNew context:nil]; - [playerItem addObserver:self forKeyPath:@"loadedTimeRanges" options:NSKeyValueObservingOptionNew context:nil]; - //[playerItem addObserver:self forKeyPath:@"playbackLikelyToKeepUp" options:NSKeyValueObservingOptionNew context:nil]; - // Get notified when playback has reached the end - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(onComplete:) name:AVPlayerItemDidPlayToEndTimeNotification object:playerItem]; - // Get notified when playback stops due to a failure (currently unused) - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(onFailToComplete:) name:AVPlayerItemFailedToPlayToEndTimeNotification object:playerItem]; - // Get notified when playback stalls (currently unused) - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(onItemStalled:) name:AVPlayerItemPlaybackStalledNotification object:playerItem]; -} - -- (NSMutableArray *)decodeAudioSources:(NSArray *)data { - NSMutableArray *array = [[NSMutableArray alloc] init]; - for (int i = 0; i < [data count]; i++) { - AudioSource *source = [self decodeAudioSource:data[i]]; - [array addObject:source]; - } - return array; -} - -- (AudioSource *)decodeAudioSource:(NSDictionary *)data { - NSString *type = data[@"type"]; - if ([@"progressive" isEqualToString:type]) { - return [[UriAudioSource alloc] initWithId:data[@"id"] uri:data[@"uri"]]; - } else if ([@"dash" isEqualToString:type]) { - return [[UriAudioSource alloc] initWithId:data[@"id"] uri:data[@"uri"]]; - } else if ([@"hls" isEqualToString:type]) { - return [[UriAudioSource alloc] initWithId:data[@"id"] uri:data[@"uri"]]; - } else if ([@"concatenating" isEqualToString:type]) { - return [[ConcatenatingAudioSource alloc] initWithId:data[@"id"] - audioSources:[self decodeAudioSources:data[@"audioSources"]]]; - } else if ([@"clipping" isEqualToString:type]) { - return [[ClippingAudioSource alloc] initWithId:data[@"id"] - audioSource:[self decodeAudioSource:data[@"audioSource"]] - start:data[@"start"] - end:data[@"end"]]; - } else if ([@"looping" isEqualToString:type]) { - NSMutableArray *childSources = [NSMutableArray new]; - int count = [data[@"count"] intValue]; - for (int i = 0; i < count; i++) { - [childSources addObject:[self decodeAudioSource:data[@"audioSource"]]]; - } - return [[LoopingAudioSource alloc] initWithId:data[@"id"] audioSources:childSources]; - } else { - return nil; - } -} - -- (void)enqueueFrom:(int)index { - int oldIndex = _index; - _index = index; - - // Update the queue while keeping the currently playing item untouched. - - /* NSLog(@"before reorder: _player.items.count: ", _player.items.count); */ - /* [self dumpQueue]; */ - - // First, remove all _player items except for the currently playing one (if any). - IndexedPlayerItem *oldItem = _player.currentItem; - IndexedPlayerItem *existingItem = nil; - NSArray *oldPlayerItems = [NSArray arrayWithArray:_player.items]; - // In the first pass, preserve the old and new items. - for (int i = 0; i < oldPlayerItems.count; i++) { - if (oldPlayerItems[i] == _indexedAudioSources[_index].playerItem) { - // Preserve and tag new item if it is already in the queue. - existingItem = oldPlayerItems[i]; - } else if (oldPlayerItems[i] == oldItem) { - // Temporarily preserve old item, just to avoid jumping to - // intermediate queue positions unnecessarily. We only want to jump - // once to _index. - } else { - [_player removeItem:oldPlayerItems[i]]; - } - } - // In the second pass, remove the old item (if different from new item). - if (_index != oldIndex) { - [_player removeItem:oldItem]; - } - - /* NSLog(@"inter order: _player.items.count: ", _player.items.count); */ - /* [self dumpQueue]; */ - - // Regenerate queue - BOOL include = NO; - for (int i = 0; i < [_order count]; i++) { - int si = [_order[i] intValue]; - if (si == _index) include = YES; - if (include && _indexedAudioSources[si].playerItem != existingItem) { - [_player insertItem:_indexedAudioSources[si].playerItem afterItem:nil]; - } - } - - /* NSLog(@"after reorder: _player.items.count: ", _player.items.count); */ - /* [self dumpQueue]; */ - - if (_processingState != loading && oldItem != _indexedAudioSources[_index].playerItem) { - // || !_player.currentItem.playbackLikelyToKeepUp; - if (_player.currentItem.playbackBufferEmpty) { - [self enterBuffering:@"enqueueFrom playbackBufferEmpty"]; - } else { - [self leaveBuffering:@"enqueueFrom !playbackBufferEmpty"]; - } - [self updatePosition]; - } -} - -- (void)updatePosition { - _updatePosition = [self getCurrentPosition]; - _updateTime = (long long)([[NSDate date] timeIntervalSince1970] * 1000.0); -} - -- (void)load:(NSDictionary *)source result:(FlutterResult)result { - if (!_playing) { - [_player pause]; - } - if (_processingState == loading) { - [self abortExistingConnection]; - } - _loadResult = result; - _index = 0; - [self updatePosition]; - _processingState = loading; - [self broadcastPlaybackEvent]; - // Remove previous observers - if (_indexedAudioSources) { - for (int i = 0; i < [_indexedAudioSources count]; i++) { - [self removeItemObservers:_indexedAudioSources[i].playerItem]; - } - } - // Decode audio source - if (_audioSource && [@"clipping" isEqualToString:source[@"type"]]) { - // Check if we're clipping an audio source that was previously loaded. - UriAudioSource *child = nil; - if ([_audioSource isKindOfClass:[ClippingAudioSource class]]) { - ClippingAudioSource *clipper = (ClippingAudioSource *)_audioSource; - child = clipper.audioSource; - } else if ([_audioSource isKindOfClass:[UriAudioSource class]]) { - child = (UriAudioSource *)_audioSource; - } - if (child) { - _audioSource = [[ClippingAudioSource alloc] initWithId:source[@"id"] - audioSource:child - start:source[@"start"] - end:source[@"end"]]; - } else { - _audioSource = [self decodeAudioSource:source]; - } - } else { - _audioSource = [self decodeAudioSource:source]; - } - _indexedAudioSources = [[NSMutableArray alloc] init]; - [_audioSource buildSequence:_indexedAudioSources treeIndex:0]; - for (int i = 0; i < [_indexedAudioSources count]; i++) { - IndexedAudioSource *source = _indexedAudioSources[i]; - [self addItemObservers:source.playerItem]; - source.playerItem.audioSource = source; - } - [self updateOrder]; - // Set up an empty player - if (!_player) { - _player = [[AVQueuePlayer alloc] initWithItems:@[]]; - if (@available(macOS 10.12, iOS 10.0, *)) { - _player.automaticallyWaitsToMinimizeStalling = _automaticallyWaitsToMinimizeStalling; - // TODO: Remove these observers in dispose. - [_player addObserver:self - forKeyPath:@"timeControlStatus" - options:NSKeyValueObservingOptionNew - context:nil]; - } - [_player addObserver:self - forKeyPath:@"currentItem" - options:NSKeyValueObservingOptionNew - context:nil]; - // TODO: learn about the different ways to define weakSelf. - //__weak __typeof__(self) weakSelf = self; - //typeof(self) __weak weakSelf = self; - __unsafe_unretained typeof(self) weakSelf = self; - if (@available(macOS 10.12, iOS 10.0, *)) {} - else { - _timeObserver = [_player addPeriodicTimeObserverForInterval:CMTimeMake(200, 1000) - queue:nil - usingBlock:^(CMTime time) { - [weakSelf checkForDiscontinuity]; - } - ]; - } - } - // Initialise the AVQueuePlayer with items. - [self enqueueFrom:0]; - // Notify each IndexedAudioSource that it's been attached to the player. - for (int i = 0; i < [_indexedAudioSources count]; i++) { - [_indexedAudioSources[i] attach:_player]; - } - - if (_player.currentItem.status == AVPlayerItemStatusReadyToPlay) { - _loadResult(@([self getDuration])); - _loadResult = nil; - } else { - // We send result after the playerItem is ready in observeValueForKeyPath. - } - [self broadcastPlaybackEvent]; -} - -- (void)updateOrder { - if (_shuffleModeEnabled) { - [_audioSource shuffle:0 currentIndex: _index]; - } - _orderInv = [NSMutableArray arrayWithCapacity:[_indexedAudioSources count]]; - for (int i = 0; i < [_indexedAudioSources count]; i++) { - [_orderInv addObject:@(0)]; - } - if (_shuffleModeEnabled) { - _order = [_audioSource getShuffleOrder]; - } else { - NSMutableArray *order = [[NSMutableArray alloc] init]; - for (int i = 0; i < [_indexedAudioSources count]; i++) { - [order addObject:@(i)]; - } - _order = order; - } - for (int i = 0; i < [_indexedAudioSources count]; i++) { - _orderInv[[_order[i] intValue]] = @(i); - } -} - -- (void)onItemStalled:(NSNotification *)notification { - IndexedPlayerItem *playerItem = (IndexedPlayerItem *)notification.object; - NSLog(@"onItemStalled"); -} - -- (void)onFailToComplete:(NSNotification *)notification { - IndexedPlayerItem *playerItem = (IndexedPlayerItem *)notification.object; - NSLog(@"onFailToComplete"); -} - -- (void)onComplete:(NSNotification *)notification { - NSLog(@"onComplete"); - if (_loopMode == loopOne) { - [self seek:kCMTimeZero index:@(_index) completionHandler:^(BOOL finished) { - // XXX: Not necessary? - [self play]; - }]; - } else { - IndexedPlayerItem *endedPlayerItem = (IndexedPlayerItem *)notification.object; - IndexedAudioSource *endedSource = endedPlayerItem.audioSource; - // When an item ends, seek back to its beginning. - [endedSource seek:kCMTimeZero]; - - if ([_orderInv[_index] intValue] + 1 < [_order count]) { - // account for automatic move to next item - _index = [_order[[_orderInv[_index] intValue] + 1] intValue]; - NSLog(@"advance to next: index = %d", _index); - [self broadcastPlaybackEvent]; - } else { - // reached end of playlist - if (_loopMode == loopAll) { - NSLog(@"Loop back to first item"); - // Loop back to the beginning - // TODO: Currently there will be a gap at the loop point. - // Maybe we can do something clever by temporarily adding the - // first playlist item at the end of the queue, although this - // will affect any code that assumes the queue always - // corresponds to a contiguous region of the indexed audio - // sources. - // For now we just do a seek back to the start. - if ([_order count] == 1) { - [self seek:kCMTimeZero index:[NSNull null] completionHandler:^(BOOL finished) { - // XXX: Necessary? - [self play]; - }]; - } else { - [self seek:kCMTimeZero index:_order[0] completionHandler:^(BOOL finished) { - // XXX: Necessary? - [self play]; - }]; - } - } else { - [self complete]; - } - } - } -} - -- (void)observeValueForKeyPath:(NSString *)keyPath - ofObject:(id)object - change:(NSDictionary *)change - context:(void *)context { - - if ([keyPath isEqualToString:@"status"]) { - IndexedPlayerItem *playerItem = (IndexedPlayerItem *)object; - AVPlayerItemStatus status = AVPlayerItemStatusUnknown; - NSNumber *statusNumber = change[NSKeyValueChangeNewKey]; - if ([statusNumber isKindOfClass:[NSNumber class]]) { - status = statusNumber.intValue; - } - switch (status) { - case AVPlayerItemStatusReadyToPlay: { - if (playerItem != _player.currentItem) return; - // Detect buffering in different ways depending on whether we're playing - if (_playing) { - if (@available(macOS 10.12, iOS 10.0, *)) { - if (_player.timeControlStatus == AVPlayerTimeControlStatusWaitingToPlayAtSpecifiedRate) { - [self enterBuffering:@"ready to play: playing, waitingToPlay"]; - } else { - [self leaveBuffering:@"ready to play: playing, !waitingToPlay"]; - } - [self updatePosition]; - } else { - // If this happens when we're playing, check whether buffer is confirmed - if (_bufferUnconfirmed && !_player.currentItem.playbackBufferFull) { - // Stay in bufering - XXX Test - [self enterBuffering:@"ready to play: playing, bufferUnconfirmed && !playbackBufferFull"]; - } else { - if (_player.currentItem.playbackBufferEmpty) { - // !_player.currentItem.playbackLikelyToKeepUp; - [self enterBuffering:@"ready to play: playing, playbackBufferEmpty"]; - } else { - [self leaveBuffering:@"ready to play: playing, !playbackBufferEmpty"]; - } - [self updatePosition]; - } - } - } else { - if (_player.currentItem.playbackBufferEmpty) { - [self enterBuffering:@"ready to play: !playing, playbackBufferEmpty"]; - // || !_player.currentItem.playbackLikelyToKeepUp; - } else { - [self leaveBuffering:@"ready to play: !playing, !playbackBufferEmpty"]; - } - [self updatePosition]; - } - [self broadcastPlaybackEvent]; - if (_loadResult) { - _loadResult(@([self getDuration])); - _loadResult = nil; - } - break; - } - case AVPlayerItemStatusFailed: { - NSLog(@"AVPlayerItemStatusFailed"); - [self sendErrorForItem:playerItem]; - break; - } - case AVPlayerItemStatusUnknown: - break; - } - } else if ([keyPath isEqualToString:@"playbackBufferEmpty"] || [keyPath isEqualToString:@"playbackBufferFull"]) { - // Use these values to detect buffering. - IndexedPlayerItem *playerItem = (IndexedPlayerItem *)object; - if (playerItem != _player.currentItem) return; - // If there's a seek in progress, these values are unreliable - if (CMTIME_IS_VALID(_seekPos)) return; - // Detect buffering in different ways depending on whether we're playing - if (_playing) { - if (@available(macOS 10.12, iOS 10.0, *)) { - // We handle this with timeControlStatus instead. - } else { - if (_bufferUnconfirmed && playerItem.playbackBufferFull) { - _bufferUnconfirmed = NO; - [self leaveBuffering:@"playing, _bufferUnconfirmed && playbackBufferFull"]; - [self updatePosition]; - NSLog(@"Buffering confirmed! leaving buffering"); - [self broadcastPlaybackEvent]; - } - } - } else { - if (playerItem.playbackBufferEmpty) { - [self enterBuffering:@"!playing, playbackBufferEmpty"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - } else if (!playerItem.playbackBufferEmpty || playerItem.playbackBufferFull) { - _processingState = ready; - [self leaveBuffering:@"!playing, !playbackBufferEmpty || playbackBufferFull"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - } - } - /* } else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"]) { */ - } else if ([keyPath isEqualToString:@"timeControlStatus"]) { - if (@available(macOS 10.12, iOS 10.0, *)) { - AVPlayerTimeControlStatus status = AVPlayerTimeControlStatusPaused; - NSNumber *statusNumber = change[NSKeyValueChangeNewKey]; - if ([statusNumber isKindOfClass:[NSNumber class]]) { - status = statusNumber.intValue; - } - switch (status) { - case AVPlayerTimeControlStatusPaused: - //NSLog(@"AVPlayerTimeControlStatusPaused"); - break; - case AVPlayerTimeControlStatusWaitingToPlayAtSpecifiedRate: - //NSLog(@"AVPlayerTimeControlStatusWaitingToPlayAtSpecifiedRate"); - if (_processingState != completed) { - [self enterBuffering:@"timeControlStatus"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - } else { - NSLog(@"Ignoring wait signal because we reached the end"); - } - break; - case AVPlayerTimeControlStatusPlaying: - [self leaveBuffering:@"timeControlStatus"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - break; - } - } - } else if ([keyPath isEqualToString:@"currentItem"] && _player.currentItem) { - if (_player.currentItem.status == AVPlayerItemStatusFailed) { - if ([_orderInv[_index] intValue] + 1 < [_order count]) { - // account for automatic move to next item - _index = [_order[[_orderInv[_index] intValue] + 1] intValue]; - NSLog(@"advance to next on error: index = %d", _index); - [self broadcastPlaybackEvent]; - } else { - NSLog(@"error on last item"); - } - return; - } else { - int expectedIndex = [self indexForItem:_player.currentItem]; - if (_index != expectedIndex) { - // AVQueuePlayer will sometimes skip over error items without - // notifying this observer. - NSLog(@"Queue change detected. Adjusting index from %d -> %d", _index, expectedIndex); - _index = expectedIndex; - [self broadcastPlaybackEvent]; - } - } - //NSLog(@"currentItem changed. _index=%d", _index); - _bufferUnconfirmed = YES; - // If we've skipped or transitioned to a new item and we're not - // currently in the middle of a seek - if (CMTIME_IS_INVALID(_seekPos) && _player.currentItem.status == AVPlayerItemStatusReadyToPlay) { - [self updatePosition]; - IndexedAudioSource *source = ((IndexedPlayerItem *)_player.currentItem).audioSource; - // We should already be at position zero but for - // ClippingAudioSource it might be off by some milliseconds so we - // consider anything <= 100 as close enough. - if ((int)(1000 * CMTimeGetSeconds(source.position)) > 100) { - NSLog(@"On currentItem change, seeking back to zero"); - BOOL shouldResumePlayback = NO; - AVPlayerActionAtItemEnd originalEndAction = _player.actionAtItemEnd; - if (_playing && CMTimeGetSeconds(CMTimeSubtract(source.position, source.duration)) >= 0) { - NSLog(@"Need to pause while rewinding because we're at the end"); - shouldResumePlayback = YES; - _player.actionAtItemEnd = AVPlayerActionAtItemEndPause; - [_player pause]; - } - [self enterBuffering:@"currentItem changed, seeking"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - [source seek:kCMTimeZero completionHandler:^(BOOL finished) { - [self leaveBuffering:@"currentItem changed, finished seek"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - if (shouldResumePlayback) { - _player.actionAtItemEnd = originalEndAction; - // TODO: This logic is almost duplicated in seek. See if we can reuse this code. - [_player play]; - } - }]; - } else { - // Already at zero, no need to seek. - } - } - } else if ([keyPath isEqualToString:@"loadedTimeRanges"]) { - IndexedPlayerItem *playerItem = (IndexedPlayerItem *)object; - if (playerItem != _player.currentItem) return; - int pos = [self getBufferedPosition]; - if (pos != _bufferedPosition) { - _bufferedPosition = pos; - [self broadcastPlaybackEvent]; - } - } -} - -- (void)sendErrorForItem:(IndexedPlayerItem *)playerItem { - FlutterError *flutterError = [FlutterError errorWithCode:[NSString stringWithFormat:@"%d", playerItem.error.code] - message:playerItem.error.localizedDescription - details:nil]; - [self sendError:flutterError playerItem:playerItem]; -} - -- (void)sendError:(FlutterError *)flutterError playerItem:(IndexedPlayerItem *)playerItem { - NSLog(@"sendError"); - if (_loadResult && playerItem == _player.currentItem) { - _loadResult(flutterError); - _loadResult = nil; - } - if (_eventSink) { - // Broadcast all errors even if they aren't on the current item. - _eventSink(flutterError); - } -} - -- (void)abortExistingConnection { - FlutterError *flutterError = [FlutterError errorWithCode:@"abort" - message:@"Connection aborted" - details:nil]; - [self sendError:flutterError playerItem:nil]; -} - -- (int)indexForItem:(IndexedPlayerItem *)playerItem { - for (int i = 0; i < _indexedAudioSources.count; i++) { - if (_indexedAudioSources[i].playerItem == playerItem) { - return i; - } - } - return -1; -} - -- (void)play { - [self play:nil]; -} - -- (void)play:(FlutterResult)result { - if (result) { - if (_playResult) { - NSLog(@"INTERRUPTING PLAY"); - _playResult(nil); - } - _playResult = result; - } - _playing = YES; -#if TARGET_OS_IPHONE - if (_configuredSession) { - [[AVAudioSession sharedInstance] setActive:YES error:nil]; - } -#endif - [_player play]; - [self updatePosition]; - if (@available(macOS 10.12, iOS 10.0, *)) {} - else { - if (_bufferUnconfirmed && !_player.currentItem.playbackBufferFull) { - [self enterBuffering:@"play, _bufferUnconfirmed && !playbackBufferFull"]; - [self broadcastPlaybackEvent]; - } - } -} - -- (void)pause { - _playing = NO; - [_player pause]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - if (_playResult) { - NSLog(@"PLAY FINISHED DUE TO PAUSE"); - _playResult(nil); - _playResult = nil; - } -} - -- (void)complete { - [self updatePosition]; - _processingState = completed; - [self broadcastPlaybackEvent]; - if (_playResult) { - NSLog(@"PLAY FINISHED DUE TO COMPLETE"); - _playResult(nil); - _playResult = nil; - } -} - -- (void)setVolume:(float)volume { - [_player setVolume:volume]; -} - -- (void)setSpeed:(float)speed { - if (speed == 1.0 - || (speed < 1.0 && _player.currentItem.canPlaySlowForward) - || (speed > 1.0 && _player.currentItem.canPlayFastForward)) { - _player.rate = speed; - } - [self updatePosition]; -} - -- (void)setLoopMode:(int)loopMode { - _loopMode = loopMode; - if (_player) { - switch (_loopMode) { - case loopOne: - _player.actionAtItemEnd = AVPlayerActionAtItemEndPause; // AVPlayerActionAtItemEndNone - break; - default: - _player.actionAtItemEnd = AVPlayerActionAtItemEndAdvance; - } - } -} - -- (void)setShuffleModeEnabled:(BOOL)shuffleModeEnabled { - NSLog(@"setShuffleModeEnabled: %d", shuffleModeEnabled); - _shuffleModeEnabled = shuffleModeEnabled; - if (!_audioSource) return; - - [self updateOrder]; - - [self enqueueFrom:_index]; -} - -- (void)dumpQueue { - for (int i = 0; i < _player.items.count; i++) { - IndexedPlayerItem *playerItem = _player.items[i]; - for (int j = 0; j < _indexedAudioSources.count; j++) { - IndexedAudioSource *source = _indexedAudioSources[j]; - if (source.playerItem == playerItem) { - NSLog(@"- %d", j); - break; - } - } - } -} - -- (void)setAutomaticallyWaitsToMinimizeStalling:(bool)automaticallyWaitsToMinimizeStalling { - _automaticallyWaitsToMinimizeStalling = automaticallyWaitsToMinimizeStalling; - if (@available(macOS 10.12, iOS 10.0, *)) { - if(_player) { - _player.automaticallyWaitsToMinimizeStalling = automaticallyWaitsToMinimizeStalling; - } - } -} - -- (void)seek:(CMTime)position index:(NSNumber *)newIndex completionHandler:(void (^)(BOOL))completionHandler { - int index = _index; - if (newIndex != [NSNull null]) { - index = [newIndex intValue]; - } - if (index != _index) { - // Jump to a new item - /* if (_playing && index == _index + 1) { */ - /* // Special case for jumping to the very next item */ - /* NSLog(@"seek to next item: %d -> %d", _index, index); */ - /* [_indexedAudioSources[_index] seek:kCMTimeZero]; */ - /* _index = index; */ - /* [_player advanceToNextItem]; */ - /* [self broadcastPlaybackEvent]; */ - /* } else */ - { - // Jump to a distant item - //NSLog(@"seek# jump to distant item: %d -> %d", _index, index); - if (_playing) { - [_player pause]; - } - [_indexedAudioSources[_index] seek:kCMTimeZero]; - // The "currentItem" key observer will respect that a seek is already in progress - _seekPos = position; - [self updatePosition]; - [self enqueueFrom:index]; - IndexedAudioSource *source = _indexedAudioSources[_index]; - if (abs((int)(1000 * CMTimeGetSeconds(CMTimeSubtract(source.position, position)))) > 100) { - [self enterBuffering:@"seek to index"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - [source seek:position completionHandler:^(BOOL finished) { - if (@available(macOS 10.12, iOS 10.0, *)) { - if (_playing) { - // Handled by timeControlStatus - } else { - if (_bufferUnconfirmed && !_player.currentItem.playbackBufferFull) { - // Stay in buffering - } else if (source.playerItem.status == AVPlayerItemStatusReadyToPlay) { - [self leaveBuffering:@"seek to index finished, (!bufferUnconfirmed || playbackBufferFull) && ready to play"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - } - } - } else { - if (_bufferUnconfirmed && !_player.currentItem.playbackBufferFull) { - // Stay in buffering - } else if (source.playerItem.status == AVPlayerItemStatusReadyToPlay) { - [self leaveBuffering:@"seek to index finished, (!bufferUnconfirmed || playbackBufferFull) && ready to play"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - } - } - if (_playing) { - [_player play]; - } - _seekPos = kCMTimeInvalid; - [self broadcastPlaybackEvent]; - if (completionHandler) { - completionHandler(finished); - } - }]; - } else { - _seekPos = kCMTimeInvalid; - if (_playing) { - [_player play]; - } - } - } - } else { - // Seek within an item - if (_playing) { - [_player pause]; - } - _seekPos = position; - //NSLog(@"seek. enter buffering. pos = %d", (int)(1000*CMTimeGetSeconds(_indexedAudioSources[_index].position))); - // TODO: Move this into a separate method so it can also - // be used in skip. - [self enterBuffering:@"seek"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - [_indexedAudioSources[_index] seek:position completionHandler:^(BOOL finished) { - [self updatePosition]; - if (_playing) { - // If playing, buffering will be detected either by: - // 1. checkForDiscontinuity - // 2. timeControlStatus - [_player play]; - } else { - // If not playing, there is no reliable way to detect - // when buffering has completed, so we use - // !playbackBufferEmpty. Although this always seems to - // be full even right after a seek. - if (_player.currentItem.playbackBufferEmpty) { - [self enterBuffering:@"seek finished, playbackBufferEmpty"]; - } else { - [self leaveBuffering:@"seek finished, !playbackBufferEmpty"]; - } - [self updatePosition]; - if (_processingState != buffering) { - [self broadcastPlaybackEvent]; - } - } - _seekPos = kCMTimeInvalid; - [self broadcastPlaybackEvent]; - if (completionHandler) { - completionHandler(finished); - } - }]; - } -} - -- (void)dispose { - if (_processingState != none) { - [_player pause]; - _processingState = none; - [self broadcastPlaybackEvent]; - } - if (_timeObserver) { - [_player removeTimeObserver:_timeObserver]; - _timeObserver = 0; - } - if (_indexedAudioSources) { - for (int i = 0; i < [_indexedAudioSources count]; i++) { - [self removeItemObservers:_indexedAudioSources[i].playerItem]; - } - } - if (_player) { - [_player removeObserver:self forKeyPath:@"currentItem"]; - if (@available(macOS 10.12, iOS 10.0, *)) { - [_player removeObserver:self forKeyPath:@"timeControlStatus"]; - } - _player = nil; - } - // Untested: - // [_eventChannel setStreamHandler:nil]; - // [_methodChannel setMethodHandler:nil]; -} - -@end diff --git a/just_audio/macos/Classes/AudioSource.h b/just_audio/macos/Classes/AudioSource.h deleted file mode 100644 index 3dd1bf5..0000000 --- a/just_audio/macos/Classes/AudioSource.h +++ /dev/null @@ -1,13 +0,0 @@ -#import - -@interface AudioSource : NSObject - -@property (readonly, nonatomic) NSString* sourceId; - -- (instancetype)initWithId:(NSString *)sid; -- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex; -- (void)findById:(NSString *)sourceId matches:(NSMutableArray *)matches; -- (NSArray *)getShuffleOrder; -- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex; - -@end diff --git a/just_audio/macos/Classes/AudioSource.m b/just_audio/macos/Classes/AudioSource.m deleted file mode 100644 index 81534f1..0000000 --- a/just_audio/macos/Classes/AudioSource.m +++ /dev/null @@ -1,37 +0,0 @@ -#import "AudioSource.h" -#import - -@implementation AudioSource { - NSString *_sourceId; -} - -- (instancetype)initWithId:(NSString *)sid { - self = [super init]; - NSAssert(self, @"super init cannot be nil"); - _sourceId = sid; - return self; -} - -- (NSString *)sourceId { - return _sourceId; -} - -- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex { - return 0; -} - -- (void)findById:(NSString *)sourceId matches:(NSMutableArray *)matches { - if ([_sourceId isEqualToString:sourceId]) { - [matches addObject:self]; - } -} - -- (NSArray *)getShuffleOrder { - return @[]; -} - -- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex { - return 0; -} - -@end diff --git a/just_audio/macos/Classes/ClippingAudioSource.h b/just_audio/macos/Classes/ClippingAudioSource.h deleted file mode 100644 index 8122e3a..0000000 --- a/just_audio/macos/Classes/ClippingAudioSource.h +++ /dev/null @@ -1,11 +0,0 @@ -#import "AudioSource.h" -#import "UriAudioSource.h" -#import - -@interface ClippingAudioSource : IndexedAudioSource - -@property (readonly, nonatomic) UriAudioSource* audioSource; - -- (instancetype)initWithId:(NSString *)sid audioSource:(UriAudioSource *)audioSource start:(NSNumber *)start end:(NSNumber *)end; - -@end diff --git a/just_audio/macos/Classes/ClippingAudioSource.m b/just_audio/macos/Classes/ClippingAudioSource.m deleted file mode 100644 index 2f3b174..0000000 --- a/just_audio/macos/Classes/ClippingAudioSource.m +++ /dev/null @@ -1,79 +0,0 @@ -#import "AudioSource.h" -#import "ClippingAudioSource.h" -#import "IndexedPlayerItem.h" -#import "UriAudioSource.h" -#import - -@implementation ClippingAudioSource { - UriAudioSource *_audioSource; - CMTime _start; - CMTime _end; -} - -- (instancetype)initWithId:(NSString *)sid audioSource:(UriAudioSource *)audioSource start:(NSNumber *)start end:(NSNumber *)end { - self = [super initWithId:sid]; - NSAssert(self, @"super init cannot be nil"); - _audioSource = audioSource; - _start = start == [NSNull null] ? kCMTimeZero : CMTimeMake([start intValue], 1000); - _end = end == [NSNull null] ? kCMTimeInvalid : CMTimeMake([end intValue], 1000); - return self; -} - -- (UriAudioSource *)audioSource { - return _audioSource; -} - -- (void)findById:(NSString *)sourceId matches:(NSMutableArray *)matches { - [super findById:sourceId matches:matches]; - [_audioSource findById:sourceId matches:matches]; -} - -- (void)attach:(AVQueuePlayer *)player { - [super attach:player]; - _audioSource.playerItem.forwardPlaybackEndTime = _end; - // XXX: Not needed since currentItem observer handles it? - [self seek:kCMTimeZero]; -} - -- (IndexedPlayerItem *)playerItem { - return _audioSource.playerItem; -} - -- (NSArray *)getShuffleOrder { - return @[@(0)]; -} - -- (void)play:(AVQueuePlayer *)player { -} - -- (void)pause:(AVQueuePlayer *)player { -} - -- (void)stop:(AVQueuePlayer *)player { -} - -- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler { - if (!completionHandler || (self.playerItem.status == AVPlayerItemStatusReadyToPlay)) { - CMTime absPosition = CMTimeAdd(_start, position); - [_audioSource.playerItem seekToTime:absPosition toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero completionHandler:completionHandler]; - } -} - -- (CMTime)duration { - return CMTimeSubtract(CMTIME_IS_INVALID(_end) ? self.playerItem.duration : _end, _start); -} - -- (void)setDuration:(CMTime)duration { -} - -- (CMTime)position { - return CMTimeSubtract(self.playerItem.currentTime, _start); -} - -- (CMTime)bufferedPosition { - CMTime pos = CMTimeSubtract(_audioSource.bufferedPosition, _start); - CMTime dur = [self duration]; - return CMTimeCompare(pos, dur) >= 0 ? dur : pos; -} - -@end diff --git a/just_audio/macos/Classes/ConcatenatingAudioSource.h b/just_audio/macos/Classes/ConcatenatingAudioSource.h deleted file mode 100644 index 68455af..0000000 --- a/just_audio/macos/Classes/ConcatenatingAudioSource.h +++ /dev/null @@ -1,13 +0,0 @@ -#import "AudioSource.h" -#import - -@interface ConcatenatingAudioSource : AudioSource - -@property (readonly, nonatomic) int count; - -- (instancetype)initWithId:(NSString *)sid audioSources:(NSMutableArray *)audioSources; -- (void)insertSource:(AudioSource *)audioSource atIndex:(int)index; -- (void)removeSourcesFromIndex:(int)start toIndex:(int)end; -- (void)moveSourceFromIndex:(int)currentIndex toIndex:(int)newIndex; - -@end diff --git a/just_audio/macos/Classes/ConcatenatingAudioSource.m b/just_audio/macos/Classes/ConcatenatingAudioSource.m deleted file mode 100644 index bd7b713..0000000 --- a/just_audio/macos/Classes/ConcatenatingAudioSource.m +++ /dev/null @@ -1,109 +0,0 @@ -#import "AudioSource.h" -#import "ConcatenatingAudioSource.h" -#import -#import - -@implementation ConcatenatingAudioSource { - NSMutableArray *_audioSources; - NSMutableArray *_shuffleOrder; -} - -- (instancetype)initWithId:(NSString *)sid audioSources:(NSMutableArray *)audioSources { - self = [super initWithId:sid]; - NSAssert(self, @"super init cannot be nil"); - _audioSources = audioSources; - return self; -} - -- (int)count { - return _audioSources.count; -} - -- (void)insertSource:(AudioSource *)audioSource atIndex:(int)index { - [_audioSources insertObject:audioSource atIndex:index]; -} - -- (void)removeSourcesFromIndex:(int)start toIndex:(int)end { - if (end == -1) end = _audioSources.count; - for (int i = start; i < end; i++) { - [_audioSources removeObjectAtIndex:start]; - } -} - -- (void)moveSourceFromIndex:(int)currentIndex toIndex:(int)newIndex { - AudioSource *source = _audioSources[currentIndex]; - [_audioSources removeObjectAtIndex:currentIndex]; - [_audioSources insertObject:source atIndex:newIndex]; -} - -- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex { - for (int i = 0; i < [_audioSources count]; i++) { - treeIndex = [_audioSources[i] buildSequence:sequence treeIndex:treeIndex]; - } - return treeIndex; -} - -- (void)findById:(NSString *)sourceId matches:(NSMutableArray *)matches { - [super findById:sourceId matches:matches]; - for (int i = 0; i < [_audioSources count]; i++) { - [_audioSources[i] findById:sourceId matches:matches]; - } -} - -- (NSArray *)getShuffleOrder { - NSMutableArray *order = [NSMutableArray new]; - int offset = [order count]; - NSMutableArray *childOrders = [NSMutableArray new]; // array of array of ints - for (int i = 0; i < [_audioSources count]; i++) { - AudioSource *audioSource = _audioSources[i]; - NSArray *childShuffleOrder = [audioSource getShuffleOrder]; - NSMutableArray *offsetChildShuffleOrder = [NSMutableArray new]; - for (int j = 0; j < [childShuffleOrder count]; j++) { - [offsetChildShuffleOrder addObject:@([childShuffleOrder[j] integerValue] + offset)]; - } - [childOrders addObject:offsetChildShuffleOrder]; - offset += [childShuffleOrder count]; - } - for (int i = 0; i < [_audioSources count]; i++) { - [order addObjectsFromArray:childOrders[[_shuffleOrder[i] integerValue]]]; - } - return order; -} - -- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex { - int currentChildIndex = -1; - for (int i = 0; i < [_audioSources count]; i++) { - int indexBefore = treeIndex; - AudioSource *child = _audioSources[i]; - treeIndex = [child shuffle:treeIndex currentIndex:currentIndex]; - if (currentIndex >= indexBefore && currentIndex < treeIndex) { - currentChildIndex = i; - } else {} - } - // Shuffle so that the current child is first in the shuffle order - _shuffleOrder = [NSMutableArray arrayWithCapacity:[_audioSources count]]; - for (int i = 0; i < [_audioSources count]; i++) { - [_shuffleOrder addObject:@(0)]; - } - NSLog(@"shuffle: audioSources.count=%d and shuffleOrder.count=%d", [_audioSources count], [_shuffleOrder count]); - // First generate a random shuffle - for (int i = 0; i < [_audioSources count]; i++) { - int j = arc4random_uniform(i + 1); - _shuffleOrder[i] = _shuffleOrder[j]; - _shuffleOrder[j] = @(i); - } - // Then bring currentIndex to the front - if (currentChildIndex != -1) { - for (int i = 1; i < [_audioSources count]; i++) { - if ([_shuffleOrder[i] integerValue] == currentChildIndex) { - NSNumber *v = _shuffleOrder[0]; - _shuffleOrder[0] = _shuffleOrder[i]; - _shuffleOrder[i] = v; - break; - } - } - } - return treeIndex; -} - -@end diff --git a/just_audio/macos/Classes/IndexedAudioSource.h b/just_audio/macos/Classes/IndexedAudioSource.h deleted file mode 100644 index 7d343d8..0000000 --- a/just_audio/macos/Classes/IndexedAudioSource.h +++ /dev/null @@ -1,21 +0,0 @@ -#import "AudioSource.h" -#import "IndexedPlayerItem.h" -#import -#import - -@interface IndexedAudioSource : AudioSource - -@property (readonly, nonatomic) IndexedPlayerItem *playerItem; -@property (readwrite, nonatomic) CMTime duration; -@property (readonly, nonatomic) CMTime position; -@property (readonly, nonatomic) CMTime bufferedPosition; -@property (readonly, nonatomic) BOOL isAttached; - -- (void)attach:(AVQueuePlayer *)player; -- (void)play:(AVQueuePlayer *)player; -- (void)pause:(AVQueuePlayer *)player; -- (void)stop:(AVQueuePlayer *)player; -- (void)seek:(CMTime)position; -- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler; - -@end diff --git a/just_audio/macos/Classes/IndexedAudioSource.m b/just_audio/macos/Classes/IndexedAudioSource.m deleted file mode 100644 index 316f900..0000000 --- a/just_audio/macos/Classes/IndexedAudioSource.m +++ /dev/null @@ -1,68 +0,0 @@ -#import "IndexedAudioSource.h" -#import "IndexedPlayerItem.h" -#import - -@implementation IndexedAudioSource { - BOOL _isAttached; -} - -- (instancetype)initWithId:(NSString *)sid { - self = [super init]; - NSAssert(self, @"super init cannot be nil"); - _isAttached = NO; - return self; -} - -- (IndexedPlayerItem *)playerItem { - return nil; -} - -- (BOOL)isAttached { - return _isAttached; -} - -- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex { - [sequence addObject:self]; - return treeIndex + 1; -} - -- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex { - return treeIndex + 1; -} - -- (void)attach:(AVQueuePlayer *)player { - _isAttached = YES; -} - -- (void)play:(AVQueuePlayer *)player { -} - -- (void)pause:(AVQueuePlayer *)player { -} - -- (void)stop:(AVQueuePlayer *)player { -} - -- (void)seek:(CMTime)position { - [self seek:position completionHandler:nil]; -} - -- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler { -} - -- (CMTime)duration { - return kCMTimeInvalid; -} - -- (void)setDuration:(CMTime)duration { -} - -- (CMTime)position { - return kCMTimeInvalid; -} - -- (CMTime)bufferedPosition { - return kCMTimeInvalid; -} - -@end diff --git a/just_audio/macos/Classes/IndexedPlayerItem.h b/just_audio/macos/Classes/IndexedPlayerItem.h deleted file mode 100644 index 5d4a11c..0000000 --- a/just_audio/macos/Classes/IndexedPlayerItem.h +++ /dev/null @@ -1,9 +0,0 @@ -#import - -@class IndexedAudioSource; - -@interface IndexedPlayerItem : AVPlayerItem - -@property (readwrite, nonatomic) IndexedAudioSource *audioSource; - -@end diff --git a/just_audio/macos/Classes/IndexedPlayerItem.m b/just_audio/macos/Classes/IndexedPlayerItem.m deleted file mode 100644 index 87fafe0..0000000 --- a/just_audio/macos/Classes/IndexedPlayerItem.m +++ /dev/null @@ -1,16 +0,0 @@ -#import "IndexedPlayerItem.h" -#import "IndexedAudioSource.h" - -@implementation IndexedPlayerItem { - IndexedAudioSource *_audioSource; -} - --(void)setAudioSource:(IndexedAudioSource *)audioSource { - _audioSource = audioSource; -} - --(IndexedAudioSource *)audioSource { - return _audioSource; -} - -@end diff --git a/just_audio/macos/Classes/JustAudioPlugin.h b/just_audio/macos/Classes/JustAudioPlugin.h deleted file mode 100644 index 3f4068d..0000000 --- a/just_audio/macos/Classes/JustAudioPlugin.h +++ /dev/null @@ -1,4 +0,0 @@ -#import - -@interface JustAudioPlugin : NSObject -@end diff --git a/just_audio/macos/Classes/JustAudioPlugin.m b/just_audio/macos/Classes/JustAudioPlugin.m deleted file mode 100644 index 982a260..0000000 --- a/just_audio/macos/Classes/JustAudioPlugin.m +++ /dev/null @@ -1,55 +0,0 @@ -#import "JustAudioPlugin.h" -#import "AudioPlayer.h" -#import -#include - -@implementation JustAudioPlugin { - NSObject* _registrar; - BOOL _configuredSession; -} - -+ (void)registerWithRegistrar:(NSObject*)registrar { - FlutterMethodChannel* channel = [FlutterMethodChannel - methodChannelWithName:@"com.ryanheise.just_audio.methods" - binaryMessenger:[registrar messenger]]; - JustAudioPlugin* instance = [[JustAudioPlugin alloc] initWithRegistrar:registrar]; - [registrar addMethodCallDelegate:instance channel:channel]; -} - -- (instancetype)initWithRegistrar:(NSObject *)registrar { - self = [super init]; - NSAssert(self, @"super init cannot be nil"); - _registrar = registrar; - return self; -} - -- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result { - if ([@"init" isEqualToString:call.method]) { - NSArray* args = (NSArray*)call.arguments; - NSString* playerId = args[0]; - /*AudioPlayer* player =*/ [[AudioPlayer alloc] initWithRegistrar:_registrar playerId:playerId configuredSession:_configuredSession]; - result(nil); - } else if ([@"setIosCategory" isEqualToString:call.method]) { -#if TARGET_OS_IPHONE - NSNumber* categoryIndex = (NSNumber*)call.arguments; - AVAudioSessionCategory category = nil; - switch (categoryIndex.integerValue) { - case 0: category = AVAudioSessionCategoryAmbient; break; - case 1: category = AVAudioSessionCategorySoloAmbient; break; - case 2: category = AVAudioSessionCategoryPlayback; break; - case 3: category = AVAudioSessionCategoryRecord; break; - case 4: category = AVAudioSessionCategoryPlayAndRecord; break; - case 5: category = AVAudioSessionCategoryMultiRoute; break; - } - if (category) { - _configuredSession = YES; - } - [[AVAudioSession sharedInstance] setCategory:category error:nil]; -#endif - result(nil); - } else { - result(FlutterMethodNotImplemented); - } -} - -@end diff --git a/just_audio/macos/Classes/LoopingAudioSource.h b/just_audio/macos/Classes/LoopingAudioSource.h deleted file mode 100644 index a77636b..0000000 --- a/just_audio/macos/Classes/LoopingAudioSource.h +++ /dev/null @@ -1,8 +0,0 @@ -#import "AudioSource.h" -#import - -@interface LoopingAudioSource : AudioSource - -- (instancetype)initWithId:(NSString *)sid audioSources:(NSArray *)audioSources; - -@end diff --git a/just_audio/macos/Classes/LoopingAudioSource.m b/just_audio/macos/Classes/LoopingAudioSource.m deleted file mode 100644 index ba4b52b..0000000 --- a/just_audio/macos/Classes/LoopingAudioSource.m +++ /dev/null @@ -1,53 +0,0 @@ -#import "AudioSource.h" -#import "LoopingAudioSource.h" -#import - -@implementation LoopingAudioSource { - // An array of duplicates - NSArray *_audioSources; // -} - -- (instancetype)initWithId:(NSString *)sid audioSources:(NSArray *)audioSources { - self = [super initWithId:sid]; - NSAssert(self, @"super init cannot be nil"); - _audioSources = audioSources; - return self; -} - -- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex { - for (int i = 0; i < [_audioSources count]; i++) { - treeIndex = [_audioSources[i] buildSequence:sequence treeIndex:treeIndex]; - } - return treeIndex; -} - -- (void)findById:(NSString *)sourceId matches:(NSMutableArray *)matches { - [super findById:sourceId matches:matches]; - for (int i = 0; i < [_audioSources count]; i++) { - [_audioSources[i] findById:sourceId matches:matches]; - } -} - -- (NSArray *)getShuffleOrder { - NSMutableArray *order = [NSMutableArray new]; - int offset = (int)[order count]; - for (int i = 0; i < [_audioSources count]; i++) { - AudioSource *audioSource = _audioSources[i]; - NSArray *childShuffleOrder = [audioSource getShuffleOrder]; - for (int j = 0; j < [childShuffleOrder count]; j++) { - [order addObject:@([childShuffleOrder[j] integerValue] + offset)]; - } - offset += [childShuffleOrder count]; - } - return order; -} - -- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex { - // TODO: This should probably shuffle the same way on all duplicates. - for (int i = 0; i < [_audioSources count]; i++) { - treeIndex = [_audioSources[i] shuffle:treeIndex currentIndex:currentIndex]; - } - return treeIndex; -} - -@end diff --git a/just_audio/macos/Classes/UriAudioSource.h b/just_audio/macos/Classes/UriAudioSource.h deleted file mode 100644 index 9b74125..0000000 --- a/just_audio/macos/Classes/UriAudioSource.h +++ /dev/null @@ -1,8 +0,0 @@ -#import "IndexedAudioSource.h" -#import - -@interface UriAudioSource : IndexedAudioSource - -- (instancetype)initWithId:(NSString *)sid uri:(NSString *)uri; - -@end diff --git a/just_audio/macos/Classes/UriAudioSource.m b/just_audio/macos/Classes/UriAudioSource.m deleted file mode 100644 index 91321d4..0000000 --- a/just_audio/macos/Classes/UriAudioSource.m +++ /dev/null @@ -1,79 +0,0 @@ -#import "UriAudioSource.h" -#import "IndexedAudioSource.h" -#import "IndexedPlayerItem.h" -#import - -@implementation UriAudioSource { - NSString *_uri; - IndexedPlayerItem *_playerItem; - /* CMTime _duration; */ -} - -- (instancetype)initWithId:(NSString *)sid uri:(NSString *)uri { - self = [super initWithId:sid]; - NSAssert(self, @"super init cannot be nil"); - _uri = uri; - if ([_uri hasPrefix:@"file://"]) { - _playerItem = [[IndexedPlayerItem alloc] initWithURL:[NSURL fileURLWithPath:[_uri substringFromIndex:7]]]; - } else { - _playerItem = [[IndexedPlayerItem alloc] initWithURL:[NSURL URLWithString:_uri]]; - } - if (@available(macOS 10.13, iOS 11.0, *)) { - // This does the best at reducing distortion on voice with speeds below 1.0 - _playerItem.audioTimePitchAlgorithm = AVAudioTimePitchAlgorithmTimeDomain; - } - /* NSKeyValueObservingOptions options = */ - /* NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew; */ - /* [_playerItem addObserver:self */ - /* forKeyPath:@"duration" */ - /* options:options */ - /* context:nil]; */ - return self; -} - -- (IndexedPlayerItem *)playerItem { - return _playerItem; -} - -- (NSArray *)getShuffleOrder { - return @[@(0)]; -} - -- (void)play:(AVQueuePlayer *)player { -} - -- (void)pause:(AVQueuePlayer *)player { -} - -- (void)stop:(AVQueuePlayer *)player { -} - -- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler { - if (!completionHandler || (_playerItem.status == AVPlayerItemStatusReadyToPlay)) { - [_playerItem seekToTime:position toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero completionHandler:completionHandler]; - } -} - -- (CMTime)duration { - return _playerItem.duration; -} - -- (void)setDuration:(CMTime)duration { -} - -- (CMTime)position { - return _playerItem.currentTime; -} - -- (CMTime)bufferedPosition { - NSValue *last = _playerItem.loadedTimeRanges.lastObject; - if (last) { - CMTimeRange timeRange = [last CMTimeRangeValue]; - return CMTimeAdd(timeRange.start, timeRange.duration); - } else { - return _playerItem.currentTime; - } - return kCMTimeInvalid; -} - -@end diff --git a/just_audio/macos/just_audio.podspec b/just_audio/macos/just_audio.podspec deleted file mode 100644 index ff946e8..0000000 --- a/just_audio/macos/just_audio.podspec +++ /dev/null @@ -1,21 +0,0 @@ -# -# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html -# -Pod::Spec.new do |s| - s.name = 'just_audio' - s.version = '0.0.1' - s.summary = 'A new flutter plugin project.' - s.description = <<-DESC -A new flutter plugin project. - DESC - s.homepage = 'http://example.com' - s.license = { :file => '../LICENSE' } - s.author = { 'Your Company' => 'email@example.com' } - s.source = { :path => '.' } - s.source_files = 'Classes/**/*' - s.public_header_files = 'Classes/**/*.h' - s.dependency 'FlutterMacOS' - s.platform = :osx, '10.11' - s.pod_target_xcconfig = { 'DEFINES_MODULE' => 'YES' } -end - diff --git a/just_audio/pubspec.lock b/just_audio/pubspec.lock deleted file mode 100644 index 6c8bd0b..0000000 --- a/just_audio/pubspec.lock +++ /dev/null @@ -1,250 +0,0 @@ -# Generated by pub -# See https://dart.dev/tools/pub/glossary#lockfile -packages: - async: - dependency: "direct main" - description: - name: async - url: "https://pub.dartlang.org" - source: hosted - version: "2.4.2" - boolean_selector: - dependency: transitive - description: - name: boolean_selector - url: "https://pub.dartlang.org" - source: hosted - version: "2.0.0" - characters: - dependency: transitive - description: - name: characters - url: "https://pub.dartlang.org" - source: hosted - version: "1.0.0" - charcode: - dependency: transitive - description: - name: charcode - url: "https://pub.dartlang.org" - source: hosted - version: "1.1.3" - clock: - dependency: transitive - description: - name: clock - url: "https://pub.dartlang.org" - source: hosted - version: "1.0.1" - collection: - dependency: transitive - description: - name: collection - url: "https://pub.dartlang.org" - source: hosted - version: "1.14.13" - convert: - dependency: transitive - description: - name: convert - url: "https://pub.dartlang.org" - source: hosted - version: "2.1.1" - crypto: - dependency: transitive - description: - name: crypto - url: "https://pub.dartlang.org" - source: hosted - version: "2.1.4" - fake_async: - dependency: transitive - description: - name: fake_async - url: "https://pub.dartlang.org" - source: hosted - version: "1.1.0" - file: - dependency: transitive - description: - name: file - url: "https://pub.dartlang.org" - source: hosted - version: "5.1.0" - flutter: - dependency: "direct main" - description: flutter - source: sdk - version: "0.0.0" - flutter_test: - dependency: "direct dev" - description: flutter - source: sdk - version: "0.0.0" - flutter_web_plugins: - dependency: "direct main" - description: flutter - source: sdk - version: "0.0.0" - intl: - dependency: transitive - description: - name: intl - url: "https://pub.dartlang.org" - source: hosted - version: "0.16.1" - matcher: - dependency: transitive - description: - name: matcher - url: "https://pub.dartlang.org" - source: hosted - version: "0.12.8" - meta: - dependency: transitive - description: - name: meta - url: "https://pub.dartlang.org" - source: hosted - version: "1.1.8" - path: - dependency: "direct main" - description: - name: path - url: "https://pub.dartlang.org" - source: hosted - version: "1.7.0" - path_provider: - dependency: "direct main" - description: - name: path_provider - url: "https://pub.dartlang.org" - source: hosted - version: "1.6.10" - path_provider_linux: - dependency: transitive - description: - name: path_provider_linux - url: "https://pub.dartlang.org" - source: hosted - version: "0.0.1+1" - path_provider_macos: - dependency: transitive - description: - name: path_provider_macos - url: "https://pub.dartlang.org" - source: hosted - version: "0.0.4+3" - path_provider_platform_interface: - dependency: transitive - description: - name: path_provider_platform_interface - url: "https://pub.dartlang.org" - source: hosted - version: "1.0.2" - platform: - dependency: transitive - description: - name: platform - url: "https://pub.dartlang.org" - source: hosted - version: "2.2.1" - plugin_platform_interface: - dependency: transitive - description: - name: plugin_platform_interface - url: "https://pub.dartlang.org" - source: hosted - version: "1.0.2" - process: - dependency: transitive - description: - name: process - url: "https://pub.dartlang.org" - source: hosted - version: "3.0.13" - rxdart: - dependency: "direct main" - description: - name: rxdart - url: "https://pub.dartlang.org" - source: hosted - version: "0.24.1" - sky_engine: - dependency: transitive - description: flutter - source: sdk - version: "0.0.99" - source_span: - dependency: transitive - description: - name: source_span - url: "https://pub.dartlang.org" - source: hosted - version: "1.7.0" - stack_trace: - dependency: transitive - description: - name: stack_trace - url: "https://pub.dartlang.org" - source: hosted - version: "1.9.5" - stream_channel: - dependency: transitive - description: - name: stream_channel - url: "https://pub.dartlang.org" - source: hosted - version: "2.0.0" - string_scanner: - dependency: transitive - description: - name: string_scanner - url: "https://pub.dartlang.org" - source: hosted - version: "1.0.5" - term_glyph: - dependency: transitive - description: - name: term_glyph - url: "https://pub.dartlang.org" - source: hosted - version: "1.1.0" - test_api: - dependency: transitive - description: - name: test_api - url: "https://pub.dartlang.org" - source: hosted - version: "0.2.17" - typed_data: - dependency: transitive - description: - name: typed_data - url: "https://pub.dartlang.org" - source: hosted - version: "1.2.0" - uuid: - dependency: "direct main" - description: - name: uuid - url: "https://pub.dartlang.org" - source: hosted - version: "2.2.0" - vector_math: - dependency: transitive - description: - name: vector_math - url: "https://pub.dartlang.org" - source: hosted - version: "2.0.8" - xdg_directories: - dependency: transitive - description: - name: xdg_directories - url: "https://pub.dartlang.org" - source: hosted - version: "0.1.0" -sdks: - dart: ">=2.9.0-14.0.dev <3.0.0" - flutter: ">=1.12.13+hotfix.5 <2.0.0" diff --git a/just_audio/pubspec.yaml b/just_audio/pubspec.yaml deleted file mode 100644 index 12616f4..0000000 --- a/just_audio/pubspec.yaml +++ /dev/null @@ -1,37 +0,0 @@ -name: just_audio -description: Flutter plugin to play audio from streams, files, assets, DASH/HLS streams and playlists. Works with audio_service to play audio in the background. -version: 0.3.1 -homepage: https://github.com/ryanheise/just_audio - -environment: - sdk: '>=2.6.0 <3.0.0' - flutter: ">=1.12.8 <2.0.0" - -dependencies: - rxdart: ^0.24.1 - path: ^1.6.4 - path_provider: ^1.6.10 - async: ^2.4.1 - uuid: ^2.2.0 - flutter: - sdk: flutter - flutter_web_plugins: - sdk: flutter - -dev_dependencies: - flutter_test: - sdk: flutter - -flutter: - plugin: - platforms: - android: - package: com.ryanheise.just_audio - pluginClass: JustAudioPlugin - ios: - pluginClass: JustAudioPlugin - macos: - pluginClass: JustAudioPlugin - web: - pluginClass: JustAudioPlugin - fileName: just_audio_web.dart diff --git a/just_audio/test/just_audio_test.dart b/just_audio/test/just_audio_test.dart deleted file mode 100644 index 14c6a7a..0000000 --- a/just_audio/test/just_audio_test.dart +++ /dev/null @@ -1,21 +0,0 @@ -import 'package:flutter/services.dart'; -import 'package:flutter_test/flutter_test.dart'; -import 'package:just_audio/just_audio.dart'; - -void main() { - const MethodChannel channel = MethodChannel('just_audio'); - - setUp(() { - channel.setMockMethodCallHandler((MethodCall methodCall) async { - return '42'; - }); - }); - - tearDown(() { - channel.setMockMethodCallHandler(null); - }); - -// test('getPlatformVersion', () async { -// expect(await AudioPlayer.platformVersion, '42'); -// }); -}