Pre 0.4.0

This commit is contained in:
exttex 2020-09-18 19:28:56 +02:00
parent 24e598fe99
commit 275840ea4c
79 changed files with 4 additions and 9742 deletions

3
.gitmodules vendored Normal file
View File

@ -0,0 +1,3 @@
[submodule "audio_service"]
path = audio_service
url = https://notabug.org/exttex/audio_service

1
audio_service Submodule

@ -0,0 +1 @@
Subproject commit 73fce9905f9ffeec0270f7c89b70cd0eaa762fb6

View File

@ -1 +0,0 @@
github: ryanheise

View File

@ -1,53 +0,0 @@
---
name: Bug report
about: Create a report to help us improve
title: ''
labels: 1 backlog, bug
assignees: ryanheise
---
<!-- ALL SECTIONS BELOW MUST BE COMPLETED -->
**Which API doesn't behave as documented, and how does it misbehave?**
Name here the specific methods or fields that are not behaving as documented, and explain clearly what is happening.
**Minimal reproduction project**
Provide a link here using one of two options:
1. Fork this repository and modify the example to reproduce the bug, then provide a link here.
2. If the unmodified official example already reproduces the bug, just write "The example".
**To Reproduce (i.e. user steps, not code)**
Steps to reproduce the behavior:
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
**Error messages**
```
If applicable, copy & paste error message here, within the triple quotes to preserve formatting.
```
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Desktop (please complete the following information):**
- OS: [e.g. MacOS + version]
- Browser [e.g. chrome, safari + version]
**Smartphone (please complete the following information):**
- Device: [e.g. iPhone6]
- OS: [e.g. iOS8.1]
**Flutter SDK version**
```
insert output of "flutter doctor" here
```
**Additional context**
Add any other context about the problem here.

View File

@ -1,8 +0,0 @@
blank_issues_enabled: false
contact_links:
- name: Community Support
url: https://stackoverflow.com/search?q=just_audio
about: Ask for help on Stack Overflow.
- name: New to Flutter?
url: https://gitter.im/flutter/flutter
about: Chat with other Flutter developers on Gitter.

View File

@ -1,39 +0,0 @@
---
name: Documentation request
about: Suggest an improvement to the documentation
title: ''
labels: 1 backlog, documentation
assignees: ryanheise
---
<!--
PLEASE READ CAREFULLY!
FOR YOUR DOCUMENTATION REQUEST TO BE PROCESSED, YOU WILL NEED
TO FILL IN ALL SECTIONS BELOW. DON'T DELETE THE HEADINGS.
THANK YOU :-D
-->
**To which pages does your suggestion apply?**
- Direct URL 1
- Direct URL 2
- ...
**Quote the sentences(s) from the documentation to be improved (if any)**
> Insert here. (Skip if you are proposing an entirely new section.)
**Describe your suggestion**
...

View File

@ -1,37 +0,0 @@
---
name: Feature request
about: Suggest an idea for this project
title: ''
labels: 1 backlog, enhancement
assignees: ryanheise
---
<!--
PLEASE READ CAREFULLY!
FOR YOUR FEATURE REQUEST TO BE PROCESSED, YOU WILL NEED
TO FILL IN ALL SECTIONS BELOW. DON'T DELETE THE HEADINGS.
THANK YOU :-D
-->
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.

70
just_audio/.gitignore vendored
View File

@ -1,70 +0,0 @@
# Miscellaneous
*.class
*.log
*.pyc
*.swp
.DS_Store
.atom/
.buildlog/
.history
.svn/
# IntelliJ related
*.iml
*.ipr
*.iws
.idea/
# Visual Studio Code related
.vscode/
# Flutter/Dart/Pub related
**/doc/api/
.dart_tool/
.flutter-plugins
.packages
.pub-cache/
.pub/
/build/
# Android related
**/android/**/gradle-wrapper.jar
**/android/.gradle
**/android/captures/
**/android/gradlew
**/android/gradlew.bat
**/android/local.properties
**/android/**/GeneratedPluginRegistrant.java
# iOS/XCode related
**/ios/**/*.mode1v3
**/ios/**/*.mode2v3
**/ios/**/*.moved-aside
**/ios/**/*.pbxuser
**/ios/**/*.perspectivev3
**/ios/**/*sync/
**/ios/**/.sconsign.dblite
**/ios/**/.tags*
**/ios/**/.vagrant/
**/ios/**/DerivedData/
**/ios/**/Icon?
**/ios/**/Pods/
**/ios/**/.symlinks/
**/ios/**/profile
**/ios/**/xcuserdata
**/ios/.generated/
**/ios/Flutter/App.framework
**/ios/Flutter/Flutter.framework
**/ios/Flutter/Generated.xcconfig
**/ios/Flutter/app.flx
**/ios/Flutter/app.zip
**/ios/Flutter/flutter_assets/
**/ios/ServiceDefinitions.json
**/ios/Runner/GeneratedPluginRegistrant.*
# Exceptions to above rules.
!**/ios/**/default.mode1v3
!**/ios/**/default.mode2v3
!**/ios/**/default.pbxuser
!**/ios/**/default.perspectivev3
!/packages/flutter_tools/test/data/dart_dependencies_test/**/.packages

View File

@ -1,10 +0,0 @@
# This file tracks properties of this Flutter project.
# Used by Flutter tool to assess capabilities and perform upgrades etc.
#
# This file should be version controlled and should not be manually edited.
version:
revision: 68587a0916366e9512a78df22c44163d041dd5f3
channel: stable
project_type: plugin

View File

@ -1,114 +0,0 @@
## 0.3.1
* Prevent hang in dispose
## 0.3.0
* Playlists
* Looping
* Shuffling
* Composing
* Clipping support added for iOS/macOS
* New player state model consisting of:
* playing: true/false
* processingState: none/loading/buffering/ready/completed
* Feature complete on iOS and macOS (except for DASH)
* Improved example
* Exception classes
## 0.2.2
* Fix dependencies for stable channel.
## 0.2.1
* Improve handling of headers.
* Report setUrl errors and duration on web.
## 0.2.0
* Support dynamic duration
* Support seeking to end of live streams
* Support request headers
* V2 implementation
* Report setUrl errors on iOS
* setUrl throws exception if interrupted
* Return null when duration is unknown
## 0.1.10
* Option to set audio session category on iOS.
## 0.1.9
* Bug fixes.
## 0.1.8
* Reduce distortion at slow speeds on iOS
## 0.1.7
* Minor bug fixes.
## 0.1.6
* Eliminate event lag over method channels.
* Report setUrl errors on Android.
* Report Icy Metadata on Android.
* Bug fixes.
## 0.1.5
* Update dependencies and documentation.
## 0.1.4
* Add MacOS implementation.
* Support cross-platform redirects on Android.
* Bug fixes.
## 0.1.3
* Fix bug in web implementation.
## 0.1.2
* Broadcast how much audio has been buffered.
## 0.1.1
* Web implementation.
* iOS option to minimize stalling.
* Fix setAsset on iOS.
## 0.1.0
* Separate buffering state from PlaybackState.
* More permissive state transitions.
* Support playing local files on iOS.
## 0.0.6
* Bug fixes.
## 0.0.5
* API change for audio clipping.
* Performance improvements and bug fixes on Android.
## 0.0.4
* Remove reseeking hack.
## 0.0.3
* Feature to change audio speed.
## 0.0.2
* iOS implementation for testing (may not work).
## 0.0.1
* Initial release with Android implementation.

View File

@ -1,229 +0,0 @@
MIT License
Copyright (c) 2019-2020 Ryan Heise.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
==============================================================================
This software includes the ExoPlayer library which is licensed under the Apache
License, Version 2.0.
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -1,222 +0,0 @@
# just_audio
This Flutter plugin plays audio from URLs, files, assets, DASH/HLS streams and playlists. Furthermore, it can clip, concatenate, loop, shuffle and compose audio into complex arrangements with gapless playback. This plugin can be used with [audio_service](https://pub.dev/packages/audio_service) to play audio in the background and control playback from the lock screen, Android notifications, the iOS Control Center, and headset buttons.
## Features
| Feature | Android | iOS | MacOS | Web |
| ------- | :-------: | :-----: | :-----: | :-----: |
| read from URL | ✅ | ✅ | ✅ | ✅ |
| read from file | ✅ | ✅ | ✅ | |
| read from asset | ✅ | ✅ | ✅ | |
| request headers | ✅ | ✅ | ✅ | |
| DASH | ✅ | | | |
| HLS | ✅ | ✅ | ✅ | |
| buffer status/position | ✅ | ✅ | ✅ | ✅ |
| play/pause/seek | ✅ | ✅ | ✅ | ✅ |
| set volume | ✅ | ✅ | ✅ | ✅ |
| set speed | ✅ | ✅ | ✅ | ✅ |
| clip audio | ✅ | ✅ | ✅ | ✅ |
| playlists | ✅ | ✅ | ✅ | ✅ |
| looping | ✅ | ✅ | ✅ | ✅ |
| shuffle | ✅ | ✅ | ✅ | ✅ |
| compose audio | ✅ | ✅ | ✅ | ✅ |
| gapless playback | ✅ | ✅ | ✅ | |
| report player errors | ✅ | ✅ | ✅ | ✅ |
Please consider reporting any bugs you encounter [here](https://github.com/ryanheise/just_audio/issues) or submitting pull requests [here](https://github.com/ryanheise/just_audio/pulls).
## Example
![just_audio](https://user-images.githubusercontent.com/19899190/89558581-bf369080-d857-11ea-9376-3a5055284bab.png)
Initialisation:
```dart
final player = AudioPlayer();
var duration = await player.setUrl('https://foo.com/bar.mp3');
```
Standard controls:
```dart
player.play(); // Usually you don't want to wait for playback to finish.
await player.seek(Duration(seconds: 10));
await player.pause();
```
Clipping audio:
```dart
await player.setClip(start: Duration(seconds: 10), end: Duration(seconds: 20));
await player.play(); // Waits until the clip has finished playing
```
Adjusting audio:
```dart
await player.setSpeed(2.0); // Double speed
await player.setVolume(0.5); // Halve volume
```
Gapless playlists:
```dart
await player.load(
ConcatenatingAudioSource(
children: [
AudioSource.uri(Uri.parse("https://example.com/track1.mp3")),
AudioSource.uri(Uri.parse("https://example.com/track2.mp3")),
AudioSource.uri(Uri.parse("https://example.com/track3.mp3")),
],
),
);
player.seekToNext();
player.seekToPrevious();
// Jump to the beginning of track3.mp3.
player.seek(Duration(milliseconds: 0), index: 2);
```
Looping and shuffling:
```dart
player.setLoopMode(LoopMode.off); // no looping (default)
player.setLoopMode(LoopMode.all); // loop playlist
player.setLoopMode(LoopMode.one); // loop current item
player.setShuffleModeEnabled(true); // shuffle except for current item
```
Composing audio sources:
```dart
player.load(
// Loop child 4 times
LoopingAudioSource(
count: 4,
// Play children one after the other
child: ConcatenatingAudioSource(
children: [
// Play a regular media file
ProgressiveAudioSource(Uri.parse("https://example.com/foo.mp3")),
// Play a DASH stream
DashAudioSource(Uri.parse("https://example.com/audio.mdp")),
// Play an HLS stream
HlsAudioSource(Uri.parse("https://example.com/audio.m3u8")),
// Play a segment of the child
ClippingAudioSource(
child: ProgressiveAudioSource(Uri.parse("https://w.xyz/p.mp3")),
start: Duration(seconds: 25),
end: Duration(seconds: 30),
),
],
),
),
);
```
Releasing resources:
```dart
await player.dispose();
```
Catching player errors:
```dart
try {
await player.setUrl("https://s3.amazonaws.com/404-file.mp3");
} catch (e) {
print("Error: $e");
}
```
Listening to state changes:
```dart
player.playerStateStream.listen((state) {
if (state.playing) ... else ...
switch (state.processingState) {
case AudioPlaybackState.none: ...
case AudioPlaybackState.loading: ...
case AudioPlaybackState.buffering: ...
case AudioPlaybackState.ready: ...
case AudioPlaybackState.completed: ...
}
});
// See also:
// - durationStream
// - positionStream
// - bufferedPositionStream
// - currentIndexStream
// - icyMetadataStream
// - playingStream
// - processingStateStream
// - loopModeStream
// - shuffleModeEnabledStream
// - volumeStream
// - speedStream
// - playbackEventStream
```
## Platform specific configuration
### Android
If you wish to connect to non-HTTPS URLS, add the following attribute to the `application` element of your `AndroidManifest.xml` file:
```xml
<application ... android:usesCleartextTraffic="true">
```
### iOS
If you wish to connect to non-HTTPS URLS, add the following to your `Info.plist` file:
```xml
<key>NSAppTransportSecurity</key>
<dict>
<key>NSAllowsArbitraryLoads</key>
<true/>
<key>NSAllowsArbitraryLoadsForMedia</key>
<true/>
</dict>
```
By default, iOS will mute your app's audio when your phone is switched to
silent mode. Depending on the requirements of your app, you can change the
default audio session category using `AudioPlayer.setIosCategory`. For example,
if you are writing a media app, Apple recommends that you set the category to
`AVAudioSessionCategoryPlayback`, which you can achieve by adding the following
code to your app's initialisation:
```dart
AudioPlayer.setIosCategory(IosCategory.playback);
```
Note: If your app uses a number of different audio plugins in combination, e.g.
for audio recording, or text to speech, or background audio, it is possible
that those plugins may internally override the setting you choose here. You may
consider asking the developer of each other plugin you use to provide a similar
method so that you can configure the same audio session category universally
across all plugins you use.
### MacOS
To allow your MacOS application to access audio files on the Internet, add the following to your `DebugProfile.entitlements` and `Release.entitlements` files:
```xml
<key>com.apple.security.network.client</key>
<true/>
```
If you wish to connect to non-HTTPS URLS, add the following to your `Info.plist` file:
```xml
<key>NSAppTransportSecurity</key>
<dict>
<key>NSAllowsArbitraryLoads</key>
<true/>
<key>NSAllowsArbitraryLoadsForMedia</key>
<true/>
</dict>
```

View File

@ -1,8 +0,0 @@
*.iml
.gradle
/local.properties
/.idea/workspace.xml
/.idea/libraries
.DS_Store
/build
/captures

View File

@ -1,48 +0,0 @@
group 'com.ryanheise.just_audio'
version '1.0'
buildscript {
repositories {
google()
jcenter()
}
dependencies {
classpath 'com.android.tools.build:gradle:3.6.3'
}
}
rootProject.allprojects {
repositories {
google()
jcenter()
}
}
apply plugin: 'com.android.library'
android {
compileSdkVersion 28
defaultConfig {
minSdkVersion 16
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
}
lintOptions {
disable 'InvalidPackage'
}
compileOptions {
sourceCompatibility 1.8
targetCompatibility 1.8
}
}
dependencies {
implementation 'com.google.android.exoplayer:exoplayer-core:2.11.4'
implementation 'com.google.android.exoplayer:exoplayer-dash:2.11.4'
implementation 'com.google.android.exoplayer:exoplayer-hls:2.11.4'
implementation 'com.google.android.exoplayer:exoplayer-smoothstreaming:2.11.4'
compile files('libs/extension-flac.aar')
}

View File

@ -1,4 +0,0 @@
org.gradle.jvmargs=-Xmx1536M
android.enableR8=true
android.useAndroidX=true
android.enableJetifier=true

View File

@ -1,6 +0,0 @@
#Mon Aug 10 13:15:44 CEST 2020
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.4-all.zip

View File

@ -1 +0,0 @@
rootProject.name = 'just_audio'

View File

@ -1,3 +0,0 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.ryanheise.just_audio">
</manifest>

View File

@ -1,724 +0,0 @@
package com.ryanheise.just_audio;
import android.content.Context;
import android.net.Uri;
import android.os.Handler;
import android.util.Log;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.ExoPlaybackException;
import com.google.android.exoplayer2.PlaybackParameters;
import com.google.android.exoplayer2.Player;
import com.google.android.exoplayer2.SimpleExoPlayer;
import com.google.android.exoplayer2.Timeline;
import com.google.android.exoplayer2.metadata.Metadata;
import com.google.android.exoplayer2.metadata.MetadataOutput;
import com.google.android.exoplayer2.metadata.icy.IcyHeaders;
import com.google.android.exoplayer2.metadata.icy.IcyInfo;
import com.google.android.exoplayer2.source.ClippingMediaSource;
import com.google.android.exoplayer2.source.ConcatenatingMediaSource;
import com.google.android.exoplayer2.source.LoopingMediaSource;
import com.google.android.exoplayer2.source.MediaSource;
import com.google.android.exoplayer2.source.ProgressiveMediaSource;
import com.google.android.exoplayer2.source.ShuffleOrder;
import com.google.android.exoplayer2.source.ShuffleOrder.DefaultShuffleOrder;
import com.google.android.exoplayer2.source.TrackGroup;
import com.google.android.exoplayer2.source.TrackGroupArray;
import com.google.android.exoplayer2.source.dash.DashMediaSource;
import com.google.android.exoplayer2.source.hls.HlsMediaSource;
import com.google.android.exoplayer2.trackselection.TrackSelectionArray;
import com.google.android.exoplayer2.upstream.DataSource;
import com.google.android.exoplayer2.upstream.DefaultDataSourceFactory;
import com.google.android.exoplayer2.upstream.DefaultHttpDataSource;
import com.google.android.exoplayer2.upstream.DefaultHttpDataSourceFactory;
import com.google.android.exoplayer2.upstream.HttpDataSource;
import com.google.android.exoplayer2.util.Util;
import io.flutter.plugin.common.BinaryMessenger;
import io.flutter.plugin.common.EventChannel;
import io.flutter.plugin.common.EventChannel.EventSink;
import io.flutter.plugin.common.MethodCall;
import io.flutter.plugin.common.MethodChannel;
import io.flutter.plugin.common.MethodChannel.MethodCallHandler;
import io.flutter.plugin.common.MethodChannel.Result;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.stream.Collectors;
import com.ryanheise.just_audio.DeezerDataSource;
public class AudioPlayer implements MethodCallHandler, Player.EventListener, MetadataOutput {
static final String TAG = "AudioPlayer";
private static Random random = new Random();
private final Context context;
private final MethodChannel methodChannel;
private final EventChannel eventChannel;
private EventSink eventSink;
private ProcessingState processingState;
private long updateTime;
private long updatePosition;
private long bufferedPosition;
private long duration;
private Long start;
private Long end;
private Long seekPos;
private Result prepareResult;
private Result playResult;
private Result seekResult;
private boolean seekProcessed;
private boolean playing;
private Map<String, MediaSource> mediaSources = new HashMap<String, MediaSource>();
private IcyInfo icyInfo;
private IcyHeaders icyHeaders;
private int errorCount;
private SimpleExoPlayer player;
private MediaSource mediaSource;
private Integer currentIndex;
private Map<LoopingMediaSource, MediaSource> loopingChildren = new HashMap<>();
private Map<LoopingMediaSource, Integer> loopingCounts = new HashMap<>();
private final Handler handler = new Handler();
private final Runnable bufferWatcher = new Runnable() {
@Override
public void run() {
if (player == null) {
return;
}
long newBufferedPosition = player.getBufferedPosition();
if (newBufferedPosition != bufferedPosition) {
bufferedPosition = newBufferedPosition;
broadcastPlaybackEvent();
}
switch (processingState) {
case buffering:
handler.postDelayed(this, 200);
break;
case ready:
if (playing) {
handler.postDelayed(this, 500);
} else {
handler.postDelayed(this, 1000);
}
break;
}
}
};
private final Runnable onDispose;
public AudioPlayer(final Context applicationContext, final BinaryMessenger messenger,
final String id, final Runnable onDispose) {
this.context = applicationContext;
this.onDispose = onDispose;
methodChannel = new MethodChannel(messenger, "com.ryanheise.just_audio.methods." + id);
methodChannel.setMethodCallHandler(this);
eventChannel = new EventChannel(messenger, "com.ryanheise.just_audio.events." + id);
eventChannel.setStreamHandler(new EventChannel.StreamHandler() {
@Override
public void onListen(final Object arguments, final EventSink eventSink) {
AudioPlayer.this.eventSink = eventSink;
}
@Override
public void onCancel(final Object arguments) {
eventSink = null;
}
});
processingState = ProcessingState.none;
}
private void startWatchingBuffer() {
handler.removeCallbacks(bufferWatcher);
handler.post(bufferWatcher);
}
@Override
public void onMetadata(Metadata metadata) {
for (int i = 0; i < metadata.length(); i++) {
final Metadata.Entry entry = metadata.get(i);
if (entry instanceof IcyInfo) {
icyInfo = (IcyInfo) entry;
broadcastPlaybackEvent();
}
}
}
@Override
public void onTracksChanged(TrackGroupArray trackGroups, TrackSelectionArray trackSelections) {
for (int i = 0; i < trackGroups.length; i++) {
TrackGroup trackGroup = trackGroups.get(i);
for (int j = 0; j < trackGroup.length; j++) {
Metadata metadata = trackGroup.getFormat(j).metadata;
if (metadata != null) {
for (int k = 0; k < metadata.length(); k++) {
final Metadata.Entry entry = metadata.get(k);
if (entry instanceof IcyHeaders) {
icyHeaders = (IcyHeaders) entry;
broadcastPlaybackEvent();
}
}
}
}
}
}
@Override
public void onPositionDiscontinuity(int reason) {
switch (reason) {
case Player.DISCONTINUITY_REASON_PERIOD_TRANSITION:
case Player.DISCONTINUITY_REASON_SEEK:
onItemMayHaveChanged();
break;
}
}
@Override
public void onTimelineChanged(Timeline timeline, int reason) {
if (reason == Player.TIMELINE_CHANGE_REASON_DYNAMIC) {
onItemMayHaveChanged();
}
}
private void onItemMayHaveChanged() {
Integer newIndex = player.getCurrentWindowIndex();
if (newIndex != currentIndex) {
currentIndex = newIndex;
}
broadcastPlaybackEvent();
}
@Override
public void onPlayerStateChanged(boolean playWhenReady, int playbackState) {
switch (playbackState) {
case Player.STATE_READY:
if (prepareResult != null) {
duration = getDuration();
transition(ProcessingState.ready);
prepareResult.success(duration);
prepareResult = null;
} else {
transition(ProcessingState.ready);
}
if (seekProcessed) {
completeSeek();
}
break;
case Player.STATE_BUFFERING:
if (processingState != ProcessingState.buffering) {
transition(ProcessingState.buffering);
startWatchingBuffer();
}
break;
case Player.STATE_ENDED:
if (processingState != ProcessingState.completed) {
transition(ProcessingState.completed);
}
if (playResult != null) {
playResult.success(null);
playResult = null;
}
break;
}
}
@Override
public void onPlayerError(ExoPlaybackException error) {
switch (error.type) {
case ExoPlaybackException.TYPE_SOURCE:
Log.e(TAG, "TYPE_SOURCE: " + error.getSourceException().getMessage());
break;
case ExoPlaybackException.TYPE_RENDERER:
Log.e(TAG, "TYPE_RENDERER: " + error.getRendererException().getMessage());
break;
case ExoPlaybackException.TYPE_UNEXPECTED:
Log.e(TAG, "TYPE_UNEXPECTED: " + error.getUnexpectedException().getMessage());
break;
default:
Log.e(TAG, "default: " + error.getUnexpectedException().getMessage());
}
sendError(String.valueOf(error.type), error.getMessage());
errorCount++;
if (player.hasNext() && currentIndex != null && errorCount <= 5) {
int nextIndex = currentIndex + 1;
player.prepare(mediaSource);
player.seekTo(nextIndex, 0);
}
}
@Override
public void onSeekProcessed() {
if (seekResult != null) {
seekProcessed = true;
if (player.getPlaybackState() == Player.STATE_READY) {
completeSeek();
}
}
}
private void completeSeek() {
seekProcessed = false;
seekPos = null;
seekResult.success(null);
seekResult = null;
}
@Override
public void onMethodCall(final MethodCall call, final Result result) {
ensurePlayerInitialized();
final List<?> args = (List<?>) call.arguments;
try {
switch (call.method) {
case "load":
load(getAudioSource(args.get(0)), result);
break;
case "play":
play(result);
break;
case "pause":
pause();
result.success(null);
break;
case "setVolume":
setVolume((float) ((double) ((Double) args.get(0))));
result.success(null);
break;
case "setSpeed":
setSpeed((float) ((double) ((Double) args.get(0))));
result.success(null);
break;
case "setLoopMode":
setLoopMode((Integer) args.get(0));
result.success(null);
break;
case "setShuffleModeEnabled":
setShuffleModeEnabled((Boolean) args.get(0));
result.success(null);
break;
case "setAutomaticallyWaitsToMinimizeStalling":
result.success(null);
break;
case "seek":
Long position = getLong(args.get(0));
Integer index = (Integer)args.get(1);
seek(position == null ? C.TIME_UNSET : position, result, index);
break;
case "dispose":
dispose();
result.success(null);
break;
case "concatenating.add":
concatenating(args.get(0))
.addMediaSource(getAudioSource(args.get(1)), handler, () -> result.success(null));
break;
case "concatenating.insert":
concatenating(args.get(0))
.addMediaSource((Integer)args.get(1), getAudioSource(args.get(2)), handler, () -> result.success(null));
break;
case "concatenating.addAll":
concatenating(args.get(0))
.addMediaSources(getAudioSources(args.get(1)), handler, () -> result.success(null));
break;
case "concatenating.insertAll":
concatenating(args.get(0))
.addMediaSources((Integer)args.get(1), getAudioSources(args.get(2)), handler, () -> result.success(null));
break;
case "concatenating.removeAt":
concatenating(args.get(0))
.removeMediaSource((Integer)args.get(1), handler, () -> result.success(null));
break;
case "concatenating.removeRange":
concatenating(args.get(0))
.removeMediaSourceRange((Integer)args.get(1), (Integer)args.get(2), handler, () -> result.success(null));
break;
case "concatenating.move":
concatenating(args.get(0))
.moveMediaSource((Integer)args.get(1), (Integer)args.get(2), handler, () -> result.success(null));
break;
case "concatenating.clear":
concatenating(args.get(0)).clear(handler, () -> result.success(null));
break;
default:
result.notImplemented();
break;
}
} catch (IllegalStateException e) {
e.printStackTrace();
result.error("Illegal state: " + e.getMessage(), null, null);
} catch (Exception e) {
e.printStackTrace();
result.error("Error: " + e, null, null);
}
}
// Set the shuffle order for mediaSource, with currentIndex at
// the first position. Traverse the tree incrementing index at each
// node.
private int setShuffleOrder(MediaSource mediaSource, int index) {
if (mediaSource instanceof ConcatenatingMediaSource) {
final ConcatenatingMediaSource source = (ConcatenatingMediaSource)mediaSource;
// Find which child is current
Integer currentChildIndex = null;
for (int i = 0; i < source.getSize(); i++) {
final int indexBefore = index;
final MediaSource child = source.getMediaSource(i);
index = setShuffleOrder(child, index);
// If currentIndex falls within this child, make this child come first.
if (currentIndex >= indexBefore && currentIndex < index) {
currentChildIndex = i;
}
}
// Shuffle so that the current child is first in the shuffle order
source.setShuffleOrder(createShuffleOrder(source.getSize(), currentChildIndex));
} else if (mediaSource instanceof LoopingMediaSource) {
final LoopingMediaSource source = (LoopingMediaSource)mediaSource;
// The ExoPlayer API doesn't provide accessors for these so we have
// to index them ourselves.
MediaSource child = loopingChildren.get(source);
int count = loopingCounts.get(source);
for (int i = 0; i < count; i++) {
index = setShuffleOrder(child, index);
}
} else {
// An actual media item takes up one spot in the playlist.
index++;
}
return index;
}
private static int[] shuffle(int length, Integer firstIndex) {
final int[] shuffleOrder = new int[length];
for (int i = 0; i < length; i++) {
final int j = random.nextInt(i + 1);
shuffleOrder[i] = shuffleOrder[j];
shuffleOrder[j] = i;
}
if (firstIndex != null) {
for (int i = 1; i < length; i++) {
if (shuffleOrder[i] == firstIndex) {
final int v = shuffleOrder[0];
shuffleOrder[0] = shuffleOrder[i];
shuffleOrder[i] = v;
break;
}
}
}
return shuffleOrder;
}
// Create a shuffle order optionally fixing the first index.
private ShuffleOrder createShuffleOrder(int length, Integer firstIndex) {
int[] shuffleIndices = shuffle(length, firstIndex);
return new DefaultShuffleOrder(shuffleIndices, random.nextLong());
}
private ConcatenatingMediaSource concatenating(final Object index) {
return (ConcatenatingMediaSource)mediaSources.get((String)index);
}
private MediaSource getAudioSource(final Object json) {
Map<?, ?> map = (Map<?, ?>)json;
String id = (String)map.get("id");
MediaSource mediaSource = mediaSources.get(id);
if (mediaSource == null) {
mediaSource = decodeAudioSource(map);
mediaSources.put(id, mediaSource);
}
return mediaSource;
}
private MediaSource decodeAudioSource(final Object json) {
Map<?, ?> map = (Map<?, ?>)json;
String id = (String)map.get("id");
switch ((String)map.get("type")) {
case "progressive":
Uri uri = Uri.parse((String)map.get("uri"));
//Deezer
if (uri.getHost() != null && uri.getHost().contains("dzcdn.net")) {
//Track id is stored in URL fragment (after #)
String fragment = uri.getFragment();
uri = Uri.parse(((String)map.get("uri")).replace("#" + fragment, ""));
return new ProgressiveMediaSource.Factory(
() -> {
HttpDataSource deezerDataSource = new DeezerDataSource(fragment);
return deezerDataSource;
}
).setTag(id).createMediaSource(uri);
}
return new ProgressiveMediaSource.Factory(buildDataSourceFactory())
.setTag(id)
.createMediaSource(uri);
case "dash":
return new DashMediaSource.Factory(buildDataSourceFactory())
.setTag(id)
.createMediaSource(Uri.parse((String)map.get("uri")));
case "hls":
return new HlsMediaSource.Factory(buildDataSourceFactory())
.setTag(id)
.createMediaSource(Uri.parse((String)map.get("uri")));
case "concatenating":
List<Object> audioSources = (List<Object>)map.get("audioSources");
return new ConcatenatingMediaSource(
false, // isAtomic
(Boolean)map.get("useLazyPreparation"),
new DefaultShuffleOrder(audioSources.size()),
audioSources
.stream()
.map(s -> getAudioSource(s))
.toArray(MediaSource[]::new));
case "clipping":
Long start = getLong(map.get("start"));
Long end = getLong(map.get("end"));
return new ClippingMediaSource(getAudioSource(map.get("audioSource")),
(start != null ? start : 0) * 1000L,
(end != null ? end : C.TIME_END_OF_SOURCE) * 1000L);
case "looping":
Integer count = (Integer)map.get("count");
MediaSource looperChild = getAudioSource(map.get("audioSource"));
LoopingMediaSource looper = new LoopingMediaSource(looperChild, count);
// TODO: store both in a single map
loopingChildren.put(looper, looperChild);
loopingCounts.put(looper, count);
return looper;
default:
throw new IllegalArgumentException("Unknown AudioSource type: " + map.get("type"));
}
}
private List<MediaSource> getAudioSources(final Object json) {
return ((List<Object>)json)
.stream()
.map(s -> getAudioSource(s))
.collect(Collectors.toList());
}
private DataSource.Factory buildDataSourceFactory() {
String userAgent = Util.getUserAgent(context, "just_audio");
DataSource.Factory httpDataSourceFactory = new DefaultHttpDataSourceFactory(
userAgent,
DefaultHttpDataSource.DEFAULT_CONNECT_TIMEOUT_MILLIS,
DefaultHttpDataSource.DEFAULT_READ_TIMEOUT_MILLIS,
true
);
return new DefaultDataSourceFactory(context, httpDataSourceFactory);
}
private void load(final MediaSource mediaSource, final Result result) {
switch (processingState) {
case none:
break;
case loading:
abortExistingConnection();
player.stop();
break;
default:
player.stop();
break;
}
errorCount = 0;
prepareResult = result;
transition(ProcessingState.loading);
if (player.getShuffleModeEnabled()) {
setShuffleOrder(mediaSource, 0);
}
this.mediaSource = mediaSource;
player.prepare(mediaSource);
}
private void ensurePlayerInitialized() {
if (player == null) {
player = new SimpleExoPlayer.Builder(context).build();
player.addMetadataOutput(this);
player.addListener(this);
}
}
private void broadcastPlaybackEvent() {
final Map<String, Object> event = new HashMap<String, Object>();
event.put("processingState", processingState.ordinal());
event.put("updatePosition", updatePosition = getCurrentPosition());
event.put("updateTime", updateTime = System.currentTimeMillis());
event.put("bufferedPosition", Math.max(updatePosition, bufferedPosition));
event.put("icyMetadata", collectIcyMetadata());
event.put("duration", duration = getDuration());
event.put("currentIndex", currentIndex);
if (eventSink != null) {
eventSink.success(event);
}
}
private Map<String, Object> collectIcyMetadata() {
final Map<String, Object> icyData = new HashMap<>();
if (icyInfo != null) {
final Map<String, String> info = new HashMap<>();
info.put("title", icyInfo.title);
info.put("url", icyInfo.url);
icyData.put("info", info);
}
if (icyHeaders != null) {
final Map<String, Object> headers = new HashMap<>();
headers.put("bitrate", icyHeaders.bitrate);
headers.put("genre", icyHeaders.genre);
headers.put("name", icyHeaders.name);
headers.put("metadataInterval", icyHeaders.metadataInterval);
headers.put("url", icyHeaders.url);
headers.put("isPublic", icyHeaders.isPublic);
icyData.put("headers", headers);
}
return icyData;
}
private long getCurrentPosition() {
if (processingState == ProcessingState.none || processingState == ProcessingState.loading) {
return 0;
} else if (seekPos != null && seekPos != C.TIME_UNSET) {
return seekPos;
} else {
return player.getCurrentPosition();
}
}
private long getDuration() {
if (processingState == ProcessingState.none || processingState == ProcessingState.loading) {
return C.TIME_UNSET;
} else {
return player.getDuration();
}
}
private void sendError(String errorCode, String errorMsg) {
if (prepareResult != null) {
prepareResult.error(errorCode, errorMsg, null);
prepareResult = null;
}
if (eventSink != null) {
eventSink.error(errorCode, errorMsg, null);
}
}
private void transition(final ProcessingState newState) {
processingState = newState;
broadcastPlaybackEvent();
}
private String getLowerCaseExtension(Uri uri) {
// Until ExoPlayer provides automatic detection of media source types, we
// rely on the file extension. When this is absent, as a temporary
// workaround we allow the app to supply a fake extension in the URL
// fragment. e.g. https://somewhere.com/somestream?x=etc#.m3u8
String fragment = uri.getFragment();
String filename = fragment != null && fragment.contains(".") ? fragment : uri.getPath();
return filename.replaceAll("^.*\\.", "").toLowerCase();
}
public void play(Result result) {
if (player.getPlayWhenReady()) return;
if (playResult != null) {
playResult.success(null);
}
playResult = result;
startWatchingBuffer();
player.setPlayWhenReady(true);
if (processingState == ProcessingState.completed && playResult != null) {
playResult.success(null);
playResult = null;
}
}
public void pause() {
if (!player.getPlayWhenReady()) return;
player.setPlayWhenReady(false);
if (playResult != null) {
playResult.success(null);
playResult = null;
}
}
public void setVolume(final float volume) {
player.setVolume(volume);
}
public void setSpeed(final float speed) {
player.setPlaybackParameters(new PlaybackParameters(speed));
broadcastPlaybackEvent();
}
public void setLoopMode(final int mode) {
player.setRepeatMode(mode);
}
public void setShuffleModeEnabled(final boolean enabled) {
if (enabled) {
setShuffleOrder(mediaSource, 0);
}
player.setShuffleModeEnabled(enabled);
}
public void seek(final long position, final Result result, final Integer index) {
if (processingState == ProcessingState.none || processingState == ProcessingState.loading) {
return;
}
abortSeek();
seekPos = position;
seekResult = result;
seekProcessed = false;
int windowIndex = index != null ? index : player.getCurrentWindowIndex();
player.seekTo(windowIndex, position);
}
public void dispose() {
mediaSources.clear();
mediaSource = null;
loopingChildren.clear();
if (player != null) {
player.release();
player = null;
transition(ProcessingState.none);
}
if (eventSink != null) {
eventSink.endOfStream();
}
onDispose.run();
}
private void abortSeek() {
if (seekResult != null) {
seekResult.success(null);
seekResult = null;
seekPos = null;
seekProcessed = false;
}
}
private void abortExistingConnection() {
sendError("abort", "Connection aborted");
}
public static Long getLong(Object o) {
return (o == null || o instanceof Long) ? (Long)o : new Long(((Integer)o).intValue());
}
enum ProcessingState {
none,
loading,
buffering,
ready,
completed
}
}

View File

@ -1,264 +0,0 @@
package com.ryanheise.just_audio;
import android.net.Uri;
import android.util.Log;
import com.google.android.exoplayer2.upstream.DataSpec;
import com.google.android.exoplayer2.upstream.HttpDataSource;
import com.google.android.exoplayer2.upstream.TransferListener;
import java.io.BufferedInputStream;
import java.io.ByteArrayOutputStream;
import java.io.FilterInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.security.MessageDigest;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import javax.crypto.Cipher;
import javax.crypto.spec.SecretKeySpec;
public class DeezerDataSource implements HttpDataSource {
HttpURLConnection connection;
InputStream inputStream;
int counter = 0;
byte[] key;
DataSpec dataSpec;
//Quality fallback stuff
String trackId;
int quality = 0;
String md5origin;
String mediaVersion;
public DeezerDataSource(String trackId) {
this.trackId = trackId;
this.key = getKey(trackId);
}
@Override
public long open(DataSpec dataSpec) throws HttpDataSource.HttpDataSourceException {
this.dataSpec = dataSpec;
try {
//Check if real url or placeholder for quality fallback
URL url = new URL(dataSpec.uri.toString());
String[] qp = url.getQuery().split("&");
//Real deezcdn url doesnt have query params
if (qp.length >= 3) {
//Parse query parameters
for (int i = 0; i < qp.length; i++) {
String p = qp[i].replace("?", "");
if (p.startsWith("md5")) {
this.md5origin = p.replace("md5=", "");
}
if (p.startsWith("mv")) {
this.mediaVersion = p.replace("mv=", "");
}
if (p.startsWith("q")) {
if (this.quality == 0) {
this.quality = Integer.parseInt(p.replace("q=", ""));
}
}
}
//Get real url
url = new URL(this.getTrackUrl(trackId, md5origin, mediaVersion, quality));
}
this.connection = (HttpURLConnection) url.openConnection();
this.connection.setChunkedStreamingMode(2048);
if (dataSpec.position > 0) {
this.counter = (int) (dataSpec.position/2048);
this.connection.setRequestProperty("Range",
"bytes=" + Long.toString(this.counter*2048) + "-");
}
InputStream is = this.connection.getInputStream();
this.inputStream = new BufferedInputStream(new FilterInputStream(is) {
@Override
public int read(byte buffer[], int offset, int len) throws IOException {
byte[] b = new byte[2048];
int t = 0;
int read = 0;
while (read != -1 && t != 2048) {
t += read = in.read(b, t, 2048-t);
}
if (counter % 3 == 0) {
byte[] dec = decryptChunk(key, b);
System.arraycopy(dec, 0, buffer, offset, 2048);
} else {
System.arraycopy(b, 0, buffer, offset, 2048);
}
counter++;
return t;
}
},2048);
} catch (Exception e) {
//Quality fallback
if (this.quality == 1) {
Log.e("E", e.toString());
throw new HttpDataSourceException("Error loading URL", dataSpec, HttpDataSourceException.TYPE_OPEN);
}
if (this.quality == 3) this.quality = 1;
if (this.quality == 9) this.quality = 3;
// r e c u r s i o n
return this.open(dataSpec);
}
String size = this.connection.getHeaderField("Content-Length");
return Long.parseLong(size);
}
@Override
public int read(byte[] buffer, int offset, int length) throws HttpDataSourceException {
int read = 0;
try {
read = this.inputStream.read(buffer, offset, length);
} catch (Exception e) {
Log.e("E", e.toString());
//throw new HttpDataSourceException("Error reading from stream", this.dataSpec, HttpDataSourceException.TYPE_READ);
}
return read;
}
@Override
public void close() {
try {
if (this.inputStream != null) this.inputStream.close();
if (this.connection != null) this.connection.disconnect();
} catch (Exception e) {
Log.e("E", e.toString());
}
}
@Override
public void setRequestProperty(String name, String value) {
Log.d("D", "setRequestProperty");
}
@Override
public void clearRequestProperty(String name) {
Log.d("D", "clearRequestProperty");
}
@Override
public void clearAllRequestProperties() {
Log.d("D", "clearAllRequestProperties");
}
@Override
public int getResponseCode() {
Log.d("D", "getResponseCode");
return 0;
}
@Override
public Map<String, List<String>> getResponseHeaders() {
return this.connection.getHeaderFields();
}
public final void addTransferListener(TransferListener transferListener) {
Log.d("D", "addTransferListener");
}
@Override
public Uri getUri() {
return Uri.parse(this.connection.getURL().toString());
}
public static String bytesToHex(byte[] bytes) {
final char[] HEX_ARRAY = "0123456789ABCDEF".toCharArray();
char[] hexChars = new char[bytes.length * 2];
for (int j = 0; j < bytes.length; j++) {
int v = bytes[j] & 0xFF;
hexChars[j * 2] = HEX_ARRAY[v >>> 4];
hexChars[j * 2 + 1] = HEX_ARRAY[v & 0x0F];
}
return new String(hexChars);
}
byte[] getKey(String id) {
String secret = "g4el58wc0zvf9na1";
try {
MessageDigest md5 = MessageDigest.getInstance("MD5");
md5.update(id.getBytes());
byte[] md5id = md5.digest();
String idmd5 = bytesToHex(md5id).toLowerCase();
String key = "";
for(int i=0; i<16; i++) {
int s0 = idmd5.charAt(i);
int s1 = idmd5.charAt(i+16);
int s2 = secret.charAt(i);
key += (char)(s0^s1^s2);
}
return key.getBytes();
} catch (Exception e) {
Log.e("E", e.toString());
return new byte[0];
}
}
byte[] decryptChunk(byte[] key, byte[] data) {
try {
byte[] IV = {00, 01, 02, 03, 04, 05, 06, 07};
SecretKeySpec Skey = new SecretKeySpec(key, "Blowfish");
Cipher cipher = Cipher.getInstance("Blowfish/CBC/NoPadding");
cipher.init(Cipher.DECRYPT_MODE, Skey, new javax.crypto.spec.IvParameterSpec(IV));
return cipher.doFinal(data);
}catch (Exception e) {
Log.e("D", e.toString());
return new byte[0];
}
}
public String getTrackUrl(String trackId, String md5origin, String mediaVersion, int quality) {
try {
int magic = 164;
ByteArrayOutputStream step1 = new ByteArrayOutputStream();
step1.write(md5origin.getBytes());
step1.write(magic);
step1.write(Integer.toString(quality).getBytes());
step1.write(magic);
step1.write(trackId.getBytes());
step1.write(magic);
step1.write(mediaVersion.getBytes());
//Get MD5
MessageDigest md5 = MessageDigest.getInstance("MD5");
md5.update(step1.toByteArray());
byte[] digest = md5.digest();
String md5hex = bytesToHex(digest).toLowerCase();
ByteArrayOutputStream step2 = new ByteArrayOutputStream();
step2.write(md5hex.getBytes());
step2.write(magic);
step2.write(step1.toByteArray());
step2.write(magic);
//Pad step2 with dots, to get correct length
while(step2.size()%16 > 0) step2.write(46);
//Prepare AES encryption
Cipher cipher = Cipher.getInstance("AES/ECB/NoPadding");
SecretKeySpec key = new SecretKeySpec("jo6aey6haid2Teih".getBytes(), "AES");
cipher.init(Cipher.ENCRYPT_MODE, key);
//Encrypt
StringBuilder step3 = new StringBuilder();
for (int i=0; i<step2.size()/16; i++) {
byte[] b = Arrays.copyOfRange(step2.toByteArray(), i*16, (i+1)*16);
step3.append(bytesToHex(cipher.doFinal(b)).toLowerCase());
}
//Join to URL
return "https://e-cdns-proxy-" + md5origin.charAt(0) + ".dzcdn.net/mobile/1/" + step3.toString();
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
}

View File

@ -1,57 +0,0 @@
package com.ryanheise.just_audio;
import android.content.Context;
import androidx.annotation.NonNull;
import io.flutter.embedding.engine.plugins.FlutterPlugin;
import io.flutter.plugin.common.BinaryMessenger;
import io.flutter.plugin.common.MethodChannel;
import io.flutter.plugin.common.PluginRegistry.Registrar;
/**
* JustAudioPlugin
*/
public class JustAudioPlugin implements FlutterPlugin {
private MethodChannel channel;
private MainMethodCallHandler methodCallHandler;
public JustAudioPlugin() {
}
/**
* v1 plugin registration.
*/
public static void registerWith(Registrar registrar) {
final JustAudioPlugin plugin = new JustAudioPlugin();
plugin.startListening(registrar.context(), registrar.messenger());
registrar.addViewDestroyListener(
view -> {
plugin.stopListening();
return false;
});
}
@Override
public void onAttachedToEngine(@NonNull FlutterPluginBinding binding) {
startListening(binding.getApplicationContext(), binding.getBinaryMessenger());
}
@Override
public void onDetachedFromEngine(@NonNull FlutterPluginBinding binding) {
stopListening();
}
private void startListening(Context applicationContext, BinaryMessenger messenger) {
methodCallHandler = new MainMethodCallHandler(applicationContext, messenger);
channel = new MethodChannel(messenger, "com.ryanheise.just_audio.methods");
channel.setMethodCallHandler(methodCallHandler);
}
private void stopListening() {
methodCallHandler.dispose();
methodCallHandler = null;
channel.setMethodCallHandler(null);
}
}

View File

@ -1,52 +0,0 @@
package com.ryanheise.just_audio;
import android.content.Context;
import androidx.annotation.NonNull;
import io.flutter.plugin.common.BinaryMessenger;
import io.flutter.plugin.common.MethodCall;
import io.flutter.plugin.common.MethodChannel.MethodCallHandler;
import io.flutter.plugin.common.MethodChannel.Result;
import java.util.HashMap;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
public class MainMethodCallHandler implements MethodCallHandler {
private final Context applicationContext;
private final BinaryMessenger messenger;
private final Map<String, AudioPlayer> players = new HashMap<>();
public MainMethodCallHandler(Context applicationContext,
BinaryMessenger messenger) {
this.applicationContext = applicationContext;
this.messenger = messenger;
}
@Override
public void onMethodCall(MethodCall call, @NonNull Result result) {
switch (call.method) {
case "init":
final List<String> ids = call.arguments();
String id = ids.get(0);
players.put(id, new AudioPlayer(applicationContext, messenger, id,
() -> players.remove(id)
));
result.success(null);
break;
case "setIosCategory":
result.success(null);
break;
default:
result.notImplemented();
break;
}
}
void dispose() {
for (AudioPlayer player : new ArrayList<AudioPlayer>(players.values())) {
player.dispose();
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,37 +0,0 @@
#import "AudioSource.h"
#import <AVFoundation/AVFoundation.h>
@implementation AudioSource {
NSString *_sourceId;
}
- (instancetype)initWithId:(NSString *)sid {
self = [super init];
NSAssert(self, @"super init cannot be nil");
_sourceId = sid;
return self;
}
- (NSString *)sourceId {
return _sourceId;
}
- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex {
return 0;
}
- (void)findById:(NSString *)sourceId matches:(NSMutableArray<AudioSource *> *)matches {
if ([_sourceId isEqualToString:sourceId]) {
[matches addObject:self];
}
}
- (NSArray *)getShuffleOrder {
return @[];
}
- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex {
return 0;
}
@end

View File

@ -1,79 +0,0 @@
#import "AudioSource.h"
#import "ClippingAudioSource.h"
#import "IndexedPlayerItem.h"
#import "UriAudioSource.h"
#import <AVFoundation/AVFoundation.h>
@implementation ClippingAudioSource {
UriAudioSource *_audioSource;
CMTime _start;
CMTime _end;
}
- (instancetype)initWithId:(NSString *)sid audioSource:(UriAudioSource *)audioSource start:(NSNumber *)start end:(NSNumber *)end {
self = [super initWithId:sid];
NSAssert(self, @"super init cannot be nil");
_audioSource = audioSource;
_start = start == [NSNull null] ? kCMTimeZero : CMTimeMake([start intValue], 1000);
_end = end == [NSNull null] ? kCMTimeInvalid : CMTimeMake([end intValue], 1000);
return self;
}
- (UriAudioSource *)audioSource {
return _audioSource;
}
- (void)findById:(NSString *)sourceId matches:(NSMutableArray<AudioSource *> *)matches {
[super findById:sourceId matches:matches];
[_audioSource findById:sourceId matches:matches];
}
- (void)attach:(AVQueuePlayer *)player {
[super attach:player];
_audioSource.playerItem.forwardPlaybackEndTime = _end;
// XXX: Not needed since currentItem observer handles it?
[self seek:kCMTimeZero];
}
- (IndexedPlayerItem *)playerItem {
return _audioSource.playerItem;
}
- (NSArray *)getShuffleOrder {
return @[@(0)];
}
- (void)play:(AVQueuePlayer *)player {
}
- (void)pause:(AVQueuePlayer *)player {
}
- (void)stop:(AVQueuePlayer *)player {
}
- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler {
if (!completionHandler || (self.playerItem.status == AVPlayerItemStatusReadyToPlay)) {
CMTime absPosition = CMTimeAdd(_start, position);
[_audioSource.playerItem seekToTime:absPosition toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero completionHandler:completionHandler];
}
}
- (CMTime)duration {
return CMTimeSubtract(CMTIME_IS_INVALID(_end) ? self.playerItem.duration : _end, _start);
}
- (void)setDuration:(CMTime)duration {
}
- (CMTime)position {
return CMTimeSubtract(self.playerItem.currentTime, _start);
}
- (CMTime)bufferedPosition {
CMTime pos = CMTimeSubtract(_audioSource.bufferedPosition, _start);
CMTime dur = [self duration];
return CMTimeCompare(pos, dur) >= 0 ? dur : pos;
}
@end

View File

@ -1,109 +0,0 @@
#import "AudioSource.h"
#import "ConcatenatingAudioSource.h"
#import <AVFoundation/AVFoundation.h>
#import <stdlib.h>
@implementation ConcatenatingAudioSource {
NSMutableArray<AudioSource *> *_audioSources;
NSMutableArray<NSNumber *> *_shuffleOrder;
}
- (instancetype)initWithId:(NSString *)sid audioSources:(NSMutableArray<AudioSource *> *)audioSources {
self = [super initWithId:sid];
NSAssert(self, @"super init cannot be nil");
_audioSources = audioSources;
return self;
}
- (int)count {
return _audioSources.count;
}
- (void)insertSource:(AudioSource *)audioSource atIndex:(int)index {
[_audioSources insertObject:audioSource atIndex:index];
}
- (void)removeSourcesFromIndex:(int)start toIndex:(int)end {
if (end == -1) end = _audioSources.count;
for (int i = start; i < end; i++) {
[_audioSources removeObjectAtIndex:start];
}
}
- (void)moveSourceFromIndex:(int)currentIndex toIndex:(int)newIndex {
AudioSource *source = _audioSources[currentIndex];
[_audioSources removeObjectAtIndex:currentIndex];
[_audioSources insertObject:source atIndex:newIndex];
}
- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex {
for (int i = 0; i < [_audioSources count]; i++) {
treeIndex = [_audioSources[i] buildSequence:sequence treeIndex:treeIndex];
}
return treeIndex;
}
- (void)findById:(NSString *)sourceId matches:(NSMutableArray<AudioSource *> *)matches {
[super findById:sourceId matches:matches];
for (int i = 0; i < [_audioSources count]; i++) {
[_audioSources[i] findById:sourceId matches:matches];
}
}
- (NSArray *)getShuffleOrder {
NSMutableArray *order = [NSMutableArray new];
int offset = [order count];
NSMutableArray *childOrders = [NSMutableArray new]; // array of array of ints
for (int i = 0; i < [_audioSources count]; i++) {
AudioSource *audioSource = _audioSources[i];
NSArray *childShuffleOrder = [audioSource getShuffleOrder];
NSMutableArray *offsetChildShuffleOrder = [NSMutableArray new];
for (int j = 0; j < [childShuffleOrder count]; j++) {
[offsetChildShuffleOrder addObject:@([childShuffleOrder[j] integerValue] + offset)];
}
[childOrders addObject:offsetChildShuffleOrder];
offset += [childShuffleOrder count];
}
for (int i = 0; i < [_audioSources count]; i++) {
[order addObjectsFromArray:childOrders[[_shuffleOrder[i] integerValue]]];
}
return order;
}
- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex {
int currentChildIndex = -1;
for (int i = 0; i < [_audioSources count]; i++) {
int indexBefore = treeIndex;
AudioSource *child = _audioSources[i];
treeIndex = [child shuffle:treeIndex currentIndex:currentIndex];
if (currentIndex >= indexBefore && currentIndex < treeIndex) {
currentChildIndex = i;
} else {}
}
// Shuffle so that the current child is first in the shuffle order
_shuffleOrder = [NSMutableArray arrayWithCapacity:[_audioSources count]];
for (int i = 0; i < [_audioSources count]; i++) {
[_shuffleOrder addObject:@(0)];
}
NSLog(@"shuffle: audioSources.count=%d and shuffleOrder.count=%d", [_audioSources count], [_shuffleOrder count]);
// First generate a random shuffle
for (int i = 0; i < [_audioSources count]; i++) {
int j = arc4random_uniform(i + 1);
_shuffleOrder[i] = _shuffleOrder[j];
_shuffleOrder[j] = @(i);
}
// Then bring currentIndex to the front
if (currentChildIndex != -1) {
for (int i = 1; i < [_audioSources count]; i++) {
if ([_shuffleOrder[i] integerValue] == currentChildIndex) {
NSNumber *v = _shuffleOrder[0];
_shuffleOrder[0] = _shuffleOrder[i];
_shuffleOrder[i] = v;
break;
}
}
}
return treeIndex;
}
@end

View File

@ -1,68 +0,0 @@
#import "IndexedAudioSource.h"
#import "IndexedPlayerItem.h"
#import <AVFoundation/AVFoundation.h>
@implementation IndexedAudioSource {
BOOL _isAttached;
}
- (instancetype)initWithId:(NSString *)sid {
self = [super init];
NSAssert(self, @"super init cannot be nil");
_isAttached = NO;
return self;
}
- (IndexedPlayerItem *)playerItem {
return nil;
}
- (BOOL)isAttached {
return _isAttached;
}
- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex {
[sequence addObject:self];
return treeIndex + 1;
}
- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex {
return treeIndex + 1;
}
- (void)attach:(AVQueuePlayer *)player {
_isAttached = YES;
}
- (void)play:(AVQueuePlayer *)player {
}
- (void)pause:(AVQueuePlayer *)player {
}
- (void)stop:(AVQueuePlayer *)player {
}
- (void)seek:(CMTime)position {
[self seek:position completionHandler:nil];
}
- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler {
}
- (CMTime)duration {
return kCMTimeInvalid;
}
- (void)setDuration:(CMTime)duration {
}
- (CMTime)position {
return kCMTimeInvalid;
}
- (CMTime)bufferedPosition {
return kCMTimeInvalid;
}
@end

View File

@ -1,16 +0,0 @@
#import "IndexedPlayerItem.h"
#import "IndexedAudioSource.h"
@implementation IndexedPlayerItem {
IndexedAudioSource *_audioSource;
}
-(void)setAudioSource:(IndexedAudioSource *)audioSource {
_audioSource = audioSource;
}
-(IndexedAudioSource *)audioSource {
return _audioSource;
}
@end

View File

@ -1,55 +0,0 @@
#import "JustAudioPlugin.h"
#import "AudioPlayer.h"
#import <AVFoundation/AVFoundation.h>
#include <TargetConditionals.h>
@implementation JustAudioPlugin {
NSObject<FlutterPluginRegistrar>* _registrar;
BOOL _configuredSession;
}
+ (void)registerWithRegistrar:(NSObject<FlutterPluginRegistrar>*)registrar {
FlutterMethodChannel* channel = [FlutterMethodChannel
methodChannelWithName:@"com.ryanheise.just_audio.methods"
binaryMessenger:[registrar messenger]];
JustAudioPlugin* instance = [[JustAudioPlugin alloc] initWithRegistrar:registrar];
[registrar addMethodCallDelegate:instance channel:channel];
}
- (instancetype)initWithRegistrar:(NSObject<FlutterPluginRegistrar> *)registrar {
self = [super init];
NSAssert(self, @"super init cannot be nil");
_registrar = registrar;
return self;
}
- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result {
if ([@"init" isEqualToString:call.method]) {
NSArray* args = (NSArray*)call.arguments;
NSString* playerId = args[0];
/*AudioPlayer* player =*/ [[AudioPlayer alloc] initWithRegistrar:_registrar playerId:playerId configuredSession:_configuredSession];
result(nil);
} else if ([@"setIosCategory" isEqualToString:call.method]) {
#if TARGET_OS_IPHONE
NSNumber* categoryIndex = (NSNumber*)call.arguments;
AVAudioSessionCategory category = nil;
switch (categoryIndex.integerValue) {
case 0: category = AVAudioSessionCategoryAmbient; break;
case 1: category = AVAudioSessionCategorySoloAmbient; break;
case 2: category = AVAudioSessionCategoryPlayback; break;
case 3: category = AVAudioSessionCategoryRecord; break;
case 4: category = AVAudioSessionCategoryPlayAndRecord; break;
case 5: category = AVAudioSessionCategoryMultiRoute; break;
}
if (category) {
_configuredSession = YES;
}
[[AVAudioSession sharedInstance] setCategory:category error:nil];
#endif
result(nil);
} else {
result(FlutterMethodNotImplemented);
}
}
@end

View File

@ -1,53 +0,0 @@
#import "AudioSource.h"
#import "LoopingAudioSource.h"
#import <AVFoundation/AVFoundation.h>
@implementation LoopingAudioSource {
// An array of duplicates
NSArray<AudioSource *> *_audioSources; // <AudioSource *>
}
- (instancetype)initWithId:(NSString *)sid audioSources:(NSArray<AudioSource *> *)audioSources {
self = [super initWithId:sid];
NSAssert(self, @"super init cannot be nil");
_audioSources = audioSources;
return self;
}
- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex {
for (int i = 0; i < [_audioSources count]; i++) {
treeIndex = [_audioSources[i] buildSequence:sequence treeIndex:treeIndex];
}
return treeIndex;
}
- (void)findById:(NSString *)sourceId matches:(NSMutableArray<AudioSource *> *)matches {
[super findById:sourceId matches:matches];
for (int i = 0; i < [_audioSources count]; i++) {
[_audioSources[i] findById:sourceId matches:matches];
}
}
- (NSArray *)getShuffleOrder {
NSMutableArray *order = [NSMutableArray new];
int offset = (int)[order count];
for (int i = 0; i < [_audioSources count]; i++) {
AudioSource *audioSource = _audioSources[i];
NSArray *childShuffleOrder = [audioSource getShuffleOrder];
for (int j = 0; j < [childShuffleOrder count]; j++) {
[order addObject:@([childShuffleOrder[j] integerValue] + offset)];
}
offset += [childShuffleOrder count];
}
return order;
}
- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex {
// TODO: This should probably shuffle the same way on all duplicates.
for (int i = 0; i < [_audioSources count]; i++) {
treeIndex = [_audioSources[i] shuffle:treeIndex currentIndex:currentIndex];
}
return treeIndex;
}
@end

View File

@ -1,79 +0,0 @@
#import "UriAudioSource.h"
#import "IndexedAudioSource.h"
#import "IndexedPlayerItem.h"
#import <AVFoundation/AVFoundation.h>
@implementation UriAudioSource {
NSString *_uri;
IndexedPlayerItem *_playerItem;
/* CMTime _duration; */
}
- (instancetype)initWithId:(NSString *)sid uri:(NSString *)uri {
self = [super initWithId:sid];
NSAssert(self, @"super init cannot be nil");
_uri = uri;
if ([_uri hasPrefix:@"file://"]) {
_playerItem = [[IndexedPlayerItem alloc] initWithURL:[NSURL fileURLWithPath:[_uri substringFromIndex:7]]];
} else {
_playerItem = [[IndexedPlayerItem alloc] initWithURL:[NSURL URLWithString:_uri]];
}
if (@available(macOS 10.13, iOS 11.0, *)) {
// This does the best at reducing distortion on voice with speeds below 1.0
_playerItem.audioTimePitchAlgorithm = AVAudioTimePitchAlgorithmTimeDomain;
}
/* NSKeyValueObservingOptions options = */
/* NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew; */
/* [_playerItem addObserver:self */
/* forKeyPath:@"duration" */
/* options:options */
/* context:nil]; */
return self;
}
- (IndexedPlayerItem *)playerItem {
return _playerItem;
}
- (NSArray *)getShuffleOrder {
return @[@(0)];
}
- (void)play:(AVQueuePlayer *)player {
}
- (void)pause:(AVQueuePlayer *)player {
}
- (void)stop:(AVQueuePlayer *)player {
}
- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler {
if (!completionHandler || (_playerItem.status == AVPlayerItemStatusReadyToPlay)) {
[_playerItem seekToTime:position toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero completionHandler:completionHandler];
}
}
- (CMTime)duration {
return _playerItem.duration;
}
- (void)setDuration:(CMTime)duration {
}
- (CMTime)position {
return _playerItem.currentTime;
}
- (CMTime)bufferedPosition {
NSValue *last = _playerItem.loadedTimeRanges.lastObject;
if (last) {
CMTimeRange timeRange = [last CMTimeRangeValue];
return CMTimeAdd(timeRange.start, timeRange.duration);
} else {
return _playerItem.currentTime;
}
return kCMTimeInvalid;
}
@end

View File

@ -1,37 +0,0 @@
.idea/
.vagrant/
.sconsign.dblite
.svn/
.DS_Store
*.swp
profile
DerivedData/
build/
GeneratedPluginRegistrant.h
GeneratedPluginRegistrant.m
.generated/
*.pbxuser
*.mode1v3
*.mode2v3
*.perspectivev3
!default.pbxuser
!default.mode1v3
!default.mode2v3
!default.perspectivev3
xcuserdata
*.moved-aside
*.pyc
*sync/
Icon?
.tags*
/Flutter/Generated.xcconfig
/Flutter/flutter_export_environment.sh

View File

@ -1,21 +0,0 @@
#import <Flutter/Flutter.h>
@interface AudioPlayer : NSObject<FlutterStreamHandler>
- (instancetype)initWithRegistrar:(NSObject<FlutterPluginRegistrar> *)registrar playerId:(NSString*)idParam configuredSession:(BOOL)configuredSession;
@end
enum ProcessingState {
none,
loading,
buffering,
ready,
completed
};
enum LoopMode {
loopOff,
loopOne,
loopAll
};

File diff suppressed because it is too large Load Diff

View File

@ -1,13 +0,0 @@
#import <Flutter/Flutter.h>
@interface AudioSource : NSObject
@property (readonly, nonatomic) NSString* sourceId;
- (instancetype)initWithId:(NSString *)sid;
- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex;
- (void)findById:(NSString *)sourceId matches:(NSMutableArray<AudioSource *> *)matches;
- (NSArray *)getShuffleOrder;
- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex;
@end

View File

@ -1,37 +0,0 @@
#import "AudioSource.h"
#import <AVFoundation/AVFoundation.h>
@implementation AudioSource {
NSString *_sourceId;
}
- (instancetype)initWithId:(NSString *)sid {
self = [super init];
NSAssert(self, @"super init cannot be nil");
_sourceId = sid;
return self;
}
- (NSString *)sourceId {
return _sourceId;
}
- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex {
return 0;
}
- (void)findById:(NSString *)sourceId matches:(NSMutableArray<AudioSource *> *)matches {
if ([_sourceId isEqualToString:sourceId]) {
[matches addObject:self];
}
}
- (NSArray *)getShuffleOrder {
return @[];
}
- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex {
return 0;
}
@end

View File

@ -1,11 +0,0 @@
#import "AudioSource.h"
#import "UriAudioSource.h"
#import <Flutter/Flutter.h>
@interface ClippingAudioSource : IndexedAudioSource
@property (readonly, nonatomic) UriAudioSource* audioSource;
- (instancetype)initWithId:(NSString *)sid audioSource:(UriAudioSource *)audioSource start:(NSNumber *)start end:(NSNumber *)end;
@end

View File

@ -1,79 +0,0 @@
#import "AudioSource.h"
#import "ClippingAudioSource.h"
#import "IndexedPlayerItem.h"
#import "UriAudioSource.h"
#import <AVFoundation/AVFoundation.h>
@implementation ClippingAudioSource {
UriAudioSource *_audioSource;
CMTime _start;
CMTime _end;
}
- (instancetype)initWithId:(NSString *)sid audioSource:(UriAudioSource *)audioSource start:(NSNumber *)start end:(NSNumber *)end {
self = [super initWithId:sid];
NSAssert(self, @"super init cannot be nil");
_audioSource = audioSource;
_start = start == [NSNull null] ? kCMTimeZero : CMTimeMake([start intValue], 1000);
_end = end == [NSNull null] ? kCMTimeInvalid : CMTimeMake([end intValue], 1000);
return self;
}
- (UriAudioSource *)audioSource {
return _audioSource;
}
- (void)findById:(NSString *)sourceId matches:(NSMutableArray<AudioSource *> *)matches {
[super findById:sourceId matches:matches];
[_audioSource findById:sourceId matches:matches];
}
- (void)attach:(AVQueuePlayer *)player {
[super attach:player];
_audioSource.playerItem.forwardPlaybackEndTime = _end;
// XXX: Not needed since currentItem observer handles it?
[self seek:kCMTimeZero];
}
- (IndexedPlayerItem *)playerItem {
return _audioSource.playerItem;
}
- (NSArray *)getShuffleOrder {
return @[@(0)];
}
- (void)play:(AVQueuePlayer *)player {
}
- (void)pause:(AVQueuePlayer *)player {
}
- (void)stop:(AVQueuePlayer *)player {
}
- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler {
if (!completionHandler || (self.playerItem.status == AVPlayerItemStatusReadyToPlay)) {
CMTime absPosition = CMTimeAdd(_start, position);
[_audioSource.playerItem seekToTime:absPosition toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero completionHandler:completionHandler];
}
}
- (CMTime)duration {
return CMTimeSubtract(CMTIME_IS_INVALID(_end) ? self.playerItem.duration : _end, _start);
}
- (void)setDuration:(CMTime)duration {
}
- (CMTime)position {
return CMTimeSubtract(self.playerItem.currentTime, _start);
}
- (CMTime)bufferedPosition {
CMTime pos = CMTimeSubtract(_audioSource.bufferedPosition, _start);
CMTime dur = [self duration];
return CMTimeCompare(pos, dur) >= 0 ? dur : pos;
}
@end

View File

@ -1,13 +0,0 @@
#import "AudioSource.h"
#import <Flutter/Flutter.h>
@interface ConcatenatingAudioSource : AudioSource
@property (readonly, nonatomic) int count;
- (instancetype)initWithId:(NSString *)sid audioSources:(NSMutableArray<AudioSource *> *)audioSources;
- (void)insertSource:(AudioSource *)audioSource atIndex:(int)index;
- (void)removeSourcesFromIndex:(int)start toIndex:(int)end;
- (void)moveSourceFromIndex:(int)currentIndex toIndex:(int)newIndex;
@end

View File

@ -1,109 +0,0 @@
#import "AudioSource.h"
#import "ConcatenatingAudioSource.h"
#import <AVFoundation/AVFoundation.h>
#import <stdlib.h>
@implementation ConcatenatingAudioSource {
NSMutableArray<AudioSource *> *_audioSources;
NSMutableArray<NSNumber *> *_shuffleOrder;
}
- (instancetype)initWithId:(NSString *)sid audioSources:(NSMutableArray<AudioSource *> *)audioSources {
self = [super initWithId:sid];
NSAssert(self, @"super init cannot be nil");
_audioSources = audioSources;
return self;
}
- (int)count {
return _audioSources.count;
}
- (void)insertSource:(AudioSource *)audioSource atIndex:(int)index {
[_audioSources insertObject:audioSource atIndex:index];
}
- (void)removeSourcesFromIndex:(int)start toIndex:(int)end {
if (end == -1) end = _audioSources.count;
for (int i = start; i < end; i++) {
[_audioSources removeObjectAtIndex:start];
}
}
- (void)moveSourceFromIndex:(int)currentIndex toIndex:(int)newIndex {
AudioSource *source = _audioSources[currentIndex];
[_audioSources removeObjectAtIndex:currentIndex];
[_audioSources insertObject:source atIndex:newIndex];
}
- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex {
for (int i = 0; i < [_audioSources count]; i++) {
treeIndex = [_audioSources[i] buildSequence:sequence treeIndex:treeIndex];
}
return treeIndex;
}
- (void)findById:(NSString *)sourceId matches:(NSMutableArray<AudioSource *> *)matches {
[super findById:sourceId matches:matches];
for (int i = 0; i < [_audioSources count]; i++) {
[_audioSources[i] findById:sourceId matches:matches];
}
}
- (NSArray *)getShuffleOrder {
NSMutableArray *order = [NSMutableArray new];
int offset = [order count];
NSMutableArray *childOrders = [NSMutableArray new]; // array of array of ints
for (int i = 0; i < [_audioSources count]; i++) {
AudioSource *audioSource = _audioSources[i];
NSArray *childShuffleOrder = [audioSource getShuffleOrder];
NSMutableArray *offsetChildShuffleOrder = [NSMutableArray new];
for (int j = 0; j < [childShuffleOrder count]; j++) {
[offsetChildShuffleOrder addObject:@([childShuffleOrder[j] integerValue] + offset)];
}
[childOrders addObject:offsetChildShuffleOrder];
offset += [childShuffleOrder count];
}
for (int i = 0; i < [_audioSources count]; i++) {
[order addObjectsFromArray:childOrders[[_shuffleOrder[i] integerValue]]];
}
return order;
}
- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex {
int currentChildIndex = -1;
for (int i = 0; i < [_audioSources count]; i++) {
int indexBefore = treeIndex;
AudioSource *child = _audioSources[i];
treeIndex = [child shuffle:treeIndex currentIndex:currentIndex];
if (currentIndex >= indexBefore && currentIndex < treeIndex) {
currentChildIndex = i;
} else {}
}
// Shuffle so that the current child is first in the shuffle order
_shuffleOrder = [NSMutableArray arrayWithCapacity:[_audioSources count]];
for (int i = 0; i < [_audioSources count]; i++) {
[_shuffleOrder addObject:@(0)];
}
NSLog(@"shuffle: audioSources.count=%d and shuffleOrder.count=%d", [_audioSources count], [_shuffleOrder count]);
// First generate a random shuffle
for (int i = 0; i < [_audioSources count]; i++) {
int j = arc4random_uniform(i + 1);
_shuffleOrder[i] = _shuffleOrder[j];
_shuffleOrder[j] = @(i);
}
// Then bring currentIndex to the front
if (currentChildIndex != -1) {
for (int i = 1; i < [_audioSources count]; i++) {
if ([_shuffleOrder[i] integerValue] == currentChildIndex) {
NSNumber *v = _shuffleOrder[0];
_shuffleOrder[0] = _shuffleOrder[i];
_shuffleOrder[i] = v;
break;
}
}
}
return treeIndex;
}
@end

View File

@ -1,21 +0,0 @@
#import "AudioSource.h"
#import "IndexedPlayerItem.h"
#import <Flutter/Flutter.h>
#import <AVFoundation/AVFoundation.h>
@interface IndexedAudioSource : AudioSource
@property (readonly, nonatomic) IndexedPlayerItem *playerItem;
@property (readwrite, nonatomic) CMTime duration;
@property (readonly, nonatomic) CMTime position;
@property (readonly, nonatomic) CMTime bufferedPosition;
@property (readonly, nonatomic) BOOL isAttached;
- (void)attach:(AVQueuePlayer *)player;
- (void)play:(AVQueuePlayer *)player;
- (void)pause:(AVQueuePlayer *)player;
- (void)stop:(AVQueuePlayer *)player;
- (void)seek:(CMTime)position;
- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler;
@end

View File

@ -1,68 +0,0 @@
#import "IndexedAudioSource.h"
#import "IndexedPlayerItem.h"
#import <AVFoundation/AVFoundation.h>
@implementation IndexedAudioSource {
BOOL _isAttached;
}
- (instancetype)initWithId:(NSString *)sid {
self = [super init];
NSAssert(self, @"super init cannot be nil");
_isAttached = NO;
return self;
}
- (IndexedPlayerItem *)playerItem {
return nil;
}
- (BOOL)isAttached {
return _isAttached;
}
- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex {
[sequence addObject:self];
return treeIndex + 1;
}
- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex {
return treeIndex + 1;
}
- (void)attach:(AVQueuePlayer *)player {
_isAttached = YES;
}
- (void)play:(AVQueuePlayer *)player {
}
- (void)pause:(AVQueuePlayer *)player {
}
- (void)stop:(AVQueuePlayer *)player {
}
- (void)seek:(CMTime)position {
[self seek:position completionHandler:nil];
}
- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler {
}
- (CMTime)duration {
return kCMTimeInvalid;
}
- (void)setDuration:(CMTime)duration {
}
- (CMTime)position {
return kCMTimeInvalid;
}
- (CMTime)bufferedPosition {
return kCMTimeInvalid;
}
@end

View File

@ -1,9 +0,0 @@
#import <AVFoundation/AVFoundation.h>
@class IndexedAudioSource;
@interface IndexedPlayerItem : AVPlayerItem
@property (readwrite, nonatomic) IndexedAudioSource *audioSource;
@end

View File

@ -1,16 +0,0 @@
#import "IndexedPlayerItem.h"
#import "IndexedAudioSource.h"
@implementation IndexedPlayerItem {
IndexedAudioSource *_audioSource;
}
-(void)setAudioSource:(IndexedAudioSource *)audioSource {
_audioSource = audioSource;
}
-(IndexedAudioSource *)audioSource {
return _audioSource;
}
@end

View File

@ -1,4 +0,0 @@
#import <Flutter/Flutter.h>
@interface JustAudioPlugin : NSObject<FlutterPlugin>
@end

View File

@ -1,55 +0,0 @@
#import "JustAudioPlugin.h"
#import "AudioPlayer.h"
#import <AVFoundation/AVFoundation.h>
#include <TargetConditionals.h>
@implementation JustAudioPlugin {
NSObject<FlutterPluginRegistrar>* _registrar;
BOOL _configuredSession;
}
+ (void)registerWithRegistrar:(NSObject<FlutterPluginRegistrar>*)registrar {
FlutterMethodChannel* channel = [FlutterMethodChannel
methodChannelWithName:@"com.ryanheise.just_audio.methods"
binaryMessenger:[registrar messenger]];
JustAudioPlugin* instance = [[JustAudioPlugin alloc] initWithRegistrar:registrar];
[registrar addMethodCallDelegate:instance channel:channel];
}
- (instancetype)initWithRegistrar:(NSObject<FlutterPluginRegistrar> *)registrar {
self = [super init];
NSAssert(self, @"super init cannot be nil");
_registrar = registrar;
return self;
}
- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result {
if ([@"init" isEqualToString:call.method]) {
NSArray* args = (NSArray*)call.arguments;
NSString* playerId = args[0];
/*AudioPlayer* player =*/ [[AudioPlayer alloc] initWithRegistrar:_registrar playerId:playerId configuredSession:_configuredSession];
result(nil);
} else if ([@"setIosCategory" isEqualToString:call.method]) {
#if TARGET_OS_IPHONE
NSNumber* categoryIndex = (NSNumber*)call.arguments;
AVAudioSessionCategory category = nil;
switch (categoryIndex.integerValue) {
case 0: category = AVAudioSessionCategoryAmbient; break;
case 1: category = AVAudioSessionCategorySoloAmbient; break;
case 2: category = AVAudioSessionCategoryPlayback; break;
case 3: category = AVAudioSessionCategoryRecord; break;
case 4: category = AVAudioSessionCategoryPlayAndRecord; break;
case 5: category = AVAudioSessionCategoryMultiRoute; break;
}
if (category) {
_configuredSession = YES;
}
[[AVAudioSession sharedInstance] setCategory:category error:nil];
#endif
result(nil);
} else {
result(FlutterMethodNotImplemented);
}
}
@end

View File

@ -1,8 +0,0 @@
#import "AudioSource.h"
#import <Flutter/Flutter.h>
@interface LoopingAudioSource : AudioSource
- (instancetype)initWithId:(NSString *)sid audioSources:(NSArray<AudioSource *> *)audioSources;
@end

View File

@ -1,53 +0,0 @@
#import "AudioSource.h"
#import "LoopingAudioSource.h"
#import <AVFoundation/AVFoundation.h>
@implementation LoopingAudioSource {
// An array of duplicates
NSArray<AudioSource *> *_audioSources; // <AudioSource *>
}
- (instancetype)initWithId:(NSString *)sid audioSources:(NSArray<AudioSource *> *)audioSources {
self = [super initWithId:sid];
NSAssert(self, @"super init cannot be nil");
_audioSources = audioSources;
return self;
}
- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex {
for (int i = 0; i < [_audioSources count]; i++) {
treeIndex = [_audioSources[i] buildSequence:sequence treeIndex:treeIndex];
}
return treeIndex;
}
- (void)findById:(NSString *)sourceId matches:(NSMutableArray<AudioSource *> *)matches {
[super findById:sourceId matches:matches];
for (int i = 0; i < [_audioSources count]; i++) {
[_audioSources[i] findById:sourceId matches:matches];
}
}
- (NSArray *)getShuffleOrder {
NSMutableArray *order = [NSMutableArray new];
int offset = (int)[order count];
for (int i = 0; i < [_audioSources count]; i++) {
AudioSource *audioSource = _audioSources[i];
NSArray *childShuffleOrder = [audioSource getShuffleOrder];
for (int j = 0; j < [childShuffleOrder count]; j++) {
[order addObject:@([childShuffleOrder[j] integerValue] + offset)];
}
offset += [childShuffleOrder count];
}
return order;
}
- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex {
// TODO: This should probably shuffle the same way on all duplicates.
for (int i = 0; i < [_audioSources count]; i++) {
treeIndex = [_audioSources[i] shuffle:treeIndex currentIndex:currentIndex];
}
return treeIndex;
}
@end

View File

@ -1,8 +0,0 @@
#import "IndexedAudioSource.h"
#import <Flutter/Flutter.h>
@interface UriAudioSource : IndexedAudioSource
- (instancetype)initWithId:(NSString *)sid uri:(NSString *)uri;
@end

View File

@ -1,79 +0,0 @@
#import "UriAudioSource.h"
#import "IndexedAudioSource.h"
#import "IndexedPlayerItem.h"
#import <AVFoundation/AVFoundation.h>
@implementation UriAudioSource {
NSString *_uri;
IndexedPlayerItem *_playerItem;
/* CMTime _duration; */
}
- (instancetype)initWithId:(NSString *)sid uri:(NSString *)uri {
self = [super initWithId:sid];
NSAssert(self, @"super init cannot be nil");
_uri = uri;
if ([_uri hasPrefix:@"file://"]) {
_playerItem = [[IndexedPlayerItem alloc] initWithURL:[NSURL fileURLWithPath:[_uri substringFromIndex:7]]];
} else {
_playerItem = [[IndexedPlayerItem alloc] initWithURL:[NSURL URLWithString:_uri]];
}
if (@available(macOS 10.13, iOS 11.0, *)) {
// This does the best at reducing distortion on voice with speeds below 1.0
_playerItem.audioTimePitchAlgorithm = AVAudioTimePitchAlgorithmTimeDomain;
}
/* NSKeyValueObservingOptions options = */
/* NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew; */
/* [_playerItem addObserver:self */
/* forKeyPath:@"duration" */
/* options:options */
/* context:nil]; */
return self;
}
- (IndexedPlayerItem *)playerItem {
return _playerItem;
}
- (NSArray *)getShuffleOrder {
return @[@(0)];
}
- (void)play:(AVQueuePlayer *)player {
}
- (void)pause:(AVQueuePlayer *)player {
}
- (void)stop:(AVQueuePlayer *)player {
}
- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler {
if (!completionHandler || (_playerItem.status == AVPlayerItemStatusReadyToPlay)) {
[_playerItem seekToTime:position toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero completionHandler:completionHandler];
}
}
- (CMTime)duration {
return _playerItem.duration;
}
- (void)setDuration:(CMTime)duration {
}
- (CMTime)position {
return _playerItem.currentTime;
}
- (CMTime)bufferedPosition {
NSValue *last = _playerItem.loadedTimeRanges.lastObject;
if (last) {
CMTimeRange timeRange = [last CMTimeRangeValue];
return CMTimeAdd(timeRange.start, timeRange.duration);
} else {
return _playerItem.currentTime;
}
return kCMTimeInvalid;
}
@end

View File

@ -1,21 +0,0 @@
#
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'just_audio'
s.version = '0.0.1'
s.summary = 'A new flutter plugin project.'
s.description = <<-DESC
A new flutter plugin project.
DESC
s.homepage = 'http://example.com'
s.license = { :file => '../LICENSE' }
s.author = { 'Your Company' => 'email@example.com' }
s.source = { :path => '.' }
s.source_files = 'Classes/**/*'
s.public_header_files = 'Classes/**/*.h'
s.dependency 'Flutter'
s.platform = :ios, '8.0'
s.pod_target_xcconfig = { 'DEFINES_MODULE' => 'YES', 'VALID_ARCHS[sdk=iphonesimulator*]' => 'x86_64' }
end

File diff suppressed because it is too large Load Diff

View File

@ -1,957 +0,0 @@
import 'dart:async';
import 'dart:html';
import 'dart:math';
import 'package:flutter/services.dart';
import 'package:flutter/widgets.dart';
import 'package:flutter_web_plugins/flutter_web_plugins.dart';
import 'package:just_audio/just_audio.dart';
final Random _random = Random();
class JustAudioPlugin {
static void registerWith(Registrar registrar) {
final MethodChannel channel = MethodChannel(
'com.ryanheise.just_audio.methods',
const StandardMethodCodec(),
registrar.messenger);
final JustAudioPlugin instance = JustAudioPlugin(registrar);
channel.setMethodCallHandler(instance.handleMethodCall);
}
final Registrar registrar;
JustAudioPlugin(this.registrar);
Future<dynamic> handleMethodCall(MethodCall call) async {
switch (call.method) {
case 'init':
final String id = call.arguments[0];
new Html5AudioPlayer(id: id, registrar: registrar);
return null;
case 'setIosCategory':
return null;
default:
throw PlatformException(code: 'Unimplemented');
}
}
}
abstract class JustAudioPlayer {
final String id;
final Registrar registrar;
final MethodChannel methodChannel;
final PluginEventChannel eventChannel;
final StreamController eventController = StreamController();
ProcessingState _processingState = ProcessingState.none;
bool _playing = false;
int _index;
JustAudioPlayer({@required this.id, @required this.registrar})
: methodChannel = MethodChannel('com.ryanheise.just_audio.methods.$id',
const StandardMethodCodec(), registrar.messenger),
eventChannel = PluginEventChannel('com.ryanheise.just_audio.events.$id',
const StandardMethodCodec(), registrar.messenger) {
methodChannel.setMethodCallHandler(_methodHandler);
eventChannel.controller = eventController;
}
Future<dynamic> _methodHandler(MethodCall call) async {
try {
final args = call.arguments;
switch (call.method) {
case 'load':
return await load(args[0]);
case 'play':
return await play();
case 'pause':
return await pause();
case 'setVolume':
return await setVolume(args[0]);
case 'setSpeed':
return await setSpeed(args[0]);
case 'setLoopMode':
return await setLoopMode(args[0]);
case 'setShuffleModeEnabled':
return await setShuffleModeEnabled(args[0]);
case 'setAutomaticallyWaitsToMinimizeStalling':
return null;
case 'seek':
return await seek(args[0], args[1]);
case 'dispose':
return dispose();
case 'concatenating.add':
return await concatenatingAdd(args[0], args[1]);
case "concatenating.insert":
return await concatenatingInsert(args[0], args[1], args[2]);
case "concatenating.addAll":
return await concatenatingAddAll(args[0], args[1]);
case "concatenating.insertAll":
return await concatenatingInsertAll(args[0], args[1], args[2]);
case "concatenating.removeAt":
return await concatenatingRemoveAt(args[0], args[1]);
case "concatenating.removeRange":
return await concatenatingRemoveRange(args[0], args[1], args[2]);
case "concatenating.move":
return await concatenatingMove(args[0], args[1], args[2]);
case "concatenating.clear":
return await concatenatingClear(args[0]);
default:
throw PlatformException(code: 'Unimplemented');
}
} catch (e, stacktrace) {
print("$stacktrace");
rethrow;
}
}
Future<int> load(Map source);
Future<void> play();
Future<void> pause();
Future<void> setVolume(double volume);
Future<void> setSpeed(double speed);
Future<void> setLoopMode(int mode);
Future<void> setShuffleModeEnabled(bool enabled);
Future<void> seek(int position, int index);
@mustCallSuper
void dispose() {
eventController.close();
}
Duration getCurrentPosition();
Duration getBufferedPosition();
Duration getDuration();
concatenatingAdd(String playerId, Map source);
concatenatingInsert(String playerId, int index, Map source);
concatenatingAddAll(String playerId, List sources);
concatenatingInsertAll(String playerId, int index, List sources);
concatenatingRemoveAt(String playerId, int index);
concatenatingRemoveRange(String playerId, int start, int end);
concatenatingMove(String playerId, int currentIndex, int newIndex);
concatenatingClear(String playerId);
broadcastPlaybackEvent() {
var updateTime = DateTime.now().millisecondsSinceEpoch;
eventController.add({
'processingState': _processingState.index,
'updatePosition': getCurrentPosition()?.inMilliseconds,
'updateTime': updateTime,
'bufferedPosition': getBufferedPosition()?.inMilliseconds,
// TODO: Icy Metadata
'icyMetadata': null,
'duration': getDuration()?.inMilliseconds,
'currentIndex': _index,
});
}
transition(ProcessingState processingState) {
_processingState = processingState;
broadcastPlaybackEvent();
}
}
class Html5AudioPlayer extends JustAudioPlayer {
AudioElement _audioElement = AudioElement();
Completer _durationCompleter;
AudioSourcePlayer _audioSourcePlayer;
LoopMode _loopMode = LoopMode.off;
bool _shuffleModeEnabled = false;
final Map<String, AudioSourcePlayer> _audioSourcePlayers = {};
Html5AudioPlayer({@required String id, @required Registrar registrar})
: super(id: id, registrar: registrar) {
_audioElement.addEventListener('durationchange', (event) {
_durationCompleter?.complete();
broadcastPlaybackEvent();
});
_audioElement.addEventListener('error', (event) {
_durationCompleter?.completeError(_audioElement.error);
});
_audioElement.addEventListener('ended', (event) async {
_currentAudioSourcePlayer.complete();
});
_audioElement.addEventListener('timeupdate', (event) {
_currentAudioSourcePlayer.timeUpdated(_audioElement.currentTime);
});
_audioElement.addEventListener('loadstart', (event) {
transition(ProcessingState.buffering);
});
_audioElement.addEventListener('waiting', (event) {
transition(ProcessingState.buffering);
});
_audioElement.addEventListener('stalled', (event) {
transition(ProcessingState.buffering);
});
_audioElement.addEventListener('canplaythrough', (event) {
transition(ProcessingState.ready);
});
_audioElement.addEventListener('progress', (event) {
broadcastPlaybackEvent();
});
}
List<int> get order {
final sequence = _audioSourcePlayer.sequence;
List<int> order = List<int>(sequence.length);
if (_shuffleModeEnabled) {
order = _audioSourcePlayer.shuffleOrder;
} else {
for (var i = 0; i < order.length; i++) {
order[i] = i;
}
}
return order;
}
List<int> getInv(List<int> order) {
List<int> orderInv = List<int>(order.length);
for (var i = 0; i < order.length; i++) {
orderInv[order[i]] = i;
}
return orderInv;
}
onEnded() async {
if (_loopMode == LoopMode.one) {
await seek(0, null);
play();
} else {
final order = this.order;
final orderInv = getInv(order);
if (orderInv[_index] + 1 < order.length) {
// move to next item
_index = order[orderInv[_index] + 1];
await _currentAudioSourcePlayer.load();
// Should always be true...
if (_playing) {
play();
}
} else {
// reached end of playlist
if (_loopMode == LoopMode.all) {
// Loop back to the beginning
if (order.length == 1) {
await seek(0, null);
play();
} else {
_index = order[0];
await _currentAudioSourcePlayer.load();
// Should always be true...
if (_playing) {
play();
}
}
} else {
transition(ProcessingState.completed);
}
}
}
}
// TODO: Improve efficiency.
IndexedAudioSourcePlayer get _currentAudioSourcePlayer =>
_audioSourcePlayer != null && _index < _audioSourcePlayer.sequence.length
? _audioSourcePlayer.sequence[_index]
: null;
@override
Future<int> load(Map source) async {
_currentAudioSourcePlayer?.pause();
_audioSourcePlayer = getAudioSource(source);
_index = 0;
if (_shuffleModeEnabled) {
_audioSourcePlayer?.shuffle(0, _index);
}
return (await _currentAudioSourcePlayer.load()).inMilliseconds;
}
Future<Duration> loadUri(final Uri uri) async {
transition(ProcessingState.loading);
final src = uri.toString();
if (src != _audioElement.src) {
_durationCompleter = Completer<num>();
_audioElement.src = src;
_audioElement.preload = 'auto';
_audioElement.load();
try {
await _durationCompleter.future;
} on MediaError catch (e) {
throw PlatformException(
code: "${e.code}", message: "Failed to load URL");
} finally {
_durationCompleter = null;
}
}
transition(ProcessingState.ready);
final seconds = _audioElement.duration;
return seconds.isFinite
? Duration(milliseconds: (seconds * 1000).toInt())
: null;
}
@override
Future<void> play() async {
_playing = true;
await _currentAudioSourcePlayer.play();
}
@override
Future<void> pause() async {
_playing = false;
_currentAudioSourcePlayer.pause();
}
@override
Future<void> setVolume(double volume) async {
_audioElement.volume = volume;
}
@override
Future<void> setSpeed(double speed) async {
_audioElement.playbackRate = speed;
}
@override
Future<void> setLoopMode(int mode) async {
_loopMode = LoopMode.values[mode];
}
@override
Future<void> setShuffleModeEnabled(bool enabled) async {
_shuffleModeEnabled = enabled;
if (enabled) {
_audioSourcePlayer?.shuffle(0, _index);
}
}
@override
Future<void> seek(int position, int newIndex) async {
int index = newIndex ?? _index;
if (index != _index) {
_currentAudioSourcePlayer.pause();
_index = index;
await _currentAudioSourcePlayer.load();
await _currentAudioSourcePlayer.seek(position);
if (_playing) {
_currentAudioSourcePlayer.play();
}
} else {
await _currentAudioSourcePlayer.seek(position);
}
}
ConcatenatingAudioSourcePlayer _concatenating(String playerId) =>
_audioSourcePlayers[playerId] as ConcatenatingAudioSourcePlayer;
concatenatingAdd(String playerId, Map source) {
final playlist = _concatenating(playerId);
playlist.add(getAudioSource(source));
}
concatenatingInsert(String playerId, int index, Map source) {
_concatenating(playerId).insert(index, getAudioSource(source));
if (index <= _index) {
_index++;
}
}
concatenatingAddAll(String playerId, List sources) {
_concatenating(playerId).addAll(getAudioSources(sources));
}
concatenatingInsertAll(String playerId, int index, List sources) {
_concatenating(playerId).insertAll(index, getAudioSources(sources));
if (index <= _index) {
_index += sources.length;
}
}
concatenatingRemoveAt(String playerId, int index) async {
// Pause if removing current item
if (_index == index && _playing) {
_currentAudioSourcePlayer.pause();
}
_concatenating(playerId).removeAt(index);
if (_index == index) {
// Skip backward if there's nothing after this
if (index == _audioSourcePlayer.sequence.length) {
_index--;
}
// Resume playback at the new item (if it exists)
if (_playing && _currentAudioSourcePlayer != null) {
await _currentAudioSourcePlayer.load();
_currentAudioSourcePlayer.play();
}
} else if (index < _index) {
// Reflect that the current item has shifted its position
_index--;
}
}
concatenatingRemoveRange(String playerId, int start, int end) async {
if (_index >= start && _index < end && _playing) {
// Pause if removing current item
_currentAudioSourcePlayer.pause();
}
_concatenating(playerId).removeRange(start, end);
if (_index >= start && _index < end) {
// Skip backward if there's nothing after this
if (start >= _audioSourcePlayer.sequence.length) {
_index = start - 1;
} else {
_index = start;
}
// Resume playback at the new item (if it exists)
if (_playing && _currentAudioSourcePlayer != null) {
await _currentAudioSourcePlayer.load();
_currentAudioSourcePlayer.play();
}
} else if (end <= _index) {
// Reflect that the current item has shifted its position
_index -= (end - start);
}
}
concatenatingMove(String playerId, int currentIndex, int newIndex) {
_concatenating(playerId).move(currentIndex, newIndex);
if (currentIndex == _index) {
_index = newIndex;
} else if (currentIndex < _index && newIndex >= _index) {
_index--;
} else if (currentIndex > _index && newIndex <= _index) {
_index++;
}
}
concatenatingClear(String playerId) {
_currentAudioSourcePlayer.pause();
_concatenating(playerId).clear();
}
@override
Duration getCurrentPosition() => _currentAudioSourcePlayer?.position;
@override
Duration getBufferedPosition() => _currentAudioSourcePlayer?.bufferedPosition;
@override
Duration getDuration() => _currentAudioSourcePlayer?.duration;
@override
void dispose() {
_currentAudioSourcePlayer?.pause();
_audioElement.removeAttribute('src');
_audioElement.load();
transition(ProcessingState.none);
super.dispose();
}
List<AudioSourcePlayer> getAudioSources(List json) =>
json.map((s) => getAudioSource(s)).toList();
AudioSourcePlayer getAudioSource(Map json) {
final String id = json['id'];
var audioSourcePlayer = _audioSourcePlayers[id];
if (audioSourcePlayer == null) {
audioSourcePlayer = decodeAudioSource(json);
_audioSourcePlayers[id] = audioSourcePlayer;
}
return audioSourcePlayer;
}
AudioSourcePlayer decodeAudioSource(Map json) {
try {
switch (json['type']) {
case 'progressive':
return ProgressiveAudioSourcePlayer(
this, json['id'], Uri.parse(json['uri']), json['headers']);
case "dash":
return DashAudioSourcePlayer(
this, json['id'], Uri.parse(json['uri']), json['headers']);
case "hls":
return HlsAudioSourcePlayer(
this, json['id'], Uri.parse(json['uri']), json['headers']);
case "concatenating":
return ConcatenatingAudioSourcePlayer(
this,
json['id'],
getAudioSources(json['audioSources']),
json['useLazyPreparation']);
case "clipping":
return ClippingAudioSourcePlayer(
this,
json['id'],
getAudioSource(json['audioSource']),
Duration(milliseconds: json['start']),
Duration(milliseconds: json['end']));
case "looping":
return LoopingAudioSourcePlayer(this, json['id'],
getAudioSource(json['audioSource']), json['count']);
default:
throw Exception("Unknown AudioSource type: " + json['type']);
}
} catch (e, stacktrace) {
print("$stacktrace");
rethrow;
}
}
}
abstract class AudioSourcePlayer {
Html5AudioPlayer html5AudioPlayer;
final String id;
AudioSourcePlayer(this.html5AudioPlayer, this.id);
List<IndexedAudioSourcePlayer> get sequence;
List<int> get shuffleOrder;
int shuffle(int treeIndex, int currentIndex);
}
abstract class IndexedAudioSourcePlayer extends AudioSourcePlayer {
IndexedAudioSourcePlayer(Html5AudioPlayer html5AudioPlayer, String id)
: super(html5AudioPlayer, id);
Future<Duration> load();
Future<void> play();
Future<void> pause();
Future<void> seek(int position);
Future<void> complete();
Future<void> timeUpdated(double seconds) async {}
Duration get duration;
Duration get position;
Duration get bufferedPosition;
AudioElement get _audioElement => html5AudioPlayer._audioElement;
@override
int shuffle(int treeIndex, int currentIndex) => treeIndex + 1;
@override
String toString() => "${this.runtimeType}";
}
abstract class UriAudioSourcePlayer extends IndexedAudioSourcePlayer {
final Uri uri;
final Map headers;
double _resumePos;
Duration _duration;
Completer _completer;
UriAudioSourcePlayer(
Html5AudioPlayer html5AudioPlayer, String id, this.uri, this.headers)
: super(html5AudioPlayer, id);
@override
List<IndexedAudioSourcePlayer> get sequence => [this];
@override
List<int> get shuffleOrder => [0];
@override
Future<Duration> load() async {
_resumePos = 0.0;
return _duration = await html5AudioPlayer.loadUri(uri);
}
@override
Future<void> play() async {
_audioElement.currentTime = _resumePos;
_audioElement.play();
_completer = Completer();
await _completer.future;
_completer = null;
}
@override
Future<void> pause() async {
_resumePos = _audioElement.currentTime;
_audioElement.pause();
_interruptPlay();
}
@override
Future<void> seek(int position) async {
_audioElement.currentTime = _resumePos = position / 1000.0;
}
@override
Future<void> complete() async {
_interruptPlay();
html5AudioPlayer.onEnded();
}
_interruptPlay() {
if (_completer?.isCompleted == false) {
_completer.complete();
}
}
@override
Duration get duration {
return _duration;
//final seconds = _audioElement.duration;
//return seconds.isFinite
// ? Duration(milliseconds: (seconds * 1000).toInt())
// : null;
}
@override
Duration get position {
double seconds = _audioElement.currentTime;
return Duration(milliseconds: (seconds * 1000).toInt());
}
@override
Duration get bufferedPosition {
if (_audioElement.buffered.length > 0) {
return Duration(
milliseconds:
(_audioElement.buffered.end(_audioElement.buffered.length - 1) *
1000)
.toInt());
} else {
return Duration.zero;
}
}
}
class ProgressiveAudioSourcePlayer extends UriAudioSourcePlayer {
ProgressiveAudioSourcePlayer(
Html5AudioPlayer html5AudioPlayer, String id, Uri uri, Map headers)
: super(html5AudioPlayer, id, uri, headers);
}
class DashAudioSourcePlayer extends UriAudioSourcePlayer {
DashAudioSourcePlayer(
Html5AudioPlayer html5AudioPlayer, String id, Uri uri, Map headers)
: super(html5AudioPlayer, id, uri, headers);
}
class HlsAudioSourcePlayer extends UriAudioSourcePlayer {
HlsAudioSourcePlayer(
Html5AudioPlayer html5AudioPlayer, String id, Uri uri, Map headers)
: super(html5AudioPlayer, id, uri, headers);
}
class ConcatenatingAudioSourcePlayer extends AudioSourcePlayer {
static List<int> generateShuffleOrder(int length, [int firstIndex]) {
final shuffleOrder = List<int>(length);
for (var i = 0; i < length; i++) {
final j = _random.nextInt(i + 1);
shuffleOrder[i] = shuffleOrder[j];
shuffleOrder[j] = i;
}
if (firstIndex != null) {
for (var i = 1; i < length; i++) {
if (shuffleOrder[i] == firstIndex) {
final v = shuffleOrder[0];
shuffleOrder[0] = shuffleOrder[i];
shuffleOrder[i] = v;
break;
}
}
}
return shuffleOrder;
}
final List<AudioSourcePlayer> audioSourcePlayers;
final bool useLazyPreparation;
List<int> _shuffleOrder;
ConcatenatingAudioSourcePlayer(Html5AudioPlayer html5AudioPlayer, String id,
this.audioSourcePlayers, this.useLazyPreparation)
: _shuffleOrder = generateShuffleOrder(audioSourcePlayers.length),
super(html5AudioPlayer, id);
@override
List<IndexedAudioSourcePlayer> get sequence =>
audioSourcePlayers.expand((p) => p.sequence).toList();
@override
List<int> get shuffleOrder {
final order = <int>[];
var offset = order.length;
final childOrders = <List<int>>[];
for (var audioSourcePlayer in audioSourcePlayers) {
final childShuffleOrder = audioSourcePlayer.shuffleOrder;
childOrders.add(childShuffleOrder.map((i) => i + offset).toList());
offset += childShuffleOrder.length;
}
for (var i = 0; i < childOrders.length; i++) {
order.addAll(childOrders[_shuffleOrder[i]]);
}
return order;
}
@override
int shuffle(int treeIndex, int currentIndex) {
int currentChildIndex;
for (var i = 0; i < audioSourcePlayers.length; i++) {
final indexBefore = treeIndex;
final child = audioSourcePlayers[i];
treeIndex = child.shuffle(treeIndex, currentIndex);
if (currentIndex >= indexBefore && currentIndex < treeIndex) {
currentChildIndex = i;
} else {}
}
// Shuffle so that the current child is first in the shuffle order
_shuffleOrder =
generateShuffleOrder(audioSourcePlayers.length, currentChildIndex);
return treeIndex;
}
add(AudioSourcePlayer player) {
audioSourcePlayers.add(player);
_shuffleOrder.add(audioSourcePlayers.length - 1);
}
insert(int index, AudioSourcePlayer player) {
audioSourcePlayers.insert(index, player);
for (var i = 0; i < audioSourcePlayers.length; i++) {
if (_shuffleOrder[i] >= index) {
_shuffleOrder[i]++;
}
}
_shuffleOrder.add(index);
}
addAll(List<AudioSourcePlayer> players) {
audioSourcePlayers.addAll(players);
_shuffleOrder.addAll(
List.generate(players.length, (i) => audioSourcePlayers.length + i)
.toList()
..shuffle());
}
insertAll(int index, List<AudioSourcePlayer> players) {
audioSourcePlayers.insertAll(index, players);
for (var i = 0; i < audioSourcePlayers.length; i++) {
if (_shuffleOrder[i] >= index) {
_shuffleOrder[i] += players.length;
}
}
_shuffleOrder.addAll(
List.generate(players.length, (i) => index + i).toList()..shuffle());
}
removeAt(int index) {
audioSourcePlayers.removeAt(index);
// 0 1 2 3
// 3 2 0 1
for (var i = 0; i < audioSourcePlayers.length; i++) {
if (_shuffleOrder[i] > index) {
_shuffleOrder[i]--;
}
}
_shuffleOrder.removeWhere((i) => i == index);
}
removeRange(int start, int end) {
audioSourcePlayers.removeRange(start, end);
for (var i = 0; i < audioSourcePlayers.length; i++) {
if (_shuffleOrder[i] >= end) {
_shuffleOrder[i] -= (end - start);
}
}
_shuffleOrder.removeWhere((i) => i >= start && i < end);
}
move(int currentIndex, int newIndex) {
audioSourcePlayers.insert(
newIndex, audioSourcePlayers.removeAt(currentIndex));
}
clear() {
audioSourcePlayers.clear();
_shuffleOrder.clear();
}
}
class ClippingAudioSourcePlayer extends IndexedAudioSourcePlayer {
final UriAudioSourcePlayer audioSourcePlayer;
final Duration start;
final Duration end;
Completer<ClipInterruptReason> _completer;
double _resumePos;
Duration _duration;
ClippingAudioSourcePlayer(Html5AudioPlayer html5AudioPlayer, String id,
this.audioSourcePlayer, this.start, this.end)
: super(html5AudioPlayer, id);
@override
List<IndexedAudioSourcePlayer> get sequence => [this];
@override
List<int> get shuffleOrder => [0];
@override
Future<Duration> load() async {
_resumePos = (start ?? Duration.zero).inMilliseconds / 1000.0;
Duration fullDuration =
await html5AudioPlayer.loadUri(audioSourcePlayer.uri);
_audioElement.currentTime = _resumePos;
_duration = Duration(
milliseconds: min((end ?? fullDuration).inMilliseconds,
fullDuration.inMilliseconds) -
(start ?? Duration.zero).inMilliseconds);
return _duration;
}
double get remaining => end.inMilliseconds / 1000 - _audioElement.currentTime;
@override
Future<void> play() async {
_interruptPlay(ClipInterruptReason.simultaneous);
_audioElement.currentTime = _resumePos;
_audioElement.play();
_completer = Completer<ClipInterruptReason>();
ClipInterruptReason reason;
while ((reason = await _completer.future) == ClipInterruptReason.seek) {
_completer = Completer<ClipInterruptReason>();
}
if (reason == ClipInterruptReason.end) {
html5AudioPlayer.onEnded();
}
_completer = null;
}
@override
Future<void> pause() async {
_interruptPlay(ClipInterruptReason.pause);
_resumePos = _audioElement.currentTime;
_audioElement.pause();
}
@override
Future<void> seek(int position) async {
_interruptPlay(ClipInterruptReason.seek);
_audioElement.currentTime =
_resumePos = start.inMilliseconds / 1000.0 + position / 1000.0;
}
@override
Future<void> complete() async {
_interruptPlay(ClipInterruptReason.end);
}
@override
Future<void> timeUpdated(double seconds) async {
if (end != null) {
if (seconds >= end.inMilliseconds / 1000) {
_interruptPlay(ClipInterruptReason.end);
}
}
}
@override
Duration get duration {
return _duration;
}
@override
Duration get position {
double seconds = _audioElement.currentTime;
var position = Duration(milliseconds: (seconds * 1000).toInt());
if (start != null) {
position -= start;
}
if (position < Duration.zero) {
position = Duration.zero;
}
return position;
}
@override
Duration get bufferedPosition {
if (_audioElement.buffered.length > 0) {
var seconds =
_audioElement.buffered.end(_audioElement.buffered.length - 1);
var position = Duration(milliseconds: (seconds * 1000).toInt());
if (start != null) {
position -= start;
}
if (position < Duration.zero) {
position = Duration.zero;
}
if (duration != null && position > duration) {
position = duration;
}
return position;
} else {
return Duration.zero;
}
}
_interruptPlay(ClipInterruptReason reason) {
if (_completer?.isCompleted == false) {
_completer.complete(reason);
}
}
}
enum ClipInterruptReason { end, pause, seek, simultaneous }
class LoopingAudioSourcePlayer extends AudioSourcePlayer {
final AudioSourcePlayer audioSourcePlayer;
final int count;
LoopingAudioSourcePlayer(Html5AudioPlayer html5AudioPlayer, String id,
this.audioSourcePlayer, this.count)
: super(html5AudioPlayer, id);
@override
List<IndexedAudioSourcePlayer> get sequence =>
List.generate(count, (i) => audioSourcePlayer)
.expand((p) => p.sequence)
.toList();
@override
List<int> get shuffleOrder {
final order = <int>[];
var offset = order.length;
for (var i = 0; i < count; i++) {
final childShuffleOrder = audioSourcePlayer.shuffleOrder;
order.addAll(childShuffleOrder.map((i) => i + offset).toList());
offset += childShuffleOrder.length;
}
return order;
}
@override
int shuffle(int treeIndex, int currentIndex) {
for (var i = 0; i < count; i++) {
treeIndex = audioSourcePlayer.shuffle(treeIndex, currentIndex);
}
return treeIndex;
}
}

View File

@ -1,37 +0,0 @@
.idea/
.vagrant/
.sconsign.dblite
.svn/
.DS_Store
*.swp
profile
DerivedData/
build/
GeneratedPluginRegistrant.h
GeneratedPluginRegistrant.m
.generated/
*.pbxuser
*.mode1v3
*.mode2v3
*.perspectivev3
!default.pbxuser
!default.mode1v3
!default.mode2v3
!default.perspectivev3
xcuserdata
*.moved-aside
*.pyc
*sync/
Icon?
.tags*
/Flutter/Generated.xcconfig
/Flutter/flutter_export_environment.sh

View File

@ -1,21 +0,0 @@
#import <FlutterMacOS/FlutterMacOS.h>
@interface AudioPlayer : NSObject<FlutterStreamHandler>
- (instancetype)initWithRegistrar:(NSObject<FlutterPluginRegistrar> *)registrar playerId:(NSString*)idParam configuredSession:(BOOL)configuredSession;
@end
enum ProcessingState {
none,
loading,
buffering,
ready,
completed
};
enum LoopMode {
loopOff,
loopOne,
loopAll
};

File diff suppressed because it is too large Load Diff

View File

@ -1,13 +0,0 @@
#import <FlutterMacOS/FlutterMacOS.h>
@interface AudioSource : NSObject
@property (readonly, nonatomic) NSString* sourceId;
- (instancetype)initWithId:(NSString *)sid;
- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex;
- (void)findById:(NSString *)sourceId matches:(NSMutableArray<AudioSource *> *)matches;
- (NSArray *)getShuffleOrder;
- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex;
@end

View File

@ -1,37 +0,0 @@
#import "AudioSource.h"
#import <AVFoundation/AVFoundation.h>
@implementation AudioSource {
NSString *_sourceId;
}
- (instancetype)initWithId:(NSString *)sid {
self = [super init];
NSAssert(self, @"super init cannot be nil");
_sourceId = sid;
return self;
}
- (NSString *)sourceId {
return _sourceId;
}
- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex {
return 0;
}
- (void)findById:(NSString *)sourceId matches:(NSMutableArray<AudioSource *> *)matches {
if ([_sourceId isEqualToString:sourceId]) {
[matches addObject:self];
}
}
- (NSArray *)getShuffleOrder {
return @[];
}
- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex {
return 0;
}
@end

View File

@ -1,11 +0,0 @@
#import "AudioSource.h"
#import "UriAudioSource.h"
#import <FlutterMacOS/FlutterMacOS.h>
@interface ClippingAudioSource : IndexedAudioSource
@property (readonly, nonatomic) UriAudioSource* audioSource;
- (instancetype)initWithId:(NSString *)sid audioSource:(UriAudioSource *)audioSource start:(NSNumber *)start end:(NSNumber *)end;
@end

View File

@ -1,79 +0,0 @@
#import "AudioSource.h"
#import "ClippingAudioSource.h"
#import "IndexedPlayerItem.h"
#import "UriAudioSource.h"
#import <AVFoundation/AVFoundation.h>
@implementation ClippingAudioSource {
UriAudioSource *_audioSource;
CMTime _start;
CMTime _end;
}
- (instancetype)initWithId:(NSString *)sid audioSource:(UriAudioSource *)audioSource start:(NSNumber *)start end:(NSNumber *)end {
self = [super initWithId:sid];
NSAssert(self, @"super init cannot be nil");
_audioSource = audioSource;
_start = start == [NSNull null] ? kCMTimeZero : CMTimeMake([start intValue], 1000);
_end = end == [NSNull null] ? kCMTimeInvalid : CMTimeMake([end intValue], 1000);
return self;
}
- (UriAudioSource *)audioSource {
return _audioSource;
}
- (void)findById:(NSString *)sourceId matches:(NSMutableArray<AudioSource *> *)matches {
[super findById:sourceId matches:matches];
[_audioSource findById:sourceId matches:matches];
}
- (void)attach:(AVQueuePlayer *)player {
[super attach:player];
_audioSource.playerItem.forwardPlaybackEndTime = _end;
// XXX: Not needed since currentItem observer handles it?
[self seek:kCMTimeZero];
}
- (IndexedPlayerItem *)playerItem {
return _audioSource.playerItem;
}
- (NSArray *)getShuffleOrder {
return @[@(0)];
}
- (void)play:(AVQueuePlayer *)player {
}
- (void)pause:(AVQueuePlayer *)player {
}
- (void)stop:(AVQueuePlayer *)player {
}
- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler {
if (!completionHandler || (self.playerItem.status == AVPlayerItemStatusReadyToPlay)) {
CMTime absPosition = CMTimeAdd(_start, position);
[_audioSource.playerItem seekToTime:absPosition toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero completionHandler:completionHandler];
}
}
- (CMTime)duration {
return CMTimeSubtract(CMTIME_IS_INVALID(_end) ? self.playerItem.duration : _end, _start);
}
- (void)setDuration:(CMTime)duration {
}
- (CMTime)position {
return CMTimeSubtract(self.playerItem.currentTime, _start);
}
- (CMTime)bufferedPosition {
CMTime pos = CMTimeSubtract(_audioSource.bufferedPosition, _start);
CMTime dur = [self duration];
return CMTimeCompare(pos, dur) >= 0 ? dur : pos;
}
@end

View File

@ -1,13 +0,0 @@
#import "AudioSource.h"
#import <FlutterMacOS/FlutterMacOS.h>
@interface ConcatenatingAudioSource : AudioSource
@property (readonly, nonatomic) int count;
- (instancetype)initWithId:(NSString *)sid audioSources:(NSMutableArray<AudioSource *> *)audioSources;
- (void)insertSource:(AudioSource *)audioSource atIndex:(int)index;
- (void)removeSourcesFromIndex:(int)start toIndex:(int)end;
- (void)moveSourceFromIndex:(int)currentIndex toIndex:(int)newIndex;
@end

View File

@ -1,109 +0,0 @@
#import "AudioSource.h"
#import "ConcatenatingAudioSource.h"
#import <AVFoundation/AVFoundation.h>
#import <stdlib.h>
@implementation ConcatenatingAudioSource {
NSMutableArray<AudioSource *> *_audioSources;
NSMutableArray<NSNumber *> *_shuffleOrder;
}
- (instancetype)initWithId:(NSString *)sid audioSources:(NSMutableArray<AudioSource *> *)audioSources {
self = [super initWithId:sid];
NSAssert(self, @"super init cannot be nil");
_audioSources = audioSources;
return self;
}
- (int)count {
return _audioSources.count;
}
- (void)insertSource:(AudioSource *)audioSource atIndex:(int)index {
[_audioSources insertObject:audioSource atIndex:index];
}
- (void)removeSourcesFromIndex:(int)start toIndex:(int)end {
if (end == -1) end = _audioSources.count;
for (int i = start; i < end; i++) {
[_audioSources removeObjectAtIndex:start];
}
}
- (void)moveSourceFromIndex:(int)currentIndex toIndex:(int)newIndex {
AudioSource *source = _audioSources[currentIndex];
[_audioSources removeObjectAtIndex:currentIndex];
[_audioSources insertObject:source atIndex:newIndex];
}
- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex {
for (int i = 0; i < [_audioSources count]; i++) {
treeIndex = [_audioSources[i] buildSequence:sequence treeIndex:treeIndex];
}
return treeIndex;
}
- (void)findById:(NSString *)sourceId matches:(NSMutableArray<AudioSource *> *)matches {
[super findById:sourceId matches:matches];
for (int i = 0; i < [_audioSources count]; i++) {
[_audioSources[i] findById:sourceId matches:matches];
}
}
- (NSArray *)getShuffleOrder {
NSMutableArray *order = [NSMutableArray new];
int offset = [order count];
NSMutableArray *childOrders = [NSMutableArray new]; // array of array of ints
for (int i = 0; i < [_audioSources count]; i++) {
AudioSource *audioSource = _audioSources[i];
NSArray *childShuffleOrder = [audioSource getShuffleOrder];
NSMutableArray *offsetChildShuffleOrder = [NSMutableArray new];
for (int j = 0; j < [childShuffleOrder count]; j++) {
[offsetChildShuffleOrder addObject:@([childShuffleOrder[j] integerValue] + offset)];
}
[childOrders addObject:offsetChildShuffleOrder];
offset += [childShuffleOrder count];
}
for (int i = 0; i < [_audioSources count]; i++) {
[order addObjectsFromArray:childOrders[[_shuffleOrder[i] integerValue]]];
}
return order;
}
- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex {
int currentChildIndex = -1;
for (int i = 0; i < [_audioSources count]; i++) {
int indexBefore = treeIndex;
AudioSource *child = _audioSources[i];
treeIndex = [child shuffle:treeIndex currentIndex:currentIndex];
if (currentIndex >= indexBefore && currentIndex < treeIndex) {
currentChildIndex = i;
} else {}
}
// Shuffle so that the current child is first in the shuffle order
_shuffleOrder = [NSMutableArray arrayWithCapacity:[_audioSources count]];
for (int i = 0; i < [_audioSources count]; i++) {
[_shuffleOrder addObject:@(0)];
}
NSLog(@"shuffle: audioSources.count=%d and shuffleOrder.count=%d", [_audioSources count], [_shuffleOrder count]);
// First generate a random shuffle
for (int i = 0; i < [_audioSources count]; i++) {
int j = arc4random_uniform(i + 1);
_shuffleOrder[i] = _shuffleOrder[j];
_shuffleOrder[j] = @(i);
}
// Then bring currentIndex to the front
if (currentChildIndex != -1) {
for (int i = 1; i < [_audioSources count]; i++) {
if ([_shuffleOrder[i] integerValue] == currentChildIndex) {
NSNumber *v = _shuffleOrder[0];
_shuffleOrder[0] = _shuffleOrder[i];
_shuffleOrder[i] = v;
break;
}
}
}
return treeIndex;
}
@end

View File

@ -1,21 +0,0 @@
#import "AudioSource.h"
#import "IndexedPlayerItem.h"
#import <FlutterMacOS/FlutterMacOS.h>
#import <AVFoundation/AVFoundation.h>
@interface IndexedAudioSource : AudioSource
@property (readonly, nonatomic) IndexedPlayerItem *playerItem;
@property (readwrite, nonatomic) CMTime duration;
@property (readonly, nonatomic) CMTime position;
@property (readonly, nonatomic) CMTime bufferedPosition;
@property (readonly, nonatomic) BOOL isAttached;
- (void)attach:(AVQueuePlayer *)player;
- (void)play:(AVQueuePlayer *)player;
- (void)pause:(AVQueuePlayer *)player;
- (void)stop:(AVQueuePlayer *)player;
- (void)seek:(CMTime)position;
- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler;
@end

View File

@ -1,68 +0,0 @@
#import "IndexedAudioSource.h"
#import "IndexedPlayerItem.h"
#import <AVFoundation/AVFoundation.h>
@implementation IndexedAudioSource {
BOOL _isAttached;
}
- (instancetype)initWithId:(NSString *)sid {
self = [super init];
NSAssert(self, @"super init cannot be nil");
_isAttached = NO;
return self;
}
- (IndexedPlayerItem *)playerItem {
return nil;
}
- (BOOL)isAttached {
return _isAttached;
}
- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex {
[sequence addObject:self];
return treeIndex + 1;
}
- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex {
return treeIndex + 1;
}
- (void)attach:(AVQueuePlayer *)player {
_isAttached = YES;
}
- (void)play:(AVQueuePlayer *)player {
}
- (void)pause:(AVQueuePlayer *)player {
}
- (void)stop:(AVQueuePlayer *)player {
}
- (void)seek:(CMTime)position {
[self seek:position completionHandler:nil];
}
- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler {
}
- (CMTime)duration {
return kCMTimeInvalid;
}
- (void)setDuration:(CMTime)duration {
}
- (CMTime)position {
return kCMTimeInvalid;
}
- (CMTime)bufferedPosition {
return kCMTimeInvalid;
}
@end

View File

@ -1,9 +0,0 @@
#import <AVFoundation/AVFoundation.h>
@class IndexedAudioSource;
@interface IndexedPlayerItem : AVPlayerItem
@property (readwrite, nonatomic) IndexedAudioSource *audioSource;
@end

View File

@ -1,16 +0,0 @@
#import "IndexedPlayerItem.h"
#import "IndexedAudioSource.h"
@implementation IndexedPlayerItem {
IndexedAudioSource *_audioSource;
}
-(void)setAudioSource:(IndexedAudioSource *)audioSource {
_audioSource = audioSource;
}
-(IndexedAudioSource *)audioSource {
return _audioSource;
}
@end

View File

@ -1,4 +0,0 @@
#import <FlutterMacOS/FlutterMacOS.h>
@interface JustAudioPlugin : NSObject<FlutterPlugin>
@end

View File

@ -1,55 +0,0 @@
#import "JustAudioPlugin.h"
#import "AudioPlayer.h"
#import <AVFoundation/AVFoundation.h>
#include <TargetConditionals.h>
@implementation JustAudioPlugin {
NSObject<FlutterPluginRegistrar>* _registrar;
BOOL _configuredSession;
}
+ (void)registerWithRegistrar:(NSObject<FlutterPluginRegistrar>*)registrar {
FlutterMethodChannel* channel = [FlutterMethodChannel
methodChannelWithName:@"com.ryanheise.just_audio.methods"
binaryMessenger:[registrar messenger]];
JustAudioPlugin* instance = [[JustAudioPlugin alloc] initWithRegistrar:registrar];
[registrar addMethodCallDelegate:instance channel:channel];
}
- (instancetype)initWithRegistrar:(NSObject<FlutterPluginRegistrar> *)registrar {
self = [super init];
NSAssert(self, @"super init cannot be nil");
_registrar = registrar;
return self;
}
- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result {
if ([@"init" isEqualToString:call.method]) {
NSArray* args = (NSArray*)call.arguments;
NSString* playerId = args[0];
/*AudioPlayer* player =*/ [[AudioPlayer alloc] initWithRegistrar:_registrar playerId:playerId configuredSession:_configuredSession];
result(nil);
} else if ([@"setIosCategory" isEqualToString:call.method]) {
#if TARGET_OS_IPHONE
NSNumber* categoryIndex = (NSNumber*)call.arguments;
AVAudioSessionCategory category = nil;
switch (categoryIndex.integerValue) {
case 0: category = AVAudioSessionCategoryAmbient; break;
case 1: category = AVAudioSessionCategorySoloAmbient; break;
case 2: category = AVAudioSessionCategoryPlayback; break;
case 3: category = AVAudioSessionCategoryRecord; break;
case 4: category = AVAudioSessionCategoryPlayAndRecord; break;
case 5: category = AVAudioSessionCategoryMultiRoute; break;
}
if (category) {
_configuredSession = YES;
}
[[AVAudioSession sharedInstance] setCategory:category error:nil];
#endif
result(nil);
} else {
result(FlutterMethodNotImplemented);
}
}
@end

View File

@ -1,8 +0,0 @@
#import "AudioSource.h"
#import <FlutterMacOS/FlutterMacOS.h>
@interface LoopingAudioSource : AudioSource
- (instancetype)initWithId:(NSString *)sid audioSources:(NSArray<AudioSource *> *)audioSources;
@end

View File

@ -1,53 +0,0 @@
#import "AudioSource.h"
#import "LoopingAudioSource.h"
#import <AVFoundation/AVFoundation.h>
@implementation LoopingAudioSource {
// An array of duplicates
NSArray<AudioSource *> *_audioSources; // <AudioSource *>
}
- (instancetype)initWithId:(NSString *)sid audioSources:(NSArray<AudioSource *> *)audioSources {
self = [super initWithId:sid];
NSAssert(self, @"super init cannot be nil");
_audioSources = audioSources;
return self;
}
- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex {
for (int i = 0; i < [_audioSources count]; i++) {
treeIndex = [_audioSources[i] buildSequence:sequence treeIndex:treeIndex];
}
return treeIndex;
}
- (void)findById:(NSString *)sourceId matches:(NSMutableArray<AudioSource *> *)matches {
[super findById:sourceId matches:matches];
for (int i = 0; i < [_audioSources count]; i++) {
[_audioSources[i] findById:sourceId matches:matches];
}
}
- (NSArray *)getShuffleOrder {
NSMutableArray *order = [NSMutableArray new];
int offset = (int)[order count];
for (int i = 0; i < [_audioSources count]; i++) {
AudioSource *audioSource = _audioSources[i];
NSArray *childShuffleOrder = [audioSource getShuffleOrder];
for (int j = 0; j < [childShuffleOrder count]; j++) {
[order addObject:@([childShuffleOrder[j] integerValue] + offset)];
}
offset += [childShuffleOrder count];
}
return order;
}
- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex {
// TODO: This should probably shuffle the same way on all duplicates.
for (int i = 0; i < [_audioSources count]; i++) {
treeIndex = [_audioSources[i] shuffle:treeIndex currentIndex:currentIndex];
}
return treeIndex;
}
@end

View File

@ -1,8 +0,0 @@
#import "IndexedAudioSource.h"
#import <FlutterMacOS/FlutterMacOS.h>
@interface UriAudioSource : IndexedAudioSource
- (instancetype)initWithId:(NSString *)sid uri:(NSString *)uri;
@end

View File

@ -1,79 +0,0 @@
#import "UriAudioSource.h"
#import "IndexedAudioSource.h"
#import "IndexedPlayerItem.h"
#import <AVFoundation/AVFoundation.h>
@implementation UriAudioSource {
NSString *_uri;
IndexedPlayerItem *_playerItem;
/* CMTime _duration; */
}
- (instancetype)initWithId:(NSString *)sid uri:(NSString *)uri {
self = [super initWithId:sid];
NSAssert(self, @"super init cannot be nil");
_uri = uri;
if ([_uri hasPrefix:@"file://"]) {
_playerItem = [[IndexedPlayerItem alloc] initWithURL:[NSURL fileURLWithPath:[_uri substringFromIndex:7]]];
} else {
_playerItem = [[IndexedPlayerItem alloc] initWithURL:[NSURL URLWithString:_uri]];
}
if (@available(macOS 10.13, iOS 11.0, *)) {
// This does the best at reducing distortion on voice with speeds below 1.0
_playerItem.audioTimePitchAlgorithm = AVAudioTimePitchAlgorithmTimeDomain;
}
/* NSKeyValueObservingOptions options = */
/* NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew; */
/* [_playerItem addObserver:self */
/* forKeyPath:@"duration" */
/* options:options */
/* context:nil]; */
return self;
}
- (IndexedPlayerItem *)playerItem {
return _playerItem;
}
- (NSArray *)getShuffleOrder {
return @[@(0)];
}
- (void)play:(AVQueuePlayer *)player {
}
- (void)pause:(AVQueuePlayer *)player {
}
- (void)stop:(AVQueuePlayer *)player {
}
- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler {
if (!completionHandler || (_playerItem.status == AVPlayerItemStatusReadyToPlay)) {
[_playerItem seekToTime:position toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero completionHandler:completionHandler];
}
}
- (CMTime)duration {
return _playerItem.duration;
}
- (void)setDuration:(CMTime)duration {
}
- (CMTime)position {
return _playerItem.currentTime;
}
- (CMTime)bufferedPosition {
NSValue *last = _playerItem.loadedTimeRanges.lastObject;
if (last) {
CMTimeRange timeRange = [last CMTimeRangeValue];
return CMTimeAdd(timeRange.start, timeRange.duration);
} else {
return _playerItem.currentTime;
}
return kCMTimeInvalid;
}
@end

View File

@ -1,21 +0,0 @@
#
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'just_audio'
s.version = '0.0.1'
s.summary = 'A new flutter plugin project.'
s.description = <<-DESC
A new flutter plugin project.
DESC
s.homepage = 'http://example.com'
s.license = { :file => '../LICENSE' }
s.author = { 'Your Company' => 'email@example.com' }
s.source = { :path => '.' }
s.source_files = 'Classes/**/*'
s.public_header_files = 'Classes/**/*.h'
s.dependency 'FlutterMacOS'
s.platform = :osx, '10.11'
s.pod_target_xcconfig = { 'DEFINES_MODULE' => 'YES' }
end

View File

@ -1,250 +0,0 @@
# Generated by pub
# See https://dart.dev/tools/pub/glossary#lockfile
packages:
async:
dependency: "direct main"
description:
name: async
url: "https://pub.dartlang.org"
source: hosted
version: "2.4.2"
boolean_selector:
dependency: transitive
description:
name: boolean_selector
url: "https://pub.dartlang.org"
source: hosted
version: "2.0.0"
characters:
dependency: transitive
description:
name: characters
url: "https://pub.dartlang.org"
source: hosted
version: "1.0.0"
charcode:
dependency: transitive
description:
name: charcode
url: "https://pub.dartlang.org"
source: hosted
version: "1.1.3"
clock:
dependency: transitive
description:
name: clock
url: "https://pub.dartlang.org"
source: hosted
version: "1.0.1"
collection:
dependency: transitive
description:
name: collection
url: "https://pub.dartlang.org"
source: hosted
version: "1.14.13"
convert:
dependency: transitive
description:
name: convert
url: "https://pub.dartlang.org"
source: hosted
version: "2.1.1"
crypto:
dependency: transitive
description:
name: crypto
url: "https://pub.dartlang.org"
source: hosted
version: "2.1.4"
fake_async:
dependency: transitive
description:
name: fake_async
url: "https://pub.dartlang.org"
source: hosted
version: "1.1.0"
file:
dependency: transitive
description:
name: file
url: "https://pub.dartlang.org"
source: hosted
version: "5.1.0"
flutter:
dependency: "direct main"
description: flutter
source: sdk
version: "0.0.0"
flutter_test:
dependency: "direct dev"
description: flutter
source: sdk
version: "0.0.0"
flutter_web_plugins:
dependency: "direct main"
description: flutter
source: sdk
version: "0.0.0"
intl:
dependency: transitive
description:
name: intl
url: "https://pub.dartlang.org"
source: hosted
version: "0.16.1"
matcher:
dependency: transitive
description:
name: matcher
url: "https://pub.dartlang.org"
source: hosted
version: "0.12.8"
meta:
dependency: transitive
description:
name: meta
url: "https://pub.dartlang.org"
source: hosted
version: "1.1.8"
path:
dependency: "direct main"
description:
name: path
url: "https://pub.dartlang.org"
source: hosted
version: "1.7.0"
path_provider:
dependency: "direct main"
description:
name: path_provider
url: "https://pub.dartlang.org"
source: hosted
version: "1.6.10"
path_provider_linux:
dependency: transitive
description:
name: path_provider_linux
url: "https://pub.dartlang.org"
source: hosted
version: "0.0.1+1"
path_provider_macos:
dependency: transitive
description:
name: path_provider_macos
url: "https://pub.dartlang.org"
source: hosted
version: "0.0.4+3"
path_provider_platform_interface:
dependency: transitive
description:
name: path_provider_platform_interface
url: "https://pub.dartlang.org"
source: hosted
version: "1.0.2"
platform:
dependency: transitive
description:
name: platform
url: "https://pub.dartlang.org"
source: hosted
version: "2.2.1"
plugin_platform_interface:
dependency: transitive
description:
name: plugin_platform_interface
url: "https://pub.dartlang.org"
source: hosted
version: "1.0.2"
process:
dependency: transitive
description:
name: process
url: "https://pub.dartlang.org"
source: hosted
version: "3.0.13"
rxdart:
dependency: "direct main"
description:
name: rxdart
url: "https://pub.dartlang.org"
source: hosted
version: "0.24.1"
sky_engine:
dependency: transitive
description: flutter
source: sdk
version: "0.0.99"
source_span:
dependency: transitive
description:
name: source_span
url: "https://pub.dartlang.org"
source: hosted
version: "1.7.0"
stack_trace:
dependency: transitive
description:
name: stack_trace
url: "https://pub.dartlang.org"
source: hosted
version: "1.9.5"
stream_channel:
dependency: transitive
description:
name: stream_channel
url: "https://pub.dartlang.org"
source: hosted
version: "2.0.0"
string_scanner:
dependency: transitive
description:
name: string_scanner
url: "https://pub.dartlang.org"
source: hosted
version: "1.0.5"
term_glyph:
dependency: transitive
description:
name: term_glyph
url: "https://pub.dartlang.org"
source: hosted
version: "1.1.0"
test_api:
dependency: transitive
description:
name: test_api
url: "https://pub.dartlang.org"
source: hosted
version: "0.2.17"
typed_data:
dependency: transitive
description:
name: typed_data
url: "https://pub.dartlang.org"
source: hosted
version: "1.2.0"
uuid:
dependency: "direct main"
description:
name: uuid
url: "https://pub.dartlang.org"
source: hosted
version: "2.2.0"
vector_math:
dependency: transitive
description:
name: vector_math
url: "https://pub.dartlang.org"
source: hosted
version: "2.0.8"
xdg_directories:
dependency: transitive
description:
name: xdg_directories
url: "https://pub.dartlang.org"
source: hosted
version: "0.1.0"
sdks:
dart: ">=2.9.0-14.0.dev <3.0.0"
flutter: ">=1.12.13+hotfix.5 <2.0.0"

View File

@ -1,37 +0,0 @@
name: just_audio
description: Flutter plugin to play audio from streams, files, assets, DASH/HLS streams and playlists. Works with audio_service to play audio in the background.
version: 0.3.1
homepage: https://github.com/ryanheise/just_audio
environment:
sdk: '>=2.6.0 <3.0.0'
flutter: ">=1.12.8 <2.0.0"
dependencies:
rxdart: ^0.24.1
path: ^1.6.4
path_provider: ^1.6.10
async: ^2.4.1
uuid: ^2.2.0
flutter:
sdk: flutter
flutter_web_plugins:
sdk: flutter
dev_dependencies:
flutter_test:
sdk: flutter
flutter:
plugin:
platforms:
android:
package: com.ryanheise.just_audio
pluginClass: JustAudioPlugin
ios:
pluginClass: JustAudioPlugin
macos:
pluginClass: JustAudioPlugin
web:
pluginClass: JustAudioPlugin
fileName: just_audio_web.dart

View File

@ -1,21 +0,0 @@
import 'package:flutter/services.dart';
import 'package:flutter_test/flutter_test.dart';
import 'package:just_audio/just_audio.dart';
void main() {
const MethodChannel channel = MethodChannel('just_audio');
setUp(() {
channel.setMockMethodCallHandler((MethodCall methodCall) async {
return '42';
});
});
tearDown(() {
channel.setMockMethodCallHandler(null);
});
// test('getPlatformVersion', () async {
// expect(await AudioPlayer.platformVersion, '42');
// });
}