For Freezer 0.5.0
This commit is contained in:
parent
884bc7a269
commit
ae319b9689
12
.github/workflows/auto-close.yml
vendored
Normal file
12
.github/workflows/auto-close.yml
vendored
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
name: Autocloser
|
||||||
|
on: [issues]
|
||||||
|
jobs:
|
||||||
|
autoclose:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Autoclose issues that did not follow issue template
|
||||||
|
uses: roots/issue-closer-action@v1.1
|
||||||
|
with:
|
||||||
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
issue-close-message: "This issue was automatically closed because it did not follow the issue template."
|
||||||
|
issue-pattern: "Which API(.|[\\r\\n])*Minimal reproduction project(.|[\\r\\n])*To Reproduce|To which pages(.|[\\r\\n])*Describe your suggestion|Is your feature request(.|[\\r\\n])*Describe the solution you'd like"
|
41
CHANGELOG.md
41
CHANGELOG.md
@ -1,3 +1,44 @@
|
|||||||
|
## 0.4.4
|
||||||
|
|
||||||
|
* Fix crash when disposing of positionStream controller.
|
||||||
|
* Handle interruptions correctly when willPauseWhenDucked is set.
|
||||||
|
* Correct seek/position/duration in HLS streams (@snaeji).
|
||||||
|
* Resume at correct speed after seek on iOS (@subhash279).
|
||||||
|
|
||||||
|
## 0.4.3
|
||||||
|
|
||||||
|
* Add section to README on configuring the audio session.
|
||||||
|
|
||||||
|
## 0.4.2
|
||||||
|
|
||||||
|
* Make default audio session settings compatible with iOS control center.
|
||||||
|
* Update README to mention NSMicrophoneUsageDescription key in Info.plist.
|
||||||
|
|
||||||
|
## 0.4.1
|
||||||
|
|
||||||
|
* Fix setSpeed bug on iOS.
|
||||||
|
|
||||||
|
## 0.4.0
|
||||||
|
|
||||||
|
* Handles audio focus/interruptions via audio_session
|
||||||
|
* Bug fixes
|
||||||
|
|
||||||
|
## 0.3.4
|
||||||
|
|
||||||
|
* Fix bug in icy metadata
|
||||||
|
* Allow Android AudioAttributes to be set
|
||||||
|
* Provide access to Android audio session ID
|
||||||
|
|
||||||
|
## 0.3.3
|
||||||
|
|
||||||
|
* Remove dependency on Java streams API
|
||||||
|
|
||||||
|
## 0.3.2
|
||||||
|
|
||||||
|
* Fix dynamic methods on ConcatenatingAudioSource for iOS/Android
|
||||||
|
* Add sequenceStream/sequenceStateStream
|
||||||
|
* Change asset URI from asset:// to asset:///
|
||||||
|
|
||||||
## 0.3.1
|
## 0.3.1
|
||||||
|
|
||||||
* Prevent hang in dispose
|
* Prevent hang in dispose
|
||||||
|
2
LICENSE
2
LICENSE
@ -1,6 +1,6 @@
|
|||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2019-2020 Ryan Heise.
|
Copyright (c) 2019-2020 Ryan Heise and the project contributors.
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
94
README.md
94
README.md
@ -4,25 +4,26 @@ This Flutter plugin plays audio from URLs, files, assets, DASH/HLS streams and p
|
|||||||
|
|
||||||
## Features
|
## Features
|
||||||
|
|
||||||
| Feature | Android | iOS | MacOS | Web |
|
| Feature | Android | iOS | macOS | Web |
|
||||||
| ------- | :-------: | :-----: | :-----: | :-----: |
|
| ------- | :-------: | :-----: | :-----: | :-----: |
|
||||||
| read from URL | ✅ | ✅ | ✅ | ✅ |
|
| read from URL | ✅ | ✅ | ✅ | ✅ |
|
||||||
| read from file | ✅ | ✅ | ✅ | |
|
| read from file | ✅ | ✅ | ✅ | |
|
||||||
| read from asset | ✅ | ✅ | ✅ | |
|
| read from asset | ✅ | ✅ | ✅ | |
|
||||||
| request headers | ✅ | ✅ | ✅ | |
|
| request headers | ✅ | ✅ | ✅ | |
|
||||||
| DASH | ✅ | | | |
|
| DASH | ✅ | | | |
|
||||||
| HLS | ✅ | ✅ | ✅ | |
|
| HLS | ✅ | ✅ | ✅ | |
|
||||||
| buffer status/position | ✅ | ✅ | ✅ | ✅ |
|
| buffer status/position | ✅ | ✅ | ✅ | ✅ |
|
||||||
| play/pause/seek | ✅ | ✅ | ✅ | ✅ |
|
| play/pause/seek | ✅ | ✅ | ✅ | ✅ |
|
||||||
| set volume | ✅ | ✅ | ✅ | ✅ |
|
| set volume | ✅ | ✅ | ✅ | ✅ |
|
||||||
| set speed | ✅ | ✅ | ✅ | ✅ |
|
| set speed | ✅ | ✅ | ✅ | ✅ |
|
||||||
| clip audio | ✅ | ✅ | ✅ | ✅ |
|
| clip audio | ✅ | ✅ | ✅ | ✅ |
|
||||||
| playlists | ✅ | ✅ | ✅ | ✅ |
|
| playlists | ✅ | ✅ | ✅ | ✅ |
|
||||||
| looping | ✅ | ✅ | ✅ | ✅ |
|
| looping | ✅ | ✅ | ✅ | ✅ |
|
||||||
| shuffle | ✅ | ✅ | ✅ | ✅ |
|
| shuffle | ✅ | ✅ | ✅ | ✅ |
|
||||||
| compose audio | ✅ | ✅ | ✅ | ✅ |
|
| compose audio | ✅ | ✅ | ✅ | ✅ |
|
||||||
| gapless playback | ✅ | ✅ | ✅ | |
|
| gapless playback | ✅ | ✅ | ✅ | |
|
||||||
| report player errors | ✅ | ✅ | ✅ | ✅ |
|
| report player errors | ✅ | ✅ | ✅ | ✅ |
|
||||||
|
| Handle phonecall interruptions | ✅ | ✅ | | |
|
||||||
|
|
||||||
Please consider reporting any bugs you encounter [here](https://github.com/ryanheise/just_audio/issues) or submitting pull requests [here](https://github.com/ryanheise/just_audio/pulls).
|
Please consider reporting any bugs you encounter [here](https://github.com/ryanheise/just_audio/issues) or submitting pull requests [here](https://github.com/ryanheise/just_audio/pulls).
|
||||||
|
|
||||||
@ -35,6 +36,8 @@ Initialisation:
|
|||||||
```dart
|
```dart
|
||||||
final player = AudioPlayer();
|
final player = AudioPlayer();
|
||||||
var duration = await player.setUrl('https://foo.com/bar.mp3');
|
var duration = await player.setUrl('https://foo.com/bar.mp3');
|
||||||
|
var duration = await player.setFilePath('/path/to/file.mp3');
|
||||||
|
var duration = await player.setAsset('path/to/asset.mp3');
|
||||||
```
|
```
|
||||||
|
|
||||||
Standard controls:
|
Standard controls:
|
||||||
@ -147,6 +150,8 @@ player.playerStateStream.listen((state) {
|
|||||||
// - durationStream
|
// - durationStream
|
||||||
// - positionStream
|
// - positionStream
|
||||||
// - bufferedPositionStream
|
// - bufferedPositionStream
|
||||||
|
// - sequenceStateStream
|
||||||
|
// - sequenceStream
|
||||||
// - currentIndexStream
|
// - currentIndexStream
|
||||||
// - icyMetadataStream
|
// - icyMetadataStream
|
||||||
// - playingStream
|
// - playingStream
|
||||||
@ -158,6 +163,21 @@ player.playerStateStream.listen((state) {
|
|||||||
// - playbackEventStream
|
// - playbackEventStream
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Configuring the audio session
|
||||||
|
|
||||||
|
If your app uses audio, you should tell the operating system what kind of usage scenario your app has and how your app will interact with other audio apps on the device. Different audio apps often have unique requirements. For example, when a navigator app speaks driving instructions, a music player should duck its audio while a podcast player should pause its audio. Depending on which one of these three apps you are building, you will need to configure your app's audio settings and callbacks to appropriately handle these interactions.
|
||||||
|
|
||||||
|
just_audio will by default choose settings that are appropriate for a music player app which means that it will automatically duck audio when a navigator starts speaking, but should pause when a phone call or another music player starts. If you are building a podcast player or audio book reader, this behaviour would not be appropriate. While the user may be able to comprehend the navigator instructions while ducked music is playing in the background, it would be much more difficult to understand the navigator instructions while simultaneously listening to an audio book or podcast.
|
||||||
|
|
||||||
|
You can use the [audio_session](https://pub.dev/packages/audio_session) package to change the default audio session configuration for your app. E.g. for a podcast player, you may use:
|
||||||
|
|
||||||
|
```dart
|
||||||
|
final session = await AudioSession.instance;
|
||||||
|
await session.configure(AudioSessionConfiguration.speech());
|
||||||
|
```
|
||||||
|
|
||||||
|
Note: If your app uses a number of different audio plugins, e.g. for audio recording, or text to speech, or background audio, it is possible that those plugins may internally override each other's audio session settings, so it is recommended that you apply your own preferred configuration using audio_session after all other audio plugins have loaded. You may consider asking the developer of each audio plugin you use to provide an option to not overwrite these global settings and allow them be managed externally.
|
||||||
|
|
||||||
## Platform specific configuration
|
## Platform specific configuration
|
||||||
|
|
||||||
### Android
|
### Android
|
||||||
@ -168,8 +188,17 @@ If you wish to connect to non-HTTPS URLS, add the following attribute to the `ap
|
|||||||
<application ... android:usesCleartextTraffic="true">
|
<application ... android:usesCleartextTraffic="true">
|
||||||
```
|
```
|
||||||
|
|
||||||
|
If you need access to the player's AudioSession ID, you can listen to `AudioPlayer.androidAudioSessionIdStream`. Note that the AudioSession ID will change whenever you set new AudioAttributes.
|
||||||
|
|
||||||
### iOS
|
### iOS
|
||||||
|
|
||||||
|
Regardless of whether your app uses the microphone, Apple will require you to add the following key to your `Info.plist` file. The message will simply be ignored if your app doesn't use the microphone:
|
||||||
|
|
||||||
|
```xml
|
||||||
|
<key>NSMicrophoneUsageDescription</key>
|
||||||
|
<string>... explain why you use (or don't use) the microphone ...</string>
|
||||||
|
```
|
||||||
|
|
||||||
If you wish to connect to non-HTTPS URLS, add the following to your `Info.plist` file:
|
If you wish to connect to non-HTTPS URLS, add the following to your `Info.plist` file:
|
||||||
|
|
||||||
```xml
|
```xml
|
||||||
@ -182,27 +211,9 @@ If you wish to connect to non-HTTPS URLS, add the following to your `Info.plist`
|
|||||||
</dict>
|
</dict>
|
||||||
```
|
```
|
||||||
|
|
||||||
By default, iOS will mute your app's audio when your phone is switched to
|
### macOS
|
||||||
silent mode. Depending on the requirements of your app, you can change the
|
|
||||||
default audio session category using `AudioPlayer.setIosCategory`. For example,
|
|
||||||
if you are writing a media app, Apple recommends that you set the category to
|
|
||||||
`AVAudioSessionCategoryPlayback`, which you can achieve by adding the following
|
|
||||||
code to your app's initialisation:
|
|
||||||
|
|
||||||
```dart
|
To allow your macOS application to access audio files on the Internet, add the following to your `DebugProfile.entitlements` and `Release.entitlements` files:
|
||||||
AudioPlayer.setIosCategory(IosCategory.playback);
|
|
||||||
```
|
|
||||||
|
|
||||||
Note: If your app uses a number of different audio plugins in combination, e.g.
|
|
||||||
for audio recording, or text to speech, or background audio, it is possible
|
|
||||||
that those plugins may internally override the setting you choose here. You may
|
|
||||||
consider asking the developer of each other plugin you use to provide a similar
|
|
||||||
method so that you can configure the same audio session category universally
|
|
||||||
across all plugins you use.
|
|
||||||
|
|
||||||
### MacOS
|
|
||||||
|
|
||||||
To allow your MacOS application to access audio files on the Internet, add the following to your `DebugProfile.entitlements` and `Release.entitlements` files:
|
|
||||||
|
|
||||||
```xml
|
```xml
|
||||||
<key>com.apple.security.network.client</key>
|
<key>com.apple.security.network.client</key>
|
||||||
@ -220,3 +231,8 @@ If you wish to connect to non-HTTPS URLS, add the following to your `Info.plist`
|
|||||||
<true/>
|
<true/>
|
||||||
</dict>
|
</dict>
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Related plugins
|
||||||
|
|
||||||
|
* [audio_service](https://pub.dev/packages/audio_service): play any audio in the background and control playback from the lock screen, Android notifications, the iOS Control Center, and headset buttons.
|
||||||
|
* [audio_session](https://pub.dev/packages/audio_session): configure your app's audio category (e.g. music vs speech) and configure how your app interacts with other audio apps (e.g. audio focus, ducking, mixing).
|
||||||
|
@ -8,7 +8,7 @@ buildscript {
|
|||||||
}
|
}
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
classpath 'com.android.tools.build:gradle:3.6.3'
|
classpath 'com.android.tools.build:gradle:3.5.0'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -40,9 +40,9 @@ android {
|
|||||||
}
|
}
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
implementation 'com.google.android.exoplayer:exoplayer-core:2.11.4'
|
implementation 'com.google.android.exoplayer:exoplayer-core:2.11.7'
|
||||||
implementation 'com.google.android.exoplayer:exoplayer-dash:2.11.4'
|
implementation 'com.google.android.exoplayer:exoplayer-dash:2.11.7'
|
||||||
implementation 'com.google.android.exoplayer:exoplayer-hls:2.11.4'
|
implementation 'com.google.android.exoplayer:exoplayer-hls:2.11.7'
|
||||||
implementation 'com.google.android.exoplayer:exoplayer-smoothstreaming:2.11.4'
|
implementation 'com.google.android.exoplayer:exoplayer-smoothstreaming:2.11.7'
|
||||||
compile files('libs/extension-flac.aar')
|
compile files('libs/extension-flac.aar')
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
#Mon Aug 10 13:15:44 CEST 2020
|
|
||||||
distributionBase=GRADLE_USER_HOME
|
distributionBase=GRADLE_USER_HOME
|
||||||
distributionPath=wrapper/dists
|
distributionPath=wrapper/dists
|
||||||
zipStoreBase=GRADLE_USER_HOME
|
zipStoreBase=GRADLE_USER_HOME
|
||||||
zipStorePath=wrapper/dists
|
zipStorePath=wrapper/dists
|
||||||
distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.4-all.zip
|
distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.2-all.zip
|
||||||
|
Binary file not shown.
@ -3,20 +3,21 @@ package com.ryanheise.just_audio;
|
|||||||
import android.content.Context;
|
import android.content.Context;
|
||||||
import android.net.Uri;
|
import android.net.Uri;
|
||||||
import android.os.Handler;
|
import android.os.Handler;
|
||||||
import android.util.Log;
|
|
||||||
|
|
||||||
import com.google.android.exoplayer2.C;
|
import com.google.android.exoplayer2.C;
|
||||||
import com.google.android.exoplayer2.ExoPlaybackException;
|
import com.google.android.exoplayer2.ExoPlaybackException;
|
||||||
import com.google.android.exoplayer2.Format;
|
|
||||||
import com.google.android.exoplayer2.PlaybackParameters;
|
import com.google.android.exoplayer2.PlaybackParameters;
|
||||||
import com.google.android.exoplayer2.Player;
|
import com.google.android.exoplayer2.Player;
|
||||||
import com.google.android.exoplayer2.SimpleExoPlayer;
|
import com.google.android.exoplayer2.SimpleExoPlayer;
|
||||||
import com.google.android.exoplayer2.Timeline;
|
import com.google.android.exoplayer2.Timeline;
|
||||||
|
import com.google.android.exoplayer2.audio.AudioAttributes;
|
||||||
|
import com.google.android.exoplayer2.audio.AudioListener;
|
||||||
import com.google.android.exoplayer2.metadata.Metadata;
|
import com.google.android.exoplayer2.metadata.Metadata;
|
||||||
import com.google.android.exoplayer2.metadata.MetadataOutput;
|
import com.google.android.exoplayer2.metadata.MetadataOutput;
|
||||||
import com.google.android.exoplayer2.metadata.icy.IcyHeaders;
|
import com.google.android.exoplayer2.metadata.icy.IcyHeaders;
|
||||||
import com.google.android.exoplayer2.metadata.icy.IcyInfo;
|
import com.google.android.exoplayer2.metadata.icy.IcyInfo;
|
||||||
import com.google.android.exoplayer2.source.ClippingMediaSource;
|
import com.google.android.exoplayer2.source.ClippingMediaSource;
|
||||||
|
import com.google.android.exoplayer2.source.MaskingMediaSource;
|
||||||
|
import com.google.android.exoplayer2.upstream.HttpDataSource;
|
||||||
import com.google.android.exoplayer2.source.ConcatenatingMediaSource;
|
import com.google.android.exoplayer2.source.ConcatenatingMediaSource;
|
||||||
import com.google.android.exoplayer2.source.LoopingMediaSource;
|
import com.google.android.exoplayer2.source.LoopingMediaSource;
|
||||||
import com.google.android.exoplayer2.source.MediaSource;
|
import com.google.android.exoplayer2.source.MediaSource;
|
||||||
@ -32,8 +33,8 @@ import com.google.android.exoplayer2.upstream.DataSource;
|
|||||||
import com.google.android.exoplayer2.upstream.DefaultDataSourceFactory;
|
import com.google.android.exoplayer2.upstream.DefaultDataSourceFactory;
|
||||||
import com.google.android.exoplayer2.upstream.DefaultHttpDataSource;
|
import com.google.android.exoplayer2.upstream.DefaultHttpDataSource;
|
||||||
import com.google.android.exoplayer2.upstream.DefaultHttpDataSourceFactory;
|
import com.google.android.exoplayer2.upstream.DefaultHttpDataSourceFactory;
|
||||||
import com.google.android.exoplayer2.upstream.HttpDataSource;
|
|
||||||
import com.google.android.exoplayer2.util.Util;
|
import com.google.android.exoplayer2.util.Util;
|
||||||
|
import io.flutter.Log;
|
||||||
import io.flutter.plugin.common.BinaryMessenger;
|
import io.flutter.plugin.common.BinaryMessenger;
|
||||||
import io.flutter.plugin.common.EventChannel;
|
import io.flutter.plugin.common.EventChannel;
|
||||||
import io.flutter.plugin.common.EventChannel.EventSink;
|
import io.flutter.plugin.common.EventChannel.EventSink;
|
||||||
@ -41,17 +42,19 @@ import io.flutter.plugin.common.MethodCall;
|
|||||||
import io.flutter.plugin.common.MethodChannel;
|
import io.flutter.plugin.common.MethodChannel;
|
||||||
import io.flutter.plugin.common.MethodChannel.MethodCallHandler;
|
import io.flutter.plugin.common.MethodChannel.MethodCallHandler;
|
||||||
import io.flutter.plugin.common.MethodChannel.Result;
|
import io.flutter.plugin.common.MethodChannel.Result;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
import java.util.stream.Collectors;
|
|
||||||
import com.ryanheise.just_audio.DeezerDataSource;
|
import com.ryanheise.just_audio.DeezerDataSource;
|
||||||
|
|
||||||
public class AudioPlayer implements MethodCallHandler, Player.EventListener, MetadataOutput {
|
public class AudioPlayer implements MethodCallHandler, Player.EventListener, AudioListener, MetadataOutput {
|
||||||
|
|
||||||
static final String TAG = "AudioPlayer";
|
static final String TAG = "AudioPlayer";
|
||||||
|
|
||||||
@ -81,12 +84,12 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Met
|
|||||||
private int errorCount;
|
private int errorCount;
|
||||||
|
|
||||||
private SimpleExoPlayer player;
|
private SimpleExoPlayer player;
|
||||||
|
private Integer audioSessionId;
|
||||||
private MediaSource mediaSource;
|
private MediaSource mediaSource;
|
||||||
private Integer currentIndex;
|
private Integer currentIndex;
|
||||||
private Map<LoopingMediaSource, MediaSource> loopingChildren = new HashMap<>();
|
private Map<LoopingMediaSource, MediaSource> loopingChildren = new HashMap<>();
|
||||||
private Map<LoopingMediaSource, Integer> loopingCounts = new HashMap<>();
|
private Map<LoopingMediaSource, Integer> loopingCounts = new HashMap<>();
|
||||||
private final Handler handler = new Handler();
|
private final Handler handler = new Handler();
|
||||||
|
|
||||||
private final Runnable bufferWatcher = new Runnable() {
|
private final Runnable bufferWatcher = new Runnable() {
|
||||||
@Override
|
@Override
|
||||||
public void run() {
|
public void run() {
|
||||||
@ -142,6 +145,15 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Met
|
|||||||
handler.post(bufferWatcher);
|
handler.post(bufferWatcher);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onAudioSessionId(int audioSessionId) {
|
||||||
|
if (audioSessionId == C.AUDIO_SESSION_ID_UNSET) {
|
||||||
|
this.audioSessionId = null;
|
||||||
|
} else {
|
||||||
|
this.audioSessionId = audioSessionId;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onMetadata(Metadata metadata) {
|
public void onMetadata(Metadata metadata) {
|
||||||
for (int i = 0; i < metadata.length(); i++) {
|
for (int i = 0; i < metadata.length(); i++) {
|
||||||
@ -353,6 +365,10 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Met
|
|||||||
case "concatenating.clear":
|
case "concatenating.clear":
|
||||||
concatenating(args.get(0)).clear(handler, () -> result.success(null));
|
concatenating(args.get(0)).clear(handler, () -> result.success(null));
|
||||||
break;
|
break;
|
||||||
|
case "setAndroidAudioAttributes":
|
||||||
|
setAudioAttributes((Map<?, ?>)args.get(0));
|
||||||
|
result.success(null);
|
||||||
|
break;
|
||||||
default:
|
default:
|
||||||
result.notImplemented();
|
result.notImplemented();
|
||||||
break;
|
break;
|
||||||
@ -446,68 +462,72 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Met
|
|||||||
Map<?, ?> map = (Map<?, ?>)json;
|
Map<?, ?> map = (Map<?, ?>)json;
|
||||||
String id = (String)map.get("id");
|
String id = (String)map.get("id");
|
||||||
switch ((String)map.get("type")) {
|
switch ((String)map.get("type")) {
|
||||||
case "progressive":
|
case "progressive":
|
||||||
Uri uri = Uri.parse((String)map.get("uri"));
|
Uri uri = Uri.parse((String)map.get("uri"));
|
||||||
//Deezer
|
//Deezer
|
||||||
if (uri.getHost() != null && uri.getHost().contains("dzcdn.net")) {
|
if (uri.getHost() != null && uri.getHost().contains("dzcdn.net")) {
|
||||||
//Track id is stored in URL fragment (after #)
|
//Track id is stored in URL fragment (after #)
|
||||||
String fragment = uri.getFragment();
|
String fragment = uri.getFragment();
|
||||||
uri = Uri.parse(((String)map.get("uri")).replace("#" + fragment, ""));
|
//Stream
|
||||||
return new ProgressiveMediaSource.Factory(
|
uri = Uri.parse(((String)map.get("uri")).replace("#" + fragment, ""));
|
||||||
() -> {
|
return new ProgressiveMediaSource.Factory(
|
||||||
HttpDataSource deezerDataSource = new DeezerDataSource(fragment);
|
() -> {
|
||||||
return deezerDataSource;
|
HttpDataSource deezerDataSource = new DeezerDataSource(fragment);
|
||||||
}
|
return deezerDataSource;
|
||||||
).setTag(id).createMediaSource(uri);
|
}
|
||||||
}
|
).setTag(id).createMediaSource(uri);
|
||||||
|
}
|
||||||
|
return new ProgressiveMediaSource.Factory(buildDataSourceFactory())
|
||||||
|
.setTag(id)
|
||||||
return new ProgressiveMediaSource.Factory(buildDataSourceFactory())
|
.createMediaSource(uri);
|
||||||
.setTag(id)
|
case "dash":
|
||||||
.createMediaSource(uri);
|
return new DashMediaSource.Factory(buildDataSourceFactory())
|
||||||
case "dash":
|
.setTag(id)
|
||||||
return new DashMediaSource.Factory(buildDataSourceFactory())
|
.createMediaSource(Uri.parse((String)map.get("uri")));
|
||||||
.setTag(id)
|
case "hls":
|
||||||
.createMediaSource(Uri.parse((String)map.get("uri")));
|
return new HlsMediaSource.Factory(buildDataSourceFactory())
|
||||||
case "hls":
|
.setTag(id)
|
||||||
return new HlsMediaSource.Factory(buildDataSourceFactory())
|
.createMediaSource(Uri.parse((String)map.get("uri")));
|
||||||
.setTag(id)
|
case "concatenating":
|
||||||
.createMediaSource(Uri.parse((String)map.get("uri")));
|
MediaSource[] mediaSources = getAudioSourcesArray(map.get("audioSources"));
|
||||||
case "concatenating":
|
return new ConcatenatingMediaSource(
|
||||||
List<Object> audioSources = (List<Object>)map.get("audioSources");
|
false, // isAtomic
|
||||||
return new ConcatenatingMediaSource(
|
(Boolean)map.get("useLazyPreparation"),
|
||||||
false, // isAtomic
|
new DefaultShuffleOrder(mediaSources.length),
|
||||||
(Boolean)map.get("useLazyPreparation"),
|
mediaSources);
|
||||||
new DefaultShuffleOrder(audioSources.size()),
|
case "clipping":
|
||||||
audioSources
|
Long start = getLong(map.get("start"));
|
||||||
.stream()
|
Long end = getLong(map.get("end"));
|
||||||
.map(s -> getAudioSource(s))
|
return new ClippingMediaSource(getAudioSource(map.get("audioSource")),
|
||||||
.toArray(MediaSource[]::new));
|
(start != null ? start : 0) * 1000L,
|
||||||
case "clipping":
|
(end != null ? end : C.TIME_END_OF_SOURCE) * 1000L);
|
||||||
Long start = getLong(map.get("start"));
|
case "looping":
|
||||||
Long end = getLong(map.get("end"));
|
Integer count = (Integer)map.get("count");
|
||||||
return new ClippingMediaSource(getAudioSource(map.get("audioSource")),
|
MediaSource looperChild = getAudioSource(map.get("audioSource"));
|
||||||
(start != null ? start : 0) * 1000L,
|
LoopingMediaSource looper = new LoopingMediaSource(looperChild, count);
|
||||||
(end != null ? end : C.TIME_END_OF_SOURCE) * 1000L);
|
// TODO: store both in a single map
|
||||||
case "looping":
|
loopingChildren.put(looper, looperChild);
|
||||||
Integer count = (Integer)map.get("count");
|
loopingCounts.put(looper, count);
|
||||||
MediaSource looperChild = getAudioSource(map.get("audioSource"));
|
return looper;
|
||||||
LoopingMediaSource looper = new LoopingMediaSource(looperChild, count);
|
default:
|
||||||
// TODO: store both in a single map
|
throw new IllegalArgumentException("Unknown AudioSource type: " + map.get("type"));
|
||||||
loopingChildren.put(looper, looperChild);
|
|
||||||
loopingCounts.put(looper, count);
|
|
||||||
return looper;
|
|
||||||
default:
|
|
||||||
throw new IllegalArgumentException("Unknown AudioSource type: " + map.get("type"));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private MediaSource[] getAudioSourcesArray(final Object json) {
|
||||||
|
List<MediaSource> mediaSources = getAudioSources(json);
|
||||||
|
MediaSource[] mediaSourcesArray = new MediaSource[mediaSources.size()];
|
||||||
|
mediaSources.toArray(mediaSourcesArray);
|
||||||
|
return mediaSourcesArray;
|
||||||
|
}
|
||||||
|
|
||||||
private List<MediaSource> getAudioSources(final Object json) {
|
private List<MediaSource> getAudioSources(final Object json) {
|
||||||
return ((List<Object>)json)
|
List<Object> audioSources = (List<Object>)json;
|
||||||
.stream()
|
List<MediaSource> mediaSources = new ArrayList<MediaSource>();
|
||||||
.map(s -> getAudioSource(s))
|
for (int i = 0 ; i < audioSources.size(); i++) {
|
||||||
.collect(Collectors.toList());
|
mediaSources.add(getAudioSource(audioSources.get(i)));
|
||||||
|
}
|
||||||
|
return mediaSources;
|
||||||
}
|
}
|
||||||
|
|
||||||
private DataSource.Factory buildDataSourceFactory() {
|
private DataSource.Factory buildDataSourceFactory() {
|
||||||
@ -548,9 +568,20 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Met
|
|||||||
player = new SimpleExoPlayer.Builder(context).build();
|
player = new SimpleExoPlayer.Builder(context).build();
|
||||||
player.addMetadataOutput(this);
|
player.addMetadataOutput(this);
|
||||||
player.addListener(this);
|
player.addListener(this);
|
||||||
|
player.addAudioListener(this);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void setAudioAttributes(Map<?, ?> json) {
|
||||||
|
ensurePlayerInitialized();
|
||||||
|
AudioAttributes.Builder builder = new AudioAttributes.Builder();
|
||||||
|
builder.setContentType((Integer)json.get("contentType"));
|
||||||
|
builder.setFlags((Integer)json.get("flags"));
|
||||||
|
builder.setUsage((Integer)json.get("usage"));
|
||||||
|
//builder.setAllowedCapturePolicy((Integer)json.get("allowedCapturePolicy"));
|
||||||
|
player.setAudioAttributes(builder.build());
|
||||||
|
}
|
||||||
|
|
||||||
private void broadcastPlaybackEvent() {
|
private void broadcastPlaybackEvent() {
|
||||||
final Map<String, Object> event = new HashMap<String, Object>();
|
final Map<String, Object> event = new HashMap<String, Object>();
|
||||||
event.put("processingState", processingState.ordinal());
|
event.put("processingState", processingState.ordinal());
|
||||||
@ -560,7 +591,18 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Met
|
|||||||
event.put("icyMetadata", collectIcyMetadata());
|
event.put("icyMetadata", collectIcyMetadata());
|
||||||
event.put("duration", duration = getDuration());
|
event.put("duration", duration = getDuration());
|
||||||
event.put("currentIndex", currentIndex);
|
event.put("currentIndex", currentIndex);
|
||||||
event.put("qualityString", null);
|
event.put("androidAudioSessionId", audioSessionId);
|
||||||
|
|
||||||
|
|
||||||
|
String qualityString = "";
|
||||||
|
if (player != null && player.getAudioFormat() != null && player.getAudioFormat().sampleMimeType != null) {
|
||||||
|
if (player.getAudioFormat().sampleMimeType.equals("audio/mpeg")) {
|
||||||
|
qualityString = "MP3";
|
||||||
|
} else {
|
||||||
|
qualityString = "FLAC";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
event.put("qualityString", qualityString);
|
||||||
|
|
||||||
if (eventSink != null) {
|
if (eventSink != null) {
|
||||||
eventSink.success(event);
|
eventSink.success(event);
|
||||||
|
@ -10,6 +10,7 @@ import java.io.ByteArrayOutputStream;
|
|||||||
import java.io.FilterInputStream;
|
import java.io.FilterInputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
|
import java.io.InterruptedIOException;
|
||||||
import java.net.HttpURLConnection;
|
import java.net.HttpURLConnection;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
import java.security.MessageDigest;
|
import java.security.MessageDigest;
|
||||||
@ -71,9 +72,9 @@ public class DeezerDataSource implements HttpDataSource {
|
|||||||
this.connection = (HttpURLConnection) url.openConnection();
|
this.connection = (HttpURLConnection) url.openConnection();
|
||||||
this.connection.setChunkedStreamingMode(2048);
|
this.connection.setChunkedStreamingMode(2048);
|
||||||
if (dataSpec.position > 0) {
|
if (dataSpec.position > 0) {
|
||||||
this.counter = (int) (dataSpec.position/2048);
|
this.counter = (int) (dataSpec.position / 2048);
|
||||||
this.connection.setRequestProperty("Range",
|
this.connection.setRequestProperty("Range",
|
||||||
"bytes=" + Long.toString(this.counter*2048) + "-");
|
"bytes=" + Long.toString(this.counter * 2048) + "-");
|
||||||
}
|
}
|
||||||
|
|
||||||
InputStream is = this.connection.getInputStream();
|
InputStream is = this.connection.getInputStream();
|
||||||
@ -84,7 +85,7 @@ public class DeezerDataSource implements HttpDataSource {
|
|||||||
int t = 0;
|
int t = 0;
|
||||||
int read = 0;
|
int read = 0;
|
||||||
while (read != -1 && t != 2048) {
|
while (read != -1 && t != 2048) {
|
||||||
t += read = in.read(b, t, 2048-t);
|
t += read = in.read(b, t, 2048 - t);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (counter % 3 == 0) {
|
if (counter % 3 == 0) {
|
||||||
@ -98,9 +99,12 @@ public class DeezerDataSource implements HttpDataSource {
|
|||||||
return t;
|
return t;
|
||||||
|
|
||||||
}
|
}
|
||||||
},2048);
|
}, 2048);
|
||||||
|
|
||||||
|
|
||||||
|
} catch (InterruptedIOException e) {
|
||||||
|
//Interrupted, do nothing
|
||||||
|
return -1;
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
//Quality fallback
|
//Quality fallback
|
||||||
if (this.quality == 1) {
|
if (this.quality == 1) {
|
||||||
@ -125,6 +129,8 @@ public class DeezerDataSource implements HttpDataSource {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int read(byte[] buffer, int offset, int length) throws HttpDataSourceException {
|
public int read(byte[] buffer, int offset, int length) throws HttpDataSourceException {
|
||||||
|
if (this.inputStream == null) throw new HttpDataSourceException("Input stream null!", this.dataSpec, HttpDataSourceException.TYPE_READ);
|
||||||
|
|
||||||
int read = 0;
|
int read = 0;
|
||||||
try {
|
try {
|
||||||
read = this.inputStream.read(buffer, offset, length);
|
read = this.inputStream.read(buffer, offset, length);
|
||||||
|
@ -39,6 +39,7 @@
|
|||||||
BOOL _automaticallyWaitsToMinimizeStalling;
|
BOOL _automaticallyWaitsToMinimizeStalling;
|
||||||
BOOL _configuredSession;
|
BOOL _configuredSession;
|
||||||
BOOL _playing;
|
BOOL _playing;
|
||||||
|
float _speed;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (instancetype)initWithRegistrar:(NSObject<FlutterPluginRegistrar> *)registrar playerId:(NSString*)idParam configuredSession:(BOOL)configuredSession {
|
- (instancetype)initWithRegistrar:(NSObject<FlutterPluginRegistrar> *)registrar playerId:(NSString*)idParam configuredSession:(BOOL)configuredSession {
|
||||||
@ -74,6 +75,7 @@
|
|||||||
_loadResult = nil;
|
_loadResult = nil;
|
||||||
_playResult = nil;
|
_playResult = nil;
|
||||||
_automaticallyWaitsToMinimizeStalling = YES;
|
_automaticallyWaitsToMinimizeStalling = YES;
|
||||||
|
_speed = 1.0f;
|
||||||
__weak __typeof__(self) weakSelf = self;
|
__weak __typeof__(self) weakSelf = self;
|
||||||
[_methodChannel setMethodCallHandler:^(FlutterMethodCall* call, FlutterResult result) {
|
[_methodChannel setMethodCallHandler:^(FlutterMethodCall* call, FlutterResult result) {
|
||||||
[weakSelf handleMethodCall:call result:result];
|
[weakSelf handleMethodCall:call result:result];
|
||||||
@ -127,7 +129,7 @@
|
|||||||
[self concatenatingInsertAll:(NSString*)args[0] index:[args[1] intValue] sources:(NSArray*)args[2]];
|
[self concatenatingInsertAll:(NSString*)args[0] index:[args[1] intValue] sources:(NSArray*)args[2]];
|
||||||
result(nil);
|
result(nil);
|
||||||
} else if ([@"concatenating.removeAt" isEqualToString:call.method]) {
|
} else if ([@"concatenating.removeAt" isEqualToString:call.method]) {
|
||||||
[self concatenatingRemoveAt:(NSString*)args[0] index:(int)args[1]];
|
[self concatenatingRemoveAt:(NSString*)args[0] index:[args[1] intValue]];
|
||||||
result(nil);
|
result(nil);
|
||||||
} else if ([@"concatenating.removeRange" isEqualToString:call.method]) {
|
} else if ([@"concatenating.removeRange" isEqualToString:call.method]) {
|
||||||
[self concatenatingRemoveRange:(NSString*)args[0] start:[args[1] intValue] end:[args[2] intValue]];
|
[self concatenatingRemoveRange:(NSString*)args[0] start:[args[1] intValue] end:[args[2] intValue]];
|
||||||
@ -138,6 +140,8 @@
|
|||||||
} else if ([@"concatenating.clear" isEqualToString:call.method]) {
|
} else if ([@"concatenating.clear" isEqualToString:call.method]) {
|
||||||
[self concatenatingClear:(NSString*)args[0]];
|
[self concatenatingClear:(NSString*)args[0]];
|
||||||
result(nil);
|
result(nil);
|
||||||
|
} else if ([@"setAndroidAudioAttributes" isEqualToString:call.method]) {
|
||||||
|
result(nil);
|
||||||
} else {
|
} else {
|
||||||
result(FlutterMethodNotImplemented);
|
result(FlutterMethodNotImplemented);
|
||||||
}
|
}
|
||||||
@ -251,7 +255,8 @@
|
|||||||
// Re-index the audio sources.
|
// Re-index the audio sources.
|
||||||
_indexedAudioSources = [[NSMutableArray alloc] init];
|
_indexedAudioSources = [[NSMutableArray alloc] init];
|
||||||
[_audioSource buildSequence:_indexedAudioSources treeIndex:0];
|
[_audioSource buildSequence:_indexedAudioSources treeIndex:0];
|
||||||
_index = [self indexForItem:_player.currentItem];
|
[self updateOrder];
|
||||||
|
[self enqueueFrom:[self indexForItem:_player.currentItem]];
|
||||||
[self broadcastPlaybackEvent];
|
[self broadcastPlaybackEvent];
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -329,7 +334,7 @@
|
|||||||
return 0;
|
return 0;
|
||||||
} else if (CMTIME_IS_VALID(_seekPos)) {
|
} else if (CMTIME_IS_VALID(_seekPos)) {
|
||||||
return (int)(1000 * CMTimeGetSeconds(_seekPos));
|
return (int)(1000 * CMTimeGetSeconds(_seekPos));
|
||||||
} else if (_indexedAudioSources) {
|
} else if (_indexedAudioSources && _indexedAudioSources.count > 0) {
|
||||||
int ms = (int)(1000 * CMTimeGetSeconds(_indexedAudioSources[_index].position));
|
int ms = (int)(1000 * CMTimeGetSeconds(_indexedAudioSources[_index].position));
|
||||||
if (ms < 0) ms = 0;
|
if (ms < 0) ms = 0;
|
||||||
return ms;
|
return ms;
|
||||||
@ -341,7 +346,7 @@
|
|||||||
- (int)getBufferedPosition {
|
- (int)getBufferedPosition {
|
||||||
if (_processingState == none || _processingState == loading) {
|
if (_processingState == none || _processingState == loading) {
|
||||||
return 0;
|
return 0;
|
||||||
} else if (_indexedAudioSources) {
|
} else if (_indexedAudioSources && _indexedAudioSources.count > 0) {
|
||||||
int ms = (int)(1000 * CMTimeGetSeconds(_indexedAudioSources[_index].bufferedPosition));
|
int ms = (int)(1000 * CMTimeGetSeconds(_indexedAudioSources[_index].bufferedPosition));
|
||||||
if (ms < 0) ms = 0;
|
if (ms < 0) ms = 0;
|
||||||
return ms;
|
return ms;
|
||||||
@ -353,7 +358,7 @@
|
|||||||
- (int)getDuration {
|
- (int)getDuration {
|
||||||
if (_processingState == none) {
|
if (_processingState == none) {
|
||||||
return -1;
|
return -1;
|
||||||
} else if (_indexedAudioSources) {
|
} else if (_indexedAudioSources && _indexedAudioSources.count > 0) {
|
||||||
int v = (int)(1000 * CMTimeGetSeconds(_indexedAudioSources[_index].duration));
|
int v = (int)(1000 * CMTimeGetSeconds(_indexedAudioSources[_index].duration));
|
||||||
return v;
|
return v;
|
||||||
} else {
|
} else {
|
||||||
@ -425,7 +430,6 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (void)enqueueFrom:(int)index {
|
- (void)enqueueFrom:(int)index {
|
||||||
int oldIndex = _index;
|
|
||||||
_index = index;
|
_index = index;
|
||||||
|
|
||||||
// Update the queue while keeping the currently playing item untouched.
|
// Update the queue while keeping the currently playing item untouched.
|
||||||
@ -436,22 +440,27 @@
|
|||||||
// First, remove all _player items except for the currently playing one (if any).
|
// First, remove all _player items except for the currently playing one (if any).
|
||||||
IndexedPlayerItem *oldItem = _player.currentItem;
|
IndexedPlayerItem *oldItem = _player.currentItem;
|
||||||
IndexedPlayerItem *existingItem = nil;
|
IndexedPlayerItem *existingItem = nil;
|
||||||
|
IndexedPlayerItem *newItem = _indexedAudioSources.count > 0 ? _indexedAudioSources[_index].playerItem : nil;
|
||||||
NSArray *oldPlayerItems = [NSArray arrayWithArray:_player.items];
|
NSArray *oldPlayerItems = [NSArray arrayWithArray:_player.items];
|
||||||
// In the first pass, preserve the old and new items.
|
// In the first pass, preserve the old and new items.
|
||||||
for (int i = 0; i < oldPlayerItems.count; i++) {
|
for (int i = 0; i < oldPlayerItems.count; i++) {
|
||||||
if (oldPlayerItems[i] == _indexedAudioSources[_index].playerItem) {
|
if (oldPlayerItems[i] == newItem) {
|
||||||
// Preserve and tag new item if it is already in the queue.
|
// Preserve and tag new item if it is already in the queue.
|
||||||
existingItem = oldPlayerItems[i];
|
existingItem = oldPlayerItems[i];
|
||||||
|
//NSLog(@"Preserving existing item %d", [self indexForItem:existingItem]);
|
||||||
} else if (oldPlayerItems[i] == oldItem) {
|
} else if (oldPlayerItems[i] == oldItem) {
|
||||||
|
//NSLog(@"Preserving old item %d", [self indexForItem:oldItem]);
|
||||||
// Temporarily preserve old item, just to avoid jumping to
|
// Temporarily preserve old item, just to avoid jumping to
|
||||||
// intermediate queue positions unnecessarily. We only want to jump
|
// intermediate queue positions unnecessarily. We only want to jump
|
||||||
// once to _index.
|
// once to _index.
|
||||||
} else {
|
} else {
|
||||||
|
//NSLog(@"Removing item %d", [self indexForItem:oldPlayerItems[i]]);
|
||||||
[_player removeItem:oldPlayerItems[i]];
|
[_player removeItem:oldPlayerItems[i]];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// In the second pass, remove the old item (if different from new item).
|
// In the second pass, remove the old item (if different from new item).
|
||||||
if (_index != oldIndex) {
|
if (oldItem && newItem != oldItem) {
|
||||||
|
//NSLog(@"removing old item %d", [self indexForItem:oldItem]);
|
||||||
[_player removeItem:oldItem];
|
[_player removeItem:oldItem];
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -464,6 +473,7 @@
|
|||||||
int si = [_order[i] intValue];
|
int si = [_order[i] intValue];
|
||||||
if (si == _index) include = YES;
|
if (si == _index) include = YES;
|
||||||
if (include && _indexedAudioSources[si].playerItem != existingItem) {
|
if (include && _indexedAudioSources[si].playerItem != existingItem) {
|
||||||
|
//NSLog(@"inserting item %d", si);
|
||||||
[_player insertItem:_indexedAudioSources[si].playerItem afterItem:nil];
|
[_player insertItem:_indexedAudioSources[si].playerItem afterItem:nil];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -471,7 +481,7 @@
|
|||||||
/* NSLog(@"after reorder: _player.items.count: ", _player.items.count); */
|
/* NSLog(@"after reorder: _player.items.count: ", _player.items.count); */
|
||||||
/* [self dumpQueue]; */
|
/* [self dumpQueue]; */
|
||||||
|
|
||||||
if (_processingState != loading && oldItem != _indexedAudioSources[_index].playerItem) {
|
if (_processingState != loading && oldItem != newItem) {
|
||||||
// || !_player.currentItem.playbackLikelyToKeepUp;
|
// || !_player.currentItem.playbackLikelyToKeepUp;
|
||||||
if (_player.currentItem.playbackBufferEmpty) {
|
if (_player.currentItem.playbackBufferEmpty) {
|
||||||
[self enterBuffering:@"enqueueFrom playbackBufferEmpty"];
|
[self enterBuffering:@"enqueueFrom playbackBufferEmpty"];
|
||||||
@ -480,6 +490,8 @@
|
|||||||
}
|
}
|
||||||
[self updatePosition];
|
[self updatePosition];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
[self updateEndAction];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)updatePosition {
|
- (void)updatePosition {
|
||||||
@ -621,13 +633,14 @@
|
|||||||
} else {
|
} else {
|
||||||
IndexedPlayerItem *endedPlayerItem = (IndexedPlayerItem *)notification.object;
|
IndexedPlayerItem *endedPlayerItem = (IndexedPlayerItem *)notification.object;
|
||||||
IndexedAudioSource *endedSource = endedPlayerItem.audioSource;
|
IndexedAudioSource *endedSource = endedPlayerItem.audioSource;
|
||||||
// When an item ends, seek back to its beginning.
|
|
||||||
[endedSource seek:kCMTimeZero];
|
|
||||||
|
|
||||||
if ([_orderInv[_index] intValue] + 1 < [_order count]) {
|
if ([_orderInv[_index] intValue] + 1 < [_order count]) {
|
||||||
|
// When an item ends, seek back to its beginning.
|
||||||
|
[endedSource seek:kCMTimeZero];
|
||||||
// account for automatic move to next item
|
// account for automatic move to next item
|
||||||
_index = [_order[[_orderInv[_index] intValue] + 1] intValue];
|
_index = [_order[[_orderInv[_index] intValue] + 1] intValue];
|
||||||
NSLog(@"advance to next: index = %d", _index);
|
NSLog(@"advance to next: index = %d", _index);
|
||||||
|
[self updateEndAction];
|
||||||
[self broadcastPlaybackEvent];
|
[self broadcastPlaybackEvent];
|
||||||
} else {
|
} else {
|
||||||
// reached end of playlist
|
// reached end of playlist
|
||||||
@ -642,11 +655,13 @@
|
|||||||
// sources.
|
// sources.
|
||||||
// For now we just do a seek back to the start.
|
// For now we just do a seek back to the start.
|
||||||
if ([_order count] == 1) {
|
if ([_order count] == 1) {
|
||||||
[self seek:kCMTimeZero index:[NSNull null] completionHandler:^(BOOL finished) {
|
[self seek:kCMTimeZero index:_order[0] completionHandler:^(BOOL finished) {
|
||||||
// XXX: Necessary?
|
// XXX: Necessary?
|
||||||
[self play];
|
[self play];
|
||||||
}];
|
}];
|
||||||
} else {
|
} else {
|
||||||
|
// When an item ends, seek back to its beginning.
|
||||||
|
[endedSource seek:kCMTimeZero];
|
||||||
[self seek:kCMTimeZero index:_order[0] completionHandler:^(BOOL finished) {
|
[self seek:kCMTimeZero index:_order[0] completionHandler:^(BOOL finished) {
|
||||||
// XXX: Necessary?
|
// XXX: Necessary?
|
||||||
[self play];
|
[self play];
|
||||||
@ -788,6 +803,7 @@
|
|||||||
// account for automatic move to next item
|
// account for automatic move to next item
|
||||||
_index = [_order[[_orderInv[_index] intValue] + 1] intValue];
|
_index = [_order[[_orderInv[_index] intValue] + 1] intValue];
|
||||||
NSLog(@"advance to next on error: index = %d", _index);
|
NSLog(@"advance to next on error: index = %d", _index);
|
||||||
|
[self updateEndAction];
|
||||||
[self broadcastPlaybackEvent];
|
[self broadcastPlaybackEvent];
|
||||||
} else {
|
} else {
|
||||||
NSLog(@"error on last item");
|
NSLog(@"error on last item");
|
||||||
@ -800,6 +816,7 @@
|
|||||||
// notifying this observer.
|
// notifying this observer.
|
||||||
NSLog(@"Queue change detected. Adjusting index from %d -> %d", _index, expectedIndex);
|
NSLog(@"Queue change detected. Adjusting index from %d -> %d", _index, expectedIndex);
|
||||||
_index = expectedIndex;
|
_index = expectedIndex;
|
||||||
|
[self updateEndAction];
|
||||||
[self broadcastPlaybackEvent];
|
[self broadcastPlaybackEvent];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -833,7 +850,7 @@
|
|||||||
if (shouldResumePlayback) {
|
if (shouldResumePlayback) {
|
||||||
_player.actionAtItemEnd = originalEndAction;
|
_player.actionAtItemEnd = originalEndAction;
|
||||||
// TODO: This logic is almost duplicated in seek. See if we can reuse this code.
|
// TODO: This logic is almost duplicated in seek. See if we can reuse this code.
|
||||||
[_player play];
|
_player.rate = _speed;
|
||||||
}
|
}
|
||||||
}];
|
}];
|
||||||
} else {
|
} else {
|
||||||
@ -904,7 +921,7 @@
|
|||||||
[[AVAudioSession sharedInstance] setActive:YES error:nil];
|
[[AVAudioSession sharedInstance] setActive:YES error:nil];
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
[_player play];
|
_player.rate = _speed;
|
||||||
[self updatePosition];
|
[self updatePosition];
|
||||||
if (@available(macOS 10.12, iOS 10.0, *)) {}
|
if (@available(macOS 10.12, iOS 10.0, *)) {}
|
||||||
else {
|
else {
|
||||||
@ -946,21 +963,26 @@
|
|||||||
if (speed == 1.0
|
if (speed == 1.0
|
||||||
|| (speed < 1.0 && _player.currentItem.canPlaySlowForward)
|
|| (speed < 1.0 && _player.currentItem.canPlaySlowForward)
|
||||||
|| (speed > 1.0 && _player.currentItem.canPlayFastForward)) {
|
|| (speed > 1.0 && _player.currentItem.canPlayFastForward)) {
|
||||||
_player.rate = speed;
|
_speed = speed;
|
||||||
|
if (_playing) {
|
||||||
|
_player.rate = speed;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
[self updatePosition];
|
[self updatePosition];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)setLoopMode:(int)loopMode {
|
- (void)setLoopMode:(int)loopMode {
|
||||||
_loopMode = loopMode;
|
_loopMode = loopMode;
|
||||||
if (_player) {
|
[self updateEndAction];
|
||||||
switch (_loopMode) {
|
}
|
||||||
case loopOne:
|
|
||||||
_player.actionAtItemEnd = AVPlayerActionAtItemEndPause; // AVPlayerActionAtItemEndNone
|
- (void)updateEndAction {
|
||||||
break;
|
// Should update this whenever the audio source changes and whenever _index changes.
|
||||||
default:
|
if (!_player) return;
|
||||||
_player.actionAtItemEnd = AVPlayerActionAtItemEndAdvance;
|
if (_audioSource && [_orderInv[_index] intValue] + 1 < [_order count] && _loopMode != loopOne) {
|
||||||
}
|
_player.actionAtItemEnd = AVPlayerActionAtItemEndAdvance;
|
||||||
|
} else {
|
||||||
|
_player.actionAtItemEnd = AVPlayerActionAtItemEndPause; // AVPlayerActionAtItemEndNone
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1050,7 +1072,7 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (_playing) {
|
if (_playing) {
|
||||||
[_player play];
|
_player.rate = _speed;
|
||||||
}
|
}
|
||||||
_seekPos = kCMTimeInvalid;
|
_seekPos = kCMTimeInvalid;
|
||||||
[self broadcastPlaybackEvent];
|
[self broadcastPlaybackEvent];
|
||||||
@ -1061,7 +1083,15 @@
|
|||||||
} else {
|
} else {
|
||||||
_seekPos = kCMTimeInvalid;
|
_seekPos = kCMTimeInvalid;
|
||||||
if (_playing) {
|
if (_playing) {
|
||||||
[_player play];
|
if (@available(iOS 10.0, *)) {
|
||||||
|
// NOTE: Re-enable this line only after figuring out
|
||||||
|
// how to detect buffering when buffered audio is not
|
||||||
|
// immediately available.
|
||||||
|
//[_player playImmediatelyAtRate:_speed];
|
||||||
|
_player.rate = _speed;
|
||||||
|
} else {
|
||||||
|
_player.rate = _speed;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1083,7 +1113,15 @@
|
|||||||
// If playing, buffering will be detected either by:
|
// If playing, buffering will be detected either by:
|
||||||
// 1. checkForDiscontinuity
|
// 1. checkForDiscontinuity
|
||||||
// 2. timeControlStatus
|
// 2. timeControlStatus
|
||||||
[_player play];
|
if (@available(iOS 10.0, *)) {
|
||||||
|
// NOTE: Re-enable this line only after figuring out how to
|
||||||
|
// detect buffering when buffered audio is not immediately
|
||||||
|
// available.
|
||||||
|
//[_player playImmediatelyAtRate:_speed];
|
||||||
|
_player.rate = _speed;
|
||||||
|
} else {
|
||||||
|
_player.rate = _speed;
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
// If not playing, there is no reliable way to detect
|
// If not playing, there is no reliable way to detect
|
||||||
// when buffering has completed, so we use
|
// when buffering has completed, so we use
|
||||||
|
@ -50,19 +50,36 @@
|
|||||||
|
|
||||||
- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler {
|
- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler {
|
||||||
if (!completionHandler || (_playerItem.status == AVPlayerItemStatusReadyToPlay)) {
|
if (!completionHandler || (_playerItem.status == AVPlayerItemStatusReadyToPlay)) {
|
||||||
[_playerItem seekToTime:position toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero completionHandler:completionHandler];
|
CMTimeRange seekableRange = [_playerItem.seekableTimeRanges.lastObject CMTimeRangeValue];
|
||||||
|
CMTime relativePosition = CMTimeAdd(position, seekableRange.start);
|
||||||
|
[_playerItem seekToTime:relativePosition toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero completionHandler:completionHandler];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
- (CMTime)duration {
|
- (CMTime)duration {
|
||||||
return _playerItem.duration;
|
NSValue *seekableRange = _playerItem.seekableTimeRanges.lastObject;
|
||||||
|
if (seekableRange) {
|
||||||
|
CMTimeRange seekableDuration = [seekableRange CMTimeRangeValue];;
|
||||||
|
return seekableDuration.duration;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return _playerItem.duration;
|
||||||
|
}
|
||||||
|
return kCMTimeInvalid;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)setDuration:(CMTime)duration {
|
- (void)setDuration:(CMTime)duration {
|
||||||
}
|
}
|
||||||
|
|
||||||
- (CMTime)position {
|
- (CMTime)position {
|
||||||
return _playerItem.currentTime;
|
NSValue *seekableRange = _playerItem.seekableTimeRanges.lastObject;
|
||||||
|
if (seekableRange) {
|
||||||
|
CMTimeRange range = [seekableRange CMTimeRangeValue];
|
||||||
|
return CMTimeSubtract(_playerItem.currentTime, range.start);
|
||||||
|
} else {
|
||||||
|
return _playerItem.currentTime;
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
- (CMTime)bufferedPosition {
|
- (CMTime)bufferedPosition {
|
||||||
|
File diff suppressed because it is too large
Load Diff
1
ios/Classes/AudioPlayer.m
Symbolic link
1
ios/Classes/AudioPlayer.m
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../../darwin/Classes/AudioPlayer.m
|
@ -1,37 +0,0 @@
|
|||||||
#import "AudioSource.h"
|
|
||||||
#import <AVFoundation/AVFoundation.h>
|
|
||||||
|
|
||||||
@implementation AudioSource {
|
|
||||||
NSString *_sourceId;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (instancetype)initWithId:(NSString *)sid {
|
|
||||||
self = [super init];
|
|
||||||
NSAssert(self, @"super init cannot be nil");
|
|
||||||
_sourceId = sid;
|
|
||||||
return self;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (NSString *)sourceId {
|
|
||||||
return _sourceId;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)findById:(NSString *)sourceId matches:(NSMutableArray<AudioSource *> *)matches {
|
|
||||||
if ([_sourceId isEqualToString:sourceId]) {
|
|
||||||
[matches addObject:self];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
- (NSArray *)getShuffleOrder {
|
|
||||||
return @[];
|
|
||||||
}
|
|
||||||
|
|
||||||
- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
@end
|
|
1
ios/Classes/AudioSource.m
Symbolic link
1
ios/Classes/AudioSource.m
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../../darwin/Classes/AudioSource.m
|
@ -1,79 +0,0 @@
|
|||||||
#import "AudioSource.h"
|
|
||||||
#import "ClippingAudioSource.h"
|
|
||||||
#import "IndexedPlayerItem.h"
|
|
||||||
#import "UriAudioSource.h"
|
|
||||||
#import <AVFoundation/AVFoundation.h>
|
|
||||||
|
|
||||||
@implementation ClippingAudioSource {
|
|
||||||
UriAudioSource *_audioSource;
|
|
||||||
CMTime _start;
|
|
||||||
CMTime _end;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (instancetype)initWithId:(NSString *)sid audioSource:(UriAudioSource *)audioSource start:(NSNumber *)start end:(NSNumber *)end {
|
|
||||||
self = [super initWithId:sid];
|
|
||||||
NSAssert(self, @"super init cannot be nil");
|
|
||||||
_audioSource = audioSource;
|
|
||||||
_start = start == [NSNull null] ? kCMTimeZero : CMTimeMake([start intValue], 1000);
|
|
||||||
_end = end == [NSNull null] ? kCMTimeInvalid : CMTimeMake([end intValue], 1000);
|
|
||||||
return self;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (UriAudioSource *)audioSource {
|
|
||||||
return _audioSource;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)findById:(NSString *)sourceId matches:(NSMutableArray<AudioSource *> *)matches {
|
|
||||||
[super findById:sourceId matches:matches];
|
|
||||||
[_audioSource findById:sourceId matches:matches];
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)attach:(AVQueuePlayer *)player {
|
|
||||||
[super attach:player];
|
|
||||||
_audioSource.playerItem.forwardPlaybackEndTime = _end;
|
|
||||||
// XXX: Not needed since currentItem observer handles it?
|
|
||||||
[self seek:kCMTimeZero];
|
|
||||||
}
|
|
||||||
|
|
||||||
- (IndexedPlayerItem *)playerItem {
|
|
||||||
return _audioSource.playerItem;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (NSArray *)getShuffleOrder {
|
|
||||||
return @[@(0)];
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)play:(AVQueuePlayer *)player {
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)pause:(AVQueuePlayer *)player {
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)stop:(AVQueuePlayer *)player {
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler {
|
|
||||||
if (!completionHandler || (self.playerItem.status == AVPlayerItemStatusReadyToPlay)) {
|
|
||||||
CMTime absPosition = CMTimeAdd(_start, position);
|
|
||||||
[_audioSource.playerItem seekToTime:absPosition toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero completionHandler:completionHandler];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
- (CMTime)duration {
|
|
||||||
return CMTimeSubtract(CMTIME_IS_INVALID(_end) ? self.playerItem.duration : _end, _start);
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)setDuration:(CMTime)duration {
|
|
||||||
}
|
|
||||||
|
|
||||||
- (CMTime)position {
|
|
||||||
return CMTimeSubtract(self.playerItem.currentTime, _start);
|
|
||||||
}
|
|
||||||
|
|
||||||
- (CMTime)bufferedPosition {
|
|
||||||
CMTime pos = CMTimeSubtract(_audioSource.bufferedPosition, _start);
|
|
||||||
CMTime dur = [self duration];
|
|
||||||
return CMTimeCompare(pos, dur) >= 0 ? dur : pos;
|
|
||||||
}
|
|
||||||
|
|
||||||
@end
|
|
1
ios/Classes/ClippingAudioSource.m
Symbolic link
1
ios/Classes/ClippingAudioSource.m
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../../darwin/Classes/ClippingAudioSource.m
|
@ -1,109 +0,0 @@
|
|||||||
#import "AudioSource.h"
|
|
||||||
#import "ConcatenatingAudioSource.h"
|
|
||||||
#import <AVFoundation/AVFoundation.h>
|
|
||||||
#import <stdlib.h>
|
|
||||||
|
|
||||||
@implementation ConcatenatingAudioSource {
|
|
||||||
NSMutableArray<AudioSource *> *_audioSources;
|
|
||||||
NSMutableArray<NSNumber *> *_shuffleOrder;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (instancetype)initWithId:(NSString *)sid audioSources:(NSMutableArray<AudioSource *> *)audioSources {
|
|
||||||
self = [super initWithId:sid];
|
|
||||||
NSAssert(self, @"super init cannot be nil");
|
|
||||||
_audioSources = audioSources;
|
|
||||||
return self;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (int)count {
|
|
||||||
return _audioSources.count;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)insertSource:(AudioSource *)audioSource atIndex:(int)index {
|
|
||||||
[_audioSources insertObject:audioSource atIndex:index];
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)removeSourcesFromIndex:(int)start toIndex:(int)end {
|
|
||||||
if (end == -1) end = _audioSources.count;
|
|
||||||
for (int i = start; i < end; i++) {
|
|
||||||
[_audioSources removeObjectAtIndex:start];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)moveSourceFromIndex:(int)currentIndex toIndex:(int)newIndex {
|
|
||||||
AudioSource *source = _audioSources[currentIndex];
|
|
||||||
[_audioSources removeObjectAtIndex:currentIndex];
|
|
||||||
[_audioSources insertObject:source atIndex:newIndex];
|
|
||||||
}
|
|
||||||
|
|
||||||
- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex {
|
|
||||||
for (int i = 0; i < [_audioSources count]; i++) {
|
|
||||||
treeIndex = [_audioSources[i] buildSequence:sequence treeIndex:treeIndex];
|
|
||||||
}
|
|
||||||
return treeIndex;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)findById:(NSString *)sourceId matches:(NSMutableArray<AudioSource *> *)matches {
|
|
||||||
[super findById:sourceId matches:matches];
|
|
||||||
for (int i = 0; i < [_audioSources count]; i++) {
|
|
||||||
[_audioSources[i] findById:sourceId matches:matches];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
- (NSArray *)getShuffleOrder {
|
|
||||||
NSMutableArray *order = [NSMutableArray new];
|
|
||||||
int offset = [order count];
|
|
||||||
NSMutableArray *childOrders = [NSMutableArray new]; // array of array of ints
|
|
||||||
for (int i = 0; i < [_audioSources count]; i++) {
|
|
||||||
AudioSource *audioSource = _audioSources[i];
|
|
||||||
NSArray *childShuffleOrder = [audioSource getShuffleOrder];
|
|
||||||
NSMutableArray *offsetChildShuffleOrder = [NSMutableArray new];
|
|
||||||
for (int j = 0; j < [childShuffleOrder count]; j++) {
|
|
||||||
[offsetChildShuffleOrder addObject:@([childShuffleOrder[j] integerValue] + offset)];
|
|
||||||
}
|
|
||||||
[childOrders addObject:offsetChildShuffleOrder];
|
|
||||||
offset += [childShuffleOrder count];
|
|
||||||
}
|
|
||||||
for (int i = 0; i < [_audioSources count]; i++) {
|
|
||||||
[order addObjectsFromArray:childOrders[[_shuffleOrder[i] integerValue]]];
|
|
||||||
}
|
|
||||||
return order;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex {
|
|
||||||
int currentChildIndex = -1;
|
|
||||||
for (int i = 0; i < [_audioSources count]; i++) {
|
|
||||||
int indexBefore = treeIndex;
|
|
||||||
AudioSource *child = _audioSources[i];
|
|
||||||
treeIndex = [child shuffle:treeIndex currentIndex:currentIndex];
|
|
||||||
if (currentIndex >= indexBefore && currentIndex < treeIndex) {
|
|
||||||
currentChildIndex = i;
|
|
||||||
} else {}
|
|
||||||
}
|
|
||||||
// Shuffle so that the current child is first in the shuffle order
|
|
||||||
_shuffleOrder = [NSMutableArray arrayWithCapacity:[_audioSources count]];
|
|
||||||
for (int i = 0; i < [_audioSources count]; i++) {
|
|
||||||
[_shuffleOrder addObject:@(0)];
|
|
||||||
}
|
|
||||||
NSLog(@"shuffle: audioSources.count=%d and shuffleOrder.count=%d", [_audioSources count], [_shuffleOrder count]);
|
|
||||||
// First generate a random shuffle
|
|
||||||
for (int i = 0; i < [_audioSources count]; i++) {
|
|
||||||
int j = arc4random_uniform(i + 1);
|
|
||||||
_shuffleOrder[i] = _shuffleOrder[j];
|
|
||||||
_shuffleOrder[j] = @(i);
|
|
||||||
}
|
|
||||||
// Then bring currentIndex to the front
|
|
||||||
if (currentChildIndex != -1) {
|
|
||||||
for (int i = 1; i < [_audioSources count]; i++) {
|
|
||||||
if ([_shuffleOrder[i] integerValue] == currentChildIndex) {
|
|
||||||
NSNumber *v = _shuffleOrder[0];
|
|
||||||
_shuffleOrder[0] = _shuffleOrder[i];
|
|
||||||
_shuffleOrder[i] = v;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return treeIndex;
|
|
||||||
}
|
|
||||||
|
|
||||||
@end
|
|
1
ios/Classes/ConcatenatingAudioSource.m
Symbolic link
1
ios/Classes/ConcatenatingAudioSource.m
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../../darwin/Classes/ConcatenatingAudioSource.m
|
@ -1,68 +0,0 @@
|
|||||||
#import "IndexedAudioSource.h"
|
|
||||||
#import "IndexedPlayerItem.h"
|
|
||||||
#import <AVFoundation/AVFoundation.h>
|
|
||||||
|
|
||||||
@implementation IndexedAudioSource {
|
|
||||||
BOOL _isAttached;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (instancetype)initWithId:(NSString *)sid {
|
|
||||||
self = [super init];
|
|
||||||
NSAssert(self, @"super init cannot be nil");
|
|
||||||
_isAttached = NO;
|
|
||||||
return self;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (IndexedPlayerItem *)playerItem {
|
|
||||||
return nil;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (BOOL)isAttached {
|
|
||||||
return _isAttached;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex {
|
|
||||||
[sequence addObject:self];
|
|
||||||
return treeIndex + 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex {
|
|
||||||
return treeIndex + 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)attach:(AVQueuePlayer *)player {
|
|
||||||
_isAttached = YES;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)play:(AVQueuePlayer *)player {
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)pause:(AVQueuePlayer *)player {
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)stop:(AVQueuePlayer *)player {
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)seek:(CMTime)position {
|
|
||||||
[self seek:position completionHandler:nil];
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler {
|
|
||||||
}
|
|
||||||
|
|
||||||
- (CMTime)duration {
|
|
||||||
return kCMTimeInvalid;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)setDuration:(CMTime)duration {
|
|
||||||
}
|
|
||||||
|
|
||||||
- (CMTime)position {
|
|
||||||
return kCMTimeInvalid;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (CMTime)bufferedPosition {
|
|
||||||
return kCMTimeInvalid;
|
|
||||||
}
|
|
||||||
|
|
||||||
@end
|
|
1
ios/Classes/IndexedAudioSource.m
Symbolic link
1
ios/Classes/IndexedAudioSource.m
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../../darwin/Classes/IndexedAudioSource.m
|
@ -1,16 +0,0 @@
|
|||||||
#import "IndexedPlayerItem.h"
|
|
||||||
#import "IndexedAudioSource.h"
|
|
||||||
|
|
||||||
@implementation IndexedPlayerItem {
|
|
||||||
IndexedAudioSource *_audioSource;
|
|
||||||
}
|
|
||||||
|
|
||||||
-(void)setAudioSource:(IndexedAudioSource *)audioSource {
|
|
||||||
_audioSource = audioSource;
|
|
||||||
}
|
|
||||||
|
|
||||||
-(IndexedAudioSource *)audioSource {
|
|
||||||
return _audioSource;
|
|
||||||
}
|
|
||||||
|
|
||||||
@end
|
|
1
ios/Classes/IndexedPlayerItem.m
Symbolic link
1
ios/Classes/IndexedPlayerItem.m
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../../darwin/Classes/IndexedPlayerItem.m
|
@ -1,55 +0,0 @@
|
|||||||
#import "JustAudioPlugin.h"
|
|
||||||
#import "AudioPlayer.h"
|
|
||||||
#import <AVFoundation/AVFoundation.h>
|
|
||||||
#include <TargetConditionals.h>
|
|
||||||
|
|
||||||
@implementation JustAudioPlugin {
|
|
||||||
NSObject<FlutterPluginRegistrar>* _registrar;
|
|
||||||
BOOL _configuredSession;
|
|
||||||
}
|
|
||||||
|
|
||||||
+ (void)registerWithRegistrar:(NSObject<FlutterPluginRegistrar>*)registrar {
|
|
||||||
FlutterMethodChannel* channel = [FlutterMethodChannel
|
|
||||||
methodChannelWithName:@"com.ryanheise.just_audio.methods"
|
|
||||||
binaryMessenger:[registrar messenger]];
|
|
||||||
JustAudioPlugin* instance = [[JustAudioPlugin alloc] initWithRegistrar:registrar];
|
|
||||||
[registrar addMethodCallDelegate:instance channel:channel];
|
|
||||||
}
|
|
||||||
|
|
||||||
- (instancetype)initWithRegistrar:(NSObject<FlutterPluginRegistrar> *)registrar {
|
|
||||||
self = [super init];
|
|
||||||
NSAssert(self, @"super init cannot be nil");
|
|
||||||
_registrar = registrar;
|
|
||||||
return self;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result {
|
|
||||||
if ([@"init" isEqualToString:call.method]) {
|
|
||||||
NSArray* args = (NSArray*)call.arguments;
|
|
||||||
NSString* playerId = args[0];
|
|
||||||
/*AudioPlayer* player =*/ [[AudioPlayer alloc] initWithRegistrar:_registrar playerId:playerId configuredSession:_configuredSession];
|
|
||||||
result(nil);
|
|
||||||
} else if ([@"setIosCategory" isEqualToString:call.method]) {
|
|
||||||
#if TARGET_OS_IPHONE
|
|
||||||
NSNumber* categoryIndex = (NSNumber*)call.arguments;
|
|
||||||
AVAudioSessionCategory category = nil;
|
|
||||||
switch (categoryIndex.integerValue) {
|
|
||||||
case 0: category = AVAudioSessionCategoryAmbient; break;
|
|
||||||
case 1: category = AVAudioSessionCategorySoloAmbient; break;
|
|
||||||
case 2: category = AVAudioSessionCategoryPlayback; break;
|
|
||||||
case 3: category = AVAudioSessionCategoryRecord; break;
|
|
||||||
case 4: category = AVAudioSessionCategoryPlayAndRecord; break;
|
|
||||||
case 5: category = AVAudioSessionCategoryMultiRoute; break;
|
|
||||||
}
|
|
||||||
if (category) {
|
|
||||||
_configuredSession = YES;
|
|
||||||
}
|
|
||||||
[[AVAudioSession sharedInstance] setCategory:category error:nil];
|
|
||||||
#endif
|
|
||||||
result(nil);
|
|
||||||
} else {
|
|
||||||
result(FlutterMethodNotImplemented);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@end
|
|
1
ios/Classes/JustAudioPlugin.m
Symbolic link
1
ios/Classes/JustAudioPlugin.m
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../../darwin/Classes/JustAudioPlugin.m
|
@ -1,53 +0,0 @@
|
|||||||
#import "AudioSource.h"
|
|
||||||
#import "LoopingAudioSource.h"
|
|
||||||
#import <AVFoundation/AVFoundation.h>
|
|
||||||
|
|
||||||
@implementation LoopingAudioSource {
|
|
||||||
// An array of duplicates
|
|
||||||
NSArray<AudioSource *> *_audioSources; // <AudioSource *>
|
|
||||||
}
|
|
||||||
|
|
||||||
- (instancetype)initWithId:(NSString *)sid audioSources:(NSArray<AudioSource *> *)audioSources {
|
|
||||||
self = [super initWithId:sid];
|
|
||||||
NSAssert(self, @"super init cannot be nil");
|
|
||||||
_audioSources = audioSources;
|
|
||||||
return self;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex {
|
|
||||||
for (int i = 0; i < [_audioSources count]; i++) {
|
|
||||||
treeIndex = [_audioSources[i] buildSequence:sequence treeIndex:treeIndex];
|
|
||||||
}
|
|
||||||
return treeIndex;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)findById:(NSString *)sourceId matches:(NSMutableArray<AudioSource *> *)matches {
|
|
||||||
[super findById:sourceId matches:matches];
|
|
||||||
for (int i = 0; i < [_audioSources count]; i++) {
|
|
||||||
[_audioSources[i] findById:sourceId matches:matches];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
- (NSArray *)getShuffleOrder {
|
|
||||||
NSMutableArray *order = [NSMutableArray new];
|
|
||||||
int offset = (int)[order count];
|
|
||||||
for (int i = 0; i < [_audioSources count]; i++) {
|
|
||||||
AudioSource *audioSource = _audioSources[i];
|
|
||||||
NSArray *childShuffleOrder = [audioSource getShuffleOrder];
|
|
||||||
for (int j = 0; j < [childShuffleOrder count]; j++) {
|
|
||||||
[order addObject:@([childShuffleOrder[j] integerValue] + offset)];
|
|
||||||
}
|
|
||||||
offset += [childShuffleOrder count];
|
|
||||||
}
|
|
||||||
return order;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex {
|
|
||||||
// TODO: This should probably shuffle the same way on all duplicates.
|
|
||||||
for (int i = 0; i < [_audioSources count]; i++) {
|
|
||||||
treeIndex = [_audioSources[i] shuffle:treeIndex currentIndex:currentIndex];
|
|
||||||
}
|
|
||||||
return treeIndex;
|
|
||||||
}
|
|
||||||
|
|
||||||
@end
|
|
1
ios/Classes/LoopingAudioSource.m
Symbolic link
1
ios/Classes/LoopingAudioSource.m
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../../darwin/Classes/LoopingAudioSource.m
|
@ -1,79 +0,0 @@
|
|||||||
#import "UriAudioSource.h"
|
|
||||||
#import "IndexedAudioSource.h"
|
|
||||||
#import "IndexedPlayerItem.h"
|
|
||||||
#import <AVFoundation/AVFoundation.h>
|
|
||||||
|
|
||||||
@implementation UriAudioSource {
|
|
||||||
NSString *_uri;
|
|
||||||
IndexedPlayerItem *_playerItem;
|
|
||||||
/* CMTime _duration; */
|
|
||||||
}
|
|
||||||
|
|
||||||
- (instancetype)initWithId:(NSString *)sid uri:(NSString *)uri {
|
|
||||||
self = [super initWithId:sid];
|
|
||||||
NSAssert(self, @"super init cannot be nil");
|
|
||||||
_uri = uri;
|
|
||||||
if ([_uri hasPrefix:@"file://"]) {
|
|
||||||
_playerItem = [[IndexedPlayerItem alloc] initWithURL:[NSURL fileURLWithPath:[_uri substringFromIndex:7]]];
|
|
||||||
} else {
|
|
||||||
_playerItem = [[IndexedPlayerItem alloc] initWithURL:[NSURL URLWithString:_uri]];
|
|
||||||
}
|
|
||||||
if (@available(macOS 10.13, iOS 11.0, *)) {
|
|
||||||
// This does the best at reducing distortion on voice with speeds below 1.0
|
|
||||||
_playerItem.audioTimePitchAlgorithm = AVAudioTimePitchAlgorithmTimeDomain;
|
|
||||||
}
|
|
||||||
/* NSKeyValueObservingOptions options = */
|
|
||||||
/* NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew; */
|
|
||||||
/* [_playerItem addObserver:self */
|
|
||||||
/* forKeyPath:@"duration" */
|
|
||||||
/* options:options */
|
|
||||||
/* context:nil]; */
|
|
||||||
return self;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (IndexedPlayerItem *)playerItem {
|
|
||||||
return _playerItem;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (NSArray *)getShuffleOrder {
|
|
||||||
return @[@(0)];
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)play:(AVQueuePlayer *)player {
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)pause:(AVQueuePlayer *)player {
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)stop:(AVQueuePlayer *)player {
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler {
|
|
||||||
if (!completionHandler || (_playerItem.status == AVPlayerItemStatusReadyToPlay)) {
|
|
||||||
[_playerItem seekToTime:position toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero completionHandler:completionHandler];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
- (CMTime)duration {
|
|
||||||
return _playerItem.duration;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)setDuration:(CMTime)duration {
|
|
||||||
}
|
|
||||||
|
|
||||||
- (CMTime)position {
|
|
||||||
return _playerItem.currentTime;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (CMTime)bufferedPosition {
|
|
||||||
NSValue *last = _playerItem.loadedTimeRanges.lastObject;
|
|
||||||
if (last) {
|
|
||||||
CMTimeRange timeRange = [last CMTimeRangeValue];
|
|
||||||
return CMTimeAdd(timeRange.start, timeRange.duration);
|
|
||||||
} else {
|
|
||||||
return _playerItem.currentTime;
|
|
||||||
}
|
|
||||||
return kCMTimeInvalid;
|
|
||||||
}
|
|
||||||
|
|
||||||
@end
|
|
1
ios/Classes/UriAudioSource.m
Symbolic link
1
ios/Classes/UriAudioSource.m
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../../darwin/Classes/UriAudioSource.m
|
@ -1,6 +1,8 @@
|
|||||||
import 'dart:async';
|
import 'dart:async';
|
||||||
import 'dart:io';
|
import 'dart:io';
|
||||||
|
import 'dart:math';
|
||||||
|
|
||||||
|
import 'package:audio_session/audio_session.dart';
|
||||||
import 'package:flutter/foundation.dart';
|
import 'package:flutter/foundation.dart';
|
||||||
import 'package:flutter/services.dart';
|
import 'package:flutter/services.dart';
|
||||||
import 'package:flutter/widgets.dart';
|
import 'package:flutter/widgets.dart';
|
||||||
@ -37,26 +39,6 @@ class AudioPlayer {
|
|||||||
return MethodChannel('com.ryanheise.just_audio.methods.$id');
|
return MethodChannel('com.ryanheise.just_audio.methods.$id');
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Configure the audio session category on iOS. This method should be called
|
|
||||||
/// before playing any audio. It has no effect on Android or Flutter for Web.
|
|
||||||
///
|
|
||||||
/// Note that the default category on iOS is [IosCategory.soloAmbient], but
|
|
||||||
/// for a typical media app, Apple recommends setting this to
|
|
||||||
/// [IosCategory.playback]. If you don't call this method, `just_audio` will
|
|
||||||
/// respect any prior category that was already set on your app's audio
|
|
||||||
/// session and will leave it alone. If it hasn't been previously set, this
|
|
||||||
/// will be [IosCategory.soloAmbient]. But if another audio plugin in your
|
|
||||||
/// app has configured a particular category, that will also be left alone.
|
|
||||||
///
|
|
||||||
/// Note: If you use other audio plugins in conjunction with this one, it is
|
|
||||||
/// possible that each of those audio plugins may override the setting you
|
|
||||||
/// choose here. (You may consider asking the developers of the other plugins
|
|
||||||
/// to provide similar configurability so that you have complete control over
|
|
||||||
/// setting the overall category that you want for your app.)
|
|
||||||
static Future<void> setIosCategory(IosCategory category) async {
|
|
||||||
await _mainChannel.invokeMethod('setIosCategory', category.index);
|
|
||||||
}
|
|
||||||
|
|
||||||
final Future<MethodChannel> _channel;
|
final Future<MethodChannel> _channel;
|
||||||
final String _id;
|
final String _id;
|
||||||
_ProxyHttpServer _proxy;
|
_ProxyHttpServer _proxy;
|
||||||
@ -76,16 +58,27 @@ class AudioPlayer {
|
|||||||
final _bufferedPositionSubject = BehaviorSubject<Duration>();
|
final _bufferedPositionSubject = BehaviorSubject<Duration>();
|
||||||
final _icyMetadataSubject = BehaviorSubject<IcyMetadata>();
|
final _icyMetadataSubject = BehaviorSubject<IcyMetadata>();
|
||||||
final _playerStateSubject = BehaviorSubject<PlayerState>();
|
final _playerStateSubject = BehaviorSubject<PlayerState>();
|
||||||
|
final _sequenceSubject = BehaviorSubject<List<IndexedAudioSource>>();
|
||||||
final _currentIndexSubject = BehaviorSubject<int>();
|
final _currentIndexSubject = BehaviorSubject<int>();
|
||||||
|
final _sequenceStateSubject = BehaviorSubject<SequenceState>();
|
||||||
final _loopModeSubject = BehaviorSubject<LoopMode>();
|
final _loopModeSubject = BehaviorSubject<LoopMode>();
|
||||||
final _shuffleModeEnabledSubject = BehaviorSubject<bool>();
|
final _shuffleModeEnabledSubject = BehaviorSubject<bool>();
|
||||||
|
final _androidAudioSessionIdSubject = BehaviorSubject<int>();
|
||||||
BehaviorSubject<Duration> _positionSubject;
|
BehaviorSubject<Duration> _positionSubject;
|
||||||
bool _automaticallyWaitsToMinimizeStalling = true;
|
bool _automaticallyWaitsToMinimizeStalling = true;
|
||||||
|
bool _playInterrupted = false;
|
||||||
|
|
||||||
/// Creates an [AudioPlayer].
|
/// Creates an [AudioPlayer]. The player will automatically pause/duck and
|
||||||
factory AudioPlayer() => AudioPlayer._internal(_uuid.v4());
|
/// resume/unduck when audio interruptions occur (e.g. a phone call) or when
|
||||||
|
/// headphones are unplugged. If you wish to handle audio interruptions
|
||||||
|
/// manually, set [handleInterruptions] to `false` and interface directly
|
||||||
|
/// with the audio session via the
|
||||||
|
/// [audio_session](https://pub.dev/packages/audio_session) package.
|
||||||
|
factory AudioPlayer({bool handleInterruptions = true}) =>
|
||||||
|
AudioPlayer._internal(_uuid.v4(), handleInterruptions);
|
||||||
|
|
||||||
AudioPlayer._internal(this._id) : _channel = _init(_id) {
|
AudioPlayer._internal(this._id, bool handleInterruptions)
|
||||||
|
: _channel = _init(_id) {
|
||||||
_playbackEvent = PlaybackEvent(
|
_playbackEvent = PlaybackEvent(
|
||||||
processingState: ProcessingState.none,
|
processingState: ProcessingState.none,
|
||||||
updatePosition: Duration.zero,
|
updatePosition: Duration.zero,
|
||||||
@ -94,6 +87,8 @@ class AudioPlayer {
|
|||||||
duration: null,
|
duration: null,
|
||||||
icyMetadata: null,
|
icyMetadata: null,
|
||||||
currentIndex: null,
|
currentIndex: null,
|
||||||
|
androidAudioSessionId: null,
|
||||||
|
qualityString: ''
|
||||||
);
|
);
|
||||||
_playbackEventSubject.add(_playbackEvent);
|
_playbackEventSubject.add(_playbackEvent);
|
||||||
_eventChannelStream = EventChannel('com.ryanheise.just_audio.events.$_id')
|
_eventChannelStream = EventChannel('com.ryanheise.just_audio.events.$_id')
|
||||||
@ -118,6 +113,7 @@ class AudioPlayer {
|
|||||||
? null
|
? null
|
||||||
: IcyMetadata.fromJson(data['icyMetadata']),
|
: IcyMetadata.fromJson(data['icyMetadata']),
|
||||||
currentIndex: data['currentIndex'],
|
currentIndex: data['currentIndex'],
|
||||||
|
androidAudioSessionId: data['androidAudioSessionId'],
|
||||||
qualityString: data['qualityString']
|
qualityString: data['qualityString']
|
||||||
);
|
);
|
||||||
//print("created event object with state: ${_playbackEvent.state}");
|
//print("created event object with state: ${_playbackEvent.state}");
|
||||||
@ -128,10 +124,6 @@ class AudioPlayer {
|
|||||||
rethrow;
|
rethrow;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
_eventChannelStreamSubscription = _eventChannelStream.listen(
|
|
||||||
_playbackEventSubject.add,
|
|
||||||
onError: _playbackEventSubject.addError,
|
|
||||||
);
|
|
||||||
_processingStateSubject.addStream(playbackEventStream
|
_processingStateSubject.addStream(playbackEventStream
|
||||||
.map((event) => event.processingState)
|
.map((event) => event.processingState)
|
||||||
.distinct()
|
.distinct()
|
||||||
@ -148,6 +140,21 @@ class AudioPlayer {
|
|||||||
.map((event) => event.currentIndex)
|
.map((event) => event.currentIndex)
|
||||||
.distinct()
|
.distinct()
|
||||||
.handleError((err, stack) {/* noop */}));
|
.handleError((err, stack) {/* noop */}));
|
||||||
|
_androidAudioSessionIdSubject.addStream(playbackEventStream
|
||||||
|
.map((event) => event.androidAudioSessionId)
|
||||||
|
.distinct()
|
||||||
|
.handleError((err, stack) {/* noop */}));
|
||||||
|
_sequenceStateSubject.addStream(
|
||||||
|
Rx.combineLatest2<List<IndexedAudioSource>, int, SequenceState>(
|
||||||
|
sequenceStream,
|
||||||
|
currentIndexStream,
|
||||||
|
(sequence, currentIndex) {
|
||||||
|
if (sequence == null) return null;
|
||||||
|
if (currentIndex == null) currentIndex = 0;
|
||||||
|
currentIndex = min(sequence.length - 1, max(0, currentIndex));
|
||||||
|
return SequenceState(sequence, currentIndex);
|
||||||
|
},
|
||||||
|
).distinct().handleError((err, stack) {/* noop */}));
|
||||||
_playerStateSubject.addStream(
|
_playerStateSubject.addStream(
|
||||||
Rx.combineLatest2<bool, PlaybackEvent, PlayerState>(
|
Rx.combineLatest2<bool, PlaybackEvent, PlayerState>(
|
||||||
playingStream,
|
playingStream,
|
||||||
@ -155,6 +162,62 @@ class AudioPlayer {
|
|||||||
(playing, event) => PlayerState(playing, event.processingState))
|
(playing, event) => PlayerState(playing, event.processingState))
|
||||||
.distinct()
|
.distinct()
|
||||||
.handleError((err, stack) {/* noop */}));
|
.handleError((err, stack) {/* noop */}));
|
||||||
|
_eventChannelStreamSubscription = _eventChannelStream.listen(
|
||||||
|
_playbackEventSubject.add,
|
||||||
|
onError: _playbackEventSubject.addError,
|
||||||
|
);
|
||||||
|
_sequenceSubject.add(null);
|
||||||
|
// Respond to changes to AndroidAudioAttributes configuration.
|
||||||
|
AudioSession.instance.then((audioSession) {
|
||||||
|
audioSession.configurationStream
|
||||||
|
.map((conf) => conf?.androidAudioAttributes)
|
||||||
|
.where((attributes) => attributes != null)
|
||||||
|
.distinct()
|
||||||
|
.listen(setAndroidAudioAttributes);
|
||||||
|
});
|
||||||
|
if (handleInterruptions) {
|
||||||
|
AudioSession.instance.then((session) {
|
||||||
|
session.becomingNoisyEventStream.listen((_) {
|
||||||
|
pause();
|
||||||
|
});
|
||||||
|
session.interruptionEventStream.listen((event) {
|
||||||
|
if (event.begin) {
|
||||||
|
switch (event.type) {
|
||||||
|
case AudioInterruptionType.duck:
|
||||||
|
if (session.androidAudioAttributes.usage ==
|
||||||
|
AndroidAudioUsage.game) {
|
||||||
|
setVolume(volume / 2);
|
||||||
|
}
|
||||||
|
_playInterrupted = false;
|
||||||
|
break;
|
||||||
|
case AudioInterruptionType.pause:
|
||||||
|
case AudioInterruptionType.unknown:
|
||||||
|
if (playing) {
|
||||||
|
pause();
|
||||||
|
// Although pause is async and sets _playInterrupted = false,
|
||||||
|
// this is done in the sync portion.
|
||||||
|
_playInterrupted = true;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
switch (event.type) {
|
||||||
|
case AudioInterruptionType.duck:
|
||||||
|
setVolume(min(1.0, volume * 2));
|
||||||
|
_playInterrupted = false;
|
||||||
|
break;
|
||||||
|
case AudioInterruptionType.pause:
|
||||||
|
if (_playInterrupted) play();
|
||||||
|
_playInterrupted = false;
|
||||||
|
break;
|
||||||
|
case AudioInterruptionType.unknown:
|
||||||
|
_playInterrupted = false;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The latest [PlaybackEvent].
|
/// The latest [PlaybackEvent].
|
||||||
@ -217,17 +280,31 @@ class AudioPlayer {
|
|||||||
/// A stream of [PlayerState]s.
|
/// A stream of [PlayerState]s.
|
||||||
Stream<PlayerState> get playerStateStream => _playerStateSubject.stream;
|
Stream<PlayerState> get playerStateStream => _playerStateSubject.stream;
|
||||||
|
|
||||||
|
/// The current sequence of indexed audio sources.
|
||||||
|
List<IndexedAudioSource> get sequence => _sequenceSubject.value;
|
||||||
|
|
||||||
|
/// A stream broadcasting the current sequence of indexed audio sources.
|
||||||
|
Stream<List<IndexedAudioSource>> get sequenceStream =>
|
||||||
|
_sequenceSubject.stream;
|
||||||
|
|
||||||
/// The index of the current item.
|
/// The index of the current item.
|
||||||
int get currentIndex => _currentIndexSubject.value;
|
int get currentIndex => _currentIndexSubject.value;
|
||||||
|
|
||||||
/// A stream broadcasting the current item.
|
/// A stream broadcasting the current item.
|
||||||
Stream<int> get currentIndexStream => _currentIndexSubject.stream;
|
Stream<int> get currentIndexStream => _currentIndexSubject.stream;
|
||||||
|
|
||||||
|
/// The current [SequenceState], or `null` if either [sequence]] or
|
||||||
|
/// [currentIndex] is `null`.
|
||||||
|
SequenceState get sequenceState => _sequenceStateSubject.value;
|
||||||
|
|
||||||
|
/// A stream broadcasting the current [SequenceState].
|
||||||
|
Stream<SequenceState> get sequenceStateStream => _sequenceStateSubject.stream;
|
||||||
|
|
||||||
/// Whether there is another item after the current index.
|
/// Whether there is another item after the current index.
|
||||||
bool get hasNext =>
|
bool get hasNext =>
|
||||||
_audioSource != null &&
|
_audioSource != null &&
|
||||||
currentIndex != null &&
|
currentIndex != null &&
|
||||||
currentIndex + 1 < _audioSource.sequence.length;
|
currentIndex + 1 < sequence.length;
|
||||||
|
|
||||||
/// Whether there is another item before the current index.
|
/// Whether there is another item before the current index.
|
||||||
bool get hasPrevious =>
|
bool get hasPrevious =>
|
||||||
@ -246,6 +323,13 @@ class AudioPlayer {
|
|||||||
Stream<bool> get shuffleModeEnabledStream =>
|
Stream<bool> get shuffleModeEnabledStream =>
|
||||||
_shuffleModeEnabledSubject.stream;
|
_shuffleModeEnabledSubject.stream;
|
||||||
|
|
||||||
|
/// The current Android AudioSession ID or `null` if not set.
|
||||||
|
int get androidAudioSessionId => _playbackEvent.androidAudioSessionId;
|
||||||
|
|
||||||
|
/// Broadcasts the current Android AudioSession ID or `null` if not set.
|
||||||
|
Stream<int> get androidAudioSessionIdStream =>
|
||||||
|
_androidAudioSessionIdSubject.stream;
|
||||||
|
|
||||||
/// Whether the player should automatically delay playback in order to
|
/// Whether the player should automatically delay playback in order to
|
||||||
/// minimize stalling. (iOS 10.0 or later only)
|
/// minimize stalling. (iOS 10.0 or later only)
|
||||||
bool get automaticallyWaitsToMinimizeStalling =>
|
bool get automaticallyWaitsToMinimizeStalling =>
|
||||||
@ -324,6 +408,7 @@ class AudioPlayer {
|
|||||||
timer.cancel();
|
timer.cancel();
|
||||||
durationSubscription?.cancel();
|
durationSubscription?.cancel();
|
||||||
playbackEventSubscription?.cancel();
|
playbackEventSubscription?.cancel();
|
||||||
|
// This will in turn close _positionSubject.
|
||||||
controller.close();
|
controller.close();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@ -363,10 +448,10 @@ class AudioPlayer {
|
|||||||
/// Convenience method to load audio from an asset, equivalent to:
|
/// Convenience method to load audio from an asset, equivalent to:
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// load(AudioSource.uri(Uri.parse('asset://$filePath')));
|
/// load(AudioSource.uri(Uri.parse('asset:///$assetPath')));
|
||||||
/// ```
|
/// ```
|
||||||
Future<Duration> setAsset(String assetPath) =>
|
Future<Duration> setAsset(String assetPath) =>
|
||||||
load(AudioSource.uri(Uri.parse('asset://$assetPath')));
|
load(AudioSource.uri(Uri.parse('asset:///$assetPath')));
|
||||||
|
|
||||||
/// Loads audio from an [AudioSource] and completes when the audio is ready
|
/// Loads audio from an [AudioSource] and completes when the audio is ready
|
||||||
/// to play with the duration of that audio, or null if the duration is unknown.
|
/// to play with the duration of that audio, or null if the duration is unknown.
|
||||||
@ -379,6 +464,7 @@ class AudioPlayer {
|
|||||||
Future<Duration> load(AudioSource source) async {
|
Future<Duration> load(AudioSource source) async {
|
||||||
try {
|
try {
|
||||||
_audioSource = source;
|
_audioSource = source;
|
||||||
|
_broadcastSequence();
|
||||||
final duration = await _load(source);
|
final duration = await _load(source);
|
||||||
// Wait for loading state to pass.
|
// Wait for loading state to pass.
|
||||||
await processingStateStream
|
await processingStateStream
|
||||||
@ -386,11 +472,14 @@ class AudioPlayer {
|
|||||||
return duration;
|
return duration;
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
_audioSource = null;
|
_audioSource = null;
|
||||||
_audioSources.clear();
|
|
||||||
rethrow;
|
rethrow;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void _broadcastSequence() {
|
||||||
|
_sequenceSubject.add(_audioSource?.sequence);
|
||||||
|
}
|
||||||
|
|
||||||
_registerAudioSource(AudioSource source) {
|
_registerAudioSource(AudioSource source) {
|
||||||
_audioSources[source._id] = source;
|
_audioSources[source._id] = source;
|
||||||
}
|
}
|
||||||
@ -453,16 +542,24 @@ class AudioPlayer {
|
|||||||
/// [stop] playback on completion, you can call either method as soon as
|
/// [stop] playback on completion, you can call either method as soon as
|
||||||
/// [processingState] becomes [ProcessingState.completed] by listening to
|
/// [processingState] becomes [ProcessingState.completed] by listening to
|
||||||
/// [processingStateStream].
|
/// [processingStateStream].
|
||||||
|
///
|
||||||
|
/// This method activates the audio session before playback, and will do
|
||||||
|
/// nothing if activation of the audio session fails for any reason.
|
||||||
Future<void> play() async {
|
Future<void> play() async {
|
||||||
if (playing) return;
|
if (playing) return;
|
||||||
_playingSubject.add(true);
|
_playInterrupted = false;
|
||||||
await _invokeMethod('play');
|
final audioSession = await AudioSession.instance;
|
||||||
|
if (await audioSession.setActive(true)) {
|
||||||
|
_playingSubject.add(true);
|
||||||
|
await _invokeMethod('play');
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Pauses the currently playing media. This method does nothing if
|
/// Pauses the currently playing media. This method does nothing if
|
||||||
/// ![playing].
|
/// ![playing].
|
||||||
Future<void> pause() async {
|
Future<void> pause() async {
|
||||||
if (!playing) return;
|
if (!playing) return;
|
||||||
|
_playInterrupted = false;
|
||||||
// Update local state immediately so that queries aren't surprised.
|
// Update local state immediately so that queries aren't surprised.
|
||||||
_playbackEvent = _playbackEvent.copyWith(
|
_playbackEvent = _playbackEvent.copyWith(
|
||||||
updatePosition: position,
|
updatePosition: position,
|
||||||
@ -558,6 +655,15 @@ class AudioPlayer {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Set the Android audio attributes for this player. Has no effect on other
|
||||||
|
/// platforms. This will cause a new Android AudioSession ID to be generated.
|
||||||
|
Future<void> setAndroidAudioAttributes(
|
||||||
|
AndroidAudioAttributes audioAttributes) async {
|
||||||
|
if (audioAttributes == null) return;
|
||||||
|
await _invokeMethod(
|
||||||
|
'setAndroidAudioAttributes', [audioAttributes.toJson()]);
|
||||||
|
}
|
||||||
|
|
||||||
/// Release all resources associated with this player. You must invoke this
|
/// Release all resources associated with this player. You must invoke this
|
||||||
/// after you are done with the player.
|
/// after you are done with the player.
|
||||||
Future<void> dispose() async {
|
Future<void> dispose() async {
|
||||||
@ -573,9 +679,7 @@ class AudioPlayer {
|
|||||||
await _playingSubject.close();
|
await _playingSubject.close();
|
||||||
await _volumeSubject.close();
|
await _volumeSubject.close();
|
||||||
await _speedSubject.close();
|
await _speedSubject.close();
|
||||||
if (_positionSubject != null) {
|
await _sequenceSubject.close();
|
||||||
await _positionSubject.close();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Future<dynamic> _invokeMethod(String method, [dynamic args]) async =>
|
Future<dynamic> _invokeMethod(String method, [dynamic args]) async =>
|
||||||
@ -636,8 +740,10 @@ class PlaybackEvent {
|
|||||||
/// The index of the currently playing item.
|
/// The index of the currently playing item.
|
||||||
final int currentIndex;
|
final int currentIndex;
|
||||||
|
|
||||||
/// Quality info of current track
|
/// The current Android AudioSession ID.
|
||||||
final String qualityString;
|
final int androidAudioSessionId;
|
||||||
|
|
||||||
|
String qualityString;
|
||||||
|
|
||||||
PlaybackEvent({
|
PlaybackEvent({
|
||||||
@required this.processingState,
|
@required this.processingState,
|
||||||
@ -647,6 +753,7 @@ class PlaybackEvent {
|
|||||||
@required this.duration,
|
@required this.duration,
|
||||||
@required this.icyMetadata,
|
@required this.icyMetadata,
|
||||||
@required this.currentIndex,
|
@required this.currentIndex,
|
||||||
|
@required this.androidAudioSessionId,
|
||||||
this.qualityString
|
this.qualityString
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -659,7 +766,7 @@ class PlaybackEvent {
|
|||||||
Duration duration,
|
Duration duration,
|
||||||
IcyMetadata icyMetadata,
|
IcyMetadata icyMetadata,
|
||||||
UriAudioSource currentIndex,
|
UriAudioSource currentIndex,
|
||||||
String qualityString
|
int androidAudioSessionId,
|
||||||
}) =>
|
}) =>
|
||||||
PlaybackEvent(
|
PlaybackEvent(
|
||||||
processingState: processingState ?? this.processingState,
|
processingState: processingState ?? this.processingState,
|
||||||
@ -669,7 +776,9 @@ class PlaybackEvent {
|
|||||||
duration: duration ?? this.duration,
|
duration: duration ?? this.duration,
|
||||||
icyMetadata: icyMetadata ?? this.icyMetadata,
|
icyMetadata: icyMetadata ?? this.icyMetadata,
|
||||||
currentIndex: currentIndex ?? this.currentIndex,
|
currentIndex: currentIndex ?? this.currentIndex,
|
||||||
qualityString: qualityString ?? this.qualityString
|
androidAudioSessionId:
|
||||||
|
androidAudioSessionId ?? this.androidAudioSessionId,
|
||||||
|
qualityString: this.qualityString
|
||||||
);
|
);
|
||||||
|
|
||||||
@override
|
@override
|
||||||
@ -787,7 +896,12 @@ class IcyMetadata {
|
|||||||
IcyMetadata({@required this.info, @required this.headers});
|
IcyMetadata({@required this.info, @required this.headers});
|
||||||
|
|
||||||
IcyMetadata.fromJson(Map json)
|
IcyMetadata.fromJson(Map json)
|
||||||
: this(info: json['info'], headers: json['headers']);
|
: this(
|
||||||
|
info: json['info'] == null ? null : IcyInfo.fromJson(json['info']),
|
||||||
|
headers: json['headers'] == null
|
||||||
|
? null
|
||||||
|
: IcyHeaders.fromJson(json['headers']),
|
||||||
|
);
|
||||||
|
|
||||||
@override
|
@override
|
||||||
int get hashCode => info.hashCode ^ headers.hashCode;
|
int get hashCode => info.hashCode ^ headers.hashCode;
|
||||||
@ -797,15 +911,21 @@ class IcyMetadata {
|
|||||||
other is IcyMetadata && other?.info == info && other?.headers == headers;
|
other is IcyMetadata && other?.info == info && other?.headers == headers;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The audio session categories on iOS, to be used with
|
/// Encapsulates the [sequence] and [currentIndex] state and ensures
|
||||||
/// [AudioPlayer.setIosCategory].
|
/// consistency such that [currentIndex] is within the range of
|
||||||
enum IosCategory {
|
/// [sequence.length]. If [sequence.length] is 0, then [currentIndex] is also
|
||||||
ambient,
|
/// 0.
|
||||||
soloAmbient,
|
class SequenceState {
|
||||||
playback,
|
/// The sequence of the current [AudioSource].
|
||||||
record,
|
final List<IndexedAudioSource> sequence;
|
||||||
playAndRecord,
|
|
||||||
multiRoute,
|
/// The index of the current source in the sequence.
|
||||||
|
final int currentIndex;
|
||||||
|
|
||||||
|
SequenceState(this.sequence, this.currentIndex);
|
||||||
|
|
||||||
|
/// The current source in the sequence.
|
||||||
|
IndexedAudioSource get currentSource => sequence[currentIndex];
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A local proxy HTTP server for making remote GET requests with headers.
|
/// A local proxy HTTP server for making remote GET requests with headers.
|
||||||
@ -944,7 +1064,7 @@ abstract class AudioSource {
|
|||||||
/// stream type on Android. If you know in advance what type of audio stream
|
/// stream type on Android. If you know in advance what type of audio stream
|
||||||
/// it is, you should instantiate [DashAudioSource] or [HlsAudioSource]
|
/// it is, you should instantiate [DashAudioSource] or [HlsAudioSource]
|
||||||
/// directly.
|
/// directly.
|
||||||
static AudioSource uri(Uri uri, {Map headers, Object tag}) {
|
static AudioSource uri(Uri uri, {Map headers, dynamic tag}) {
|
||||||
bool hasExtension(Uri uri, String extension) =>
|
bool hasExtension(Uri uri, String extension) =>
|
||||||
uri.path.toLowerCase().endsWith('.$extension') ||
|
uri.path.toLowerCase().endsWith('.$extension') ||
|
||||||
uri.fragment.toLowerCase().endsWith('.$extension');
|
uri.fragment.toLowerCase().endsWith('.$extension');
|
||||||
@ -1010,7 +1130,7 @@ abstract class AudioSource {
|
|||||||
|
|
||||||
/// An [AudioSource] that can appear in a sequence.
|
/// An [AudioSource] that can appear in a sequence.
|
||||||
abstract class IndexedAudioSource extends AudioSource {
|
abstract class IndexedAudioSource extends AudioSource {
|
||||||
final Object tag;
|
final dynamic tag;
|
||||||
|
|
||||||
IndexedAudioSource(this.tag);
|
IndexedAudioSource(this.tag);
|
||||||
|
|
||||||
@ -1026,7 +1146,7 @@ abstract class UriAudioSource extends IndexedAudioSource {
|
|||||||
Uri _overrideUri;
|
Uri _overrideUri;
|
||||||
File _cacheFile;
|
File _cacheFile;
|
||||||
|
|
||||||
UriAudioSource(this.uri, {this.headers, Object tag, @required String type})
|
UriAudioSource(this.uri, {this.headers, dynamic tag, @required String type})
|
||||||
: _type = type,
|
: _type = type,
|
||||||
super(tag);
|
super(tag);
|
||||||
|
|
||||||
@ -1034,7 +1154,8 @@ abstract class UriAudioSource extends IndexedAudioSource {
|
|||||||
Future<void> _setup(AudioPlayer player) async {
|
Future<void> _setup(AudioPlayer player) async {
|
||||||
await super._setup(player);
|
await super._setup(player);
|
||||||
if (uri.scheme == 'asset') {
|
if (uri.scheme == 'asset') {
|
||||||
_overrideUri = Uri.file((await _loadAsset(uri.path)).path);
|
_overrideUri = Uri.file(
|
||||||
|
(await _loadAsset(uri.path.replaceFirst(RegExp(r'^/'), ''))).path);
|
||||||
} else if (headers != null) {
|
} else if (headers != null) {
|
||||||
_overrideUri = player._proxy.addUrl(uri, headers);
|
_overrideUri = player._proxy.addUrl(uri, headers);
|
||||||
}
|
}
|
||||||
@ -1077,7 +1198,7 @@ abstract class UriAudioSource extends IndexedAudioSource {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An [AudioSource] representing a regular media file such asn an MP3 or M4A
|
/// An [AudioSource] representing a regular media file such as an MP3 or M4A
|
||||||
/// file. The following URI schemes are supported:
|
/// file. The following URI schemes are supported:
|
||||||
///
|
///
|
||||||
/// * file: loads from a local file (provided you give your app permission to
|
/// * file: loads from a local file (provided you give your app permission to
|
||||||
@ -1088,26 +1209,38 @@ abstract class UriAudioSource extends IndexedAudioSource {
|
|||||||
/// On platforms except for the web, the supplied [headers] will be passed with
|
/// On platforms except for the web, the supplied [headers] will be passed with
|
||||||
/// the HTTP(S) request.
|
/// the HTTP(S) request.
|
||||||
class ProgressiveAudioSource extends UriAudioSource {
|
class ProgressiveAudioSource extends UriAudioSource {
|
||||||
ProgressiveAudioSource(Uri uri, {Map headers, Object tag})
|
ProgressiveAudioSource(Uri uri, {Map headers, dynamic tag})
|
||||||
: super(uri, headers: headers, tag: tag, type: 'progressive');
|
: super(uri, headers: headers, tag: tag, type: 'progressive');
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An [AudioSource] representing a DASH stream.
|
/// An [AudioSource] representing a DASH stream. The following URI schemes are
|
||||||
|
/// supported:
|
||||||
|
///
|
||||||
|
/// * file: loads from a local file (provided you give your app permission to
|
||||||
|
/// access that file).
|
||||||
|
/// * asset: loads from a Flutter asset (not supported on Web).
|
||||||
|
/// * http(s): loads from an HTTP(S) resource.
|
||||||
///
|
///
|
||||||
/// On platforms except for the web, the supplied [headers] will be passed with
|
/// On platforms except for the web, the supplied [headers] will be passed with
|
||||||
/// the HTTP(S) request. Currently headers are not recursively applied to items
|
/// the HTTP(S) request. Currently headers are not recursively applied to items
|
||||||
/// the HTTP(S) request. Currently headers are not applied recursively.
|
/// the HTTP(S) request. Currently headers are not applied recursively.
|
||||||
class DashAudioSource extends UriAudioSource {
|
class DashAudioSource extends UriAudioSource {
|
||||||
DashAudioSource(Uri uri, {Map headers, Object tag})
|
DashAudioSource(Uri uri, {Map headers, dynamic tag})
|
||||||
: super(uri, headers: headers, tag: tag, type: 'dash');
|
: super(uri, headers: headers, tag: tag, type: 'dash');
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An [AudioSource] representing an HLS stream.
|
/// An [AudioSource] representing an HLS stream. The following URI schemes are
|
||||||
|
/// supported:
|
||||||
|
///
|
||||||
|
/// * file: loads from a local file (provided you give your app permission to
|
||||||
|
/// access that file).
|
||||||
|
/// * asset: loads from a Flutter asset (not supported on Web).
|
||||||
|
/// * http(s): loads from an HTTP(S) resource.
|
||||||
///
|
///
|
||||||
/// On platforms except for the web, the supplied [headers] will be passed with
|
/// On platforms except for the web, the supplied [headers] will be passed with
|
||||||
/// the HTTP(S) request. Currently headers are not applied recursively.
|
/// the HTTP(S) request. Currently headers are not applied recursively.
|
||||||
class HlsAudioSource extends UriAudioSource {
|
class HlsAudioSource extends UriAudioSource {
|
||||||
HlsAudioSource(Uri uri, {Map headers, Object tag})
|
HlsAudioSource(Uri uri, {Map headers, dynamic tag})
|
||||||
: super(uri, headers: headers, tag: tag, type: 'hls');
|
: super(uri, headers: headers, tag: tag, type: 'hls');
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1138,6 +1271,7 @@ class ConcatenatingAudioSource extends AudioSource {
|
|||||||
/// (Untested) Appends an [AudioSource].
|
/// (Untested) Appends an [AudioSource].
|
||||||
Future<void> add(AudioSource audioSource) async {
|
Future<void> add(AudioSource audioSource) async {
|
||||||
children.add(audioSource);
|
children.add(audioSource);
|
||||||
|
_player._broadcastSequence();
|
||||||
if (_player != null) {
|
if (_player != null) {
|
||||||
await _player
|
await _player
|
||||||
._invokeMethod('concatenating.add', [_id, audioSource.toJson()]);
|
._invokeMethod('concatenating.add', [_id, audioSource.toJson()]);
|
||||||
@ -1147,6 +1281,7 @@ class ConcatenatingAudioSource extends AudioSource {
|
|||||||
/// (Untested) Inserts an [AudioSource] at [index].
|
/// (Untested) Inserts an [AudioSource] at [index].
|
||||||
Future<void> insert(int index, AudioSource audioSource) async {
|
Future<void> insert(int index, AudioSource audioSource) async {
|
||||||
children.insert(index, audioSource);
|
children.insert(index, audioSource);
|
||||||
|
_player._broadcastSequence();
|
||||||
if (_player != null) {
|
if (_player != null) {
|
||||||
await _player._invokeMethod(
|
await _player._invokeMethod(
|
||||||
'concatenating.insert', [_id, index, audioSource.toJson()]);
|
'concatenating.insert', [_id, index, audioSource.toJson()]);
|
||||||
@ -1156,6 +1291,7 @@ class ConcatenatingAudioSource extends AudioSource {
|
|||||||
/// (Untested) Appends multiple [AudioSource]s.
|
/// (Untested) Appends multiple [AudioSource]s.
|
||||||
Future<void> addAll(List<AudioSource> children) async {
|
Future<void> addAll(List<AudioSource> children) async {
|
||||||
this.children.addAll(children);
|
this.children.addAll(children);
|
||||||
|
_player._broadcastSequence();
|
||||||
if (_player != null) {
|
if (_player != null) {
|
||||||
await _player._invokeMethod('concatenating.addAll',
|
await _player._invokeMethod('concatenating.addAll',
|
||||||
[_id, children.map((s) => s.toJson()).toList()]);
|
[_id, children.map((s) => s.toJson()).toList()]);
|
||||||
@ -1165,6 +1301,7 @@ class ConcatenatingAudioSource extends AudioSource {
|
|||||||
/// (Untested) Insert multiple [AudioSource]s at [index].
|
/// (Untested) Insert multiple [AudioSource]s at [index].
|
||||||
Future<void> insertAll(int index, List<AudioSource> children) async {
|
Future<void> insertAll(int index, List<AudioSource> children) async {
|
||||||
this.children.insertAll(index, children);
|
this.children.insertAll(index, children);
|
||||||
|
_player._broadcastSequence();
|
||||||
if (_player != null) {
|
if (_player != null) {
|
||||||
await _player._invokeMethod('concatenating.insertAll',
|
await _player._invokeMethod('concatenating.insertAll',
|
||||||
[_id, index, children.map((s) => s.toJson()).toList()]);
|
[_id, index, children.map((s) => s.toJson()).toList()]);
|
||||||
@ -1175,6 +1312,7 @@ class ConcatenatingAudioSource extends AudioSource {
|
|||||||
/// [ConcatenatingAudioSource] has already been loaded.
|
/// [ConcatenatingAudioSource] has already been loaded.
|
||||||
Future<void> removeAt(int index) async {
|
Future<void> removeAt(int index) async {
|
||||||
children.removeAt(index);
|
children.removeAt(index);
|
||||||
|
_player._broadcastSequence();
|
||||||
if (_player != null) {
|
if (_player != null) {
|
||||||
await _player._invokeMethod('concatenating.removeAt', [_id, index]);
|
await _player._invokeMethod('concatenating.removeAt', [_id, index]);
|
||||||
}
|
}
|
||||||
@ -1184,6 +1322,7 @@ class ConcatenatingAudioSource extends AudioSource {
|
|||||||
/// to [end] exclusive.
|
/// to [end] exclusive.
|
||||||
Future<void> removeRange(int start, int end) async {
|
Future<void> removeRange(int start, int end) async {
|
||||||
children.removeRange(start, end);
|
children.removeRange(start, end);
|
||||||
|
_player._broadcastSequence();
|
||||||
if (_player != null) {
|
if (_player != null) {
|
||||||
await _player
|
await _player
|
||||||
._invokeMethod('concatenating.removeRange', [_id, start, end]);
|
._invokeMethod('concatenating.removeRange', [_id, start, end]);
|
||||||
@ -1193,6 +1332,7 @@ class ConcatenatingAudioSource extends AudioSource {
|
|||||||
/// (Untested) Moves an [AudioSource] from [currentIndex] to [newIndex].
|
/// (Untested) Moves an [AudioSource] from [currentIndex] to [newIndex].
|
||||||
Future<void> move(int currentIndex, int newIndex) async {
|
Future<void> move(int currentIndex, int newIndex) async {
|
||||||
children.insert(newIndex, children.removeAt(currentIndex));
|
children.insert(newIndex, children.removeAt(currentIndex));
|
||||||
|
_player._broadcastSequence();
|
||||||
if (_player != null) {
|
if (_player != null) {
|
||||||
await _player
|
await _player
|
||||||
._invokeMethod('concatenating.move', [_id, currentIndex, newIndex]);
|
._invokeMethod('concatenating.move', [_id, currentIndex, newIndex]);
|
||||||
@ -1202,6 +1342,7 @@ class ConcatenatingAudioSource extends AudioSource {
|
|||||||
/// (Untested) Removes all [AudioSources].
|
/// (Untested) Removes all [AudioSources].
|
||||||
Future<void> clear() async {
|
Future<void> clear() async {
|
||||||
children.clear();
|
children.clear();
|
||||||
|
_player._broadcastSequence();
|
||||||
if (_player != null) {
|
if (_player != null) {
|
||||||
await _player._invokeMethod('concatenating.clear', [_id]);
|
await _player._invokeMethod('concatenating.clear', [_id]);
|
||||||
}
|
}
|
||||||
@ -1243,7 +1384,7 @@ class ClippingAudioSource extends IndexedAudioSource {
|
|||||||
@required this.child,
|
@required this.child,
|
||||||
this.start,
|
this.start,
|
||||||
this.end,
|
this.end,
|
||||||
Object tag,
|
dynamic tag,
|
||||||
}) : super(tag);
|
}) : super(tag);
|
||||||
|
|
||||||
@override
|
@override
|
||||||
@ -1281,6 +1422,12 @@ class LoopingAudioSource extends AudioSource {
|
|||||||
this.count,
|
this.count,
|
||||||
}) : super();
|
}) : super();
|
||||||
|
|
||||||
|
@override
|
||||||
|
Future<void> _setup(AudioPlayer player) async {
|
||||||
|
await super._setup(player);
|
||||||
|
await child._setup(player);
|
||||||
|
}
|
||||||
|
|
||||||
@override
|
@override
|
||||||
List<IndexedAudioSource> get sequence =>
|
List<IndexedAudioSource> get sequence =>
|
||||||
List.generate(count, (i) => child).expand((s) => s.sequence).toList();
|
List.generate(count, (i) => child).expand((s) => s.sequence).toList();
|
||||||
|
@ -96,6 +96,8 @@ abstract class JustAudioPlayer {
|
|||||||
return await concatenatingMove(args[0], args[1], args[2]);
|
return await concatenatingMove(args[0], args[1], args[2]);
|
||||||
case "concatenating.clear":
|
case "concatenating.clear":
|
||||||
return await concatenatingClear(args[0]);
|
return await concatenatingClear(args[0]);
|
||||||
|
case "setAndroidAudioAttributes":
|
||||||
|
return null;
|
||||||
default:
|
default:
|
||||||
throw PlatformException(code: 'Unimplemented');
|
throw PlatformException(code: 'Unimplemented');
|
||||||
}
|
}
|
||||||
@ -280,7 +282,7 @@ class Html5AudioPlayer extends JustAudioPlayer {
|
|||||||
if (_shuffleModeEnabled) {
|
if (_shuffleModeEnabled) {
|
||||||
_audioSourcePlayer?.shuffle(0, _index);
|
_audioSourcePlayer?.shuffle(0, _index);
|
||||||
}
|
}
|
||||||
return (await _currentAudioSourcePlayer.load()).inMilliseconds;
|
return (await _currentAudioSourcePlayer.load())?.inMilliseconds;
|
||||||
}
|
}
|
||||||
|
|
||||||
Future<Duration> loadUri(final Uri uri) async {
|
Future<Duration> loadUri(final Uri uri) async {
|
||||||
|
File diff suppressed because it is too large
Load Diff
1
macos/Classes/AudioPlayer.m
Symbolic link
1
macos/Classes/AudioPlayer.m
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../../darwin/Classes/AudioPlayer.m
|
@ -1,37 +0,0 @@
|
|||||||
#import "AudioSource.h"
|
|
||||||
#import <AVFoundation/AVFoundation.h>
|
|
||||||
|
|
||||||
@implementation AudioSource {
|
|
||||||
NSString *_sourceId;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (instancetype)initWithId:(NSString *)sid {
|
|
||||||
self = [super init];
|
|
||||||
NSAssert(self, @"super init cannot be nil");
|
|
||||||
_sourceId = sid;
|
|
||||||
return self;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (NSString *)sourceId {
|
|
||||||
return _sourceId;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)findById:(NSString *)sourceId matches:(NSMutableArray<AudioSource *> *)matches {
|
|
||||||
if ([_sourceId isEqualToString:sourceId]) {
|
|
||||||
[matches addObject:self];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
- (NSArray *)getShuffleOrder {
|
|
||||||
return @[];
|
|
||||||
}
|
|
||||||
|
|
||||||
- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
@end
|
|
1
macos/Classes/AudioSource.m
Symbolic link
1
macos/Classes/AudioSource.m
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../../darwin/Classes/AudioSource.m
|
@ -1,79 +0,0 @@
|
|||||||
#import "AudioSource.h"
|
|
||||||
#import "ClippingAudioSource.h"
|
|
||||||
#import "IndexedPlayerItem.h"
|
|
||||||
#import "UriAudioSource.h"
|
|
||||||
#import <AVFoundation/AVFoundation.h>
|
|
||||||
|
|
||||||
@implementation ClippingAudioSource {
|
|
||||||
UriAudioSource *_audioSource;
|
|
||||||
CMTime _start;
|
|
||||||
CMTime _end;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (instancetype)initWithId:(NSString *)sid audioSource:(UriAudioSource *)audioSource start:(NSNumber *)start end:(NSNumber *)end {
|
|
||||||
self = [super initWithId:sid];
|
|
||||||
NSAssert(self, @"super init cannot be nil");
|
|
||||||
_audioSource = audioSource;
|
|
||||||
_start = start == [NSNull null] ? kCMTimeZero : CMTimeMake([start intValue], 1000);
|
|
||||||
_end = end == [NSNull null] ? kCMTimeInvalid : CMTimeMake([end intValue], 1000);
|
|
||||||
return self;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (UriAudioSource *)audioSource {
|
|
||||||
return _audioSource;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)findById:(NSString *)sourceId matches:(NSMutableArray<AudioSource *> *)matches {
|
|
||||||
[super findById:sourceId matches:matches];
|
|
||||||
[_audioSource findById:sourceId matches:matches];
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)attach:(AVQueuePlayer *)player {
|
|
||||||
[super attach:player];
|
|
||||||
_audioSource.playerItem.forwardPlaybackEndTime = _end;
|
|
||||||
// XXX: Not needed since currentItem observer handles it?
|
|
||||||
[self seek:kCMTimeZero];
|
|
||||||
}
|
|
||||||
|
|
||||||
- (IndexedPlayerItem *)playerItem {
|
|
||||||
return _audioSource.playerItem;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (NSArray *)getShuffleOrder {
|
|
||||||
return @[@(0)];
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)play:(AVQueuePlayer *)player {
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)pause:(AVQueuePlayer *)player {
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)stop:(AVQueuePlayer *)player {
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler {
|
|
||||||
if (!completionHandler || (self.playerItem.status == AVPlayerItemStatusReadyToPlay)) {
|
|
||||||
CMTime absPosition = CMTimeAdd(_start, position);
|
|
||||||
[_audioSource.playerItem seekToTime:absPosition toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero completionHandler:completionHandler];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
- (CMTime)duration {
|
|
||||||
return CMTimeSubtract(CMTIME_IS_INVALID(_end) ? self.playerItem.duration : _end, _start);
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)setDuration:(CMTime)duration {
|
|
||||||
}
|
|
||||||
|
|
||||||
- (CMTime)position {
|
|
||||||
return CMTimeSubtract(self.playerItem.currentTime, _start);
|
|
||||||
}
|
|
||||||
|
|
||||||
- (CMTime)bufferedPosition {
|
|
||||||
CMTime pos = CMTimeSubtract(_audioSource.bufferedPosition, _start);
|
|
||||||
CMTime dur = [self duration];
|
|
||||||
return CMTimeCompare(pos, dur) >= 0 ? dur : pos;
|
|
||||||
}
|
|
||||||
|
|
||||||
@end
|
|
1
macos/Classes/ClippingAudioSource.m
Symbolic link
1
macos/Classes/ClippingAudioSource.m
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../../darwin/Classes/ClippingAudioSource.m
|
@ -1,109 +0,0 @@
|
|||||||
#import "AudioSource.h"
|
|
||||||
#import "ConcatenatingAudioSource.h"
|
|
||||||
#import <AVFoundation/AVFoundation.h>
|
|
||||||
#import <stdlib.h>
|
|
||||||
|
|
||||||
@implementation ConcatenatingAudioSource {
|
|
||||||
NSMutableArray<AudioSource *> *_audioSources;
|
|
||||||
NSMutableArray<NSNumber *> *_shuffleOrder;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (instancetype)initWithId:(NSString *)sid audioSources:(NSMutableArray<AudioSource *> *)audioSources {
|
|
||||||
self = [super initWithId:sid];
|
|
||||||
NSAssert(self, @"super init cannot be nil");
|
|
||||||
_audioSources = audioSources;
|
|
||||||
return self;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (int)count {
|
|
||||||
return _audioSources.count;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)insertSource:(AudioSource *)audioSource atIndex:(int)index {
|
|
||||||
[_audioSources insertObject:audioSource atIndex:index];
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)removeSourcesFromIndex:(int)start toIndex:(int)end {
|
|
||||||
if (end == -1) end = _audioSources.count;
|
|
||||||
for (int i = start; i < end; i++) {
|
|
||||||
[_audioSources removeObjectAtIndex:start];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)moveSourceFromIndex:(int)currentIndex toIndex:(int)newIndex {
|
|
||||||
AudioSource *source = _audioSources[currentIndex];
|
|
||||||
[_audioSources removeObjectAtIndex:currentIndex];
|
|
||||||
[_audioSources insertObject:source atIndex:newIndex];
|
|
||||||
}
|
|
||||||
|
|
||||||
- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex {
|
|
||||||
for (int i = 0; i < [_audioSources count]; i++) {
|
|
||||||
treeIndex = [_audioSources[i] buildSequence:sequence treeIndex:treeIndex];
|
|
||||||
}
|
|
||||||
return treeIndex;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)findById:(NSString *)sourceId matches:(NSMutableArray<AudioSource *> *)matches {
|
|
||||||
[super findById:sourceId matches:matches];
|
|
||||||
for (int i = 0; i < [_audioSources count]; i++) {
|
|
||||||
[_audioSources[i] findById:sourceId matches:matches];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
- (NSArray *)getShuffleOrder {
|
|
||||||
NSMutableArray *order = [NSMutableArray new];
|
|
||||||
int offset = [order count];
|
|
||||||
NSMutableArray *childOrders = [NSMutableArray new]; // array of array of ints
|
|
||||||
for (int i = 0; i < [_audioSources count]; i++) {
|
|
||||||
AudioSource *audioSource = _audioSources[i];
|
|
||||||
NSArray *childShuffleOrder = [audioSource getShuffleOrder];
|
|
||||||
NSMutableArray *offsetChildShuffleOrder = [NSMutableArray new];
|
|
||||||
for (int j = 0; j < [childShuffleOrder count]; j++) {
|
|
||||||
[offsetChildShuffleOrder addObject:@([childShuffleOrder[j] integerValue] + offset)];
|
|
||||||
}
|
|
||||||
[childOrders addObject:offsetChildShuffleOrder];
|
|
||||||
offset += [childShuffleOrder count];
|
|
||||||
}
|
|
||||||
for (int i = 0; i < [_audioSources count]; i++) {
|
|
||||||
[order addObjectsFromArray:childOrders[[_shuffleOrder[i] integerValue]]];
|
|
||||||
}
|
|
||||||
return order;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex {
|
|
||||||
int currentChildIndex = -1;
|
|
||||||
for (int i = 0; i < [_audioSources count]; i++) {
|
|
||||||
int indexBefore = treeIndex;
|
|
||||||
AudioSource *child = _audioSources[i];
|
|
||||||
treeIndex = [child shuffle:treeIndex currentIndex:currentIndex];
|
|
||||||
if (currentIndex >= indexBefore && currentIndex < treeIndex) {
|
|
||||||
currentChildIndex = i;
|
|
||||||
} else {}
|
|
||||||
}
|
|
||||||
// Shuffle so that the current child is first in the shuffle order
|
|
||||||
_shuffleOrder = [NSMutableArray arrayWithCapacity:[_audioSources count]];
|
|
||||||
for (int i = 0; i < [_audioSources count]; i++) {
|
|
||||||
[_shuffleOrder addObject:@(0)];
|
|
||||||
}
|
|
||||||
NSLog(@"shuffle: audioSources.count=%d and shuffleOrder.count=%d", [_audioSources count], [_shuffleOrder count]);
|
|
||||||
// First generate a random shuffle
|
|
||||||
for (int i = 0; i < [_audioSources count]; i++) {
|
|
||||||
int j = arc4random_uniform(i + 1);
|
|
||||||
_shuffleOrder[i] = _shuffleOrder[j];
|
|
||||||
_shuffleOrder[j] = @(i);
|
|
||||||
}
|
|
||||||
// Then bring currentIndex to the front
|
|
||||||
if (currentChildIndex != -1) {
|
|
||||||
for (int i = 1; i < [_audioSources count]; i++) {
|
|
||||||
if ([_shuffleOrder[i] integerValue] == currentChildIndex) {
|
|
||||||
NSNumber *v = _shuffleOrder[0];
|
|
||||||
_shuffleOrder[0] = _shuffleOrder[i];
|
|
||||||
_shuffleOrder[i] = v;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return treeIndex;
|
|
||||||
}
|
|
||||||
|
|
||||||
@end
|
|
1
macos/Classes/ConcatenatingAudioSource.m
Symbolic link
1
macos/Classes/ConcatenatingAudioSource.m
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../../darwin/Classes/ConcatenatingAudioSource.m
|
@ -1,68 +0,0 @@
|
|||||||
#import "IndexedAudioSource.h"
|
|
||||||
#import "IndexedPlayerItem.h"
|
|
||||||
#import <AVFoundation/AVFoundation.h>
|
|
||||||
|
|
||||||
@implementation IndexedAudioSource {
|
|
||||||
BOOL _isAttached;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (instancetype)initWithId:(NSString *)sid {
|
|
||||||
self = [super init];
|
|
||||||
NSAssert(self, @"super init cannot be nil");
|
|
||||||
_isAttached = NO;
|
|
||||||
return self;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (IndexedPlayerItem *)playerItem {
|
|
||||||
return nil;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (BOOL)isAttached {
|
|
||||||
return _isAttached;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex {
|
|
||||||
[sequence addObject:self];
|
|
||||||
return treeIndex + 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex {
|
|
||||||
return treeIndex + 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)attach:(AVQueuePlayer *)player {
|
|
||||||
_isAttached = YES;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)play:(AVQueuePlayer *)player {
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)pause:(AVQueuePlayer *)player {
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)stop:(AVQueuePlayer *)player {
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)seek:(CMTime)position {
|
|
||||||
[self seek:position completionHandler:nil];
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler {
|
|
||||||
}
|
|
||||||
|
|
||||||
- (CMTime)duration {
|
|
||||||
return kCMTimeInvalid;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)setDuration:(CMTime)duration {
|
|
||||||
}
|
|
||||||
|
|
||||||
- (CMTime)position {
|
|
||||||
return kCMTimeInvalid;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (CMTime)bufferedPosition {
|
|
||||||
return kCMTimeInvalid;
|
|
||||||
}
|
|
||||||
|
|
||||||
@end
|
|
1
macos/Classes/IndexedAudioSource.m
Symbolic link
1
macos/Classes/IndexedAudioSource.m
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../../darwin/Classes/IndexedAudioSource.m
|
@ -1,16 +0,0 @@
|
|||||||
#import "IndexedPlayerItem.h"
|
|
||||||
#import "IndexedAudioSource.h"
|
|
||||||
|
|
||||||
@implementation IndexedPlayerItem {
|
|
||||||
IndexedAudioSource *_audioSource;
|
|
||||||
}
|
|
||||||
|
|
||||||
-(void)setAudioSource:(IndexedAudioSource *)audioSource {
|
|
||||||
_audioSource = audioSource;
|
|
||||||
}
|
|
||||||
|
|
||||||
-(IndexedAudioSource *)audioSource {
|
|
||||||
return _audioSource;
|
|
||||||
}
|
|
||||||
|
|
||||||
@end
|
|
1
macos/Classes/IndexedPlayerItem.m
Symbolic link
1
macos/Classes/IndexedPlayerItem.m
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../../darwin/Classes/IndexedPlayerItem.m
|
@ -1,55 +0,0 @@
|
|||||||
#import "JustAudioPlugin.h"
|
|
||||||
#import "AudioPlayer.h"
|
|
||||||
#import <AVFoundation/AVFoundation.h>
|
|
||||||
#include <TargetConditionals.h>
|
|
||||||
|
|
||||||
@implementation JustAudioPlugin {
|
|
||||||
NSObject<FlutterPluginRegistrar>* _registrar;
|
|
||||||
BOOL _configuredSession;
|
|
||||||
}
|
|
||||||
|
|
||||||
+ (void)registerWithRegistrar:(NSObject<FlutterPluginRegistrar>*)registrar {
|
|
||||||
FlutterMethodChannel* channel = [FlutterMethodChannel
|
|
||||||
methodChannelWithName:@"com.ryanheise.just_audio.methods"
|
|
||||||
binaryMessenger:[registrar messenger]];
|
|
||||||
JustAudioPlugin* instance = [[JustAudioPlugin alloc] initWithRegistrar:registrar];
|
|
||||||
[registrar addMethodCallDelegate:instance channel:channel];
|
|
||||||
}
|
|
||||||
|
|
||||||
- (instancetype)initWithRegistrar:(NSObject<FlutterPluginRegistrar> *)registrar {
|
|
||||||
self = [super init];
|
|
||||||
NSAssert(self, @"super init cannot be nil");
|
|
||||||
_registrar = registrar;
|
|
||||||
return self;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result {
|
|
||||||
if ([@"init" isEqualToString:call.method]) {
|
|
||||||
NSArray* args = (NSArray*)call.arguments;
|
|
||||||
NSString* playerId = args[0];
|
|
||||||
/*AudioPlayer* player =*/ [[AudioPlayer alloc] initWithRegistrar:_registrar playerId:playerId configuredSession:_configuredSession];
|
|
||||||
result(nil);
|
|
||||||
} else if ([@"setIosCategory" isEqualToString:call.method]) {
|
|
||||||
#if TARGET_OS_IPHONE
|
|
||||||
NSNumber* categoryIndex = (NSNumber*)call.arguments;
|
|
||||||
AVAudioSessionCategory category = nil;
|
|
||||||
switch (categoryIndex.integerValue) {
|
|
||||||
case 0: category = AVAudioSessionCategoryAmbient; break;
|
|
||||||
case 1: category = AVAudioSessionCategorySoloAmbient; break;
|
|
||||||
case 2: category = AVAudioSessionCategoryPlayback; break;
|
|
||||||
case 3: category = AVAudioSessionCategoryRecord; break;
|
|
||||||
case 4: category = AVAudioSessionCategoryPlayAndRecord; break;
|
|
||||||
case 5: category = AVAudioSessionCategoryMultiRoute; break;
|
|
||||||
}
|
|
||||||
if (category) {
|
|
||||||
_configuredSession = YES;
|
|
||||||
}
|
|
||||||
[[AVAudioSession sharedInstance] setCategory:category error:nil];
|
|
||||||
#endif
|
|
||||||
result(nil);
|
|
||||||
} else {
|
|
||||||
result(FlutterMethodNotImplemented);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@end
|
|
1
macos/Classes/JustAudioPlugin.m
Symbolic link
1
macos/Classes/JustAudioPlugin.m
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../../darwin/Classes/JustAudioPlugin.m
|
@ -1,53 +0,0 @@
|
|||||||
#import "AudioSource.h"
|
|
||||||
#import "LoopingAudioSource.h"
|
|
||||||
#import <AVFoundation/AVFoundation.h>
|
|
||||||
|
|
||||||
@implementation LoopingAudioSource {
|
|
||||||
// An array of duplicates
|
|
||||||
NSArray<AudioSource *> *_audioSources; // <AudioSource *>
|
|
||||||
}
|
|
||||||
|
|
||||||
- (instancetype)initWithId:(NSString *)sid audioSources:(NSArray<AudioSource *> *)audioSources {
|
|
||||||
self = [super initWithId:sid];
|
|
||||||
NSAssert(self, @"super init cannot be nil");
|
|
||||||
_audioSources = audioSources;
|
|
||||||
return self;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex {
|
|
||||||
for (int i = 0; i < [_audioSources count]; i++) {
|
|
||||||
treeIndex = [_audioSources[i] buildSequence:sequence treeIndex:treeIndex];
|
|
||||||
}
|
|
||||||
return treeIndex;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)findById:(NSString *)sourceId matches:(NSMutableArray<AudioSource *> *)matches {
|
|
||||||
[super findById:sourceId matches:matches];
|
|
||||||
for (int i = 0; i < [_audioSources count]; i++) {
|
|
||||||
[_audioSources[i] findById:sourceId matches:matches];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
- (NSArray *)getShuffleOrder {
|
|
||||||
NSMutableArray *order = [NSMutableArray new];
|
|
||||||
int offset = (int)[order count];
|
|
||||||
for (int i = 0; i < [_audioSources count]; i++) {
|
|
||||||
AudioSource *audioSource = _audioSources[i];
|
|
||||||
NSArray *childShuffleOrder = [audioSource getShuffleOrder];
|
|
||||||
for (int j = 0; j < [childShuffleOrder count]; j++) {
|
|
||||||
[order addObject:@([childShuffleOrder[j] integerValue] + offset)];
|
|
||||||
}
|
|
||||||
offset += [childShuffleOrder count];
|
|
||||||
}
|
|
||||||
return order;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex {
|
|
||||||
// TODO: This should probably shuffle the same way on all duplicates.
|
|
||||||
for (int i = 0; i < [_audioSources count]; i++) {
|
|
||||||
treeIndex = [_audioSources[i] shuffle:treeIndex currentIndex:currentIndex];
|
|
||||||
}
|
|
||||||
return treeIndex;
|
|
||||||
}
|
|
||||||
|
|
||||||
@end
|
|
1
macos/Classes/LoopingAudioSource.m
Symbolic link
1
macos/Classes/LoopingAudioSource.m
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../../darwin/Classes/LoopingAudioSource.m
|
@ -1,79 +0,0 @@
|
|||||||
#import "UriAudioSource.h"
|
|
||||||
#import "IndexedAudioSource.h"
|
|
||||||
#import "IndexedPlayerItem.h"
|
|
||||||
#import <AVFoundation/AVFoundation.h>
|
|
||||||
|
|
||||||
@implementation UriAudioSource {
|
|
||||||
NSString *_uri;
|
|
||||||
IndexedPlayerItem *_playerItem;
|
|
||||||
/* CMTime _duration; */
|
|
||||||
}
|
|
||||||
|
|
||||||
- (instancetype)initWithId:(NSString *)sid uri:(NSString *)uri {
|
|
||||||
self = [super initWithId:sid];
|
|
||||||
NSAssert(self, @"super init cannot be nil");
|
|
||||||
_uri = uri;
|
|
||||||
if ([_uri hasPrefix:@"file://"]) {
|
|
||||||
_playerItem = [[IndexedPlayerItem alloc] initWithURL:[NSURL fileURLWithPath:[_uri substringFromIndex:7]]];
|
|
||||||
} else {
|
|
||||||
_playerItem = [[IndexedPlayerItem alloc] initWithURL:[NSURL URLWithString:_uri]];
|
|
||||||
}
|
|
||||||
if (@available(macOS 10.13, iOS 11.0, *)) {
|
|
||||||
// This does the best at reducing distortion on voice with speeds below 1.0
|
|
||||||
_playerItem.audioTimePitchAlgorithm = AVAudioTimePitchAlgorithmTimeDomain;
|
|
||||||
}
|
|
||||||
/* NSKeyValueObservingOptions options = */
|
|
||||||
/* NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew; */
|
|
||||||
/* [_playerItem addObserver:self */
|
|
||||||
/* forKeyPath:@"duration" */
|
|
||||||
/* options:options */
|
|
||||||
/* context:nil]; */
|
|
||||||
return self;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (IndexedPlayerItem *)playerItem {
|
|
||||||
return _playerItem;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (NSArray *)getShuffleOrder {
|
|
||||||
return @[@(0)];
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)play:(AVQueuePlayer *)player {
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)pause:(AVQueuePlayer *)player {
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)stop:(AVQueuePlayer *)player {
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler {
|
|
||||||
if (!completionHandler || (_playerItem.status == AVPlayerItemStatusReadyToPlay)) {
|
|
||||||
[_playerItem seekToTime:position toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero completionHandler:completionHandler];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
- (CMTime)duration {
|
|
||||||
return _playerItem.duration;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)setDuration:(CMTime)duration {
|
|
||||||
}
|
|
||||||
|
|
||||||
- (CMTime)position {
|
|
||||||
return _playerItem.currentTime;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (CMTime)bufferedPosition {
|
|
||||||
NSValue *last = _playerItem.loadedTimeRanges.lastObject;
|
|
||||||
if (last) {
|
|
||||||
CMTimeRange timeRange = [last CMTimeRangeValue];
|
|
||||||
return CMTimeAdd(timeRange.start, timeRange.duration);
|
|
||||||
} else {
|
|
||||||
return _playerItem.currentTime;
|
|
||||||
}
|
|
||||||
return kCMTimeInvalid;
|
|
||||||
}
|
|
||||||
|
|
||||||
@end
|
|
1
macos/Classes/UriAudioSource.m
Symbolic link
1
macos/Classes/UriAudioSource.m
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../../darwin/Classes/UriAudioSource.m
|
@ -8,6 +8,13 @@ packages:
|
|||||||
url: "https://pub.dartlang.org"
|
url: "https://pub.dartlang.org"
|
||||||
source: hosted
|
source: hosted
|
||||||
version: "2.4.2"
|
version: "2.4.2"
|
||||||
|
audio_session:
|
||||||
|
dependency: "direct main"
|
||||||
|
description:
|
||||||
|
name: audio_session
|
||||||
|
url: "https://pub.dartlang.org"
|
||||||
|
source: hosted
|
||||||
|
version: "0.0.7"
|
||||||
boolean_selector:
|
boolean_selector:
|
||||||
dependency: transitive
|
dependency: transitive
|
||||||
description:
|
description:
|
||||||
|
@ -1,13 +1,14 @@
|
|||||||
name: just_audio
|
name: just_audio
|
||||||
description: Flutter plugin to play audio from streams, files, assets, DASH/HLS streams and playlists. Works with audio_service to play audio in the background.
|
description: Flutter plugin to play audio from streams, files, assets, DASH/HLS streams and playlists. Works with audio_service to play audio in the background.
|
||||||
version: 0.3.1
|
version: 0.4.4
|
||||||
homepage: https://github.com/ryanheise/just_audio
|
homepage: https://github.com/ryanheise/just_audio
|
||||||
|
|
||||||
environment:
|
environment:
|
||||||
sdk: '>=2.6.0 <3.0.0'
|
sdk: ">=2.7.0 <3.0.0"
|
||||||
flutter: ">=1.12.8 <2.0.0"
|
flutter: ">=1.12.13+hotfix.5"
|
||||||
|
|
||||||
dependencies:
|
dependencies:
|
||||||
|
audio_session: ^0.0.7
|
||||||
rxdart: ^0.24.1
|
rxdart: ^0.24.1
|
||||||
path: ^1.6.4
|
path: ^1.6.4
|
||||||
path_provider: ^1.6.10
|
path_provider: ^1.6.10
|
||||||
|
Loading…
Reference in New Issue
Block a user