diff --git a/.github/workflows/auto-close.yml b/.github/workflows/auto-close.yml new file mode 100644 index 0000000..6e572b9 --- /dev/null +++ b/.github/workflows/auto-close.yml @@ -0,0 +1,12 @@ +name: Autocloser +on: [issues] +jobs: + autoclose: + runs-on: ubuntu-latest + steps: + - name: Autoclose issues that did not follow issue template + uses: roots/issue-closer-action@v1.1 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + issue-close-message: "This issue was automatically closed because it did not follow the issue template." + issue-pattern: "Which API(.|[\\r\\n])*Minimal reproduction project(.|[\\r\\n])*To Reproduce|To which pages(.|[\\r\\n])*Describe your suggestion|Is your feature request(.|[\\r\\n])*Describe the solution you'd like" diff --git a/CHANGELOG.md b/CHANGELOG.md index fc87c15..e9499fe 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,44 @@ +## 0.4.4 + +* Fix crash when disposing of positionStream controller. +* Handle interruptions correctly when willPauseWhenDucked is set. +* Correct seek/position/duration in HLS streams (@snaeji). +* Resume at correct speed after seek on iOS (@subhash279). + +## 0.4.3 + +* Add section to README on configuring the audio session. + +## 0.4.2 + +* Make default audio session settings compatible with iOS control center. +* Update README to mention NSMicrophoneUsageDescription key in Info.plist. + +## 0.4.1 + +* Fix setSpeed bug on iOS. + +## 0.4.0 + +* Handles audio focus/interruptions via audio_session +* Bug fixes + +## 0.3.4 + +* Fix bug in icy metadata +* Allow Android AudioAttributes to be set +* Provide access to Android audio session ID + +## 0.3.3 + +* Remove dependency on Java streams API + +## 0.3.2 + +* Fix dynamic methods on ConcatenatingAudioSource for iOS/Android +* Add sequenceStream/sequenceStateStream +* Change asset URI from asset:// to asset:/// + ## 0.3.1 * Prevent hang in dispose diff --git a/LICENSE b/LICENSE index 27a8b32..6948574 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2019-2020 Ryan Heise. +Copyright (c) 2019-2020 Ryan Heise and the project contributors. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/README.md b/README.md index 60fb32f..6e413dc 100644 --- a/README.md +++ b/README.md @@ -4,25 +4,26 @@ This Flutter plugin plays audio from URLs, files, assets, DASH/HLS streams and p ## Features -| Feature | Android | iOS | MacOS | Web | -| ------- | :-------: | :-----: | :-----: | :-----: | -| read from URL | ✅ | ✅ | ✅ | ✅ | -| read from file | ✅ | ✅ | ✅ | | -| read from asset | ✅ | ✅ | ✅ | | -| request headers | ✅ | ✅ | ✅ | | -| DASH | ✅ | | | | -| HLS | ✅ | ✅ | ✅ | | -| buffer status/position | ✅ | ✅ | ✅ | ✅ | -| play/pause/seek | ✅ | ✅ | ✅ | ✅ | -| set volume | ✅ | ✅ | ✅ | ✅ | -| set speed | ✅ | ✅ | ✅ | ✅ | -| clip audio | ✅ | ✅ | ✅ | ✅ | -| playlists | ✅ | ✅ | ✅ | ✅ | -| looping | ✅ | ✅ | ✅ | ✅ | -| shuffle | ✅ | ✅ | ✅ | ✅ | -| compose audio | ✅ | ✅ | ✅ | ✅ | -| gapless playback | ✅ | ✅ | ✅ | | -| report player errors | ✅ | ✅ | ✅ | ✅ | +| Feature | Android | iOS | macOS | Web | +| ------- | :-------: | :-----: | :-----: | :-----: | +| read from URL | ✅ | ✅ | ✅ | ✅ | +| read from file | ✅ | ✅ | ✅ | | +| read from asset | ✅ | ✅ | ✅ | | +| request headers | ✅ | ✅ | ✅ | | +| DASH | ✅ | | | | +| HLS | ✅ | ✅ | ✅ | | +| buffer status/position | ✅ | ✅ | ✅ | ✅ | +| play/pause/seek | ✅ | ✅ | ✅ | ✅ | +| set volume | ✅ | ✅ | ✅ | ✅ | +| set speed | ✅ | ✅ | ✅ | ✅ | +| clip audio | ✅ | ✅ | ✅ | ✅ | +| playlists | ✅ | ✅ | ✅ | ✅ | +| looping | ✅ | ✅ | ✅ | ✅ | +| shuffle | ✅ | ✅ | ✅ | ✅ | +| compose audio | ✅ | ✅ | ✅ | ✅ | +| gapless playback | ✅ | ✅ | ✅ | | +| report player errors | ✅ | ✅ | ✅ | ✅ | +| Handle phonecall interruptions | ✅ | ✅ | | | Please consider reporting any bugs you encounter [here](https://github.com/ryanheise/just_audio/issues) or submitting pull requests [here](https://github.com/ryanheise/just_audio/pulls). @@ -35,6 +36,8 @@ Initialisation: ```dart final player = AudioPlayer(); var duration = await player.setUrl('https://foo.com/bar.mp3'); +var duration = await player.setFilePath('/path/to/file.mp3'); +var duration = await player.setAsset('path/to/asset.mp3'); ``` Standard controls: @@ -147,6 +150,8 @@ player.playerStateStream.listen((state) { // - durationStream // - positionStream // - bufferedPositionStream +// - sequenceStateStream +// - sequenceStream // - currentIndexStream // - icyMetadataStream // - playingStream @@ -158,6 +163,21 @@ player.playerStateStream.listen((state) { // - playbackEventStream ``` +## Configuring the audio session + +If your app uses audio, you should tell the operating system what kind of usage scenario your app has and how your app will interact with other audio apps on the device. Different audio apps often have unique requirements. For example, when a navigator app speaks driving instructions, a music player should duck its audio while a podcast player should pause its audio. Depending on which one of these three apps you are building, you will need to configure your app's audio settings and callbacks to appropriately handle these interactions. + +just_audio will by default choose settings that are appropriate for a music player app which means that it will automatically duck audio when a navigator starts speaking, but should pause when a phone call or another music player starts. If you are building a podcast player or audio book reader, this behaviour would not be appropriate. While the user may be able to comprehend the navigator instructions while ducked music is playing in the background, it would be much more difficult to understand the navigator instructions while simultaneously listening to an audio book or podcast. + +You can use the [audio_session](https://pub.dev/packages/audio_session) package to change the default audio session configuration for your app. E.g. for a podcast player, you may use: + +```dart +final session = await AudioSession.instance; +await session.configure(AudioSessionConfiguration.speech()); +``` + +Note: If your app uses a number of different audio plugins, e.g. for audio recording, or text to speech, or background audio, it is possible that those plugins may internally override each other's audio session settings, so it is recommended that you apply your own preferred configuration using audio_session after all other audio plugins have loaded. You may consider asking the developer of each audio plugin you use to provide an option to not overwrite these global settings and allow them be managed externally. + ## Platform specific configuration ### Android @@ -168,8 +188,17 @@ If you wish to connect to non-HTTPS URLS, add the following attribute to the `ap ``` +If you need access to the player's AudioSession ID, you can listen to `AudioPlayer.androidAudioSessionIdStream`. Note that the AudioSession ID will change whenever you set new AudioAttributes. + ### iOS +Regardless of whether your app uses the microphone, Apple will require you to add the following key to your `Info.plist` file. The message will simply be ignored if your app doesn't use the microphone: + +```xml +NSMicrophoneUsageDescription +... explain why you use (or don't use) the microphone ... +``` + If you wish to connect to non-HTTPS URLS, add the following to your `Info.plist` file: ```xml @@ -182,27 +211,9 @@ If you wish to connect to non-HTTPS URLS, add the following to your `Info.plist` ``` -By default, iOS will mute your app's audio when your phone is switched to -silent mode. Depending on the requirements of your app, you can change the -default audio session category using `AudioPlayer.setIosCategory`. For example, -if you are writing a media app, Apple recommends that you set the category to -`AVAudioSessionCategoryPlayback`, which you can achieve by adding the following -code to your app's initialisation: +### macOS -```dart -AudioPlayer.setIosCategory(IosCategory.playback); -``` - -Note: If your app uses a number of different audio plugins in combination, e.g. -for audio recording, or text to speech, or background audio, it is possible -that those plugins may internally override the setting you choose here. You may -consider asking the developer of each other plugin you use to provide a similar -method so that you can configure the same audio session category universally -across all plugins you use. - -### MacOS - -To allow your MacOS application to access audio files on the Internet, add the following to your `DebugProfile.entitlements` and `Release.entitlements` files: +To allow your macOS application to access audio files on the Internet, add the following to your `DebugProfile.entitlements` and `Release.entitlements` files: ```xml com.apple.security.network.client @@ -220,3 +231,8 @@ If you wish to connect to non-HTTPS URLS, add the following to your `Info.plist` ``` + +## Related plugins + +* [audio_service](https://pub.dev/packages/audio_service): play any audio in the background and control playback from the lock screen, Android notifications, the iOS Control Center, and headset buttons. +* [audio_session](https://pub.dev/packages/audio_session): configure your app's audio category (e.g. music vs speech) and configure how your app interacts with other audio apps (e.g. audio focus, ducking, mixing). diff --git a/android/build.gradle b/android/build.gradle index d63baca..0c2896d 100644 --- a/android/build.gradle +++ b/android/build.gradle @@ -8,7 +8,7 @@ buildscript { } dependencies { - classpath 'com.android.tools.build:gradle:3.6.3' + classpath 'com.android.tools.build:gradle:3.5.0' } } @@ -40,9 +40,9 @@ android { } dependencies { - implementation 'com.google.android.exoplayer:exoplayer-core:2.11.4' - implementation 'com.google.android.exoplayer:exoplayer-dash:2.11.4' - implementation 'com.google.android.exoplayer:exoplayer-hls:2.11.4' - implementation 'com.google.android.exoplayer:exoplayer-smoothstreaming:2.11.4' + implementation 'com.google.android.exoplayer:exoplayer-core:2.11.7' + implementation 'com.google.android.exoplayer:exoplayer-dash:2.11.7' + implementation 'com.google.android.exoplayer:exoplayer-hls:2.11.7' + implementation 'com.google.android.exoplayer:exoplayer-smoothstreaming:2.11.7' compile files('libs/extension-flac.aar') } diff --git a/android/gradle/wrapper/gradle-wrapper.properties b/android/gradle/wrapper/gradle-wrapper.properties index 212deb2..01a286e 100644 --- a/android/gradle/wrapper/gradle-wrapper.properties +++ b/android/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,5 @@ -#Mon Aug 10 13:15:44 CEST 2020 distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.4-all.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.2-all.zip diff --git a/android/libs/extension-flac.aar b/android/libs/extension-flac.aar index 62d38a2..982c1c6 100644 Binary files a/android/libs/extension-flac.aar and b/android/libs/extension-flac.aar differ diff --git a/android/src/main/java/com/ryanheise/just_audio/AudioPlayer.java b/android/src/main/java/com/ryanheise/just_audio/AudioPlayer.java index bdf5bb7..12d9709 100644 --- a/android/src/main/java/com/ryanheise/just_audio/AudioPlayer.java +++ b/android/src/main/java/com/ryanheise/just_audio/AudioPlayer.java @@ -3,20 +3,21 @@ package com.ryanheise.just_audio; import android.content.Context; import android.net.Uri; import android.os.Handler; -import android.util.Log; - import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.ExoPlaybackException; -import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.PlaybackParameters; import com.google.android.exoplayer2.Player; import com.google.android.exoplayer2.SimpleExoPlayer; import com.google.android.exoplayer2.Timeline; +import com.google.android.exoplayer2.audio.AudioAttributes; +import com.google.android.exoplayer2.audio.AudioListener; import com.google.android.exoplayer2.metadata.Metadata; import com.google.android.exoplayer2.metadata.MetadataOutput; import com.google.android.exoplayer2.metadata.icy.IcyHeaders; import com.google.android.exoplayer2.metadata.icy.IcyInfo; import com.google.android.exoplayer2.source.ClippingMediaSource; +import com.google.android.exoplayer2.source.MaskingMediaSource; +import com.google.android.exoplayer2.upstream.HttpDataSource; import com.google.android.exoplayer2.source.ConcatenatingMediaSource; import com.google.android.exoplayer2.source.LoopingMediaSource; import com.google.android.exoplayer2.source.MediaSource; @@ -32,8 +33,8 @@ import com.google.android.exoplayer2.upstream.DataSource; import com.google.android.exoplayer2.upstream.DefaultDataSourceFactory; import com.google.android.exoplayer2.upstream.DefaultHttpDataSource; import com.google.android.exoplayer2.upstream.DefaultHttpDataSourceFactory; -import com.google.android.exoplayer2.upstream.HttpDataSource; import com.google.android.exoplayer2.util.Util; +import io.flutter.Log; import io.flutter.plugin.common.BinaryMessenger; import io.flutter.plugin.common.EventChannel; import io.flutter.plugin.common.EventChannel.EventSink; @@ -41,17 +42,19 @@ import io.flutter.plugin.common.MethodCall; import io.flutter.plugin.common.MethodChannel; import io.flutter.plugin.common.MethodChannel.MethodCallHandler; import io.flutter.plugin.common.MethodChannel.Result; + +import java.io.File; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Random; -import java.util.stream.Collectors; import com.ryanheise.just_audio.DeezerDataSource; -public class AudioPlayer implements MethodCallHandler, Player.EventListener, MetadataOutput { +public class AudioPlayer implements MethodCallHandler, Player.EventListener, AudioListener, MetadataOutput { static final String TAG = "AudioPlayer"; @@ -81,12 +84,12 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Met private int errorCount; private SimpleExoPlayer player; + private Integer audioSessionId; private MediaSource mediaSource; private Integer currentIndex; private Map loopingChildren = new HashMap<>(); private Map loopingCounts = new HashMap<>(); private final Handler handler = new Handler(); - private final Runnable bufferWatcher = new Runnable() { @Override public void run() { @@ -142,6 +145,15 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Met handler.post(bufferWatcher); } + @Override + public void onAudioSessionId(int audioSessionId) { + if (audioSessionId == C.AUDIO_SESSION_ID_UNSET) { + this.audioSessionId = null; + } else { + this.audioSessionId = audioSessionId; + } + } + @Override public void onMetadata(Metadata metadata) { for (int i = 0; i < metadata.length(); i++) { @@ -353,6 +365,10 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Met case "concatenating.clear": concatenating(args.get(0)).clear(handler, () -> result.success(null)); break; + case "setAndroidAudioAttributes": + setAudioAttributes((Map)args.get(0)); + result.success(null); + break; default: result.notImplemented(); break; @@ -446,68 +462,72 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Met Map map = (Map)json; String id = (String)map.get("id"); switch ((String)map.get("type")) { - case "progressive": - Uri uri = Uri.parse((String)map.get("uri")); - //Deezer - if (uri.getHost() != null && uri.getHost().contains("dzcdn.net")) { - //Track id is stored in URL fragment (after #) - String fragment = uri.getFragment(); - uri = Uri.parse(((String)map.get("uri")).replace("#" + fragment, "")); - return new ProgressiveMediaSource.Factory( - () -> { - HttpDataSource deezerDataSource = new DeezerDataSource(fragment); - return deezerDataSource; - } - ).setTag(id).createMediaSource(uri); - } - - - - return new ProgressiveMediaSource.Factory(buildDataSourceFactory()) - .setTag(id) - .createMediaSource(uri); - case "dash": - return new DashMediaSource.Factory(buildDataSourceFactory()) - .setTag(id) - .createMediaSource(Uri.parse((String)map.get("uri"))); - case "hls": - return new HlsMediaSource.Factory(buildDataSourceFactory()) - .setTag(id) - .createMediaSource(Uri.parse((String)map.get("uri"))); - case "concatenating": - List audioSources = (List)map.get("audioSources"); - return new ConcatenatingMediaSource( - false, // isAtomic - (Boolean)map.get("useLazyPreparation"), - new DefaultShuffleOrder(audioSources.size()), - audioSources - .stream() - .map(s -> getAudioSource(s)) - .toArray(MediaSource[]::new)); - case "clipping": - Long start = getLong(map.get("start")); - Long end = getLong(map.get("end")); - return new ClippingMediaSource(getAudioSource(map.get("audioSource")), - (start != null ? start : 0) * 1000L, - (end != null ? end : C.TIME_END_OF_SOURCE) * 1000L); - case "looping": - Integer count = (Integer)map.get("count"); - MediaSource looperChild = getAudioSource(map.get("audioSource")); - LoopingMediaSource looper = new LoopingMediaSource(looperChild, count); - // TODO: store both in a single map - loopingChildren.put(looper, looperChild); - loopingCounts.put(looper, count); - return looper; - default: - throw new IllegalArgumentException("Unknown AudioSource type: " + map.get("type")); + case "progressive": + Uri uri = Uri.parse((String)map.get("uri")); + //Deezer + if (uri.getHost() != null && uri.getHost().contains("dzcdn.net")) { + //Track id is stored in URL fragment (after #) + String fragment = uri.getFragment(); + //Stream + uri = Uri.parse(((String)map.get("uri")).replace("#" + fragment, "")); + return new ProgressiveMediaSource.Factory( + () -> { + HttpDataSource deezerDataSource = new DeezerDataSource(fragment); + return deezerDataSource; + } + ).setTag(id).createMediaSource(uri); + } + return new ProgressiveMediaSource.Factory(buildDataSourceFactory()) + .setTag(id) + .createMediaSource(uri); + case "dash": + return new DashMediaSource.Factory(buildDataSourceFactory()) + .setTag(id) + .createMediaSource(Uri.parse((String)map.get("uri"))); + case "hls": + return new HlsMediaSource.Factory(buildDataSourceFactory()) + .setTag(id) + .createMediaSource(Uri.parse((String)map.get("uri"))); + case "concatenating": + MediaSource[] mediaSources = getAudioSourcesArray(map.get("audioSources")); + return new ConcatenatingMediaSource( + false, // isAtomic + (Boolean)map.get("useLazyPreparation"), + new DefaultShuffleOrder(mediaSources.length), + mediaSources); + case "clipping": + Long start = getLong(map.get("start")); + Long end = getLong(map.get("end")); + return new ClippingMediaSource(getAudioSource(map.get("audioSource")), + (start != null ? start : 0) * 1000L, + (end != null ? end : C.TIME_END_OF_SOURCE) * 1000L); + case "looping": + Integer count = (Integer)map.get("count"); + MediaSource looperChild = getAudioSource(map.get("audioSource")); + LoopingMediaSource looper = new LoopingMediaSource(looperChild, count); + // TODO: store both in a single map + loopingChildren.put(looper, looperChild); + loopingCounts.put(looper, count); + return looper; + default: + throw new IllegalArgumentException("Unknown AudioSource type: " + map.get("type")); } } + private MediaSource[] getAudioSourcesArray(final Object json) { + List mediaSources = getAudioSources(json); + MediaSource[] mediaSourcesArray = new MediaSource[mediaSources.size()]; + mediaSources.toArray(mediaSourcesArray); + return mediaSourcesArray; + } + private List getAudioSources(final Object json) { - return ((List)json) - .stream() - .map(s -> getAudioSource(s)) - .collect(Collectors.toList()); + List audioSources = (List)json; + List mediaSources = new ArrayList(); + for (int i = 0 ; i < audioSources.size(); i++) { + mediaSources.add(getAudioSource(audioSources.get(i))); + } + return mediaSources; } private DataSource.Factory buildDataSourceFactory() { @@ -548,9 +568,20 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Met player = new SimpleExoPlayer.Builder(context).build(); player.addMetadataOutput(this); player.addListener(this); + player.addAudioListener(this); } } + private void setAudioAttributes(Map json) { + ensurePlayerInitialized(); + AudioAttributes.Builder builder = new AudioAttributes.Builder(); + builder.setContentType((Integer)json.get("contentType")); + builder.setFlags((Integer)json.get("flags")); + builder.setUsage((Integer)json.get("usage")); + //builder.setAllowedCapturePolicy((Integer)json.get("allowedCapturePolicy")); + player.setAudioAttributes(builder.build()); + } + private void broadcastPlaybackEvent() { final Map event = new HashMap(); event.put("processingState", processingState.ordinal()); @@ -560,7 +591,18 @@ public class AudioPlayer implements MethodCallHandler, Player.EventListener, Met event.put("icyMetadata", collectIcyMetadata()); event.put("duration", duration = getDuration()); event.put("currentIndex", currentIndex); - event.put("qualityString", null); + event.put("androidAudioSessionId", audioSessionId); + + + String qualityString = ""; + if (player != null && player.getAudioFormat() != null && player.getAudioFormat().sampleMimeType != null) { + if (player.getAudioFormat().sampleMimeType.equals("audio/mpeg")) { + qualityString = "MP3"; + } else { + qualityString = "FLAC"; + } + } + event.put("qualityString", qualityString); if (eventSink != null) { eventSink.success(event); diff --git a/android/src/main/java/com/ryanheise/just_audio/DeezerDataSource.java b/android/src/main/java/com/ryanheise/just_audio/DeezerDataSource.java index be617d0..9254fab 100644 --- a/android/src/main/java/com/ryanheise/just_audio/DeezerDataSource.java +++ b/android/src/main/java/com/ryanheise/just_audio/DeezerDataSource.java @@ -10,6 +10,7 @@ import java.io.ByteArrayOutputStream; import java.io.FilterInputStream; import java.io.IOException; import java.io.InputStream; +import java.io.InterruptedIOException; import java.net.HttpURLConnection; import java.net.URL; import java.security.MessageDigest; @@ -71,9 +72,9 @@ public class DeezerDataSource implements HttpDataSource { this.connection = (HttpURLConnection) url.openConnection(); this.connection.setChunkedStreamingMode(2048); if (dataSpec.position > 0) { - this.counter = (int) (dataSpec.position/2048); + this.counter = (int) (dataSpec.position / 2048); this.connection.setRequestProperty("Range", - "bytes=" + Long.toString(this.counter*2048) + "-"); + "bytes=" + Long.toString(this.counter * 2048) + "-"); } InputStream is = this.connection.getInputStream(); @@ -84,7 +85,7 @@ public class DeezerDataSource implements HttpDataSource { int t = 0; int read = 0; while (read != -1 && t != 2048) { - t += read = in.read(b, t, 2048-t); + t += read = in.read(b, t, 2048 - t); } if (counter % 3 == 0) { @@ -98,9 +99,12 @@ public class DeezerDataSource implements HttpDataSource { return t; } - },2048); + }, 2048); + } catch (InterruptedIOException e) { + //Interrupted, do nothing + return -1; } catch (Exception e) { //Quality fallback if (this.quality == 1) { @@ -125,6 +129,8 @@ public class DeezerDataSource implements HttpDataSource { @Override public int read(byte[] buffer, int offset, int length) throws HttpDataSourceException { + if (this.inputStream == null) throw new HttpDataSourceException("Input stream null!", this.dataSpec, HttpDataSourceException.TYPE_READ); + int read = 0; try { read = this.inputStream.read(buffer, offset, length); diff --git a/darwin/Classes/AudioPlayer.m b/darwin/Classes/AudioPlayer.m index ccbfdea..c53ad79 100644 --- a/darwin/Classes/AudioPlayer.m +++ b/darwin/Classes/AudioPlayer.m @@ -39,6 +39,7 @@ BOOL _automaticallyWaitsToMinimizeStalling; BOOL _configuredSession; BOOL _playing; + float _speed; } - (instancetype)initWithRegistrar:(NSObject *)registrar playerId:(NSString*)idParam configuredSession:(BOOL)configuredSession { @@ -74,6 +75,7 @@ _loadResult = nil; _playResult = nil; _automaticallyWaitsToMinimizeStalling = YES; + _speed = 1.0f; __weak __typeof__(self) weakSelf = self; [_methodChannel setMethodCallHandler:^(FlutterMethodCall* call, FlutterResult result) { [weakSelf handleMethodCall:call result:result]; @@ -127,7 +129,7 @@ [self concatenatingInsertAll:(NSString*)args[0] index:[args[1] intValue] sources:(NSArray*)args[2]]; result(nil); } else if ([@"concatenating.removeAt" isEqualToString:call.method]) { - [self concatenatingRemoveAt:(NSString*)args[0] index:(int)args[1]]; + [self concatenatingRemoveAt:(NSString*)args[0] index:[args[1] intValue]]; result(nil); } else if ([@"concatenating.removeRange" isEqualToString:call.method]) { [self concatenatingRemoveRange:(NSString*)args[0] start:[args[1] intValue] end:[args[2] intValue]]; @@ -138,6 +140,8 @@ } else if ([@"concatenating.clear" isEqualToString:call.method]) { [self concatenatingClear:(NSString*)args[0]]; result(nil); + } else if ([@"setAndroidAudioAttributes" isEqualToString:call.method]) { + result(nil); } else { result(FlutterMethodNotImplemented); } @@ -251,7 +255,8 @@ // Re-index the audio sources. _indexedAudioSources = [[NSMutableArray alloc] init]; [_audioSource buildSequence:_indexedAudioSources treeIndex:0]; - _index = [self indexForItem:_player.currentItem]; + [self updateOrder]; + [self enqueueFrom:[self indexForItem:_player.currentItem]]; [self broadcastPlaybackEvent]; } @@ -329,7 +334,7 @@ return 0; } else if (CMTIME_IS_VALID(_seekPos)) { return (int)(1000 * CMTimeGetSeconds(_seekPos)); - } else if (_indexedAudioSources) { + } else if (_indexedAudioSources && _indexedAudioSources.count > 0) { int ms = (int)(1000 * CMTimeGetSeconds(_indexedAudioSources[_index].position)); if (ms < 0) ms = 0; return ms; @@ -341,7 +346,7 @@ - (int)getBufferedPosition { if (_processingState == none || _processingState == loading) { return 0; - } else if (_indexedAudioSources) { + } else if (_indexedAudioSources && _indexedAudioSources.count > 0) { int ms = (int)(1000 * CMTimeGetSeconds(_indexedAudioSources[_index].bufferedPosition)); if (ms < 0) ms = 0; return ms; @@ -353,7 +358,7 @@ - (int)getDuration { if (_processingState == none) { return -1; - } else if (_indexedAudioSources) { + } else if (_indexedAudioSources && _indexedAudioSources.count > 0) { int v = (int)(1000 * CMTimeGetSeconds(_indexedAudioSources[_index].duration)); return v; } else { @@ -425,7 +430,6 @@ } - (void)enqueueFrom:(int)index { - int oldIndex = _index; _index = index; // Update the queue while keeping the currently playing item untouched. @@ -436,22 +440,27 @@ // First, remove all _player items except for the currently playing one (if any). IndexedPlayerItem *oldItem = _player.currentItem; IndexedPlayerItem *existingItem = nil; + IndexedPlayerItem *newItem = _indexedAudioSources.count > 0 ? _indexedAudioSources[_index].playerItem : nil; NSArray *oldPlayerItems = [NSArray arrayWithArray:_player.items]; // In the first pass, preserve the old and new items. for (int i = 0; i < oldPlayerItems.count; i++) { - if (oldPlayerItems[i] == _indexedAudioSources[_index].playerItem) { + if (oldPlayerItems[i] == newItem) { // Preserve and tag new item if it is already in the queue. existingItem = oldPlayerItems[i]; + //NSLog(@"Preserving existing item %d", [self indexForItem:existingItem]); } else if (oldPlayerItems[i] == oldItem) { + //NSLog(@"Preserving old item %d", [self indexForItem:oldItem]); // Temporarily preserve old item, just to avoid jumping to // intermediate queue positions unnecessarily. We only want to jump // once to _index. } else { + //NSLog(@"Removing item %d", [self indexForItem:oldPlayerItems[i]]); [_player removeItem:oldPlayerItems[i]]; } } // In the second pass, remove the old item (if different from new item). - if (_index != oldIndex) { + if (oldItem && newItem != oldItem) { + //NSLog(@"removing old item %d", [self indexForItem:oldItem]); [_player removeItem:oldItem]; } @@ -464,6 +473,7 @@ int si = [_order[i] intValue]; if (si == _index) include = YES; if (include && _indexedAudioSources[si].playerItem != existingItem) { + //NSLog(@"inserting item %d", si); [_player insertItem:_indexedAudioSources[si].playerItem afterItem:nil]; } } @@ -471,7 +481,7 @@ /* NSLog(@"after reorder: _player.items.count: ", _player.items.count); */ /* [self dumpQueue]; */ - if (_processingState != loading && oldItem != _indexedAudioSources[_index].playerItem) { + if (_processingState != loading && oldItem != newItem) { // || !_player.currentItem.playbackLikelyToKeepUp; if (_player.currentItem.playbackBufferEmpty) { [self enterBuffering:@"enqueueFrom playbackBufferEmpty"]; @@ -480,6 +490,8 @@ } [self updatePosition]; } + + [self updateEndAction]; } - (void)updatePosition { @@ -621,13 +633,14 @@ } else { IndexedPlayerItem *endedPlayerItem = (IndexedPlayerItem *)notification.object; IndexedAudioSource *endedSource = endedPlayerItem.audioSource; - // When an item ends, seek back to its beginning. - [endedSource seek:kCMTimeZero]; if ([_orderInv[_index] intValue] + 1 < [_order count]) { + // When an item ends, seek back to its beginning. + [endedSource seek:kCMTimeZero]; // account for automatic move to next item _index = [_order[[_orderInv[_index] intValue] + 1] intValue]; NSLog(@"advance to next: index = %d", _index); + [self updateEndAction]; [self broadcastPlaybackEvent]; } else { // reached end of playlist @@ -642,11 +655,13 @@ // sources. // For now we just do a seek back to the start. if ([_order count] == 1) { - [self seek:kCMTimeZero index:[NSNull null] completionHandler:^(BOOL finished) { + [self seek:kCMTimeZero index:_order[0] completionHandler:^(BOOL finished) { // XXX: Necessary? [self play]; }]; } else { + // When an item ends, seek back to its beginning. + [endedSource seek:kCMTimeZero]; [self seek:kCMTimeZero index:_order[0] completionHandler:^(BOOL finished) { // XXX: Necessary? [self play]; @@ -788,6 +803,7 @@ // account for automatic move to next item _index = [_order[[_orderInv[_index] intValue] + 1] intValue]; NSLog(@"advance to next on error: index = %d", _index); + [self updateEndAction]; [self broadcastPlaybackEvent]; } else { NSLog(@"error on last item"); @@ -800,6 +816,7 @@ // notifying this observer. NSLog(@"Queue change detected. Adjusting index from %d -> %d", _index, expectedIndex); _index = expectedIndex; + [self updateEndAction]; [self broadcastPlaybackEvent]; } } @@ -833,7 +850,7 @@ if (shouldResumePlayback) { _player.actionAtItemEnd = originalEndAction; // TODO: This logic is almost duplicated in seek. See if we can reuse this code. - [_player play]; + _player.rate = _speed; } }]; } else { @@ -904,7 +921,7 @@ [[AVAudioSession sharedInstance] setActive:YES error:nil]; } #endif - [_player play]; + _player.rate = _speed; [self updatePosition]; if (@available(macOS 10.12, iOS 10.0, *)) {} else { @@ -946,21 +963,26 @@ if (speed == 1.0 || (speed < 1.0 && _player.currentItem.canPlaySlowForward) || (speed > 1.0 && _player.currentItem.canPlayFastForward)) { - _player.rate = speed; + _speed = speed; + if (_playing) { + _player.rate = speed; + } } [self updatePosition]; } - (void)setLoopMode:(int)loopMode { _loopMode = loopMode; - if (_player) { - switch (_loopMode) { - case loopOne: - _player.actionAtItemEnd = AVPlayerActionAtItemEndPause; // AVPlayerActionAtItemEndNone - break; - default: - _player.actionAtItemEnd = AVPlayerActionAtItemEndAdvance; - } + [self updateEndAction]; +} + +- (void)updateEndAction { + // Should update this whenever the audio source changes and whenever _index changes. + if (!_player) return; + if (_audioSource && [_orderInv[_index] intValue] + 1 < [_order count] && _loopMode != loopOne) { + _player.actionAtItemEnd = AVPlayerActionAtItemEndAdvance; + } else { + _player.actionAtItemEnd = AVPlayerActionAtItemEndPause; // AVPlayerActionAtItemEndNone } } @@ -1050,7 +1072,7 @@ } } if (_playing) { - [_player play]; + _player.rate = _speed; } _seekPos = kCMTimeInvalid; [self broadcastPlaybackEvent]; @@ -1061,7 +1083,15 @@ } else { _seekPos = kCMTimeInvalid; if (_playing) { - [_player play]; + if (@available(iOS 10.0, *)) { + // NOTE: Re-enable this line only after figuring out + // how to detect buffering when buffered audio is not + // immediately available. + //[_player playImmediatelyAtRate:_speed]; + _player.rate = _speed; + } else { + _player.rate = _speed; + } } } } @@ -1083,7 +1113,15 @@ // If playing, buffering will be detected either by: // 1. checkForDiscontinuity // 2. timeControlStatus - [_player play]; + if (@available(iOS 10.0, *)) { + // NOTE: Re-enable this line only after figuring out how to + // detect buffering when buffered audio is not immediately + // available. + //[_player playImmediatelyAtRate:_speed]; + _player.rate = _speed; + } else { + _player.rate = _speed; + } } else { // If not playing, there is no reliable way to detect // when buffering has completed, so we use diff --git a/darwin/Classes/UriAudioSource.m b/darwin/Classes/UriAudioSource.m index 91321d4..9945a0a 100644 --- a/darwin/Classes/UriAudioSource.m +++ b/darwin/Classes/UriAudioSource.m @@ -50,19 +50,36 @@ - (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler { if (!completionHandler || (_playerItem.status == AVPlayerItemStatusReadyToPlay)) { - [_playerItem seekToTime:position toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero completionHandler:completionHandler]; + CMTimeRange seekableRange = [_playerItem.seekableTimeRanges.lastObject CMTimeRangeValue]; + CMTime relativePosition = CMTimeAdd(position, seekableRange.start); + [_playerItem seekToTime:relativePosition toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero completionHandler:completionHandler]; } } - (CMTime)duration { - return _playerItem.duration; + NSValue *seekableRange = _playerItem.seekableTimeRanges.lastObject; + if (seekableRange) { + CMTimeRange seekableDuration = [seekableRange CMTimeRangeValue];; + return seekableDuration.duration; + } + else { + return _playerItem.duration; + } + return kCMTimeInvalid; } - (void)setDuration:(CMTime)duration { } - (CMTime)position { - return _playerItem.currentTime; + NSValue *seekableRange = _playerItem.seekableTimeRanges.lastObject; + if (seekableRange) { + CMTimeRange range = [seekableRange CMTimeRangeValue]; + return CMTimeSubtract(_playerItem.currentTime, range.start); + } else { + return _playerItem.currentTime; + } + } - (CMTime)bufferedPosition { diff --git a/ios/Classes/AudioPlayer.m b/ios/Classes/AudioPlayer.m deleted file mode 100644 index ccbfdea..0000000 --- a/ios/Classes/AudioPlayer.m +++ /dev/null @@ -1,1138 +0,0 @@ -#import "AudioPlayer.h" -#import "AudioSource.h" -#import "IndexedAudioSource.h" -#import "UriAudioSource.h" -#import "ConcatenatingAudioSource.h" -#import "LoopingAudioSource.h" -#import "ClippingAudioSource.h" -#import -#import -#include - -// TODO: Check for and report invalid state transitions. -// TODO: Apply Apple's guidance on seeking: https://developer.apple.com/library/archive/qa/qa1820/_index.html -@implementation AudioPlayer { - NSObject* _registrar; - FlutterMethodChannel *_methodChannel; - FlutterEventChannel *_eventChannel; - FlutterEventSink _eventSink; - NSString *_playerId; - AVQueuePlayer *_player; - AudioSource *_audioSource; - NSMutableArray *_indexedAudioSources; - NSMutableArray *_order; - NSMutableArray *_orderInv; - int _index; - enum ProcessingState _processingState; - enum LoopMode _loopMode; - BOOL _shuffleModeEnabled; - long long _updateTime; - int _updatePosition; - int _lastPosition; - int _bufferedPosition; - // Set when the current item hasn't been played yet so we aren't sure whether sufficient audio has been buffered. - BOOL _bufferUnconfirmed; - CMTime _seekPos; - FlutterResult _loadResult; - FlutterResult _playResult; - id _timeObserver; - BOOL _automaticallyWaitsToMinimizeStalling; - BOOL _configuredSession; - BOOL _playing; -} - -- (instancetype)initWithRegistrar:(NSObject *)registrar playerId:(NSString*)idParam configuredSession:(BOOL)configuredSession { - self = [super init]; - NSAssert(self, @"super init cannot be nil"); - _registrar = registrar; - _playerId = idParam; - _configuredSession = configuredSession; - _methodChannel = - [FlutterMethodChannel methodChannelWithName:[NSMutableString stringWithFormat:@"com.ryanheise.just_audio.methods.%@", _playerId] - binaryMessenger:[registrar messenger]]; - _eventChannel = - [FlutterEventChannel eventChannelWithName:[NSMutableString stringWithFormat:@"com.ryanheise.just_audio.events.%@", _playerId] - binaryMessenger:[registrar messenger]]; - [_eventChannel setStreamHandler:self]; - _index = 0; - _processingState = none; - _loopMode = loopOff; - _shuffleModeEnabled = NO; - _player = nil; - _audioSource = nil; - _indexedAudioSources = nil; - _order = nil; - _orderInv = nil; - _seekPos = kCMTimeInvalid; - _timeObserver = 0; - _updatePosition = 0; - _updateTime = 0; - _lastPosition = 0; - _bufferedPosition = 0; - _bufferUnconfirmed = NO; - _playing = NO; - _loadResult = nil; - _playResult = nil; - _automaticallyWaitsToMinimizeStalling = YES; - __weak __typeof__(self) weakSelf = self; - [_methodChannel setMethodCallHandler:^(FlutterMethodCall* call, FlutterResult result) { - [weakSelf handleMethodCall:call result:result]; - }]; - return self; -} - -- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result { - NSArray* args = (NSArray*)call.arguments; - if ([@"load" isEqualToString:call.method]) { - [self load:args[0] result:result]; - } else if ([@"play" isEqualToString:call.method]) { - [self play:result]; - } else if ([@"pause" isEqualToString:call.method]) { - [self pause]; - result(nil); - } else if ([@"setVolume" isEqualToString:call.method]) { - [self setVolume:(float)[args[0] doubleValue]]; - result(nil); - } else if ([@"setSpeed" isEqualToString:call.method]) { - [self setSpeed:(float)[args[0] doubleValue]]; - result(nil); - } else if ([@"setLoopMode" isEqualToString:call.method]) { - [self setLoopMode:[args[0] intValue]]; - result(nil); - } else if ([@"setShuffleModeEnabled" isEqualToString:call.method]) { - [self setShuffleModeEnabled:(BOOL)[args[0] boolValue]]; - result(nil); - } else if ([@"setAutomaticallyWaitsToMinimizeStalling" isEqualToString:call.method]) { - [self setAutomaticallyWaitsToMinimizeStalling:(BOOL)[args[0] boolValue]]; - result(nil); - } else if ([@"seek" isEqualToString:call.method]) { - CMTime position = args[0] == [NSNull null] ? kCMTimePositiveInfinity : CMTimeMake([args[0] intValue], 1000); - [self seek:position index:args[1] completionHandler:^(BOOL finished) { - result(nil); - }]; - result(nil); - } else if ([@"dispose" isEqualToString:call.method]) { - [self dispose]; - result(nil); - } else if ([@"concatenating.add" isEqualToString:call.method]) { - [self concatenatingAdd:(NSString*)args[0] source:(NSDictionary*)args[1]]; - result(nil); - } else if ([@"concatenating.insert" isEqualToString:call.method]) { - [self concatenatingInsert:(NSString*)args[0] index:[args[1] intValue] source:(NSDictionary*)args[2]]; - result(nil); - } else if ([@"concatenating.addAll" isEqualToString:call.method]) { - [self concatenatingAddAll:(NSString*)args[0] sources:(NSArray*)args[1]]; - result(nil); - } else if ([@"concatenating.insertAll" isEqualToString:call.method]) { - [self concatenatingInsertAll:(NSString*)args[0] index:[args[1] intValue] sources:(NSArray*)args[2]]; - result(nil); - } else if ([@"concatenating.removeAt" isEqualToString:call.method]) { - [self concatenatingRemoveAt:(NSString*)args[0] index:(int)args[1]]; - result(nil); - } else if ([@"concatenating.removeRange" isEqualToString:call.method]) { - [self concatenatingRemoveRange:(NSString*)args[0] start:[args[1] intValue] end:[args[2] intValue]]; - result(nil); - } else if ([@"concatenating.move" isEqualToString:call.method]) { - [self concatenatingMove:(NSString*)args[0] currentIndex:[args[1] intValue] newIndex:[args[2] intValue]]; - result(nil); - } else if ([@"concatenating.clear" isEqualToString:call.method]) { - [self concatenatingClear:(NSString*)args[0]]; - result(nil); - } else { - result(FlutterMethodNotImplemented); - } -} - -// Untested -- (void)concatenatingAdd:(NSString *)catId source:(NSDictionary *)source { - [self concatenatingInsertAll:catId index:-1 sources:@[source]]; -} - -// Untested -- (void)concatenatingInsert:(NSString *)catId index:(int)index source:(NSDictionary *)source { - [self concatenatingInsertAll:catId index:index sources:@[source]]; -} - -// Untested -- (void)concatenatingAddAll:(NSString *)catId sources:(NSArray *)sources { - [self concatenatingInsertAll:catId index:-1 sources:sources]; -} - -// Untested -- (void)concatenatingInsertAll:(NSString *)catId index:(int)index sources:(NSArray *)sources { - // Find all duplicates of the identified ConcatenatingAudioSource. - NSMutableArray *matches = [[NSMutableArray alloc] init]; - [_audioSource findById:catId matches:matches]; - // Add each new source to each match. - for (int i = 0; i < matches.count; i++) { - ConcatenatingAudioSource *catSource = (ConcatenatingAudioSource *)matches[i]; - int idx = index >= 0 ? index : catSource.count; - NSMutableArray *audioSources = [self decodeAudioSources:sources]; - for (int j = 0; j < audioSources.count; j++) { - AudioSource *audioSource = audioSources[j]; - [catSource insertSource:audioSource atIndex:(idx + j)]; - } - } - // Index the new audio sources. - _indexedAudioSources = [[NSMutableArray alloc] init]; - [_audioSource buildSequence:_indexedAudioSources treeIndex:0]; - for (int i = 0; i < [_indexedAudioSources count]; i++) { - IndexedAudioSource *audioSource = _indexedAudioSources[i]; - if (!audioSource.isAttached) { - audioSource.playerItem.audioSource = audioSource; - [self addItemObservers:audioSource.playerItem]; - } - } - [self updateOrder]; - if (_player.currentItem) { - _index = [self indexForItem:_player.currentItem]; - } else { - _index = 0; - } - [self enqueueFrom:_index]; - // Notify each new IndexedAudioSource that it's been attached to the player. - for (int i = 0; i < [_indexedAudioSources count]; i++) { - if (!_indexedAudioSources[i].isAttached) { - [_indexedAudioSources[i] attach:_player]; - } - } - [self broadcastPlaybackEvent]; -} - -// Untested -- (void)concatenatingRemoveAt:(NSString *)catId index:(int)index { - [self concatenatingRemoveRange:catId start:index end:(index + 1)]; -} - -// Untested -- (void)concatenatingRemoveRange:(NSString *)catId start:(int)start end:(int)end { - // Find all duplicates of the identified ConcatenatingAudioSource. - NSMutableArray *matches = [[NSMutableArray alloc] init]; - [_audioSource findById:catId matches:matches]; - // Remove range from each match. - for (int i = 0; i < matches.count; i++) { - ConcatenatingAudioSource *catSource = (ConcatenatingAudioSource *)matches[i]; - int endIndex = end >= 0 ? end : catSource.count; - [catSource removeSourcesFromIndex:start toIndex:endIndex]; - } - // Re-index the remaining audio sources. - NSArray *oldIndexedAudioSources = _indexedAudioSources; - _indexedAudioSources = [[NSMutableArray alloc] init]; - [_audioSource buildSequence:_indexedAudioSources treeIndex:0]; - for (int i = 0, j = 0; i < _indexedAudioSources.count; i++, j++) { - IndexedAudioSource *audioSource = _indexedAudioSources[i]; - while (audioSource != oldIndexedAudioSources[j]) { - [self removeItemObservers:oldIndexedAudioSources[j].playerItem]; - if (j < _index) { - _index--; - } else if (j == _index) { - // The currently playing item was removed. - } - j++; - } - } - [self updateOrder]; - if (_index >= _indexedAudioSources.count) _index = _indexedAudioSources.count - 1; - if (_index < 0) _index = 0; - [self enqueueFrom:_index]; - [self broadcastPlaybackEvent]; -} - -// Untested -- (void)concatenatingMove:(NSString *)catId currentIndex:(int)currentIndex newIndex:(int)newIndex { - // Find all duplicates of the identified ConcatenatingAudioSource. - NSMutableArray *matches = [[NSMutableArray alloc] init]; - [_audioSource findById:catId matches:matches]; - // Move range within each match. - for (int i = 0; i < matches.count; i++) { - ConcatenatingAudioSource *catSource = (ConcatenatingAudioSource *)matches[i]; - [catSource moveSourceFromIndex:currentIndex toIndex:newIndex]; - } - // Re-index the audio sources. - _indexedAudioSources = [[NSMutableArray alloc] init]; - [_audioSource buildSequence:_indexedAudioSources treeIndex:0]; - _index = [self indexForItem:_player.currentItem]; - [self broadcastPlaybackEvent]; -} - -// Untested -- (void)concatenatingClear:(NSString *)catId { - [self concatenatingRemoveRange:catId start:0 end:-1]; -} - -- (FlutterError*)onListenWithArguments:(id)arguments eventSink:(FlutterEventSink)eventSink { - _eventSink = eventSink; - return nil; -} - -- (FlutterError*)onCancelWithArguments:(id)arguments { - _eventSink = nil; - return nil; -} - -- (void)checkForDiscontinuity { - if (!_eventSink) return; - if (!_playing || CMTIME_IS_VALID(_seekPos) || _processingState == completed) return; - int position = [self getCurrentPosition]; - if (_processingState == buffering) { - if (position > _lastPosition) { - [self leaveBuffering:@"stall ended"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - } - } else { - long long now = (long long)([[NSDate date] timeIntervalSince1970] * 1000.0); - long long timeSinceLastUpdate = now - _updateTime; - long long expectedPosition = _updatePosition + (long long)(timeSinceLastUpdate * _player.rate); - long long drift = position - expectedPosition; - //NSLog(@"position: %d, drift: %lld", position, drift); - // Update if we've drifted or just started observing - if (_updateTime == 0L) { - [self broadcastPlaybackEvent]; - } else if (drift < -100) { - [self enterBuffering:@"stalling"]; - NSLog(@"Drift: %lld", drift); - [self updatePosition]; - [self broadcastPlaybackEvent]; - } - } - _lastPosition = position; -} - -- (void)enterBuffering:(NSString *)reason { - NSLog(@"ENTER BUFFERING: %@", reason); - _processingState = buffering; -} - -- (void)leaveBuffering:(NSString *)reason { - NSLog(@"LEAVE BUFFERING: %@", reason); - _processingState = ready; -} - -- (void)broadcastPlaybackEvent { - if (!_eventSink) return; - _eventSink(@{ - @"processingState": @(_processingState), - @"updatePosition": @(_updatePosition), - @"updateTime": @(_updateTime), - // TODO: buffer position - @"bufferedPosition": @(_updatePosition), - // TODO: Icy Metadata - @"icyMetadata": [NSNull null], - @"duration": @([self getDuration]), - @"currentIndex": @(_index), - }); -} - -- (int)getCurrentPosition { - if (_processingState == none || _processingState == loading) { - return 0; - } else if (CMTIME_IS_VALID(_seekPos)) { - return (int)(1000 * CMTimeGetSeconds(_seekPos)); - } else if (_indexedAudioSources) { - int ms = (int)(1000 * CMTimeGetSeconds(_indexedAudioSources[_index].position)); - if (ms < 0) ms = 0; - return ms; - } else { - return 0; - } -} - -- (int)getBufferedPosition { - if (_processingState == none || _processingState == loading) { - return 0; - } else if (_indexedAudioSources) { - int ms = (int)(1000 * CMTimeGetSeconds(_indexedAudioSources[_index].bufferedPosition)); - if (ms < 0) ms = 0; - return ms; - } else { - return 0; - } -} - -- (int)getDuration { - if (_processingState == none) { - return -1; - } else if (_indexedAudioSources) { - int v = (int)(1000 * CMTimeGetSeconds(_indexedAudioSources[_index].duration)); - return v; - } else { - return 0; - } -} - -- (void)removeItemObservers:(AVPlayerItem *)playerItem { - [playerItem removeObserver:self forKeyPath:@"status"]; - [playerItem removeObserver:self forKeyPath:@"playbackBufferEmpty"]; - [playerItem removeObserver:self forKeyPath:@"playbackBufferFull"]; - //[playerItem removeObserver:self forKeyPath:@"playbackLikelyToKeepUp"]; - [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemDidPlayToEndTimeNotification object:playerItem]; - [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemFailedToPlayToEndTimeNotification object:playerItem]; - [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemPlaybackStalledNotification object:playerItem]; -} - -- (void)addItemObservers:(AVPlayerItem *)playerItem { - // Get notified when the item is loaded or had an error loading - [playerItem addObserver:self forKeyPath:@"status" options:NSKeyValueObservingOptionNew context:nil]; - // Get notified of the buffer state - [playerItem addObserver:self forKeyPath:@"playbackBufferEmpty" options:NSKeyValueObservingOptionNew context:nil]; - [playerItem addObserver:self forKeyPath:@"playbackBufferFull" options:NSKeyValueObservingOptionNew context:nil]; - [playerItem addObserver:self forKeyPath:@"loadedTimeRanges" options:NSKeyValueObservingOptionNew context:nil]; - //[playerItem addObserver:self forKeyPath:@"playbackLikelyToKeepUp" options:NSKeyValueObservingOptionNew context:nil]; - // Get notified when playback has reached the end - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(onComplete:) name:AVPlayerItemDidPlayToEndTimeNotification object:playerItem]; - // Get notified when playback stops due to a failure (currently unused) - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(onFailToComplete:) name:AVPlayerItemFailedToPlayToEndTimeNotification object:playerItem]; - // Get notified when playback stalls (currently unused) - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(onItemStalled:) name:AVPlayerItemPlaybackStalledNotification object:playerItem]; -} - -- (NSMutableArray *)decodeAudioSources:(NSArray *)data { - NSMutableArray *array = [[NSMutableArray alloc] init]; - for (int i = 0; i < [data count]; i++) { - AudioSource *source = [self decodeAudioSource:data[i]]; - [array addObject:source]; - } - return array; -} - -- (AudioSource *)decodeAudioSource:(NSDictionary *)data { - NSString *type = data[@"type"]; - if ([@"progressive" isEqualToString:type]) { - return [[UriAudioSource alloc] initWithId:data[@"id"] uri:data[@"uri"]]; - } else if ([@"dash" isEqualToString:type]) { - return [[UriAudioSource alloc] initWithId:data[@"id"] uri:data[@"uri"]]; - } else if ([@"hls" isEqualToString:type]) { - return [[UriAudioSource alloc] initWithId:data[@"id"] uri:data[@"uri"]]; - } else if ([@"concatenating" isEqualToString:type]) { - return [[ConcatenatingAudioSource alloc] initWithId:data[@"id"] - audioSources:[self decodeAudioSources:data[@"audioSources"]]]; - } else if ([@"clipping" isEqualToString:type]) { - return [[ClippingAudioSource alloc] initWithId:data[@"id"] - audioSource:[self decodeAudioSource:data[@"audioSource"]] - start:data[@"start"] - end:data[@"end"]]; - } else if ([@"looping" isEqualToString:type]) { - NSMutableArray *childSources = [NSMutableArray new]; - int count = [data[@"count"] intValue]; - for (int i = 0; i < count; i++) { - [childSources addObject:[self decodeAudioSource:data[@"audioSource"]]]; - } - return [[LoopingAudioSource alloc] initWithId:data[@"id"] audioSources:childSources]; - } else { - return nil; - } -} - -- (void)enqueueFrom:(int)index { - int oldIndex = _index; - _index = index; - - // Update the queue while keeping the currently playing item untouched. - - /* NSLog(@"before reorder: _player.items.count: ", _player.items.count); */ - /* [self dumpQueue]; */ - - // First, remove all _player items except for the currently playing one (if any). - IndexedPlayerItem *oldItem = _player.currentItem; - IndexedPlayerItem *existingItem = nil; - NSArray *oldPlayerItems = [NSArray arrayWithArray:_player.items]; - // In the first pass, preserve the old and new items. - for (int i = 0; i < oldPlayerItems.count; i++) { - if (oldPlayerItems[i] == _indexedAudioSources[_index].playerItem) { - // Preserve and tag new item if it is already in the queue. - existingItem = oldPlayerItems[i]; - } else if (oldPlayerItems[i] == oldItem) { - // Temporarily preserve old item, just to avoid jumping to - // intermediate queue positions unnecessarily. We only want to jump - // once to _index. - } else { - [_player removeItem:oldPlayerItems[i]]; - } - } - // In the second pass, remove the old item (if different from new item). - if (_index != oldIndex) { - [_player removeItem:oldItem]; - } - - /* NSLog(@"inter order: _player.items.count: ", _player.items.count); */ - /* [self dumpQueue]; */ - - // Regenerate queue - BOOL include = NO; - for (int i = 0; i < [_order count]; i++) { - int si = [_order[i] intValue]; - if (si == _index) include = YES; - if (include && _indexedAudioSources[si].playerItem != existingItem) { - [_player insertItem:_indexedAudioSources[si].playerItem afterItem:nil]; - } - } - - /* NSLog(@"after reorder: _player.items.count: ", _player.items.count); */ - /* [self dumpQueue]; */ - - if (_processingState != loading && oldItem != _indexedAudioSources[_index].playerItem) { - // || !_player.currentItem.playbackLikelyToKeepUp; - if (_player.currentItem.playbackBufferEmpty) { - [self enterBuffering:@"enqueueFrom playbackBufferEmpty"]; - } else { - [self leaveBuffering:@"enqueueFrom !playbackBufferEmpty"]; - } - [self updatePosition]; - } -} - -- (void)updatePosition { - _updatePosition = [self getCurrentPosition]; - _updateTime = (long long)([[NSDate date] timeIntervalSince1970] * 1000.0); -} - -- (void)load:(NSDictionary *)source result:(FlutterResult)result { - if (!_playing) { - [_player pause]; - } - if (_processingState == loading) { - [self abortExistingConnection]; - } - _loadResult = result; - _index = 0; - [self updatePosition]; - _processingState = loading; - [self broadcastPlaybackEvent]; - // Remove previous observers - if (_indexedAudioSources) { - for (int i = 0; i < [_indexedAudioSources count]; i++) { - [self removeItemObservers:_indexedAudioSources[i].playerItem]; - } - } - // Decode audio source - if (_audioSource && [@"clipping" isEqualToString:source[@"type"]]) { - // Check if we're clipping an audio source that was previously loaded. - UriAudioSource *child = nil; - if ([_audioSource isKindOfClass:[ClippingAudioSource class]]) { - ClippingAudioSource *clipper = (ClippingAudioSource *)_audioSource; - child = clipper.audioSource; - } else if ([_audioSource isKindOfClass:[UriAudioSource class]]) { - child = (UriAudioSource *)_audioSource; - } - if (child) { - _audioSource = [[ClippingAudioSource alloc] initWithId:source[@"id"] - audioSource:child - start:source[@"start"] - end:source[@"end"]]; - } else { - _audioSource = [self decodeAudioSource:source]; - } - } else { - _audioSource = [self decodeAudioSource:source]; - } - _indexedAudioSources = [[NSMutableArray alloc] init]; - [_audioSource buildSequence:_indexedAudioSources treeIndex:0]; - for (int i = 0; i < [_indexedAudioSources count]; i++) { - IndexedAudioSource *source = _indexedAudioSources[i]; - [self addItemObservers:source.playerItem]; - source.playerItem.audioSource = source; - } - [self updateOrder]; - // Set up an empty player - if (!_player) { - _player = [[AVQueuePlayer alloc] initWithItems:@[]]; - if (@available(macOS 10.12, iOS 10.0, *)) { - _player.automaticallyWaitsToMinimizeStalling = _automaticallyWaitsToMinimizeStalling; - // TODO: Remove these observers in dispose. - [_player addObserver:self - forKeyPath:@"timeControlStatus" - options:NSKeyValueObservingOptionNew - context:nil]; - } - [_player addObserver:self - forKeyPath:@"currentItem" - options:NSKeyValueObservingOptionNew - context:nil]; - // TODO: learn about the different ways to define weakSelf. - //__weak __typeof__(self) weakSelf = self; - //typeof(self) __weak weakSelf = self; - __unsafe_unretained typeof(self) weakSelf = self; - if (@available(macOS 10.12, iOS 10.0, *)) {} - else { - _timeObserver = [_player addPeriodicTimeObserverForInterval:CMTimeMake(200, 1000) - queue:nil - usingBlock:^(CMTime time) { - [weakSelf checkForDiscontinuity]; - } - ]; - } - } - // Initialise the AVQueuePlayer with items. - [self enqueueFrom:0]; - // Notify each IndexedAudioSource that it's been attached to the player. - for (int i = 0; i < [_indexedAudioSources count]; i++) { - [_indexedAudioSources[i] attach:_player]; - } - - if (_player.currentItem.status == AVPlayerItemStatusReadyToPlay) { - _loadResult(@([self getDuration])); - _loadResult = nil; - } else { - // We send result after the playerItem is ready in observeValueForKeyPath. - } - [self broadcastPlaybackEvent]; -} - -- (void)updateOrder { - if (_shuffleModeEnabled) { - [_audioSource shuffle:0 currentIndex: _index]; - } - _orderInv = [NSMutableArray arrayWithCapacity:[_indexedAudioSources count]]; - for (int i = 0; i < [_indexedAudioSources count]; i++) { - [_orderInv addObject:@(0)]; - } - if (_shuffleModeEnabled) { - _order = [_audioSource getShuffleOrder]; - } else { - NSMutableArray *order = [[NSMutableArray alloc] init]; - for (int i = 0; i < [_indexedAudioSources count]; i++) { - [order addObject:@(i)]; - } - _order = order; - } - for (int i = 0; i < [_indexedAudioSources count]; i++) { - _orderInv[[_order[i] intValue]] = @(i); - } -} - -- (void)onItemStalled:(NSNotification *)notification { - IndexedPlayerItem *playerItem = (IndexedPlayerItem *)notification.object; - NSLog(@"onItemStalled"); -} - -- (void)onFailToComplete:(NSNotification *)notification { - IndexedPlayerItem *playerItem = (IndexedPlayerItem *)notification.object; - NSLog(@"onFailToComplete"); -} - -- (void)onComplete:(NSNotification *)notification { - NSLog(@"onComplete"); - if (_loopMode == loopOne) { - [self seek:kCMTimeZero index:@(_index) completionHandler:^(BOOL finished) { - // XXX: Not necessary? - [self play]; - }]; - } else { - IndexedPlayerItem *endedPlayerItem = (IndexedPlayerItem *)notification.object; - IndexedAudioSource *endedSource = endedPlayerItem.audioSource; - // When an item ends, seek back to its beginning. - [endedSource seek:kCMTimeZero]; - - if ([_orderInv[_index] intValue] + 1 < [_order count]) { - // account for automatic move to next item - _index = [_order[[_orderInv[_index] intValue] + 1] intValue]; - NSLog(@"advance to next: index = %d", _index); - [self broadcastPlaybackEvent]; - } else { - // reached end of playlist - if (_loopMode == loopAll) { - NSLog(@"Loop back to first item"); - // Loop back to the beginning - // TODO: Currently there will be a gap at the loop point. - // Maybe we can do something clever by temporarily adding the - // first playlist item at the end of the queue, although this - // will affect any code that assumes the queue always - // corresponds to a contiguous region of the indexed audio - // sources. - // For now we just do a seek back to the start. - if ([_order count] == 1) { - [self seek:kCMTimeZero index:[NSNull null] completionHandler:^(BOOL finished) { - // XXX: Necessary? - [self play]; - }]; - } else { - [self seek:kCMTimeZero index:_order[0] completionHandler:^(BOOL finished) { - // XXX: Necessary? - [self play]; - }]; - } - } else { - [self complete]; - } - } - } -} - -- (void)observeValueForKeyPath:(NSString *)keyPath - ofObject:(id)object - change:(NSDictionary *)change - context:(void *)context { - - if ([keyPath isEqualToString:@"status"]) { - IndexedPlayerItem *playerItem = (IndexedPlayerItem *)object; - AVPlayerItemStatus status = AVPlayerItemStatusUnknown; - NSNumber *statusNumber = change[NSKeyValueChangeNewKey]; - if ([statusNumber isKindOfClass:[NSNumber class]]) { - status = statusNumber.intValue; - } - switch (status) { - case AVPlayerItemStatusReadyToPlay: { - if (playerItem != _player.currentItem) return; - // Detect buffering in different ways depending on whether we're playing - if (_playing) { - if (@available(macOS 10.12, iOS 10.0, *)) { - if (_player.timeControlStatus == AVPlayerTimeControlStatusWaitingToPlayAtSpecifiedRate) { - [self enterBuffering:@"ready to play: playing, waitingToPlay"]; - } else { - [self leaveBuffering:@"ready to play: playing, !waitingToPlay"]; - } - [self updatePosition]; - } else { - // If this happens when we're playing, check whether buffer is confirmed - if (_bufferUnconfirmed && !_player.currentItem.playbackBufferFull) { - // Stay in bufering - XXX Test - [self enterBuffering:@"ready to play: playing, bufferUnconfirmed && !playbackBufferFull"]; - } else { - if (_player.currentItem.playbackBufferEmpty) { - // !_player.currentItem.playbackLikelyToKeepUp; - [self enterBuffering:@"ready to play: playing, playbackBufferEmpty"]; - } else { - [self leaveBuffering:@"ready to play: playing, !playbackBufferEmpty"]; - } - [self updatePosition]; - } - } - } else { - if (_player.currentItem.playbackBufferEmpty) { - [self enterBuffering:@"ready to play: !playing, playbackBufferEmpty"]; - // || !_player.currentItem.playbackLikelyToKeepUp; - } else { - [self leaveBuffering:@"ready to play: !playing, !playbackBufferEmpty"]; - } - [self updatePosition]; - } - [self broadcastPlaybackEvent]; - if (_loadResult) { - _loadResult(@([self getDuration])); - _loadResult = nil; - } - break; - } - case AVPlayerItemStatusFailed: { - NSLog(@"AVPlayerItemStatusFailed"); - [self sendErrorForItem:playerItem]; - break; - } - case AVPlayerItemStatusUnknown: - break; - } - } else if ([keyPath isEqualToString:@"playbackBufferEmpty"] || [keyPath isEqualToString:@"playbackBufferFull"]) { - // Use these values to detect buffering. - IndexedPlayerItem *playerItem = (IndexedPlayerItem *)object; - if (playerItem != _player.currentItem) return; - // If there's a seek in progress, these values are unreliable - if (CMTIME_IS_VALID(_seekPos)) return; - // Detect buffering in different ways depending on whether we're playing - if (_playing) { - if (@available(macOS 10.12, iOS 10.0, *)) { - // We handle this with timeControlStatus instead. - } else { - if (_bufferUnconfirmed && playerItem.playbackBufferFull) { - _bufferUnconfirmed = NO; - [self leaveBuffering:@"playing, _bufferUnconfirmed && playbackBufferFull"]; - [self updatePosition]; - NSLog(@"Buffering confirmed! leaving buffering"); - [self broadcastPlaybackEvent]; - } - } - } else { - if (playerItem.playbackBufferEmpty) { - [self enterBuffering:@"!playing, playbackBufferEmpty"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - } else if (!playerItem.playbackBufferEmpty || playerItem.playbackBufferFull) { - _processingState = ready; - [self leaveBuffering:@"!playing, !playbackBufferEmpty || playbackBufferFull"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - } - } - /* } else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"]) { */ - } else if ([keyPath isEqualToString:@"timeControlStatus"]) { - if (@available(macOS 10.12, iOS 10.0, *)) { - AVPlayerTimeControlStatus status = AVPlayerTimeControlStatusPaused; - NSNumber *statusNumber = change[NSKeyValueChangeNewKey]; - if ([statusNumber isKindOfClass:[NSNumber class]]) { - status = statusNumber.intValue; - } - switch (status) { - case AVPlayerTimeControlStatusPaused: - //NSLog(@"AVPlayerTimeControlStatusPaused"); - break; - case AVPlayerTimeControlStatusWaitingToPlayAtSpecifiedRate: - //NSLog(@"AVPlayerTimeControlStatusWaitingToPlayAtSpecifiedRate"); - if (_processingState != completed) { - [self enterBuffering:@"timeControlStatus"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - } else { - NSLog(@"Ignoring wait signal because we reached the end"); - } - break; - case AVPlayerTimeControlStatusPlaying: - [self leaveBuffering:@"timeControlStatus"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - break; - } - } - } else if ([keyPath isEqualToString:@"currentItem"] && _player.currentItem) { - if (_player.currentItem.status == AVPlayerItemStatusFailed) { - if ([_orderInv[_index] intValue] + 1 < [_order count]) { - // account for automatic move to next item - _index = [_order[[_orderInv[_index] intValue] + 1] intValue]; - NSLog(@"advance to next on error: index = %d", _index); - [self broadcastPlaybackEvent]; - } else { - NSLog(@"error on last item"); - } - return; - } else { - int expectedIndex = [self indexForItem:_player.currentItem]; - if (_index != expectedIndex) { - // AVQueuePlayer will sometimes skip over error items without - // notifying this observer. - NSLog(@"Queue change detected. Adjusting index from %d -> %d", _index, expectedIndex); - _index = expectedIndex; - [self broadcastPlaybackEvent]; - } - } - //NSLog(@"currentItem changed. _index=%d", _index); - _bufferUnconfirmed = YES; - // If we've skipped or transitioned to a new item and we're not - // currently in the middle of a seek - if (CMTIME_IS_INVALID(_seekPos) && _player.currentItem.status == AVPlayerItemStatusReadyToPlay) { - [self updatePosition]; - IndexedAudioSource *source = ((IndexedPlayerItem *)_player.currentItem).audioSource; - // We should already be at position zero but for - // ClippingAudioSource it might be off by some milliseconds so we - // consider anything <= 100 as close enough. - if ((int)(1000 * CMTimeGetSeconds(source.position)) > 100) { - NSLog(@"On currentItem change, seeking back to zero"); - BOOL shouldResumePlayback = NO; - AVPlayerActionAtItemEnd originalEndAction = _player.actionAtItemEnd; - if (_playing && CMTimeGetSeconds(CMTimeSubtract(source.position, source.duration)) >= 0) { - NSLog(@"Need to pause while rewinding because we're at the end"); - shouldResumePlayback = YES; - _player.actionAtItemEnd = AVPlayerActionAtItemEndPause; - [_player pause]; - } - [self enterBuffering:@"currentItem changed, seeking"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - [source seek:kCMTimeZero completionHandler:^(BOOL finished) { - [self leaveBuffering:@"currentItem changed, finished seek"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - if (shouldResumePlayback) { - _player.actionAtItemEnd = originalEndAction; - // TODO: This logic is almost duplicated in seek. See if we can reuse this code. - [_player play]; - } - }]; - } else { - // Already at zero, no need to seek. - } - } - } else if ([keyPath isEqualToString:@"loadedTimeRanges"]) { - IndexedPlayerItem *playerItem = (IndexedPlayerItem *)object; - if (playerItem != _player.currentItem) return; - int pos = [self getBufferedPosition]; - if (pos != _bufferedPosition) { - _bufferedPosition = pos; - [self broadcastPlaybackEvent]; - } - } -} - -- (void)sendErrorForItem:(IndexedPlayerItem *)playerItem { - FlutterError *flutterError = [FlutterError errorWithCode:[NSString stringWithFormat:@"%d", playerItem.error.code] - message:playerItem.error.localizedDescription - details:nil]; - [self sendError:flutterError playerItem:playerItem]; -} - -- (void)sendError:(FlutterError *)flutterError playerItem:(IndexedPlayerItem *)playerItem { - NSLog(@"sendError"); - if (_loadResult && playerItem == _player.currentItem) { - _loadResult(flutterError); - _loadResult = nil; - } - if (_eventSink) { - // Broadcast all errors even if they aren't on the current item. - _eventSink(flutterError); - } -} - -- (void)abortExistingConnection { - FlutterError *flutterError = [FlutterError errorWithCode:@"abort" - message:@"Connection aborted" - details:nil]; - [self sendError:flutterError playerItem:nil]; -} - -- (int)indexForItem:(IndexedPlayerItem *)playerItem { - for (int i = 0; i < _indexedAudioSources.count; i++) { - if (_indexedAudioSources[i].playerItem == playerItem) { - return i; - } - } - return -1; -} - -- (void)play { - [self play:nil]; -} - -- (void)play:(FlutterResult)result { - if (result) { - if (_playResult) { - NSLog(@"INTERRUPTING PLAY"); - _playResult(nil); - } - _playResult = result; - } - _playing = YES; -#if TARGET_OS_IPHONE - if (_configuredSession) { - [[AVAudioSession sharedInstance] setActive:YES error:nil]; - } -#endif - [_player play]; - [self updatePosition]; - if (@available(macOS 10.12, iOS 10.0, *)) {} - else { - if (_bufferUnconfirmed && !_player.currentItem.playbackBufferFull) { - [self enterBuffering:@"play, _bufferUnconfirmed && !playbackBufferFull"]; - [self broadcastPlaybackEvent]; - } - } -} - -- (void)pause { - _playing = NO; - [_player pause]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - if (_playResult) { - NSLog(@"PLAY FINISHED DUE TO PAUSE"); - _playResult(nil); - _playResult = nil; - } -} - -- (void)complete { - [self updatePosition]; - _processingState = completed; - [self broadcastPlaybackEvent]; - if (_playResult) { - NSLog(@"PLAY FINISHED DUE TO COMPLETE"); - _playResult(nil); - _playResult = nil; - } -} - -- (void)setVolume:(float)volume { - [_player setVolume:volume]; -} - -- (void)setSpeed:(float)speed { - if (speed == 1.0 - || (speed < 1.0 && _player.currentItem.canPlaySlowForward) - || (speed > 1.0 && _player.currentItem.canPlayFastForward)) { - _player.rate = speed; - } - [self updatePosition]; -} - -- (void)setLoopMode:(int)loopMode { - _loopMode = loopMode; - if (_player) { - switch (_loopMode) { - case loopOne: - _player.actionAtItemEnd = AVPlayerActionAtItemEndPause; // AVPlayerActionAtItemEndNone - break; - default: - _player.actionAtItemEnd = AVPlayerActionAtItemEndAdvance; - } - } -} - -- (void)setShuffleModeEnabled:(BOOL)shuffleModeEnabled { - NSLog(@"setShuffleModeEnabled: %d", shuffleModeEnabled); - _shuffleModeEnabled = shuffleModeEnabled; - if (!_audioSource) return; - - [self updateOrder]; - - [self enqueueFrom:_index]; -} - -- (void)dumpQueue { - for (int i = 0; i < _player.items.count; i++) { - IndexedPlayerItem *playerItem = _player.items[i]; - for (int j = 0; j < _indexedAudioSources.count; j++) { - IndexedAudioSource *source = _indexedAudioSources[j]; - if (source.playerItem == playerItem) { - NSLog(@"- %d", j); - break; - } - } - } -} - -- (void)setAutomaticallyWaitsToMinimizeStalling:(bool)automaticallyWaitsToMinimizeStalling { - _automaticallyWaitsToMinimizeStalling = automaticallyWaitsToMinimizeStalling; - if (@available(macOS 10.12, iOS 10.0, *)) { - if(_player) { - _player.automaticallyWaitsToMinimizeStalling = automaticallyWaitsToMinimizeStalling; - } - } -} - -- (void)seek:(CMTime)position index:(NSNumber *)newIndex completionHandler:(void (^)(BOOL))completionHandler { - int index = _index; - if (newIndex != [NSNull null]) { - index = [newIndex intValue]; - } - if (index != _index) { - // Jump to a new item - /* if (_playing && index == _index + 1) { */ - /* // Special case for jumping to the very next item */ - /* NSLog(@"seek to next item: %d -> %d", _index, index); */ - /* [_indexedAudioSources[_index] seek:kCMTimeZero]; */ - /* _index = index; */ - /* [_player advanceToNextItem]; */ - /* [self broadcastPlaybackEvent]; */ - /* } else */ - { - // Jump to a distant item - //NSLog(@"seek# jump to distant item: %d -> %d", _index, index); - if (_playing) { - [_player pause]; - } - [_indexedAudioSources[_index] seek:kCMTimeZero]; - // The "currentItem" key observer will respect that a seek is already in progress - _seekPos = position; - [self updatePosition]; - [self enqueueFrom:index]; - IndexedAudioSource *source = _indexedAudioSources[_index]; - if (abs((int)(1000 * CMTimeGetSeconds(CMTimeSubtract(source.position, position)))) > 100) { - [self enterBuffering:@"seek to index"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - [source seek:position completionHandler:^(BOOL finished) { - if (@available(macOS 10.12, iOS 10.0, *)) { - if (_playing) { - // Handled by timeControlStatus - } else { - if (_bufferUnconfirmed && !_player.currentItem.playbackBufferFull) { - // Stay in buffering - } else if (source.playerItem.status == AVPlayerItemStatusReadyToPlay) { - [self leaveBuffering:@"seek to index finished, (!bufferUnconfirmed || playbackBufferFull) && ready to play"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - } - } - } else { - if (_bufferUnconfirmed && !_player.currentItem.playbackBufferFull) { - // Stay in buffering - } else if (source.playerItem.status == AVPlayerItemStatusReadyToPlay) { - [self leaveBuffering:@"seek to index finished, (!bufferUnconfirmed || playbackBufferFull) && ready to play"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - } - } - if (_playing) { - [_player play]; - } - _seekPos = kCMTimeInvalid; - [self broadcastPlaybackEvent]; - if (completionHandler) { - completionHandler(finished); - } - }]; - } else { - _seekPos = kCMTimeInvalid; - if (_playing) { - [_player play]; - } - } - } - } else { - // Seek within an item - if (_playing) { - [_player pause]; - } - _seekPos = position; - //NSLog(@"seek. enter buffering. pos = %d", (int)(1000*CMTimeGetSeconds(_indexedAudioSources[_index].position))); - // TODO: Move this into a separate method so it can also - // be used in skip. - [self enterBuffering:@"seek"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - [_indexedAudioSources[_index] seek:position completionHandler:^(BOOL finished) { - [self updatePosition]; - if (_playing) { - // If playing, buffering will be detected either by: - // 1. checkForDiscontinuity - // 2. timeControlStatus - [_player play]; - } else { - // If not playing, there is no reliable way to detect - // when buffering has completed, so we use - // !playbackBufferEmpty. Although this always seems to - // be full even right after a seek. - if (_player.currentItem.playbackBufferEmpty) { - [self enterBuffering:@"seek finished, playbackBufferEmpty"]; - } else { - [self leaveBuffering:@"seek finished, !playbackBufferEmpty"]; - } - [self updatePosition]; - if (_processingState != buffering) { - [self broadcastPlaybackEvent]; - } - } - _seekPos = kCMTimeInvalid; - [self broadcastPlaybackEvent]; - if (completionHandler) { - completionHandler(finished); - } - }]; - } -} - -- (void)dispose { - if (_processingState != none) { - [_player pause]; - _processingState = none; - [self broadcastPlaybackEvent]; - } - if (_timeObserver) { - [_player removeTimeObserver:_timeObserver]; - _timeObserver = 0; - } - if (_indexedAudioSources) { - for (int i = 0; i < [_indexedAudioSources count]; i++) { - [self removeItemObservers:_indexedAudioSources[i].playerItem]; - } - } - if (_player) { - [_player removeObserver:self forKeyPath:@"currentItem"]; - if (@available(macOS 10.12, iOS 10.0, *)) { - [_player removeObserver:self forKeyPath:@"timeControlStatus"]; - } - _player = nil; - } - // Untested: - // [_eventChannel setStreamHandler:nil]; - // [_methodChannel setMethodHandler:nil]; -} - -@end diff --git a/ios/Classes/AudioPlayer.m b/ios/Classes/AudioPlayer.m new file mode 120000 index 0000000..596ca1d --- /dev/null +++ b/ios/Classes/AudioPlayer.m @@ -0,0 +1 @@ +../../darwin/Classes/AudioPlayer.m \ No newline at end of file diff --git a/ios/Classes/AudioSource.m b/ios/Classes/AudioSource.m deleted file mode 100644 index 81534f1..0000000 --- a/ios/Classes/AudioSource.m +++ /dev/null @@ -1,37 +0,0 @@ -#import "AudioSource.h" -#import - -@implementation AudioSource { - NSString *_sourceId; -} - -- (instancetype)initWithId:(NSString *)sid { - self = [super init]; - NSAssert(self, @"super init cannot be nil"); - _sourceId = sid; - return self; -} - -- (NSString *)sourceId { - return _sourceId; -} - -- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex { - return 0; -} - -- (void)findById:(NSString *)sourceId matches:(NSMutableArray *)matches { - if ([_sourceId isEqualToString:sourceId]) { - [matches addObject:self]; - } -} - -- (NSArray *)getShuffleOrder { - return @[]; -} - -- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex { - return 0; -} - -@end diff --git a/ios/Classes/AudioSource.m b/ios/Classes/AudioSource.m new file mode 120000 index 0000000..16881d6 --- /dev/null +++ b/ios/Classes/AudioSource.m @@ -0,0 +1 @@ +../../darwin/Classes/AudioSource.m \ No newline at end of file diff --git a/ios/Classes/ClippingAudioSource.m b/ios/Classes/ClippingAudioSource.m deleted file mode 100644 index 2f3b174..0000000 --- a/ios/Classes/ClippingAudioSource.m +++ /dev/null @@ -1,79 +0,0 @@ -#import "AudioSource.h" -#import "ClippingAudioSource.h" -#import "IndexedPlayerItem.h" -#import "UriAudioSource.h" -#import - -@implementation ClippingAudioSource { - UriAudioSource *_audioSource; - CMTime _start; - CMTime _end; -} - -- (instancetype)initWithId:(NSString *)sid audioSource:(UriAudioSource *)audioSource start:(NSNumber *)start end:(NSNumber *)end { - self = [super initWithId:sid]; - NSAssert(self, @"super init cannot be nil"); - _audioSource = audioSource; - _start = start == [NSNull null] ? kCMTimeZero : CMTimeMake([start intValue], 1000); - _end = end == [NSNull null] ? kCMTimeInvalid : CMTimeMake([end intValue], 1000); - return self; -} - -- (UriAudioSource *)audioSource { - return _audioSource; -} - -- (void)findById:(NSString *)sourceId matches:(NSMutableArray *)matches { - [super findById:sourceId matches:matches]; - [_audioSource findById:sourceId matches:matches]; -} - -- (void)attach:(AVQueuePlayer *)player { - [super attach:player]; - _audioSource.playerItem.forwardPlaybackEndTime = _end; - // XXX: Not needed since currentItem observer handles it? - [self seek:kCMTimeZero]; -} - -- (IndexedPlayerItem *)playerItem { - return _audioSource.playerItem; -} - -- (NSArray *)getShuffleOrder { - return @[@(0)]; -} - -- (void)play:(AVQueuePlayer *)player { -} - -- (void)pause:(AVQueuePlayer *)player { -} - -- (void)stop:(AVQueuePlayer *)player { -} - -- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler { - if (!completionHandler || (self.playerItem.status == AVPlayerItemStatusReadyToPlay)) { - CMTime absPosition = CMTimeAdd(_start, position); - [_audioSource.playerItem seekToTime:absPosition toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero completionHandler:completionHandler]; - } -} - -- (CMTime)duration { - return CMTimeSubtract(CMTIME_IS_INVALID(_end) ? self.playerItem.duration : _end, _start); -} - -- (void)setDuration:(CMTime)duration { -} - -- (CMTime)position { - return CMTimeSubtract(self.playerItem.currentTime, _start); -} - -- (CMTime)bufferedPosition { - CMTime pos = CMTimeSubtract(_audioSource.bufferedPosition, _start); - CMTime dur = [self duration]; - return CMTimeCompare(pos, dur) >= 0 ? dur : pos; -} - -@end diff --git a/ios/Classes/ClippingAudioSource.m b/ios/Classes/ClippingAudioSource.m new file mode 120000 index 0000000..d561b1e --- /dev/null +++ b/ios/Classes/ClippingAudioSource.m @@ -0,0 +1 @@ +../../darwin/Classes/ClippingAudioSource.m \ No newline at end of file diff --git a/ios/Classes/ConcatenatingAudioSource.m b/ios/Classes/ConcatenatingAudioSource.m deleted file mode 100644 index bd7b713..0000000 --- a/ios/Classes/ConcatenatingAudioSource.m +++ /dev/null @@ -1,109 +0,0 @@ -#import "AudioSource.h" -#import "ConcatenatingAudioSource.h" -#import -#import - -@implementation ConcatenatingAudioSource { - NSMutableArray *_audioSources; - NSMutableArray *_shuffleOrder; -} - -- (instancetype)initWithId:(NSString *)sid audioSources:(NSMutableArray *)audioSources { - self = [super initWithId:sid]; - NSAssert(self, @"super init cannot be nil"); - _audioSources = audioSources; - return self; -} - -- (int)count { - return _audioSources.count; -} - -- (void)insertSource:(AudioSource *)audioSource atIndex:(int)index { - [_audioSources insertObject:audioSource atIndex:index]; -} - -- (void)removeSourcesFromIndex:(int)start toIndex:(int)end { - if (end == -1) end = _audioSources.count; - for (int i = start; i < end; i++) { - [_audioSources removeObjectAtIndex:start]; - } -} - -- (void)moveSourceFromIndex:(int)currentIndex toIndex:(int)newIndex { - AudioSource *source = _audioSources[currentIndex]; - [_audioSources removeObjectAtIndex:currentIndex]; - [_audioSources insertObject:source atIndex:newIndex]; -} - -- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex { - for (int i = 0; i < [_audioSources count]; i++) { - treeIndex = [_audioSources[i] buildSequence:sequence treeIndex:treeIndex]; - } - return treeIndex; -} - -- (void)findById:(NSString *)sourceId matches:(NSMutableArray *)matches { - [super findById:sourceId matches:matches]; - for (int i = 0; i < [_audioSources count]; i++) { - [_audioSources[i] findById:sourceId matches:matches]; - } -} - -- (NSArray *)getShuffleOrder { - NSMutableArray *order = [NSMutableArray new]; - int offset = [order count]; - NSMutableArray *childOrders = [NSMutableArray new]; // array of array of ints - for (int i = 0; i < [_audioSources count]; i++) { - AudioSource *audioSource = _audioSources[i]; - NSArray *childShuffleOrder = [audioSource getShuffleOrder]; - NSMutableArray *offsetChildShuffleOrder = [NSMutableArray new]; - for (int j = 0; j < [childShuffleOrder count]; j++) { - [offsetChildShuffleOrder addObject:@([childShuffleOrder[j] integerValue] + offset)]; - } - [childOrders addObject:offsetChildShuffleOrder]; - offset += [childShuffleOrder count]; - } - for (int i = 0; i < [_audioSources count]; i++) { - [order addObjectsFromArray:childOrders[[_shuffleOrder[i] integerValue]]]; - } - return order; -} - -- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex { - int currentChildIndex = -1; - for (int i = 0; i < [_audioSources count]; i++) { - int indexBefore = treeIndex; - AudioSource *child = _audioSources[i]; - treeIndex = [child shuffle:treeIndex currentIndex:currentIndex]; - if (currentIndex >= indexBefore && currentIndex < treeIndex) { - currentChildIndex = i; - } else {} - } - // Shuffle so that the current child is first in the shuffle order - _shuffleOrder = [NSMutableArray arrayWithCapacity:[_audioSources count]]; - for (int i = 0; i < [_audioSources count]; i++) { - [_shuffleOrder addObject:@(0)]; - } - NSLog(@"shuffle: audioSources.count=%d and shuffleOrder.count=%d", [_audioSources count], [_shuffleOrder count]); - // First generate a random shuffle - for (int i = 0; i < [_audioSources count]; i++) { - int j = arc4random_uniform(i + 1); - _shuffleOrder[i] = _shuffleOrder[j]; - _shuffleOrder[j] = @(i); - } - // Then bring currentIndex to the front - if (currentChildIndex != -1) { - for (int i = 1; i < [_audioSources count]; i++) { - if ([_shuffleOrder[i] integerValue] == currentChildIndex) { - NSNumber *v = _shuffleOrder[0]; - _shuffleOrder[0] = _shuffleOrder[i]; - _shuffleOrder[i] = v; - break; - } - } - } - return treeIndex; -} - -@end diff --git a/ios/Classes/ConcatenatingAudioSource.m b/ios/Classes/ConcatenatingAudioSource.m new file mode 120000 index 0000000..1e2adbb --- /dev/null +++ b/ios/Classes/ConcatenatingAudioSource.m @@ -0,0 +1 @@ +../../darwin/Classes/ConcatenatingAudioSource.m \ No newline at end of file diff --git a/ios/Classes/IndexedAudioSource.m b/ios/Classes/IndexedAudioSource.m deleted file mode 100644 index 316f900..0000000 --- a/ios/Classes/IndexedAudioSource.m +++ /dev/null @@ -1,68 +0,0 @@ -#import "IndexedAudioSource.h" -#import "IndexedPlayerItem.h" -#import - -@implementation IndexedAudioSource { - BOOL _isAttached; -} - -- (instancetype)initWithId:(NSString *)sid { - self = [super init]; - NSAssert(self, @"super init cannot be nil"); - _isAttached = NO; - return self; -} - -- (IndexedPlayerItem *)playerItem { - return nil; -} - -- (BOOL)isAttached { - return _isAttached; -} - -- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex { - [sequence addObject:self]; - return treeIndex + 1; -} - -- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex { - return treeIndex + 1; -} - -- (void)attach:(AVQueuePlayer *)player { - _isAttached = YES; -} - -- (void)play:(AVQueuePlayer *)player { -} - -- (void)pause:(AVQueuePlayer *)player { -} - -- (void)stop:(AVQueuePlayer *)player { -} - -- (void)seek:(CMTime)position { - [self seek:position completionHandler:nil]; -} - -- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler { -} - -- (CMTime)duration { - return kCMTimeInvalid; -} - -- (void)setDuration:(CMTime)duration { -} - -- (CMTime)position { - return kCMTimeInvalid; -} - -- (CMTime)bufferedPosition { - return kCMTimeInvalid; -} - -@end diff --git a/ios/Classes/IndexedAudioSource.m b/ios/Classes/IndexedAudioSource.m new file mode 120000 index 0000000..051d504 --- /dev/null +++ b/ios/Classes/IndexedAudioSource.m @@ -0,0 +1 @@ +../../darwin/Classes/IndexedAudioSource.m \ No newline at end of file diff --git a/ios/Classes/IndexedPlayerItem.m b/ios/Classes/IndexedPlayerItem.m deleted file mode 100644 index 87fafe0..0000000 --- a/ios/Classes/IndexedPlayerItem.m +++ /dev/null @@ -1,16 +0,0 @@ -#import "IndexedPlayerItem.h" -#import "IndexedAudioSource.h" - -@implementation IndexedPlayerItem { - IndexedAudioSource *_audioSource; -} - --(void)setAudioSource:(IndexedAudioSource *)audioSource { - _audioSource = audioSource; -} - --(IndexedAudioSource *)audioSource { - return _audioSource; -} - -@end diff --git a/ios/Classes/IndexedPlayerItem.m b/ios/Classes/IndexedPlayerItem.m new file mode 120000 index 0000000..04e55fc --- /dev/null +++ b/ios/Classes/IndexedPlayerItem.m @@ -0,0 +1 @@ +../../darwin/Classes/IndexedPlayerItem.m \ No newline at end of file diff --git a/ios/Classes/JustAudioPlugin.m b/ios/Classes/JustAudioPlugin.m deleted file mode 100644 index 982a260..0000000 --- a/ios/Classes/JustAudioPlugin.m +++ /dev/null @@ -1,55 +0,0 @@ -#import "JustAudioPlugin.h" -#import "AudioPlayer.h" -#import -#include - -@implementation JustAudioPlugin { - NSObject* _registrar; - BOOL _configuredSession; -} - -+ (void)registerWithRegistrar:(NSObject*)registrar { - FlutterMethodChannel* channel = [FlutterMethodChannel - methodChannelWithName:@"com.ryanheise.just_audio.methods" - binaryMessenger:[registrar messenger]]; - JustAudioPlugin* instance = [[JustAudioPlugin alloc] initWithRegistrar:registrar]; - [registrar addMethodCallDelegate:instance channel:channel]; -} - -- (instancetype)initWithRegistrar:(NSObject *)registrar { - self = [super init]; - NSAssert(self, @"super init cannot be nil"); - _registrar = registrar; - return self; -} - -- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result { - if ([@"init" isEqualToString:call.method]) { - NSArray* args = (NSArray*)call.arguments; - NSString* playerId = args[0]; - /*AudioPlayer* player =*/ [[AudioPlayer alloc] initWithRegistrar:_registrar playerId:playerId configuredSession:_configuredSession]; - result(nil); - } else if ([@"setIosCategory" isEqualToString:call.method]) { -#if TARGET_OS_IPHONE - NSNumber* categoryIndex = (NSNumber*)call.arguments; - AVAudioSessionCategory category = nil; - switch (categoryIndex.integerValue) { - case 0: category = AVAudioSessionCategoryAmbient; break; - case 1: category = AVAudioSessionCategorySoloAmbient; break; - case 2: category = AVAudioSessionCategoryPlayback; break; - case 3: category = AVAudioSessionCategoryRecord; break; - case 4: category = AVAudioSessionCategoryPlayAndRecord; break; - case 5: category = AVAudioSessionCategoryMultiRoute; break; - } - if (category) { - _configuredSession = YES; - } - [[AVAudioSession sharedInstance] setCategory:category error:nil]; -#endif - result(nil); - } else { - result(FlutterMethodNotImplemented); - } -} - -@end diff --git a/ios/Classes/JustAudioPlugin.m b/ios/Classes/JustAudioPlugin.m new file mode 120000 index 0000000..8583f76 --- /dev/null +++ b/ios/Classes/JustAudioPlugin.m @@ -0,0 +1 @@ +../../darwin/Classes/JustAudioPlugin.m \ No newline at end of file diff --git a/ios/Classes/LoopingAudioSource.m b/ios/Classes/LoopingAudioSource.m deleted file mode 100644 index ba4b52b..0000000 --- a/ios/Classes/LoopingAudioSource.m +++ /dev/null @@ -1,53 +0,0 @@ -#import "AudioSource.h" -#import "LoopingAudioSource.h" -#import - -@implementation LoopingAudioSource { - // An array of duplicates - NSArray *_audioSources; // -} - -- (instancetype)initWithId:(NSString *)sid audioSources:(NSArray *)audioSources { - self = [super initWithId:sid]; - NSAssert(self, @"super init cannot be nil"); - _audioSources = audioSources; - return self; -} - -- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex { - for (int i = 0; i < [_audioSources count]; i++) { - treeIndex = [_audioSources[i] buildSequence:sequence treeIndex:treeIndex]; - } - return treeIndex; -} - -- (void)findById:(NSString *)sourceId matches:(NSMutableArray *)matches { - [super findById:sourceId matches:matches]; - for (int i = 0; i < [_audioSources count]; i++) { - [_audioSources[i] findById:sourceId matches:matches]; - } -} - -- (NSArray *)getShuffleOrder { - NSMutableArray *order = [NSMutableArray new]; - int offset = (int)[order count]; - for (int i = 0; i < [_audioSources count]; i++) { - AudioSource *audioSource = _audioSources[i]; - NSArray *childShuffleOrder = [audioSource getShuffleOrder]; - for (int j = 0; j < [childShuffleOrder count]; j++) { - [order addObject:@([childShuffleOrder[j] integerValue] + offset)]; - } - offset += [childShuffleOrder count]; - } - return order; -} - -- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex { - // TODO: This should probably shuffle the same way on all duplicates. - for (int i = 0; i < [_audioSources count]; i++) { - treeIndex = [_audioSources[i] shuffle:treeIndex currentIndex:currentIndex]; - } - return treeIndex; -} - -@end diff --git a/ios/Classes/LoopingAudioSource.m b/ios/Classes/LoopingAudioSource.m new file mode 120000 index 0000000..17c7958 --- /dev/null +++ b/ios/Classes/LoopingAudioSource.m @@ -0,0 +1 @@ +../../darwin/Classes/LoopingAudioSource.m \ No newline at end of file diff --git a/ios/Classes/UriAudioSource.m b/ios/Classes/UriAudioSource.m deleted file mode 100644 index 91321d4..0000000 --- a/ios/Classes/UriAudioSource.m +++ /dev/null @@ -1,79 +0,0 @@ -#import "UriAudioSource.h" -#import "IndexedAudioSource.h" -#import "IndexedPlayerItem.h" -#import - -@implementation UriAudioSource { - NSString *_uri; - IndexedPlayerItem *_playerItem; - /* CMTime _duration; */ -} - -- (instancetype)initWithId:(NSString *)sid uri:(NSString *)uri { - self = [super initWithId:sid]; - NSAssert(self, @"super init cannot be nil"); - _uri = uri; - if ([_uri hasPrefix:@"file://"]) { - _playerItem = [[IndexedPlayerItem alloc] initWithURL:[NSURL fileURLWithPath:[_uri substringFromIndex:7]]]; - } else { - _playerItem = [[IndexedPlayerItem alloc] initWithURL:[NSURL URLWithString:_uri]]; - } - if (@available(macOS 10.13, iOS 11.0, *)) { - // This does the best at reducing distortion on voice with speeds below 1.0 - _playerItem.audioTimePitchAlgorithm = AVAudioTimePitchAlgorithmTimeDomain; - } - /* NSKeyValueObservingOptions options = */ - /* NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew; */ - /* [_playerItem addObserver:self */ - /* forKeyPath:@"duration" */ - /* options:options */ - /* context:nil]; */ - return self; -} - -- (IndexedPlayerItem *)playerItem { - return _playerItem; -} - -- (NSArray *)getShuffleOrder { - return @[@(0)]; -} - -- (void)play:(AVQueuePlayer *)player { -} - -- (void)pause:(AVQueuePlayer *)player { -} - -- (void)stop:(AVQueuePlayer *)player { -} - -- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler { - if (!completionHandler || (_playerItem.status == AVPlayerItemStatusReadyToPlay)) { - [_playerItem seekToTime:position toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero completionHandler:completionHandler]; - } -} - -- (CMTime)duration { - return _playerItem.duration; -} - -- (void)setDuration:(CMTime)duration { -} - -- (CMTime)position { - return _playerItem.currentTime; -} - -- (CMTime)bufferedPosition { - NSValue *last = _playerItem.loadedTimeRanges.lastObject; - if (last) { - CMTimeRange timeRange = [last CMTimeRangeValue]; - return CMTimeAdd(timeRange.start, timeRange.duration); - } else { - return _playerItem.currentTime; - } - return kCMTimeInvalid; -} - -@end diff --git a/ios/Classes/UriAudioSource.m b/ios/Classes/UriAudioSource.m new file mode 120000 index 0000000..8effbd7 --- /dev/null +++ b/ios/Classes/UriAudioSource.m @@ -0,0 +1 @@ +../../darwin/Classes/UriAudioSource.m \ No newline at end of file diff --git a/lib/just_audio.dart b/lib/just_audio.dart index 4c7f813..9c50c11 100644 --- a/lib/just_audio.dart +++ b/lib/just_audio.dart @@ -1,6 +1,8 @@ import 'dart:async'; import 'dart:io'; +import 'dart:math'; +import 'package:audio_session/audio_session.dart'; import 'package:flutter/foundation.dart'; import 'package:flutter/services.dart'; import 'package:flutter/widgets.dart'; @@ -37,26 +39,6 @@ class AudioPlayer { return MethodChannel('com.ryanheise.just_audio.methods.$id'); } - /// Configure the audio session category on iOS. This method should be called - /// before playing any audio. It has no effect on Android or Flutter for Web. - /// - /// Note that the default category on iOS is [IosCategory.soloAmbient], but - /// for a typical media app, Apple recommends setting this to - /// [IosCategory.playback]. If you don't call this method, `just_audio` will - /// respect any prior category that was already set on your app's audio - /// session and will leave it alone. If it hasn't been previously set, this - /// will be [IosCategory.soloAmbient]. But if another audio plugin in your - /// app has configured a particular category, that will also be left alone. - /// - /// Note: If you use other audio plugins in conjunction with this one, it is - /// possible that each of those audio plugins may override the setting you - /// choose here. (You may consider asking the developers of the other plugins - /// to provide similar configurability so that you have complete control over - /// setting the overall category that you want for your app.) - static Future setIosCategory(IosCategory category) async { - await _mainChannel.invokeMethod('setIosCategory', category.index); - } - final Future _channel; final String _id; _ProxyHttpServer _proxy; @@ -76,16 +58,27 @@ class AudioPlayer { final _bufferedPositionSubject = BehaviorSubject(); final _icyMetadataSubject = BehaviorSubject(); final _playerStateSubject = BehaviorSubject(); + final _sequenceSubject = BehaviorSubject>(); final _currentIndexSubject = BehaviorSubject(); + final _sequenceStateSubject = BehaviorSubject(); final _loopModeSubject = BehaviorSubject(); final _shuffleModeEnabledSubject = BehaviorSubject(); + final _androidAudioSessionIdSubject = BehaviorSubject(); BehaviorSubject _positionSubject; bool _automaticallyWaitsToMinimizeStalling = true; + bool _playInterrupted = false; - /// Creates an [AudioPlayer]. - factory AudioPlayer() => AudioPlayer._internal(_uuid.v4()); + /// Creates an [AudioPlayer]. The player will automatically pause/duck and + /// resume/unduck when audio interruptions occur (e.g. a phone call) or when + /// headphones are unplugged. If you wish to handle audio interruptions + /// manually, set [handleInterruptions] to `false` and interface directly + /// with the audio session via the + /// [audio_session](https://pub.dev/packages/audio_session) package. + factory AudioPlayer({bool handleInterruptions = true}) => + AudioPlayer._internal(_uuid.v4(), handleInterruptions); - AudioPlayer._internal(this._id) : _channel = _init(_id) { + AudioPlayer._internal(this._id, bool handleInterruptions) + : _channel = _init(_id) { _playbackEvent = PlaybackEvent( processingState: ProcessingState.none, updatePosition: Duration.zero, @@ -94,6 +87,8 @@ class AudioPlayer { duration: null, icyMetadata: null, currentIndex: null, + androidAudioSessionId: null, + qualityString: '' ); _playbackEventSubject.add(_playbackEvent); _eventChannelStream = EventChannel('com.ryanheise.just_audio.events.$_id') @@ -118,6 +113,7 @@ class AudioPlayer { ? null : IcyMetadata.fromJson(data['icyMetadata']), currentIndex: data['currentIndex'], + androidAudioSessionId: data['androidAudioSessionId'], qualityString: data['qualityString'] ); //print("created event object with state: ${_playbackEvent.state}"); @@ -128,10 +124,6 @@ class AudioPlayer { rethrow; } }); - _eventChannelStreamSubscription = _eventChannelStream.listen( - _playbackEventSubject.add, - onError: _playbackEventSubject.addError, - ); _processingStateSubject.addStream(playbackEventStream .map((event) => event.processingState) .distinct() @@ -148,6 +140,21 @@ class AudioPlayer { .map((event) => event.currentIndex) .distinct() .handleError((err, stack) {/* noop */})); + _androidAudioSessionIdSubject.addStream(playbackEventStream + .map((event) => event.androidAudioSessionId) + .distinct() + .handleError((err, stack) {/* noop */})); + _sequenceStateSubject.addStream( + Rx.combineLatest2, int, SequenceState>( + sequenceStream, + currentIndexStream, + (sequence, currentIndex) { + if (sequence == null) return null; + if (currentIndex == null) currentIndex = 0; + currentIndex = min(sequence.length - 1, max(0, currentIndex)); + return SequenceState(sequence, currentIndex); + }, + ).distinct().handleError((err, stack) {/* noop */})); _playerStateSubject.addStream( Rx.combineLatest2( playingStream, @@ -155,6 +162,62 @@ class AudioPlayer { (playing, event) => PlayerState(playing, event.processingState)) .distinct() .handleError((err, stack) {/* noop */})); + _eventChannelStreamSubscription = _eventChannelStream.listen( + _playbackEventSubject.add, + onError: _playbackEventSubject.addError, + ); + _sequenceSubject.add(null); + // Respond to changes to AndroidAudioAttributes configuration. + AudioSession.instance.then((audioSession) { + audioSession.configurationStream + .map((conf) => conf?.androidAudioAttributes) + .where((attributes) => attributes != null) + .distinct() + .listen(setAndroidAudioAttributes); + }); + if (handleInterruptions) { + AudioSession.instance.then((session) { + session.becomingNoisyEventStream.listen((_) { + pause(); + }); + session.interruptionEventStream.listen((event) { + if (event.begin) { + switch (event.type) { + case AudioInterruptionType.duck: + if (session.androidAudioAttributes.usage == + AndroidAudioUsage.game) { + setVolume(volume / 2); + } + _playInterrupted = false; + break; + case AudioInterruptionType.pause: + case AudioInterruptionType.unknown: + if (playing) { + pause(); + // Although pause is async and sets _playInterrupted = false, + // this is done in the sync portion. + _playInterrupted = true; + } + break; + } + } else { + switch (event.type) { + case AudioInterruptionType.duck: + setVolume(min(1.0, volume * 2)); + _playInterrupted = false; + break; + case AudioInterruptionType.pause: + if (_playInterrupted) play(); + _playInterrupted = false; + break; + case AudioInterruptionType.unknown: + _playInterrupted = false; + break; + } + } + }); + }); + } } /// The latest [PlaybackEvent]. @@ -217,17 +280,31 @@ class AudioPlayer { /// A stream of [PlayerState]s. Stream get playerStateStream => _playerStateSubject.stream; + /// The current sequence of indexed audio sources. + List get sequence => _sequenceSubject.value; + + /// A stream broadcasting the current sequence of indexed audio sources. + Stream> get sequenceStream => + _sequenceSubject.stream; + /// The index of the current item. int get currentIndex => _currentIndexSubject.value; /// A stream broadcasting the current item. Stream get currentIndexStream => _currentIndexSubject.stream; + /// The current [SequenceState], or `null` if either [sequence]] or + /// [currentIndex] is `null`. + SequenceState get sequenceState => _sequenceStateSubject.value; + + /// A stream broadcasting the current [SequenceState]. + Stream get sequenceStateStream => _sequenceStateSubject.stream; + /// Whether there is another item after the current index. bool get hasNext => _audioSource != null && currentIndex != null && - currentIndex + 1 < _audioSource.sequence.length; + currentIndex + 1 < sequence.length; /// Whether there is another item before the current index. bool get hasPrevious => @@ -246,6 +323,13 @@ class AudioPlayer { Stream get shuffleModeEnabledStream => _shuffleModeEnabledSubject.stream; + /// The current Android AudioSession ID or `null` if not set. + int get androidAudioSessionId => _playbackEvent.androidAudioSessionId; + + /// Broadcasts the current Android AudioSession ID or `null` if not set. + Stream get androidAudioSessionIdStream => + _androidAudioSessionIdSubject.stream; + /// Whether the player should automatically delay playback in order to /// minimize stalling. (iOS 10.0 or later only) bool get automaticallyWaitsToMinimizeStalling => @@ -324,6 +408,7 @@ class AudioPlayer { timer.cancel(); durationSubscription?.cancel(); playbackEventSubscription?.cancel(); + // This will in turn close _positionSubject. controller.close(); return; } @@ -363,10 +448,10 @@ class AudioPlayer { /// Convenience method to load audio from an asset, equivalent to: /// /// ``` - /// load(AudioSource.uri(Uri.parse('asset://$filePath'))); + /// load(AudioSource.uri(Uri.parse('asset:///$assetPath'))); /// ``` Future setAsset(String assetPath) => - load(AudioSource.uri(Uri.parse('asset://$assetPath'))); + load(AudioSource.uri(Uri.parse('asset:///$assetPath'))); /// Loads audio from an [AudioSource] and completes when the audio is ready /// to play with the duration of that audio, or null if the duration is unknown. @@ -379,6 +464,7 @@ class AudioPlayer { Future load(AudioSource source) async { try { _audioSource = source; + _broadcastSequence(); final duration = await _load(source); // Wait for loading state to pass. await processingStateStream @@ -386,11 +472,14 @@ class AudioPlayer { return duration; } catch (e) { _audioSource = null; - _audioSources.clear(); rethrow; } } + void _broadcastSequence() { + _sequenceSubject.add(_audioSource?.sequence); + } + _registerAudioSource(AudioSource source) { _audioSources[source._id] = source; } @@ -453,16 +542,24 @@ class AudioPlayer { /// [stop] playback on completion, you can call either method as soon as /// [processingState] becomes [ProcessingState.completed] by listening to /// [processingStateStream]. + /// + /// This method activates the audio session before playback, and will do + /// nothing if activation of the audio session fails for any reason. Future play() async { if (playing) return; - _playingSubject.add(true); - await _invokeMethod('play'); + _playInterrupted = false; + final audioSession = await AudioSession.instance; + if (await audioSession.setActive(true)) { + _playingSubject.add(true); + await _invokeMethod('play'); + } } /// Pauses the currently playing media. This method does nothing if /// ![playing]. Future pause() async { if (!playing) return; + _playInterrupted = false; // Update local state immediately so that queries aren't surprised. _playbackEvent = _playbackEvent.copyWith( updatePosition: position, @@ -558,6 +655,15 @@ class AudioPlayer { } } + /// Set the Android audio attributes for this player. Has no effect on other + /// platforms. This will cause a new Android AudioSession ID to be generated. + Future setAndroidAudioAttributes( + AndroidAudioAttributes audioAttributes) async { + if (audioAttributes == null) return; + await _invokeMethod( + 'setAndroidAudioAttributes', [audioAttributes.toJson()]); + } + /// Release all resources associated with this player. You must invoke this /// after you are done with the player. Future dispose() async { @@ -573,9 +679,7 @@ class AudioPlayer { await _playingSubject.close(); await _volumeSubject.close(); await _speedSubject.close(); - if (_positionSubject != null) { - await _positionSubject.close(); - } + await _sequenceSubject.close(); } Future _invokeMethod(String method, [dynamic args]) async => @@ -636,8 +740,10 @@ class PlaybackEvent { /// The index of the currently playing item. final int currentIndex; - /// Quality info of current track - final String qualityString; + /// The current Android AudioSession ID. + final int androidAudioSessionId; + + String qualityString; PlaybackEvent({ @required this.processingState, @@ -647,6 +753,7 @@ class PlaybackEvent { @required this.duration, @required this.icyMetadata, @required this.currentIndex, + @required this.androidAudioSessionId, this.qualityString }); @@ -659,7 +766,7 @@ class PlaybackEvent { Duration duration, IcyMetadata icyMetadata, UriAudioSource currentIndex, - String qualityString + int androidAudioSessionId, }) => PlaybackEvent( processingState: processingState ?? this.processingState, @@ -669,7 +776,9 @@ class PlaybackEvent { duration: duration ?? this.duration, icyMetadata: icyMetadata ?? this.icyMetadata, currentIndex: currentIndex ?? this.currentIndex, - qualityString: qualityString ?? this.qualityString + androidAudioSessionId: + androidAudioSessionId ?? this.androidAudioSessionId, + qualityString: this.qualityString ); @override @@ -787,7 +896,12 @@ class IcyMetadata { IcyMetadata({@required this.info, @required this.headers}); IcyMetadata.fromJson(Map json) - : this(info: json['info'], headers: json['headers']); + : this( + info: json['info'] == null ? null : IcyInfo.fromJson(json['info']), + headers: json['headers'] == null + ? null + : IcyHeaders.fromJson(json['headers']), + ); @override int get hashCode => info.hashCode ^ headers.hashCode; @@ -797,15 +911,21 @@ class IcyMetadata { other is IcyMetadata && other?.info == info && other?.headers == headers; } -/// The audio session categories on iOS, to be used with -/// [AudioPlayer.setIosCategory]. -enum IosCategory { - ambient, - soloAmbient, - playback, - record, - playAndRecord, - multiRoute, +/// Encapsulates the [sequence] and [currentIndex] state and ensures +/// consistency such that [currentIndex] is within the range of +/// [sequence.length]. If [sequence.length] is 0, then [currentIndex] is also +/// 0. +class SequenceState { + /// The sequence of the current [AudioSource]. + final List sequence; + + /// The index of the current source in the sequence. + final int currentIndex; + + SequenceState(this.sequence, this.currentIndex); + + /// The current source in the sequence. + IndexedAudioSource get currentSource => sequence[currentIndex]; } /// A local proxy HTTP server for making remote GET requests with headers. @@ -944,7 +1064,7 @@ abstract class AudioSource { /// stream type on Android. If you know in advance what type of audio stream /// it is, you should instantiate [DashAudioSource] or [HlsAudioSource] /// directly. - static AudioSource uri(Uri uri, {Map headers, Object tag}) { + static AudioSource uri(Uri uri, {Map headers, dynamic tag}) { bool hasExtension(Uri uri, String extension) => uri.path.toLowerCase().endsWith('.$extension') || uri.fragment.toLowerCase().endsWith('.$extension'); @@ -1010,7 +1130,7 @@ abstract class AudioSource { /// An [AudioSource] that can appear in a sequence. abstract class IndexedAudioSource extends AudioSource { - final Object tag; + final dynamic tag; IndexedAudioSource(this.tag); @@ -1026,7 +1146,7 @@ abstract class UriAudioSource extends IndexedAudioSource { Uri _overrideUri; File _cacheFile; - UriAudioSource(this.uri, {this.headers, Object tag, @required String type}) + UriAudioSource(this.uri, {this.headers, dynamic tag, @required String type}) : _type = type, super(tag); @@ -1034,7 +1154,8 @@ abstract class UriAudioSource extends IndexedAudioSource { Future _setup(AudioPlayer player) async { await super._setup(player); if (uri.scheme == 'asset') { - _overrideUri = Uri.file((await _loadAsset(uri.path)).path); + _overrideUri = Uri.file( + (await _loadAsset(uri.path.replaceFirst(RegExp(r'^/'), ''))).path); } else if (headers != null) { _overrideUri = player._proxy.addUrl(uri, headers); } @@ -1077,7 +1198,7 @@ abstract class UriAudioSource extends IndexedAudioSource { }; } -/// An [AudioSource] representing a regular media file such asn an MP3 or M4A +/// An [AudioSource] representing a regular media file such as an MP3 or M4A /// file. The following URI schemes are supported: /// /// * file: loads from a local file (provided you give your app permission to @@ -1088,26 +1209,38 @@ abstract class UriAudioSource extends IndexedAudioSource { /// On platforms except for the web, the supplied [headers] will be passed with /// the HTTP(S) request. class ProgressiveAudioSource extends UriAudioSource { - ProgressiveAudioSource(Uri uri, {Map headers, Object tag}) + ProgressiveAudioSource(Uri uri, {Map headers, dynamic tag}) : super(uri, headers: headers, tag: tag, type: 'progressive'); } -/// An [AudioSource] representing a DASH stream. +/// An [AudioSource] representing a DASH stream. The following URI schemes are +/// supported: +/// +/// * file: loads from a local file (provided you give your app permission to +/// access that file). +/// * asset: loads from a Flutter asset (not supported on Web). +/// * http(s): loads from an HTTP(S) resource. /// /// On platforms except for the web, the supplied [headers] will be passed with /// the HTTP(S) request. Currently headers are not recursively applied to items /// the HTTP(S) request. Currently headers are not applied recursively. class DashAudioSource extends UriAudioSource { - DashAudioSource(Uri uri, {Map headers, Object tag}) + DashAudioSource(Uri uri, {Map headers, dynamic tag}) : super(uri, headers: headers, tag: tag, type: 'dash'); } -/// An [AudioSource] representing an HLS stream. +/// An [AudioSource] representing an HLS stream. The following URI schemes are +/// supported: +/// +/// * file: loads from a local file (provided you give your app permission to +/// access that file). +/// * asset: loads from a Flutter asset (not supported on Web). +/// * http(s): loads from an HTTP(S) resource. /// /// On platforms except for the web, the supplied [headers] will be passed with /// the HTTP(S) request. Currently headers are not applied recursively. class HlsAudioSource extends UriAudioSource { - HlsAudioSource(Uri uri, {Map headers, Object tag}) + HlsAudioSource(Uri uri, {Map headers, dynamic tag}) : super(uri, headers: headers, tag: tag, type: 'hls'); } @@ -1138,6 +1271,7 @@ class ConcatenatingAudioSource extends AudioSource { /// (Untested) Appends an [AudioSource]. Future add(AudioSource audioSource) async { children.add(audioSource); + _player._broadcastSequence(); if (_player != null) { await _player ._invokeMethod('concatenating.add', [_id, audioSource.toJson()]); @@ -1147,6 +1281,7 @@ class ConcatenatingAudioSource extends AudioSource { /// (Untested) Inserts an [AudioSource] at [index]. Future insert(int index, AudioSource audioSource) async { children.insert(index, audioSource); + _player._broadcastSequence(); if (_player != null) { await _player._invokeMethod( 'concatenating.insert', [_id, index, audioSource.toJson()]); @@ -1156,6 +1291,7 @@ class ConcatenatingAudioSource extends AudioSource { /// (Untested) Appends multiple [AudioSource]s. Future addAll(List children) async { this.children.addAll(children); + _player._broadcastSequence(); if (_player != null) { await _player._invokeMethod('concatenating.addAll', [_id, children.map((s) => s.toJson()).toList()]); @@ -1165,6 +1301,7 @@ class ConcatenatingAudioSource extends AudioSource { /// (Untested) Insert multiple [AudioSource]s at [index]. Future insertAll(int index, List children) async { this.children.insertAll(index, children); + _player._broadcastSequence(); if (_player != null) { await _player._invokeMethod('concatenating.insertAll', [_id, index, children.map((s) => s.toJson()).toList()]); @@ -1175,6 +1312,7 @@ class ConcatenatingAudioSource extends AudioSource { /// [ConcatenatingAudioSource] has already been loaded. Future removeAt(int index) async { children.removeAt(index); + _player._broadcastSequence(); if (_player != null) { await _player._invokeMethod('concatenating.removeAt', [_id, index]); } @@ -1184,6 +1322,7 @@ class ConcatenatingAudioSource extends AudioSource { /// to [end] exclusive. Future removeRange(int start, int end) async { children.removeRange(start, end); + _player._broadcastSequence(); if (_player != null) { await _player ._invokeMethod('concatenating.removeRange', [_id, start, end]); @@ -1193,6 +1332,7 @@ class ConcatenatingAudioSource extends AudioSource { /// (Untested) Moves an [AudioSource] from [currentIndex] to [newIndex]. Future move(int currentIndex, int newIndex) async { children.insert(newIndex, children.removeAt(currentIndex)); + _player._broadcastSequence(); if (_player != null) { await _player ._invokeMethod('concatenating.move', [_id, currentIndex, newIndex]); @@ -1202,6 +1342,7 @@ class ConcatenatingAudioSource extends AudioSource { /// (Untested) Removes all [AudioSources]. Future clear() async { children.clear(); + _player._broadcastSequence(); if (_player != null) { await _player._invokeMethod('concatenating.clear', [_id]); } @@ -1243,7 +1384,7 @@ class ClippingAudioSource extends IndexedAudioSource { @required this.child, this.start, this.end, - Object tag, + dynamic tag, }) : super(tag); @override @@ -1281,6 +1422,12 @@ class LoopingAudioSource extends AudioSource { this.count, }) : super(); + @override + Future _setup(AudioPlayer player) async { + await super._setup(player); + await child._setup(player); + } + @override List get sequence => List.generate(count, (i) => child).expand((s) => s.sequence).toList(); diff --git a/lib/just_audio_web.dart b/lib/just_audio_web.dart index 1df5735..9932cdd 100644 --- a/lib/just_audio_web.dart +++ b/lib/just_audio_web.dart @@ -96,6 +96,8 @@ abstract class JustAudioPlayer { return await concatenatingMove(args[0], args[1], args[2]); case "concatenating.clear": return await concatenatingClear(args[0]); + case "setAndroidAudioAttributes": + return null; default: throw PlatformException(code: 'Unimplemented'); } @@ -280,7 +282,7 @@ class Html5AudioPlayer extends JustAudioPlayer { if (_shuffleModeEnabled) { _audioSourcePlayer?.shuffle(0, _index); } - return (await _currentAudioSourcePlayer.load()).inMilliseconds; + return (await _currentAudioSourcePlayer.load())?.inMilliseconds; } Future loadUri(final Uri uri) async { diff --git a/macos/Classes/AudioPlayer.m b/macos/Classes/AudioPlayer.m deleted file mode 100644 index ccbfdea..0000000 --- a/macos/Classes/AudioPlayer.m +++ /dev/null @@ -1,1138 +0,0 @@ -#import "AudioPlayer.h" -#import "AudioSource.h" -#import "IndexedAudioSource.h" -#import "UriAudioSource.h" -#import "ConcatenatingAudioSource.h" -#import "LoopingAudioSource.h" -#import "ClippingAudioSource.h" -#import -#import -#include - -// TODO: Check for and report invalid state transitions. -// TODO: Apply Apple's guidance on seeking: https://developer.apple.com/library/archive/qa/qa1820/_index.html -@implementation AudioPlayer { - NSObject* _registrar; - FlutterMethodChannel *_methodChannel; - FlutterEventChannel *_eventChannel; - FlutterEventSink _eventSink; - NSString *_playerId; - AVQueuePlayer *_player; - AudioSource *_audioSource; - NSMutableArray *_indexedAudioSources; - NSMutableArray *_order; - NSMutableArray *_orderInv; - int _index; - enum ProcessingState _processingState; - enum LoopMode _loopMode; - BOOL _shuffleModeEnabled; - long long _updateTime; - int _updatePosition; - int _lastPosition; - int _bufferedPosition; - // Set when the current item hasn't been played yet so we aren't sure whether sufficient audio has been buffered. - BOOL _bufferUnconfirmed; - CMTime _seekPos; - FlutterResult _loadResult; - FlutterResult _playResult; - id _timeObserver; - BOOL _automaticallyWaitsToMinimizeStalling; - BOOL _configuredSession; - BOOL _playing; -} - -- (instancetype)initWithRegistrar:(NSObject *)registrar playerId:(NSString*)idParam configuredSession:(BOOL)configuredSession { - self = [super init]; - NSAssert(self, @"super init cannot be nil"); - _registrar = registrar; - _playerId = idParam; - _configuredSession = configuredSession; - _methodChannel = - [FlutterMethodChannel methodChannelWithName:[NSMutableString stringWithFormat:@"com.ryanheise.just_audio.methods.%@", _playerId] - binaryMessenger:[registrar messenger]]; - _eventChannel = - [FlutterEventChannel eventChannelWithName:[NSMutableString stringWithFormat:@"com.ryanheise.just_audio.events.%@", _playerId] - binaryMessenger:[registrar messenger]]; - [_eventChannel setStreamHandler:self]; - _index = 0; - _processingState = none; - _loopMode = loopOff; - _shuffleModeEnabled = NO; - _player = nil; - _audioSource = nil; - _indexedAudioSources = nil; - _order = nil; - _orderInv = nil; - _seekPos = kCMTimeInvalid; - _timeObserver = 0; - _updatePosition = 0; - _updateTime = 0; - _lastPosition = 0; - _bufferedPosition = 0; - _bufferUnconfirmed = NO; - _playing = NO; - _loadResult = nil; - _playResult = nil; - _automaticallyWaitsToMinimizeStalling = YES; - __weak __typeof__(self) weakSelf = self; - [_methodChannel setMethodCallHandler:^(FlutterMethodCall* call, FlutterResult result) { - [weakSelf handleMethodCall:call result:result]; - }]; - return self; -} - -- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result { - NSArray* args = (NSArray*)call.arguments; - if ([@"load" isEqualToString:call.method]) { - [self load:args[0] result:result]; - } else if ([@"play" isEqualToString:call.method]) { - [self play:result]; - } else if ([@"pause" isEqualToString:call.method]) { - [self pause]; - result(nil); - } else if ([@"setVolume" isEqualToString:call.method]) { - [self setVolume:(float)[args[0] doubleValue]]; - result(nil); - } else if ([@"setSpeed" isEqualToString:call.method]) { - [self setSpeed:(float)[args[0] doubleValue]]; - result(nil); - } else if ([@"setLoopMode" isEqualToString:call.method]) { - [self setLoopMode:[args[0] intValue]]; - result(nil); - } else if ([@"setShuffleModeEnabled" isEqualToString:call.method]) { - [self setShuffleModeEnabled:(BOOL)[args[0] boolValue]]; - result(nil); - } else if ([@"setAutomaticallyWaitsToMinimizeStalling" isEqualToString:call.method]) { - [self setAutomaticallyWaitsToMinimizeStalling:(BOOL)[args[0] boolValue]]; - result(nil); - } else if ([@"seek" isEqualToString:call.method]) { - CMTime position = args[0] == [NSNull null] ? kCMTimePositiveInfinity : CMTimeMake([args[0] intValue], 1000); - [self seek:position index:args[1] completionHandler:^(BOOL finished) { - result(nil); - }]; - result(nil); - } else if ([@"dispose" isEqualToString:call.method]) { - [self dispose]; - result(nil); - } else if ([@"concatenating.add" isEqualToString:call.method]) { - [self concatenatingAdd:(NSString*)args[0] source:(NSDictionary*)args[1]]; - result(nil); - } else if ([@"concatenating.insert" isEqualToString:call.method]) { - [self concatenatingInsert:(NSString*)args[0] index:[args[1] intValue] source:(NSDictionary*)args[2]]; - result(nil); - } else if ([@"concatenating.addAll" isEqualToString:call.method]) { - [self concatenatingAddAll:(NSString*)args[0] sources:(NSArray*)args[1]]; - result(nil); - } else if ([@"concatenating.insertAll" isEqualToString:call.method]) { - [self concatenatingInsertAll:(NSString*)args[0] index:[args[1] intValue] sources:(NSArray*)args[2]]; - result(nil); - } else if ([@"concatenating.removeAt" isEqualToString:call.method]) { - [self concatenatingRemoveAt:(NSString*)args[0] index:(int)args[1]]; - result(nil); - } else if ([@"concatenating.removeRange" isEqualToString:call.method]) { - [self concatenatingRemoveRange:(NSString*)args[0] start:[args[1] intValue] end:[args[2] intValue]]; - result(nil); - } else if ([@"concatenating.move" isEqualToString:call.method]) { - [self concatenatingMove:(NSString*)args[0] currentIndex:[args[1] intValue] newIndex:[args[2] intValue]]; - result(nil); - } else if ([@"concatenating.clear" isEqualToString:call.method]) { - [self concatenatingClear:(NSString*)args[0]]; - result(nil); - } else { - result(FlutterMethodNotImplemented); - } -} - -// Untested -- (void)concatenatingAdd:(NSString *)catId source:(NSDictionary *)source { - [self concatenatingInsertAll:catId index:-1 sources:@[source]]; -} - -// Untested -- (void)concatenatingInsert:(NSString *)catId index:(int)index source:(NSDictionary *)source { - [self concatenatingInsertAll:catId index:index sources:@[source]]; -} - -// Untested -- (void)concatenatingAddAll:(NSString *)catId sources:(NSArray *)sources { - [self concatenatingInsertAll:catId index:-1 sources:sources]; -} - -// Untested -- (void)concatenatingInsertAll:(NSString *)catId index:(int)index sources:(NSArray *)sources { - // Find all duplicates of the identified ConcatenatingAudioSource. - NSMutableArray *matches = [[NSMutableArray alloc] init]; - [_audioSource findById:catId matches:matches]; - // Add each new source to each match. - for (int i = 0; i < matches.count; i++) { - ConcatenatingAudioSource *catSource = (ConcatenatingAudioSource *)matches[i]; - int idx = index >= 0 ? index : catSource.count; - NSMutableArray *audioSources = [self decodeAudioSources:sources]; - for (int j = 0; j < audioSources.count; j++) { - AudioSource *audioSource = audioSources[j]; - [catSource insertSource:audioSource atIndex:(idx + j)]; - } - } - // Index the new audio sources. - _indexedAudioSources = [[NSMutableArray alloc] init]; - [_audioSource buildSequence:_indexedAudioSources treeIndex:0]; - for (int i = 0; i < [_indexedAudioSources count]; i++) { - IndexedAudioSource *audioSource = _indexedAudioSources[i]; - if (!audioSource.isAttached) { - audioSource.playerItem.audioSource = audioSource; - [self addItemObservers:audioSource.playerItem]; - } - } - [self updateOrder]; - if (_player.currentItem) { - _index = [self indexForItem:_player.currentItem]; - } else { - _index = 0; - } - [self enqueueFrom:_index]; - // Notify each new IndexedAudioSource that it's been attached to the player. - for (int i = 0; i < [_indexedAudioSources count]; i++) { - if (!_indexedAudioSources[i].isAttached) { - [_indexedAudioSources[i] attach:_player]; - } - } - [self broadcastPlaybackEvent]; -} - -// Untested -- (void)concatenatingRemoveAt:(NSString *)catId index:(int)index { - [self concatenatingRemoveRange:catId start:index end:(index + 1)]; -} - -// Untested -- (void)concatenatingRemoveRange:(NSString *)catId start:(int)start end:(int)end { - // Find all duplicates of the identified ConcatenatingAudioSource. - NSMutableArray *matches = [[NSMutableArray alloc] init]; - [_audioSource findById:catId matches:matches]; - // Remove range from each match. - for (int i = 0; i < matches.count; i++) { - ConcatenatingAudioSource *catSource = (ConcatenatingAudioSource *)matches[i]; - int endIndex = end >= 0 ? end : catSource.count; - [catSource removeSourcesFromIndex:start toIndex:endIndex]; - } - // Re-index the remaining audio sources. - NSArray *oldIndexedAudioSources = _indexedAudioSources; - _indexedAudioSources = [[NSMutableArray alloc] init]; - [_audioSource buildSequence:_indexedAudioSources treeIndex:0]; - for (int i = 0, j = 0; i < _indexedAudioSources.count; i++, j++) { - IndexedAudioSource *audioSource = _indexedAudioSources[i]; - while (audioSource != oldIndexedAudioSources[j]) { - [self removeItemObservers:oldIndexedAudioSources[j].playerItem]; - if (j < _index) { - _index--; - } else if (j == _index) { - // The currently playing item was removed. - } - j++; - } - } - [self updateOrder]; - if (_index >= _indexedAudioSources.count) _index = _indexedAudioSources.count - 1; - if (_index < 0) _index = 0; - [self enqueueFrom:_index]; - [self broadcastPlaybackEvent]; -} - -// Untested -- (void)concatenatingMove:(NSString *)catId currentIndex:(int)currentIndex newIndex:(int)newIndex { - // Find all duplicates of the identified ConcatenatingAudioSource. - NSMutableArray *matches = [[NSMutableArray alloc] init]; - [_audioSource findById:catId matches:matches]; - // Move range within each match. - for (int i = 0; i < matches.count; i++) { - ConcatenatingAudioSource *catSource = (ConcatenatingAudioSource *)matches[i]; - [catSource moveSourceFromIndex:currentIndex toIndex:newIndex]; - } - // Re-index the audio sources. - _indexedAudioSources = [[NSMutableArray alloc] init]; - [_audioSource buildSequence:_indexedAudioSources treeIndex:0]; - _index = [self indexForItem:_player.currentItem]; - [self broadcastPlaybackEvent]; -} - -// Untested -- (void)concatenatingClear:(NSString *)catId { - [self concatenatingRemoveRange:catId start:0 end:-1]; -} - -- (FlutterError*)onListenWithArguments:(id)arguments eventSink:(FlutterEventSink)eventSink { - _eventSink = eventSink; - return nil; -} - -- (FlutterError*)onCancelWithArguments:(id)arguments { - _eventSink = nil; - return nil; -} - -- (void)checkForDiscontinuity { - if (!_eventSink) return; - if (!_playing || CMTIME_IS_VALID(_seekPos) || _processingState == completed) return; - int position = [self getCurrentPosition]; - if (_processingState == buffering) { - if (position > _lastPosition) { - [self leaveBuffering:@"stall ended"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - } - } else { - long long now = (long long)([[NSDate date] timeIntervalSince1970] * 1000.0); - long long timeSinceLastUpdate = now - _updateTime; - long long expectedPosition = _updatePosition + (long long)(timeSinceLastUpdate * _player.rate); - long long drift = position - expectedPosition; - //NSLog(@"position: %d, drift: %lld", position, drift); - // Update if we've drifted or just started observing - if (_updateTime == 0L) { - [self broadcastPlaybackEvent]; - } else if (drift < -100) { - [self enterBuffering:@"stalling"]; - NSLog(@"Drift: %lld", drift); - [self updatePosition]; - [self broadcastPlaybackEvent]; - } - } - _lastPosition = position; -} - -- (void)enterBuffering:(NSString *)reason { - NSLog(@"ENTER BUFFERING: %@", reason); - _processingState = buffering; -} - -- (void)leaveBuffering:(NSString *)reason { - NSLog(@"LEAVE BUFFERING: %@", reason); - _processingState = ready; -} - -- (void)broadcastPlaybackEvent { - if (!_eventSink) return; - _eventSink(@{ - @"processingState": @(_processingState), - @"updatePosition": @(_updatePosition), - @"updateTime": @(_updateTime), - // TODO: buffer position - @"bufferedPosition": @(_updatePosition), - // TODO: Icy Metadata - @"icyMetadata": [NSNull null], - @"duration": @([self getDuration]), - @"currentIndex": @(_index), - }); -} - -- (int)getCurrentPosition { - if (_processingState == none || _processingState == loading) { - return 0; - } else if (CMTIME_IS_VALID(_seekPos)) { - return (int)(1000 * CMTimeGetSeconds(_seekPos)); - } else if (_indexedAudioSources) { - int ms = (int)(1000 * CMTimeGetSeconds(_indexedAudioSources[_index].position)); - if (ms < 0) ms = 0; - return ms; - } else { - return 0; - } -} - -- (int)getBufferedPosition { - if (_processingState == none || _processingState == loading) { - return 0; - } else if (_indexedAudioSources) { - int ms = (int)(1000 * CMTimeGetSeconds(_indexedAudioSources[_index].bufferedPosition)); - if (ms < 0) ms = 0; - return ms; - } else { - return 0; - } -} - -- (int)getDuration { - if (_processingState == none) { - return -1; - } else if (_indexedAudioSources) { - int v = (int)(1000 * CMTimeGetSeconds(_indexedAudioSources[_index].duration)); - return v; - } else { - return 0; - } -} - -- (void)removeItemObservers:(AVPlayerItem *)playerItem { - [playerItem removeObserver:self forKeyPath:@"status"]; - [playerItem removeObserver:self forKeyPath:@"playbackBufferEmpty"]; - [playerItem removeObserver:self forKeyPath:@"playbackBufferFull"]; - //[playerItem removeObserver:self forKeyPath:@"playbackLikelyToKeepUp"]; - [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemDidPlayToEndTimeNotification object:playerItem]; - [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemFailedToPlayToEndTimeNotification object:playerItem]; - [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemPlaybackStalledNotification object:playerItem]; -} - -- (void)addItemObservers:(AVPlayerItem *)playerItem { - // Get notified when the item is loaded or had an error loading - [playerItem addObserver:self forKeyPath:@"status" options:NSKeyValueObservingOptionNew context:nil]; - // Get notified of the buffer state - [playerItem addObserver:self forKeyPath:@"playbackBufferEmpty" options:NSKeyValueObservingOptionNew context:nil]; - [playerItem addObserver:self forKeyPath:@"playbackBufferFull" options:NSKeyValueObservingOptionNew context:nil]; - [playerItem addObserver:self forKeyPath:@"loadedTimeRanges" options:NSKeyValueObservingOptionNew context:nil]; - //[playerItem addObserver:self forKeyPath:@"playbackLikelyToKeepUp" options:NSKeyValueObservingOptionNew context:nil]; - // Get notified when playback has reached the end - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(onComplete:) name:AVPlayerItemDidPlayToEndTimeNotification object:playerItem]; - // Get notified when playback stops due to a failure (currently unused) - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(onFailToComplete:) name:AVPlayerItemFailedToPlayToEndTimeNotification object:playerItem]; - // Get notified when playback stalls (currently unused) - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(onItemStalled:) name:AVPlayerItemPlaybackStalledNotification object:playerItem]; -} - -- (NSMutableArray *)decodeAudioSources:(NSArray *)data { - NSMutableArray *array = [[NSMutableArray alloc] init]; - for (int i = 0; i < [data count]; i++) { - AudioSource *source = [self decodeAudioSource:data[i]]; - [array addObject:source]; - } - return array; -} - -- (AudioSource *)decodeAudioSource:(NSDictionary *)data { - NSString *type = data[@"type"]; - if ([@"progressive" isEqualToString:type]) { - return [[UriAudioSource alloc] initWithId:data[@"id"] uri:data[@"uri"]]; - } else if ([@"dash" isEqualToString:type]) { - return [[UriAudioSource alloc] initWithId:data[@"id"] uri:data[@"uri"]]; - } else if ([@"hls" isEqualToString:type]) { - return [[UriAudioSource alloc] initWithId:data[@"id"] uri:data[@"uri"]]; - } else if ([@"concatenating" isEqualToString:type]) { - return [[ConcatenatingAudioSource alloc] initWithId:data[@"id"] - audioSources:[self decodeAudioSources:data[@"audioSources"]]]; - } else if ([@"clipping" isEqualToString:type]) { - return [[ClippingAudioSource alloc] initWithId:data[@"id"] - audioSource:[self decodeAudioSource:data[@"audioSource"]] - start:data[@"start"] - end:data[@"end"]]; - } else if ([@"looping" isEqualToString:type]) { - NSMutableArray *childSources = [NSMutableArray new]; - int count = [data[@"count"] intValue]; - for (int i = 0; i < count; i++) { - [childSources addObject:[self decodeAudioSource:data[@"audioSource"]]]; - } - return [[LoopingAudioSource alloc] initWithId:data[@"id"] audioSources:childSources]; - } else { - return nil; - } -} - -- (void)enqueueFrom:(int)index { - int oldIndex = _index; - _index = index; - - // Update the queue while keeping the currently playing item untouched. - - /* NSLog(@"before reorder: _player.items.count: ", _player.items.count); */ - /* [self dumpQueue]; */ - - // First, remove all _player items except for the currently playing one (if any). - IndexedPlayerItem *oldItem = _player.currentItem; - IndexedPlayerItem *existingItem = nil; - NSArray *oldPlayerItems = [NSArray arrayWithArray:_player.items]; - // In the first pass, preserve the old and new items. - for (int i = 0; i < oldPlayerItems.count; i++) { - if (oldPlayerItems[i] == _indexedAudioSources[_index].playerItem) { - // Preserve and tag new item if it is already in the queue. - existingItem = oldPlayerItems[i]; - } else if (oldPlayerItems[i] == oldItem) { - // Temporarily preserve old item, just to avoid jumping to - // intermediate queue positions unnecessarily. We only want to jump - // once to _index. - } else { - [_player removeItem:oldPlayerItems[i]]; - } - } - // In the second pass, remove the old item (if different from new item). - if (_index != oldIndex) { - [_player removeItem:oldItem]; - } - - /* NSLog(@"inter order: _player.items.count: ", _player.items.count); */ - /* [self dumpQueue]; */ - - // Regenerate queue - BOOL include = NO; - for (int i = 0; i < [_order count]; i++) { - int si = [_order[i] intValue]; - if (si == _index) include = YES; - if (include && _indexedAudioSources[si].playerItem != existingItem) { - [_player insertItem:_indexedAudioSources[si].playerItem afterItem:nil]; - } - } - - /* NSLog(@"after reorder: _player.items.count: ", _player.items.count); */ - /* [self dumpQueue]; */ - - if (_processingState != loading && oldItem != _indexedAudioSources[_index].playerItem) { - // || !_player.currentItem.playbackLikelyToKeepUp; - if (_player.currentItem.playbackBufferEmpty) { - [self enterBuffering:@"enqueueFrom playbackBufferEmpty"]; - } else { - [self leaveBuffering:@"enqueueFrom !playbackBufferEmpty"]; - } - [self updatePosition]; - } -} - -- (void)updatePosition { - _updatePosition = [self getCurrentPosition]; - _updateTime = (long long)([[NSDate date] timeIntervalSince1970] * 1000.0); -} - -- (void)load:(NSDictionary *)source result:(FlutterResult)result { - if (!_playing) { - [_player pause]; - } - if (_processingState == loading) { - [self abortExistingConnection]; - } - _loadResult = result; - _index = 0; - [self updatePosition]; - _processingState = loading; - [self broadcastPlaybackEvent]; - // Remove previous observers - if (_indexedAudioSources) { - for (int i = 0; i < [_indexedAudioSources count]; i++) { - [self removeItemObservers:_indexedAudioSources[i].playerItem]; - } - } - // Decode audio source - if (_audioSource && [@"clipping" isEqualToString:source[@"type"]]) { - // Check if we're clipping an audio source that was previously loaded. - UriAudioSource *child = nil; - if ([_audioSource isKindOfClass:[ClippingAudioSource class]]) { - ClippingAudioSource *clipper = (ClippingAudioSource *)_audioSource; - child = clipper.audioSource; - } else if ([_audioSource isKindOfClass:[UriAudioSource class]]) { - child = (UriAudioSource *)_audioSource; - } - if (child) { - _audioSource = [[ClippingAudioSource alloc] initWithId:source[@"id"] - audioSource:child - start:source[@"start"] - end:source[@"end"]]; - } else { - _audioSource = [self decodeAudioSource:source]; - } - } else { - _audioSource = [self decodeAudioSource:source]; - } - _indexedAudioSources = [[NSMutableArray alloc] init]; - [_audioSource buildSequence:_indexedAudioSources treeIndex:0]; - for (int i = 0; i < [_indexedAudioSources count]; i++) { - IndexedAudioSource *source = _indexedAudioSources[i]; - [self addItemObservers:source.playerItem]; - source.playerItem.audioSource = source; - } - [self updateOrder]; - // Set up an empty player - if (!_player) { - _player = [[AVQueuePlayer alloc] initWithItems:@[]]; - if (@available(macOS 10.12, iOS 10.0, *)) { - _player.automaticallyWaitsToMinimizeStalling = _automaticallyWaitsToMinimizeStalling; - // TODO: Remove these observers in dispose. - [_player addObserver:self - forKeyPath:@"timeControlStatus" - options:NSKeyValueObservingOptionNew - context:nil]; - } - [_player addObserver:self - forKeyPath:@"currentItem" - options:NSKeyValueObservingOptionNew - context:nil]; - // TODO: learn about the different ways to define weakSelf. - //__weak __typeof__(self) weakSelf = self; - //typeof(self) __weak weakSelf = self; - __unsafe_unretained typeof(self) weakSelf = self; - if (@available(macOS 10.12, iOS 10.0, *)) {} - else { - _timeObserver = [_player addPeriodicTimeObserverForInterval:CMTimeMake(200, 1000) - queue:nil - usingBlock:^(CMTime time) { - [weakSelf checkForDiscontinuity]; - } - ]; - } - } - // Initialise the AVQueuePlayer with items. - [self enqueueFrom:0]; - // Notify each IndexedAudioSource that it's been attached to the player. - for (int i = 0; i < [_indexedAudioSources count]; i++) { - [_indexedAudioSources[i] attach:_player]; - } - - if (_player.currentItem.status == AVPlayerItemStatusReadyToPlay) { - _loadResult(@([self getDuration])); - _loadResult = nil; - } else { - // We send result after the playerItem is ready in observeValueForKeyPath. - } - [self broadcastPlaybackEvent]; -} - -- (void)updateOrder { - if (_shuffleModeEnabled) { - [_audioSource shuffle:0 currentIndex: _index]; - } - _orderInv = [NSMutableArray arrayWithCapacity:[_indexedAudioSources count]]; - for (int i = 0; i < [_indexedAudioSources count]; i++) { - [_orderInv addObject:@(0)]; - } - if (_shuffleModeEnabled) { - _order = [_audioSource getShuffleOrder]; - } else { - NSMutableArray *order = [[NSMutableArray alloc] init]; - for (int i = 0; i < [_indexedAudioSources count]; i++) { - [order addObject:@(i)]; - } - _order = order; - } - for (int i = 0; i < [_indexedAudioSources count]; i++) { - _orderInv[[_order[i] intValue]] = @(i); - } -} - -- (void)onItemStalled:(NSNotification *)notification { - IndexedPlayerItem *playerItem = (IndexedPlayerItem *)notification.object; - NSLog(@"onItemStalled"); -} - -- (void)onFailToComplete:(NSNotification *)notification { - IndexedPlayerItem *playerItem = (IndexedPlayerItem *)notification.object; - NSLog(@"onFailToComplete"); -} - -- (void)onComplete:(NSNotification *)notification { - NSLog(@"onComplete"); - if (_loopMode == loopOne) { - [self seek:kCMTimeZero index:@(_index) completionHandler:^(BOOL finished) { - // XXX: Not necessary? - [self play]; - }]; - } else { - IndexedPlayerItem *endedPlayerItem = (IndexedPlayerItem *)notification.object; - IndexedAudioSource *endedSource = endedPlayerItem.audioSource; - // When an item ends, seek back to its beginning. - [endedSource seek:kCMTimeZero]; - - if ([_orderInv[_index] intValue] + 1 < [_order count]) { - // account for automatic move to next item - _index = [_order[[_orderInv[_index] intValue] + 1] intValue]; - NSLog(@"advance to next: index = %d", _index); - [self broadcastPlaybackEvent]; - } else { - // reached end of playlist - if (_loopMode == loopAll) { - NSLog(@"Loop back to first item"); - // Loop back to the beginning - // TODO: Currently there will be a gap at the loop point. - // Maybe we can do something clever by temporarily adding the - // first playlist item at the end of the queue, although this - // will affect any code that assumes the queue always - // corresponds to a contiguous region of the indexed audio - // sources. - // For now we just do a seek back to the start. - if ([_order count] == 1) { - [self seek:kCMTimeZero index:[NSNull null] completionHandler:^(BOOL finished) { - // XXX: Necessary? - [self play]; - }]; - } else { - [self seek:kCMTimeZero index:_order[0] completionHandler:^(BOOL finished) { - // XXX: Necessary? - [self play]; - }]; - } - } else { - [self complete]; - } - } - } -} - -- (void)observeValueForKeyPath:(NSString *)keyPath - ofObject:(id)object - change:(NSDictionary *)change - context:(void *)context { - - if ([keyPath isEqualToString:@"status"]) { - IndexedPlayerItem *playerItem = (IndexedPlayerItem *)object; - AVPlayerItemStatus status = AVPlayerItemStatusUnknown; - NSNumber *statusNumber = change[NSKeyValueChangeNewKey]; - if ([statusNumber isKindOfClass:[NSNumber class]]) { - status = statusNumber.intValue; - } - switch (status) { - case AVPlayerItemStatusReadyToPlay: { - if (playerItem != _player.currentItem) return; - // Detect buffering in different ways depending on whether we're playing - if (_playing) { - if (@available(macOS 10.12, iOS 10.0, *)) { - if (_player.timeControlStatus == AVPlayerTimeControlStatusWaitingToPlayAtSpecifiedRate) { - [self enterBuffering:@"ready to play: playing, waitingToPlay"]; - } else { - [self leaveBuffering:@"ready to play: playing, !waitingToPlay"]; - } - [self updatePosition]; - } else { - // If this happens when we're playing, check whether buffer is confirmed - if (_bufferUnconfirmed && !_player.currentItem.playbackBufferFull) { - // Stay in bufering - XXX Test - [self enterBuffering:@"ready to play: playing, bufferUnconfirmed && !playbackBufferFull"]; - } else { - if (_player.currentItem.playbackBufferEmpty) { - // !_player.currentItem.playbackLikelyToKeepUp; - [self enterBuffering:@"ready to play: playing, playbackBufferEmpty"]; - } else { - [self leaveBuffering:@"ready to play: playing, !playbackBufferEmpty"]; - } - [self updatePosition]; - } - } - } else { - if (_player.currentItem.playbackBufferEmpty) { - [self enterBuffering:@"ready to play: !playing, playbackBufferEmpty"]; - // || !_player.currentItem.playbackLikelyToKeepUp; - } else { - [self leaveBuffering:@"ready to play: !playing, !playbackBufferEmpty"]; - } - [self updatePosition]; - } - [self broadcastPlaybackEvent]; - if (_loadResult) { - _loadResult(@([self getDuration])); - _loadResult = nil; - } - break; - } - case AVPlayerItemStatusFailed: { - NSLog(@"AVPlayerItemStatusFailed"); - [self sendErrorForItem:playerItem]; - break; - } - case AVPlayerItemStatusUnknown: - break; - } - } else if ([keyPath isEqualToString:@"playbackBufferEmpty"] || [keyPath isEqualToString:@"playbackBufferFull"]) { - // Use these values to detect buffering. - IndexedPlayerItem *playerItem = (IndexedPlayerItem *)object; - if (playerItem != _player.currentItem) return; - // If there's a seek in progress, these values are unreliable - if (CMTIME_IS_VALID(_seekPos)) return; - // Detect buffering in different ways depending on whether we're playing - if (_playing) { - if (@available(macOS 10.12, iOS 10.0, *)) { - // We handle this with timeControlStatus instead. - } else { - if (_bufferUnconfirmed && playerItem.playbackBufferFull) { - _bufferUnconfirmed = NO; - [self leaveBuffering:@"playing, _bufferUnconfirmed && playbackBufferFull"]; - [self updatePosition]; - NSLog(@"Buffering confirmed! leaving buffering"); - [self broadcastPlaybackEvent]; - } - } - } else { - if (playerItem.playbackBufferEmpty) { - [self enterBuffering:@"!playing, playbackBufferEmpty"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - } else if (!playerItem.playbackBufferEmpty || playerItem.playbackBufferFull) { - _processingState = ready; - [self leaveBuffering:@"!playing, !playbackBufferEmpty || playbackBufferFull"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - } - } - /* } else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"]) { */ - } else if ([keyPath isEqualToString:@"timeControlStatus"]) { - if (@available(macOS 10.12, iOS 10.0, *)) { - AVPlayerTimeControlStatus status = AVPlayerTimeControlStatusPaused; - NSNumber *statusNumber = change[NSKeyValueChangeNewKey]; - if ([statusNumber isKindOfClass:[NSNumber class]]) { - status = statusNumber.intValue; - } - switch (status) { - case AVPlayerTimeControlStatusPaused: - //NSLog(@"AVPlayerTimeControlStatusPaused"); - break; - case AVPlayerTimeControlStatusWaitingToPlayAtSpecifiedRate: - //NSLog(@"AVPlayerTimeControlStatusWaitingToPlayAtSpecifiedRate"); - if (_processingState != completed) { - [self enterBuffering:@"timeControlStatus"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - } else { - NSLog(@"Ignoring wait signal because we reached the end"); - } - break; - case AVPlayerTimeControlStatusPlaying: - [self leaveBuffering:@"timeControlStatus"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - break; - } - } - } else if ([keyPath isEqualToString:@"currentItem"] && _player.currentItem) { - if (_player.currentItem.status == AVPlayerItemStatusFailed) { - if ([_orderInv[_index] intValue] + 1 < [_order count]) { - // account for automatic move to next item - _index = [_order[[_orderInv[_index] intValue] + 1] intValue]; - NSLog(@"advance to next on error: index = %d", _index); - [self broadcastPlaybackEvent]; - } else { - NSLog(@"error on last item"); - } - return; - } else { - int expectedIndex = [self indexForItem:_player.currentItem]; - if (_index != expectedIndex) { - // AVQueuePlayer will sometimes skip over error items without - // notifying this observer. - NSLog(@"Queue change detected. Adjusting index from %d -> %d", _index, expectedIndex); - _index = expectedIndex; - [self broadcastPlaybackEvent]; - } - } - //NSLog(@"currentItem changed. _index=%d", _index); - _bufferUnconfirmed = YES; - // If we've skipped or transitioned to a new item and we're not - // currently in the middle of a seek - if (CMTIME_IS_INVALID(_seekPos) && _player.currentItem.status == AVPlayerItemStatusReadyToPlay) { - [self updatePosition]; - IndexedAudioSource *source = ((IndexedPlayerItem *)_player.currentItem).audioSource; - // We should already be at position zero but for - // ClippingAudioSource it might be off by some milliseconds so we - // consider anything <= 100 as close enough. - if ((int)(1000 * CMTimeGetSeconds(source.position)) > 100) { - NSLog(@"On currentItem change, seeking back to zero"); - BOOL shouldResumePlayback = NO; - AVPlayerActionAtItemEnd originalEndAction = _player.actionAtItemEnd; - if (_playing && CMTimeGetSeconds(CMTimeSubtract(source.position, source.duration)) >= 0) { - NSLog(@"Need to pause while rewinding because we're at the end"); - shouldResumePlayback = YES; - _player.actionAtItemEnd = AVPlayerActionAtItemEndPause; - [_player pause]; - } - [self enterBuffering:@"currentItem changed, seeking"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - [source seek:kCMTimeZero completionHandler:^(BOOL finished) { - [self leaveBuffering:@"currentItem changed, finished seek"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - if (shouldResumePlayback) { - _player.actionAtItemEnd = originalEndAction; - // TODO: This logic is almost duplicated in seek. See if we can reuse this code. - [_player play]; - } - }]; - } else { - // Already at zero, no need to seek. - } - } - } else if ([keyPath isEqualToString:@"loadedTimeRanges"]) { - IndexedPlayerItem *playerItem = (IndexedPlayerItem *)object; - if (playerItem != _player.currentItem) return; - int pos = [self getBufferedPosition]; - if (pos != _bufferedPosition) { - _bufferedPosition = pos; - [self broadcastPlaybackEvent]; - } - } -} - -- (void)sendErrorForItem:(IndexedPlayerItem *)playerItem { - FlutterError *flutterError = [FlutterError errorWithCode:[NSString stringWithFormat:@"%d", playerItem.error.code] - message:playerItem.error.localizedDescription - details:nil]; - [self sendError:flutterError playerItem:playerItem]; -} - -- (void)sendError:(FlutterError *)flutterError playerItem:(IndexedPlayerItem *)playerItem { - NSLog(@"sendError"); - if (_loadResult && playerItem == _player.currentItem) { - _loadResult(flutterError); - _loadResult = nil; - } - if (_eventSink) { - // Broadcast all errors even if they aren't on the current item. - _eventSink(flutterError); - } -} - -- (void)abortExistingConnection { - FlutterError *flutterError = [FlutterError errorWithCode:@"abort" - message:@"Connection aborted" - details:nil]; - [self sendError:flutterError playerItem:nil]; -} - -- (int)indexForItem:(IndexedPlayerItem *)playerItem { - for (int i = 0; i < _indexedAudioSources.count; i++) { - if (_indexedAudioSources[i].playerItem == playerItem) { - return i; - } - } - return -1; -} - -- (void)play { - [self play:nil]; -} - -- (void)play:(FlutterResult)result { - if (result) { - if (_playResult) { - NSLog(@"INTERRUPTING PLAY"); - _playResult(nil); - } - _playResult = result; - } - _playing = YES; -#if TARGET_OS_IPHONE - if (_configuredSession) { - [[AVAudioSession sharedInstance] setActive:YES error:nil]; - } -#endif - [_player play]; - [self updatePosition]; - if (@available(macOS 10.12, iOS 10.0, *)) {} - else { - if (_bufferUnconfirmed && !_player.currentItem.playbackBufferFull) { - [self enterBuffering:@"play, _bufferUnconfirmed && !playbackBufferFull"]; - [self broadcastPlaybackEvent]; - } - } -} - -- (void)pause { - _playing = NO; - [_player pause]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - if (_playResult) { - NSLog(@"PLAY FINISHED DUE TO PAUSE"); - _playResult(nil); - _playResult = nil; - } -} - -- (void)complete { - [self updatePosition]; - _processingState = completed; - [self broadcastPlaybackEvent]; - if (_playResult) { - NSLog(@"PLAY FINISHED DUE TO COMPLETE"); - _playResult(nil); - _playResult = nil; - } -} - -- (void)setVolume:(float)volume { - [_player setVolume:volume]; -} - -- (void)setSpeed:(float)speed { - if (speed == 1.0 - || (speed < 1.0 && _player.currentItem.canPlaySlowForward) - || (speed > 1.0 && _player.currentItem.canPlayFastForward)) { - _player.rate = speed; - } - [self updatePosition]; -} - -- (void)setLoopMode:(int)loopMode { - _loopMode = loopMode; - if (_player) { - switch (_loopMode) { - case loopOne: - _player.actionAtItemEnd = AVPlayerActionAtItemEndPause; // AVPlayerActionAtItemEndNone - break; - default: - _player.actionAtItemEnd = AVPlayerActionAtItemEndAdvance; - } - } -} - -- (void)setShuffleModeEnabled:(BOOL)shuffleModeEnabled { - NSLog(@"setShuffleModeEnabled: %d", shuffleModeEnabled); - _shuffleModeEnabled = shuffleModeEnabled; - if (!_audioSource) return; - - [self updateOrder]; - - [self enqueueFrom:_index]; -} - -- (void)dumpQueue { - for (int i = 0; i < _player.items.count; i++) { - IndexedPlayerItem *playerItem = _player.items[i]; - for (int j = 0; j < _indexedAudioSources.count; j++) { - IndexedAudioSource *source = _indexedAudioSources[j]; - if (source.playerItem == playerItem) { - NSLog(@"- %d", j); - break; - } - } - } -} - -- (void)setAutomaticallyWaitsToMinimizeStalling:(bool)automaticallyWaitsToMinimizeStalling { - _automaticallyWaitsToMinimizeStalling = automaticallyWaitsToMinimizeStalling; - if (@available(macOS 10.12, iOS 10.0, *)) { - if(_player) { - _player.automaticallyWaitsToMinimizeStalling = automaticallyWaitsToMinimizeStalling; - } - } -} - -- (void)seek:(CMTime)position index:(NSNumber *)newIndex completionHandler:(void (^)(BOOL))completionHandler { - int index = _index; - if (newIndex != [NSNull null]) { - index = [newIndex intValue]; - } - if (index != _index) { - // Jump to a new item - /* if (_playing && index == _index + 1) { */ - /* // Special case for jumping to the very next item */ - /* NSLog(@"seek to next item: %d -> %d", _index, index); */ - /* [_indexedAudioSources[_index] seek:kCMTimeZero]; */ - /* _index = index; */ - /* [_player advanceToNextItem]; */ - /* [self broadcastPlaybackEvent]; */ - /* } else */ - { - // Jump to a distant item - //NSLog(@"seek# jump to distant item: %d -> %d", _index, index); - if (_playing) { - [_player pause]; - } - [_indexedAudioSources[_index] seek:kCMTimeZero]; - // The "currentItem" key observer will respect that a seek is already in progress - _seekPos = position; - [self updatePosition]; - [self enqueueFrom:index]; - IndexedAudioSource *source = _indexedAudioSources[_index]; - if (abs((int)(1000 * CMTimeGetSeconds(CMTimeSubtract(source.position, position)))) > 100) { - [self enterBuffering:@"seek to index"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - [source seek:position completionHandler:^(BOOL finished) { - if (@available(macOS 10.12, iOS 10.0, *)) { - if (_playing) { - // Handled by timeControlStatus - } else { - if (_bufferUnconfirmed && !_player.currentItem.playbackBufferFull) { - // Stay in buffering - } else if (source.playerItem.status == AVPlayerItemStatusReadyToPlay) { - [self leaveBuffering:@"seek to index finished, (!bufferUnconfirmed || playbackBufferFull) && ready to play"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - } - } - } else { - if (_bufferUnconfirmed && !_player.currentItem.playbackBufferFull) { - // Stay in buffering - } else if (source.playerItem.status == AVPlayerItemStatusReadyToPlay) { - [self leaveBuffering:@"seek to index finished, (!bufferUnconfirmed || playbackBufferFull) && ready to play"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - } - } - if (_playing) { - [_player play]; - } - _seekPos = kCMTimeInvalid; - [self broadcastPlaybackEvent]; - if (completionHandler) { - completionHandler(finished); - } - }]; - } else { - _seekPos = kCMTimeInvalid; - if (_playing) { - [_player play]; - } - } - } - } else { - // Seek within an item - if (_playing) { - [_player pause]; - } - _seekPos = position; - //NSLog(@"seek. enter buffering. pos = %d", (int)(1000*CMTimeGetSeconds(_indexedAudioSources[_index].position))); - // TODO: Move this into a separate method so it can also - // be used in skip. - [self enterBuffering:@"seek"]; - [self updatePosition]; - [self broadcastPlaybackEvent]; - [_indexedAudioSources[_index] seek:position completionHandler:^(BOOL finished) { - [self updatePosition]; - if (_playing) { - // If playing, buffering will be detected either by: - // 1. checkForDiscontinuity - // 2. timeControlStatus - [_player play]; - } else { - // If not playing, there is no reliable way to detect - // when buffering has completed, so we use - // !playbackBufferEmpty. Although this always seems to - // be full even right after a seek. - if (_player.currentItem.playbackBufferEmpty) { - [self enterBuffering:@"seek finished, playbackBufferEmpty"]; - } else { - [self leaveBuffering:@"seek finished, !playbackBufferEmpty"]; - } - [self updatePosition]; - if (_processingState != buffering) { - [self broadcastPlaybackEvent]; - } - } - _seekPos = kCMTimeInvalid; - [self broadcastPlaybackEvent]; - if (completionHandler) { - completionHandler(finished); - } - }]; - } -} - -- (void)dispose { - if (_processingState != none) { - [_player pause]; - _processingState = none; - [self broadcastPlaybackEvent]; - } - if (_timeObserver) { - [_player removeTimeObserver:_timeObserver]; - _timeObserver = 0; - } - if (_indexedAudioSources) { - for (int i = 0; i < [_indexedAudioSources count]; i++) { - [self removeItemObservers:_indexedAudioSources[i].playerItem]; - } - } - if (_player) { - [_player removeObserver:self forKeyPath:@"currentItem"]; - if (@available(macOS 10.12, iOS 10.0, *)) { - [_player removeObserver:self forKeyPath:@"timeControlStatus"]; - } - _player = nil; - } - // Untested: - // [_eventChannel setStreamHandler:nil]; - // [_methodChannel setMethodHandler:nil]; -} - -@end diff --git a/macos/Classes/AudioPlayer.m b/macos/Classes/AudioPlayer.m new file mode 120000 index 0000000..596ca1d --- /dev/null +++ b/macos/Classes/AudioPlayer.m @@ -0,0 +1 @@ +../../darwin/Classes/AudioPlayer.m \ No newline at end of file diff --git a/macos/Classes/AudioSource.m b/macos/Classes/AudioSource.m deleted file mode 100644 index 81534f1..0000000 --- a/macos/Classes/AudioSource.m +++ /dev/null @@ -1,37 +0,0 @@ -#import "AudioSource.h" -#import - -@implementation AudioSource { - NSString *_sourceId; -} - -- (instancetype)initWithId:(NSString *)sid { - self = [super init]; - NSAssert(self, @"super init cannot be nil"); - _sourceId = sid; - return self; -} - -- (NSString *)sourceId { - return _sourceId; -} - -- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex { - return 0; -} - -- (void)findById:(NSString *)sourceId matches:(NSMutableArray *)matches { - if ([_sourceId isEqualToString:sourceId]) { - [matches addObject:self]; - } -} - -- (NSArray *)getShuffleOrder { - return @[]; -} - -- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex { - return 0; -} - -@end diff --git a/macos/Classes/AudioSource.m b/macos/Classes/AudioSource.m new file mode 120000 index 0000000..16881d6 --- /dev/null +++ b/macos/Classes/AudioSource.m @@ -0,0 +1 @@ +../../darwin/Classes/AudioSource.m \ No newline at end of file diff --git a/macos/Classes/ClippingAudioSource.m b/macos/Classes/ClippingAudioSource.m deleted file mode 100644 index 2f3b174..0000000 --- a/macos/Classes/ClippingAudioSource.m +++ /dev/null @@ -1,79 +0,0 @@ -#import "AudioSource.h" -#import "ClippingAudioSource.h" -#import "IndexedPlayerItem.h" -#import "UriAudioSource.h" -#import - -@implementation ClippingAudioSource { - UriAudioSource *_audioSource; - CMTime _start; - CMTime _end; -} - -- (instancetype)initWithId:(NSString *)sid audioSource:(UriAudioSource *)audioSource start:(NSNumber *)start end:(NSNumber *)end { - self = [super initWithId:sid]; - NSAssert(self, @"super init cannot be nil"); - _audioSource = audioSource; - _start = start == [NSNull null] ? kCMTimeZero : CMTimeMake([start intValue], 1000); - _end = end == [NSNull null] ? kCMTimeInvalid : CMTimeMake([end intValue], 1000); - return self; -} - -- (UriAudioSource *)audioSource { - return _audioSource; -} - -- (void)findById:(NSString *)sourceId matches:(NSMutableArray *)matches { - [super findById:sourceId matches:matches]; - [_audioSource findById:sourceId matches:matches]; -} - -- (void)attach:(AVQueuePlayer *)player { - [super attach:player]; - _audioSource.playerItem.forwardPlaybackEndTime = _end; - // XXX: Not needed since currentItem observer handles it? - [self seek:kCMTimeZero]; -} - -- (IndexedPlayerItem *)playerItem { - return _audioSource.playerItem; -} - -- (NSArray *)getShuffleOrder { - return @[@(0)]; -} - -- (void)play:(AVQueuePlayer *)player { -} - -- (void)pause:(AVQueuePlayer *)player { -} - -- (void)stop:(AVQueuePlayer *)player { -} - -- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler { - if (!completionHandler || (self.playerItem.status == AVPlayerItemStatusReadyToPlay)) { - CMTime absPosition = CMTimeAdd(_start, position); - [_audioSource.playerItem seekToTime:absPosition toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero completionHandler:completionHandler]; - } -} - -- (CMTime)duration { - return CMTimeSubtract(CMTIME_IS_INVALID(_end) ? self.playerItem.duration : _end, _start); -} - -- (void)setDuration:(CMTime)duration { -} - -- (CMTime)position { - return CMTimeSubtract(self.playerItem.currentTime, _start); -} - -- (CMTime)bufferedPosition { - CMTime pos = CMTimeSubtract(_audioSource.bufferedPosition, _start); - CMTime dur = [self duration]; - return CMTimeCompare(pos, dur) >= 0 ? dur : pos; -} - -@end diff --git a/macos/Classes/ClippingAudioSource.m b/macos/Classes/ClippingAudioSource.m new file mode 120000 index 0000000..d561b1e --- /dev/null +++ b/macos/Classes/ClippingAudioSource.m @@ -0,0 +1 @@ +../../darwin/Classes/ClippingAudioSource.m \ No newline at end of file diff --git a/macos/Classes/ConcatenatingAudioSource.m b/macos/Classes/ConcatenatingAudioSource.m deleted file mode 100644 index bd7b713..0000000 --- a/macos/Classes/ConcatenatingAudioSource.m +++ /dev/null @@ -1,109 +0,0 @@ -#import "AudioSource.h" -#import "ConcatenatingAudioSource.h" -#import -#import - -@implementation ConcatenatingAudioSource { - NSMutableArray *_audioSources; - NSMutableArray *_shuffleOrder; -} - -- (instancetype)initWithId:(NSString *)sid audioSources:(NSMutableArray *)audioSources { - self = [super initWithId:sid]; - NSAssert(self, @"super init cannot be nil"); - _audioSources = audioSources; - return self; -} - -- (int)count { - return _audioSources.count; -} - -- (void)insertSource:(AudioSource *)audioSource atIndex:(int)index { - [_audioSources insertObject:audioSource atIndex:index]; -} - -- (void)removeSourcesFromIndex:(int)start toIndex:(int)end { - if (end == -1) end = _audioSources.count; - for (int i = start; i < end; i++) { - [_audioSources removeObjectAtIndex:start]; - } -} - -- (void)moveSourceFromIndex:(int)currentIndex toIndex:(int)newIndex { - AudioSource *source = _audioSources[currentIndex]; - [_audioSources removeObjectAtIndex:currentIndex]; - [_audioSources insertObject:source atIndex:newIndex]; -} - -- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex { - for (int i = 0; i < [_audioSources count]; i++) { - treeIndex = [_audioSources[i] buildSequence:sequence treeIndex:treeIndex]; - } - return treeIndex; -} - -- (void)findById:(NSString *)sourceId matches:(NSMutableArray *)matches { - [super findById:sourceId matches:matches]; - for (int i = 0; i < [_audioSources count]; i++) { - [_audioSources[i] findById:sourceId matches:matches]; - } -} - -- (NSArray *)getShuffleOrder { - NSMutableArray *order = [NSMutableArray new]; - int offset = [order count]; - NSMutableArray *childOrders = [NSMutableArray new]; // array of array of ints - for (int i = 0; i < [_audioSources count]; i++) { - AudioSource *audioSource = _audioSources[i]; - NSArray *childShuffleOrder = [audioSource getShuffleOrder]; - NSMutableArray *offsetChildShuffleOrder = [NSMutableArray new]; - for (int j = 0; j < [childShuffleOrder count]; j++) { - [offsetChildShuffleOrder addObject:@([childShuffleOrder[j] integerValue] + offset)]; - } - [childOrders addObject:offsetChildShuffleOrder]; - offset += [childShuffleOrder count]; - } - for (int i = 0; i < [_audioSources count]; i++) { - [order addObjectsFromArray:childOrders[[_shuffleOrder[i] integerValue]]]; - } - return order; -} - -- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex { - int currentChildIndex = -1; - for (int i = 0; i < [_audioSources count]; i++) { - int indexBefore = treeIndex; - AudioSource *child = _audioSources[i]; - treeIndex = [child shuffle:treeIndex currentIndex:currentIndex]; - if (currentIndex >= indexBefore && currentIndex < treeIndex) { - currentChildIndex = i; - } else {} - } - // Shuffle so that the current child is first in the shuffle order - _shuffleOrder = [NSMutableArray arrayWithCapacity:[_audioSources count]]; - for (int i = 0; i < [_audioSources count]; i++) { - [_shuffleOrder addObject:@(0)]; - } - NSLog(@"shuffle: audioSources.count=%d and shuffleOrder.count=%d", [_audioSources count], [_shuffleOrder count]); - // First generate a random shuffle - for (int i = 0; i < [_audioSources count]; i++) { - int j = arc4random_uniform(i + 1); - _shuffleOrder[i] = _shuffleOrder[j]; - _shuffleOrder[j] = @(i); - } - // Then bring currentIndex to the front - if (currentChildIndex != -1) { - for (int i = 1; i < [_audioSources count]; i++) { - if ([_shuffleOrder[i] integerValue] == currentChildIndex) { - NSNumber *v = _shuffleOrder[0]; - _shuffleOrder[0] = _shuffleOrder[i]; - _shuffleOrder[i] = v; - break; - } - } - } - return treeIndex; -} - -@end diff --git a/macos/Classes/ConcatenatingAudioSource.m b/macos/Classes/ConcatenatingAudioSource.m new file mode 120000 index 0000000..1e2adbb --- /dev/null +++ b/macos/Classes/ConcatenatingAudioSource.m @@ -0,0 +1 @@ +../../darwin/Classes/ConcatenatingAudioSource.m \ No newline at end of file diff --git a/macos/Classes/IndexedAudioSource.m b/macos/Classes/IndexedAudioSource.m deleted file mode 100644 index 316f900..0000000 --- a/macos/Classes/IndexedAudioSource.m +++ /dev/null @@ -1,68 +0,0 @@ -#import "IndexedAudioSource.h" -#import "IndexedPlayerItem.h" -#import - -@implementation IndexedAudioSource { - BOOL _isAttached; -} - -- (instancetype)initWithId:(NSString *)sid { - self = [super init]; - NSAssert(self, @"super init cannot be nil"); - _isAttached = NO; - return self; -} - -- (IndexedPlayerItem *)playerItem { - return nil; -} - -- (BOOL)isAttached { - return _isAttached; -} - -- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex { - [sequence addObject:self]; - return treeIndex + 1; -} - -- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex { - return treeIndex + 1; -} - -- (void)attach:(AVQueuePlayer *)player { - _isAttached = YES; -} - -- (void)play:(AVQueuePlayer *)player { -} - -- (void)pause:(AVQueuePlayer *)player { -} - -- (void)stop:(AVQueuePlayer *)player { -} - -- (void)seek:(CMTime)position { - [self seek:position completionHandler:nil]; -} - -- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler { -} - -- (CMTime)duration { - return kCMTimeInvalid; -} - -- (void)setDuration:(CMTime)duration { -} - -- (CMTime)position { - return kCMTimeInvalid; -} - -- (CMTime)bufferedPosition { - return kCMTimeInvalid; -} - -@end diff --git a/macos/Classes/IndexedAudioSource.m b/macos/Classes/IndexedAudioSource.m new file mode 120000 index 0000000..051d504 --- /dev/null +++ b/macos/Classes/IndexedAudioSource.m @@ -0,0 +1 @@ +../../darwin/Classes/IndexedAudioSource.m \ No newline at end of file diff --git a/macos/Classes/IndexedPlayerItem.m b/macos/Classes/IndexedPlayerItem.m deleted file mode 100644 index 87fafe0..0000000 --- a/macos/Classes/IndexedPlayerItem.m +++ /dev/null @@ -1,16 +0,0 @@ -#import "IndexedPlayerItem.h" -#import "IndexedAudioSource.h" - -@implementation IndexedPlayerItem { - IndexedAudioSource *_audioSource; -} - --(void)setAudioSource:(IndexedAudioSource *)audioSource { - _audioSource = audioSource; -} - --(IndexedAudioSource *)audioSource { - return _audioSource; -} - -@end diff --git a/macos/Classes/IndexedPlayerItem.m b/macos/Classes/IndexedPlayerItem.m new file mode 120000 index 0000000..04e55fc --- /dev/null +++ b/macos/Classes/IndexedPlayerItem.m @@ -0,0 +1 @@ +../../darwin/Classes/IndexedPlayerItem.m \ No newline at end of file diff --git a/macos/Classes/JustAudioPlugin.m b/macos/Classes/JustAudioPlugin.m deleted file mode 100644 index 982a260..0000000 --- a/macos/Classes/JustAudioPlugin.m +++ /dev/null @@ -1,55 +0,0 @@ -#import "JustAudioPlugin.h" -#import "AudioPlayer.h" -#import -#include - -@implementation JustAudioPlugin { - NSObject* _registrar; - BOOL _configuredSession; -} - -+ (void)registerWithRegistrar:(NSObject*)registrar { - FlutterMethodChannel* channel = [FlutterMethodChannel - methodChannelWithName:@"com.ryanheise.just_audio.methods" - binaryMessenger:[registrar messenger]]; - JustAudioPlugin* instance = [[JustAudioPlugin alloc] initWithRegistrar:registrar]; - [registrar addMethodCallDelegate:instance channel:channel]; -} - -- (instancetype)initWithRegistrar:(NSObject *)registrar { - self = [super init]; - NSAssert(self, @"super init cannot be nil"); - _registrar = registrar; - return self; -} - -- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result { - if ([@"init" isEqualToString:call.method]) { - NSArray* args = (NSArray*)call.arguments; - NSString* playerId = args[0]; - /*AudioPlayer* player =*/ [[AudioPlayer alloc] initWithRegistrar:_registrar playerId:playerId configuredSession:_configuredSession]; - result(nil); - } else if ([@"setIosCategory" isEqualToString:call.method]) { -#if TARGET_OS_IPHONE - NSNumber* categoryIndex = (NSNumber*)call.arguments; - AVAudioSessionCategory category = nil; - switch (categoryIndex.integerValue) { - case 0: category = AVAudioSessionCategoryAmbient; break; - case 1: category = AVAudioSessionCategorySoloAmbient; break; - case 2: category = AVAudioSessionCategoryPlayback; break; - case 3: category = AVAudioSessionCategoryRecord; break; - case 4: category = AVAudioSessionCategoryPlayAndRecord; break; - case 5: category = AVAudioSessionCategoryMultiRoute; break; - } - if (category) { - _configuredSession = YES; - } - [[AVAudioSession sharedInstance] setCategory:category error:nil]; -#endif - result(nil); - } else { - result(FlutterMethodNotImplemented); - } -} - -@end diff --git a/macos/Classes/JustAudioPlugin.m b/macos/Classes/JustAudioPlugin.m new file mode 120000 index 0000000..8583f76 --- /dev/null +++ b/macos/Classes/JustAudioPlugin.m @@ -0,0 +1 @@ +../../darwin/Classes/JustAudioPlugin.m \ No newline at end of file diff --git a/macos/Classes/LoopingAudioSource.m b/macos/Classes/LoopingAudioSource.m deleted file mode 100644 index ba4b52b..0000000 --- a/macos/Classes/LoopingAudioSource.m +++ /dev/null @@ -1,53 +0,0 @@ -#import "AudioSource.h" -#import "LoopingAudioSource.h" -#import - -@implementation LoopingAudioSource { - // An array of duplicates - NSArray *_audioSources; // -} - -- (instancetype)initWithId:(NSString *)sid audioSources:(NSArray *)audioSources { - self = [super initWithId:sid]; - NSAssert(self, @"super init cannot be nil"); - _audioSources = audioSources; - return self; -} - -- (int)buildSequence:(NSMutableArray *)sequence treeIndex:(int)treeIndex { - for (int i = 0; i < [_audioSources count]; i++) { - treeIndex = [_audioSources[i] buildSequence:sequence treeIndex:treeIndex]; - } - return treeIndex; -} - -- (void)findById:(NSString *)sourceId matches:(NSMutableArray *)matches { - [super findById:sourceId matches:matches]; - for (int i = 0; i < [_audioSources count]; i++) { - [_audioSources[i] findById:sourceId matches:matches]; - } -} - -- (NSArray *)getShuffleOrder { - NSMutableArray *order = [NSMutableArray new]; - int offset = (int)[order count]; - for (int i = 0; i < [_audioSources count]; i++) { - AudioSource *audioSource = _audioSources[i]; - NSArray *childShuffleOrder = [audioSource getShuffleOrder]; - for (int j = 0; j < [childShuffleOrder count]; j++) { - [order addObject:@([childShuffleOrder[j] integerValue] + offset)]; - } - offset += [childShuffleOrder count]; - } - return order; -} - -- (int)shuffle:(int)treeIndex currentIndex:(int)currentIndex { - // TODO: This should probably shuffle the same way on all duplicates. - for (int i = 0; i < [_audioSources count]; i++) { - treeIndex = [_audioSources[i] shuffle:treeIndex currentIndex:currentIndex]; - } - return treeIndex; -} - -@end diff --git a/macos/Classes/LoopingAudioSource.m b/macos/Classes/LoopingAudioSource.m new file mode 120000 index 0000000..17c7958 --- /dev/null +++ b/macos/Classes/LoopingAudioSource.m @@ -0,0 +1 @@ +../../darwin/Classes/LoopingAudioSource.m \ No newline at end of file diff --git a/macos/Classes/UriAudioSource.m b/macos/Classes/UriAudioSource.m deleted file mode 100644 index 91321d4..0000000 --- a/macos/Classes/UriAudioSource.m +++ /dev/null @@ -1,79 +0,0 @@ -#import "UriAudioSource.h" -#import "IndexedAudioSource.h" -#import "IndexedPlayerItem.h" -#import - -@implementation UriAudioSource { - NSString *_uri; - IndexedPlayerItem *_playerItem; - /* CMTime _duration; */ -} - -- (instancetype)initWithId:(NSString *)sid uri:(NSString *)uri { - self = [super initWithId:sid]; - NSAssert(self, @"super init cannot be nil"); - _uri = uri; - if ([_uri hasPrefix:@"file://"]) { - _playerItem = [[IndexedPlayerItem alloc] initWithURL:[NSURL fileURLWithPath:[_uri substringFromIndex:7]]]; - } else { - _playerItem = [[IndexedPlayerItem alloc] initWithURL:[NSURL URLWithString:_uri]]; - } - if (@available(macOS 10.13, iOS 11.0, *)) { - // This does the best at reducing distortion on voice with speeds below 1.0 - _playerItem.audioTimePitchAlgorithm = AVAudioTimePitchAlgorithmTimeDomain; - } - /* NSKeyValueObservingOptions options = */ - /* NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew; */ - /* [_playerItem addObserver:self */ - /* forKeyPath:@"duration" */ - /* options:options */ - /* context:nil]; */ - return self; -} - -- (IndexedPlayerItem *)playerItem { - return _playerItem; -} - -- (NSArray *)getShuffleOrder { - return @[@(0)]; -} - -- (void)play:(AVQueuePlayer *)player { -} - -- (void)pause:(AVQueuePlayer *)player { -} - -- (void)stop:(AVQueuePlayer *)player { -} - -- (void)seek:(CMTime)position completionHandler:(void (^)(BOOL))completionHandler { - if (!completionHandler || (_playerItem.status == AVPlayerItemStatusReadyToPlay)) { - [_playerItem seekToTime:position toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero completionHandler:completionHandler]; - } -} - -- (CMTime)duration { - return _playerItem.duration; -} - -- (void)setDuration:(CMTime)duration { -} - -- (CMTime)position { - return _playerItem.currentTime; -} - -- (CMTime)bufferedPosition { - NSValue *last = _playerItem.loadedTimeRanges.lastObject; - if (last) { - CMTimeRange timeRange = [last CMTimeRangeValue]; - return CMTimeAdd(timeRange.start, timeRange.duration); - } else { - return _playerItem.currentTime; - } - return kCMTimeInvalid; -} - -@end diff --git a/macos/Classes/UriAudioSource.m b/macos/Classes/UriAudioSource.m new file mode 120000 index 0000000..8effbd7 --- /dev/null +++ b/macos/Classes/UriAudioSource.m @@ -0,0 +1 @@ +../../darwin/Classes/UriAudioSource.m \ No newline at end of file diff --git a/pubspec.lock b/pubspec.lock index 6c8bd0b..20786c0 100644 --- a/pubspec.lock +++ b/pubspec.lock @@ -8,6 +8,13 @@ packages: url: "https://pub.dartlang.org" source: hosted version: "2.4.2" + audio_session: + dependency: "direct main" + description: + name: audio_session + url: "https://pub.dartlang.org" + source: hosted + version: "0.0.7" boolean_selector: dependency: transitive description: diff --git a/pubspec.yaml b/pubspec.yaml index 12616f4..8a4ea14 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -1,13 +1,14 @@ name: just_audio description: Flutter plugin to play audio from streams, files, assets, DASH/HLS streams and playlists. Works with audio_service to play audio in the background. -version: 0.3.1 +version: 0.4.4 homepage: https://github.com/ryanheise/just_audio environment: - sdk: '>=2.6.0 <3.0.0' - flutter: ">=1.12.8 <2.0.0" + sdk: ">=2.7.0 <3.0.0" + flutter: ">=1.12.13+hotfix.5" dependencies: + audio_session: ^0.0.7 rxdart: ^0.24.1 path: ^1.6.4 path_provider: ^1.6.10