Flutter AudioService plugin not working on Ios release - flutter

I'm using AudioService plugin and it works fine on Android and in debug mode in iOS. But once I test it on a real iOS device (in release mode) It's giving me exceptions
first at all
void quranStartListeningPoint() => AudioServiceBackground.run(() => QuranAudioService());
second this is my play Function
void startListeningToAyah({model.Ayah ayah, model.Surah surah}) {
if (AudioService.running) {
await AudioService.stop();
await Future.delayed(Duration(seconds: 1));
}
await AudioService.start(
androidNotificationColor: 0XFFB590EE,
backgroundTaskEntrypoint: quranStartListeningPoint,
params: {
'sheikhId': "$selectedReciter",
"ayahIndex": ayah.numberInSurah,
"surah": surah.number,
"quranModel": _quranDao.quranModelAsJson
},
);
}
Next I convert quranModelAsJson from a json to a model because I need the whole model in the Service Class and this is the only way to send it (as I think)
class QuranAudioService extends BackgroundAudioTask {
final _audioPlayer = AudioPlayer();
final String baseUrl = "https://cdn.alquran.cloud/media/audio/ayah/";
String ayahUrl;
int surahNumber;
int ayahIndex;
int renewSurah = 0;
QuranModel model;
Surah surah;
String sheikhId;
#override
Future<void> onStart(Map<String, dynamic> params) async {
await _audioPlayer.setReleaseMode(ReleaseMode.STOP);
implementParams(params);
onCompleteListener();
AudioServiceBackground.setState(
systemActions: [MediaAction.seekTo],
controls: getPlayControllers(),
playing: true,
processingState: AudioProcessingState.connecting);
ayahUrl = "$baseUrl$sheikhId/${surah.ayahs[ayahIndex].number}";
await _audioPlayer.play("$ayahUrl");
setMediaItem();
AudioServiceBackground.setState(
controls: getPlayControllers(),
playing: true,
processingState: AudioProcessingState.ready,
systemActions: [MediaAction.seekTo],
);
}
void setMediaItem() {
AudioServiceBackground.setMediaItem(
MediaItem(
extras: {"surahIndex": surahNumber, "renewSurah": renewSurah},
id: "$ayahIndex",
album: "${surah.englishName}",
title: "${surah.name}",
),
);
}
#override
Future<void> onPause() async {
// Broadcast that we're paused, and what controls are available.
AudioServiceBackground.setState(
controls: getPauseControllers(),
systemActions: [MediaAction.seekTo],
playing: false,
processingState: AudioProcessingState.ready);
// Pause the audio.
_audioPlayer.pause();
}
#override
Future<void> onStop() async {
_audioPlayer.stop();
if (ayahIndex == surah.ayahs.length) {
await AudioServiceBackground.setState(
controls: [replayControl],
playing: false,
processingState: AudioProcessingState.stopped);
} else {
await AudioServiceBackground.setState(
controls: [],
playing: false,
processingState: AudioProcessingState.stopped);
return super.onStop();
}
}
#override
Future<void> onPlay() async {
setMediaItem();
AudioServiceBackground.setState(
controls: getPlayControllers(),
playing: true,
processingState: AudioProcessingState.ready,
systemActions: [MediaAction.seekTo],
);
await _audioPlayer.play(ayahUrl);
renewSurah = 0;
}
#override
Future<void> onSkipToNext() async {
playNext();
}
#override
Future<void> onSkipToPrevious() async {
playPrevious();
}
// #override
// void onRewind() {
// ayahIndex = 0;
// ayahUrl = "$baseUrl$sheikhId/${surah.ayahs[ayahIndex].number}";
// this.onPlay();
// }
void implementParams(Map<String, dynamic> params) {
surahNumber = params["surah"] - 1;
ayahIndex = params["ayahIndex"] - 1;
sheikhId = params["sheikhId"];
if (model == null) model = QuranModel.fromJson(params["quranModel"]);
surah = model.surahs[surahNumber];
}
void onCompleteListener() {
_audioPlayer.onPlayerCompletion.listen((event) {
playNext();
});
}
void playNext() async {
ayahIndex++;
if (ayahIndex < surah.ayahs.length) {
ayahUrl = "$baseUrl$sheikhId/${surah.ayahs[ayahIndex].number}";
this.onPlay();
} else
changeNextSurahIndex();
}
void changeNextSurahIndex() {
renewSurah = 1;
if (surahNumber == 113) {
surahNumber = 0;
} else
surahNumber++;
ayahIndex = 0;
surah = model.surahs[surahNumber];
ayahUrl = "$baseUrl$sheikhId/${surah.ayahs[ayahIndex].number}";
this.onPlay();
}
void changePreviousSurahIndex() {
if (surahNumber == 0) {
surahNumber = 113;
} else
surahNumber--;
ayahIndex = 0;
renewSurah = 1;
surah = model.surahs[surahNumber];
ayahUrl = "$baseUrl$sheikhId/${surah.ayahs[ayahIndex].number}";
this.onPlay();
}
void playPrevious() async {
if (ayahIndex > 0) {
ayahIndex--;
ayahUrl = "$baseUrl$sheikhId/${surah.ayahs[ayahIndex].number}";
this.onPlay();
} else {
changePreviousSurahIndex();
}
}
List<MediaControl> getPlayControllers() {
return [
skipToNextControl,
pauseControl,
skipToPreviousControl,
stopControl
];
}
List<MediaControl> getPauseControllers() {
return [skipToNextControl, playControl, skipToPreviousControl, stopControl];
}
}
It was always giving me that ayahs was called on null
which means my json didnt converted to the model
so I deleted this implementation and added just a url to play
and the exception was
2020-08-31 17:58:08.458205-0400 Runner[700:75506] iOS => call startHeadlessService, playerId bb98efb6-a819-4ea7-a566-1dc6f0ff3df4
2020-08-31 17:58:08.471709-0400 Runner[700:76237] [VERBOSE-2:ui_dart_state.cc(166)] Unhandled Exception: NoSuchMethodError: The method '*' was called on null.
Receiver: null
Tried calling: *()
#0 AudioServiceBackground.run (package:audio_service/audio_service.dart:144)
<asynchronous suspension>

Related

Unhandled Exception: Bad state: Tried to use PaginationNotifier after `dispose` was called

I have a StateNotifierProvider that calls an async function which loads some images from the internal storage and adds them to the AsyncValue data:
//Provider declaration
final paginationImagesProvider = StateNotifierProvider.autoDispose<PaginationNotifier, AsyncValue<List<Uint8List?>>>((ref) {
return PaginationNotifier(folderId: ref.watch(localStorageSelectedFolderProvider), itemsPerBatch: 100, ref: ref);
});
//Actual class with AsyncValue as State
class PaginationNotifier extends StateNotifier<AsyncValue<List<Uint8List?>>> {
final int itemsPerBatch;
final String folderId;
final Ref ref;
int _numberOfItemsInFolder = 0;
bool _alreadyFetching = false;
bool _hasMoreItems = true;
PaginationNotifier({required this.itemsPerBatch, required this.folderId, required this.ref}) : super(const AsyncValue.loading()) {
log("PaginationNotifier created with folderId: $folderId, itemsPerBatch: $itemsPerBatch");
init();
}
final List<Uint8List?> _items = [];
void init() {
if (_items.isEmpty) {
log("fetchingFirstBatch");
_fetchFirstBatch();
}
}
Future<List<Uint8List?>> _fetchNextItems() async {
List<AssetEntity> images = (await (await PhotoManager.getAssetPathList())
.firstWhere((element) => element.id == folderId)
.getAssetListRange(start: _items.length, end: _items.length + itemsPerBatch));
List<Uint8List?> newItems = [];
for (AssetEntity image in images) {
newItems.add(await image.thumbnailData);
}
return newItems;
}
void _updateData(List<Uint8List?> result) {
if (result.isEmpty) {
state = AsyncValue.data(_items);
} else {
state = AsyncValue.data(_items..addAll(result));
}
_hasMoreItems = _numberOfItemsInFolder > _items.length;
}
Future<void> _fetchFirstBatch() async {
try {
_numberOfItemsInFolder = await (await PhotoManager.getAssetPathList()).firstWhere((element) => element.id == folderId).assetCountAsync;
state = const AsyncValue.loading();
final List<Uint8List?> result = await _fetchNextItems();
_updateData(result);
} catch (e, stk) {
state = AsyncValue.error(e, stk);
}
}
Future<void> fetchNextBatch() async {
if (_alreadyFetching || !_hasMoreItems) return;
_alreadyFetching = true;
log("data updated");
state = AsyncValue.data(_items);
try {
final result = await _fetchNextItems();
_updateData(result);
} catch (e, stk) {
state = AsyncValue.error(e, stk);
log("error catched");
}
_alreadyFetching = false;
}
}
Then I use a scroll controller attached to a CustomScrollView in order to call fetchNextBatch() when the scroll position changes:
#override
void initState() {
if (!controller.hasListeners && !controller.hasClients) {
log("listener added");
controller.addListener(() {
double maxScroll = controller.position.maxScrollExtent;
double position = controller.position.pixels;
if ((position > maxScroll * 0.2 || position == 0) && ref.read(paginationImagesProvider.notifier).mounted) {
ref.read(paginationImagesProvider.notifier).fetchNextBatch();
}
});
}
super.initState();
}
The problem is that when the StateNotifierProvider is fetching more data in the async function fetchNextBatch() and I go back on the navigator (like navigator.pop()), Flutter gives me an error:
[ERROR:flutter/runtime/dart_vm_initializer.cc(41)] Unhandled Exception: Bad state: Tried to use PaginationNotifier after dispose was called.
Consider checking mounted.
I think that the async function responsible of loading data completes after I've popped the page from the Stack (which triggers a Provider dispose).
I'm probably missing something and I still haven't found a fix for this error, any help is appreciated.

Unhandled Exception: Invalid argument: is a regular instance: Instance of 'LocationDto'

currently I am us this plugin to get the background location of a user, works great with ios but fails on android after recent updates and the plugin progress stalled, I am now faced with the issue below
so, in my main class i have this
#override
void initState() {
super.initState();
WidgetsBinding.instance.addObserver(this);
// onLineStatus = UserSimplePreferences.getUsername() ?? ''
onLineStatus = UserSimplePrefences.getButtonStatus() ?? false;
displayToastMessage(onLineStatus.toString(), context);
osmController = MapController(
initMapWithUserPosition: true,
//initPosition: initPosition,
);
osmController.addObserver(this);
scaffoldKey = GlobalKey<ScaffoldState>();
if (IsolateNameServer.lookupPortByName(
LocationServiceRepository.isolateName) !=
null) {
IsolateNameServer.removePortNameMapping(
LocationServiceRepository.isolateName);
}
IsolateNameServer.registerPortWithName(
port.sendPort, LocationServiceRepository.isolateName);
port.listen(
(dynamic data) async {
if (data != null) await updateUI(data);
},
);
initPlatformState();
}
Future<void> updateUI(LocationDto data) async {
await _updateNotificationText(data);
}
Future<void> _updateNotificationText(LocationDto data) async {
await BackgroundLocator.updateNotificationText(
title: "Your location is updated",
msg: "${DateTime.now()}",
bigMsg: "${data.latitude}, ${data.longitude}");
}
Future<void> initPlatformState() async {
print('Initializing...');
await BackgroundLocator.initialize();
await BackgroundLocator.isServiceRunning();
}
Future<void> _startLocator() async {
Map<String, dynamic> data = {'countInit': 1};
return await BackgroundLocator.registerLocationUpdate(
LocationCallbackHandler.callback,
initCallback: LocationCallbackHandler.initCallback,
initDataCallback: data,
disposeCallback: LocationCallbackHandler.disposeCallback,
iosSettings: IOSSettings(
accuracy: LocationAccuracy.NAVIGATION,
distanceFilter: 0,
stopWithTerminate: true),
autoStop: false,
androidSettings: AndroidSettings(
accuracy: LocationAccuracy.NAVIGATION,
interval: 5,
distanceFilter: 0,
client: LocationClient.google,
androidNotificationSettings: AndroidNotificationSettings(
notificationChannelName: 'Location tracking',
notificationTitle: 'Start Location Tracking',
notificationMsg: 'Track location in background',
notificationBigMsg:
'Background location is on to keep the app up-tp-date with your location. This is required for main features to work properly when the app is not running.',
notificationIconColor: Colors.grey,
notificationTapCallback:
LocationCallbackHandler.notificationCallback)));
}
This is the LocationRepositoryClass
class LocationServiceRepository {
static LocationServiceRepository _instance = LocationServiceRepository._();
LocationServiceRepository._();
factory LocationServiceRepository() {
return _instance;
}
static const String isolateName = 'LocatorIsolate';
int _count = -1;
Future<void> init(Map<dynamic, dynamic> params) async {
//TODO change logs
print("***********Init callback handler");
if (params.containsKey('countInit')) {
dynamic tmpCount = params['countInit'];
if (tmpCount is double) {
_count = tmpCount.toInt();
} else if (tmpCount is String) {
_count = int.parse(tmpCount);
} else if (tmpCount is int) {
_count = tmpCount;
} else {
_count = -2;
}
} else {
_count = 0;
}
print("$_count");
await setLogLabel("start");
final SendPort? send = IsolateNameServer.lookupPortByName(isolateName);
send?.send(null);
}
Future<void> dispose() async {
print("***********Dispose callback handler");
print("$_count");
await setLogLabel("end");
final SendPort? send = IsolateNameServer.lookupPortByName(isolateName);
send?.send(null);
}
#pragma('vm:entry-point')
Future<void> callback(LocationDto locationDto) async {
print('$_count location in dart: ${locationDto.toString()}');
await setLogPosition(_count, locationDto);
final SendPort? send = IsolateNameServer.lookupPortByName(isolateName);
send?.send(locationDto);//error here
_count++;
}
static Future<void> setLogLabel(String label) async {
final date = DateTime.now();
// await FileManager.writeToLogFile(
// '------------\n$label: ${formatDateLog(date)}\n------------\n');
}
static Future<void> setLogPosition(int count, LocationDto data) async {
final date = DateTime.now();
// await FileManager.writeToLogFile(
// '$count : ${formatDateLog(date)} --> ${formatLog(data)} --- isMocked: ${data.isMocked}\n');
}
static double dp(double val, int places) {
num mod = pow(10.0, places);
return ((val * mod).round().toDouble() / mod);
}
static String formatDateLog(DateTime date) {
return date.hour.toString() +
":" +
date.minute.toString() +
":" +
date.second.toString();
}
static String formatLog(LocationDto locationDto) {
return dp(locationDto.latitude, 4).toString() +
" " +
dp(locationDto.longitude, 4).toString();
}
}
and this is the locationCallbackHandler class
class LocationCallbackHandler {
static Future<void> initCallback(Map<dynamic, dynamic> params) async {
LocationServiceRepository myLocationCallbackRepository =
LocationServiceRepository();
await myLocationCallbackRepository.init(params);
}
static Future<void> disposeCallback() async {
LocationServiceRepository myLocationCallbackRepository =
LocationServiceRepository();
await myLocationCallbackRepository.dispose();
}
#pragma('vm:entry-point')
static void callback(LocationDto locationDto) async {
LocationServiceRepository myLocationCallbackRepository =
LocationServiceRepository();
await myLocationCallbackRepository.callback(locationDto);
}
static Future<void> notificationCallback() async {
print('***notificationCallback');
}
}
Everthing was working fine until this update with flutter and kotlin, Now i have the error
[ERROR:flutter/runtime/dart_vm_initializer.cc(41)] Unhandled Exception: Invalid argument: is a regular instance: Instance of 'LocationDto'
E/flutter (26006): #0 _SendPort._sendInternal (dart:isolate-patch/isolate_patch.dart:249:43)
E/flutter (26006): #1 _SendPort.send (dart:isolate-patch/isolate_patch.dart:230:5)
E/flutter (26006): #2 LocationServiceRepository.callback
package:drivers_app/tabPages/location_service_reposirtory.dart:59
The error points to this code
#pragma('vm:entry-point')
Future<void> callback(LocationDto locationDto) async {
print('$_count location in dart: ${locationDto.toString()}');
await setLogPosition(_count, locationDto);
final SendPort? send = IsolateNameServer.lookupPortByName(isolateName);
send?.send(locationDto); //error here
_count++;
}
There is no reply from the plugin developer, Any help would be appreciated.
https://github.com/Yukams/background_locator_fixed/pull/53
There were some errors after updating flutter to version 3, If you run into this error then please visit the above website for the fix.

How to use any state management for my bluetooth app?

I am trying to use a connected bluetooth device on other pages, but I'm unable to do that. I tried to use the provider, but that did not work, parameter passing did not work either.
After testing, I am using the following
I made a class ReactiveProvider
class ReactiveProvider(){
Stream<ConnectionStateUpdate> get currentConnectionStream {
return flutterReactiveBle.connectToAdvertisingDevice(
id: _foundBleUARTDevices[index].id,
prescanDuration: const Duration(seconds: 1),
withServices: [_uartUuid, _uartRx, _uartTx],
);
}
}
and setup in start
void main() {
runApp(
MultiProvider(providers: [
StreamProvider<ConnectionStateUpdate>(
create: (context) => ReactiveProvider().currentConnectionStream,
initialData: const ConnectionStateUpdate(
deviceId: "",
connectionState: DeviceConnectionState.disconnected,
failure: null),
)
], child: const MainApp()),
);
}
and in StatefullWidget
final _currentConnectionStream = Provider.of<ConnectionStateUpdate>(context);
I got the errors
The instance member 'context' can't be accessed in an initializer.
Try replacing the reference to the instance member with a different expression
and
The method 'listen' isn't defined for the type 'ConnectionStateUpdate'.
Try correcting the name to the name of an existing method, or defining a method named 'listen'.
In following function
_connection = _currentConnectionStream.listen((event) {});
I want to access the following parameters on another page using any state management
final flutterReactiveBle = FlutterReactiveBle();
List<DiscoveredDevice> _foundBleUARTDevices = [];
late StreamSubscription<DiscoveredDevice> _scanStream;
late Stream<ConnectionStateUpdate> _currentConnectionStream;
late StreamSubscription<ConnectionStateUpdate> _connection;
late QualifiedCharacteristic _txCharacteristic;
//late QualifiedCharacteristic _rxCharacteristic;
late Stream<List<int>> _receivedDataStream;
These are other functions I am using
void onNewReceivedData(List<int> data) {
_numberOfMessagesReceived += 1;
_receivedData
.add("$_numberOfMessagesReceived: ${String.fromCharCodes(data)}");
if (_receivedData.length > 10) {
_receivedData.removeAt(0);
}
}
void _disconnect() async {
await _connection.cancel();
_connected = false;
}
void _stopScan() async {
await _scanStream.cancel();
_scanning = false;
}
void _startScan() async {
_foundBleUARTDevices = [];
_scanning = true;
_scanStream = flutterReactiveBle
.scanForDevices(withServices: [_uartUuid]).listen((device) {
if (_foundBleUARTDevices.every((element) => element.id != device.id)) {
_foundBleUARTDevices.add(device);
}
}, onError: (Object error) {
_logTexts = "${_logTexts}ERROR while scanning:$error \n";
}, onDone: () async {
await _scanStream.cancel();
_scanning = false;
});
}
void onConnectDevice(index) {
_currentConnectionStream = flutterReactiveBle.connectToAdvertisingDevice(
id: _foundBleUARTDevices[index].id,
prescanDuration: const Duration(seconds: 1),
withServices: [_uartUuid, _uartRx, _uartTx],
);
_logTexts = "";
_connection = _currentConnectionStream.listen((event) {
var id = event.deviceId.toString();
switch (event.connectionState) {
case DeviceConnectionState.connecting:
{
_logTexts = "${_logTexts}Connecting to $id\n";
break;
}
case DeviceConnectionState.connected:
{
_connected = true;
_logTexts = "${_logTexts}Connected to $id\n";
_numberOfMessagesReceived = 0;
_receivedData = [];
_txCharacteristic = QualifiedCharacteristic(
serviceId: _uartUuid,
characteristicId: _uartTx,
deviceId: event.deviceId);
_receivedDataStream =
flutterReactiveBle.subscribeToCharacteristic(_txCharacteristic);
_receivedDataStream.listen((data) {
onNewReceivedData(data);
}, onError: (dynamic error) {
_logTexts = "${_logTexts}Error:$error$id\n";
});
break;
}
case DeviceConnectionState.disconnecting:
{
_connected = false;
_logTexts = "${_logTexts}Disconnecting from $id\n";
break;
}
case DeviceConnectionState.disconnected:
{
_logTexts = "${_logTexts}Disconnected from $id\n";
break;
}
}
});
}
Another question I have, is how I can use or keep connected using on void onConnectDevice(index) function, because as per the provider you don't need to pass the parameters.

In flutter how can we use audio_service to fetch dynamic data

await AudioService.start(
backgroundTaskEntrypoint: _audioPlayerTaskEntrypoint,
androidNotificationChannelName: 'Audio Player',
androidNotificationColor: 0xFF2196f3,
androidNotificationIcon: 'mipmap/ic_launcher',
params: getParams(),
);
This is my code snippet where i am calling AudioService.start but i am unable to start service.
i am fetching the audio item from firebase and want to load those as a list view to audio_service..But i am unable to do that.My class where i have defined a audio service extending the background service.
import 'package:audio_service/audio_service.dart';
import 'package:just_audio/just_audio.dart';
MediaControl playControl = MediaControl(
androidIcon: 'drawable/ic_action_play_arrow',
label: 'Play',
action: MediaAction.play,
);
MediaControl pauseControl = MediaControl(
androidIcon: 'drawable/ic_action_pause',
label: 'Pause',
action: MediaAction.pause,
);
MediaControl skipToNextControl = MediaControl(
androidIcon: 'drawable/ic_action_skip_next',
label: 'Next',
action: MediaAction.skipToNext,
);
MediaControl skipToPreviousControl = MediaControl(
androidIcon: 'drawable/ic_action_skip_previous',
label: 'Previous',
action: MediaAction.skipToPrevious,
);
MediaControl stopControl = MediaControl(
androidIcon: 'drawable/ic_action_stop',
label: 'Stop',
action: MediaAction.stop,
);
class AudioPlayerTask extends BackgroundAudioTask {
//
var _queue = <MediaItem>[];
int _queueIndex = -1;
AudioPlayer _audioPlayer = new AudioPlayer();
AudioProcessingState _skipState;
bool _playing;
bool get hasNext => _queueIndex + 1 < _queue.length;
bool get hasPrevious => _queueIndex > 0;
MediaItem get mediaItem => _queue[_queueIndex];
StreamSubscription<AudioPlaybackState> _playerStateSubscription;
StreamSubscription<AudioPlaybackEvent> _eventSubscription;
#override
void onStart(Map<String, dynamic> params) {
print("-------------------------------------started");
_queue.clear();
List mediaItems = params['data'];
for (int i = 0; i < mediaItems.length; i++) {
MediaItem mediaItem = MediaItem.fromJson(mediaItems[i]);
_queue.add(mediaItem);
}
_playerStateSubscription = _audioPlayer.playbackStateStream
.where((state) => state == AudioPlaybackState.completed)
.listen((state) {
_handlePlaybackCompleted();
});
_eventSubscription = _audioPlayer.playbackEventStream.listen((event) {
final bufferingState =
event.buffering ? AudioProcessingState.buffering : null;
switch (event.state) {
case AudioPlaybackState.paused:
_setState(
processingState: bufferingState ?? AudioProcessingState.ready,
position: event.position);
break;
case AudioPlaybackState.playing:
_setState(
processingState: bufferingState ?? AudioProcessingState.ready,
position: event.position);
break;
case AudioPlaybackState.connecting:
_setState(
processingState: _skipState ?? AudioProcessingState.connecting,
position: event.position);
break;
default:
}
});
AudioServiceBackground.setQueue(_queue);
onSkipToNext();
}
#override
void onPlay() {
if (_skipState == null) {
_playing = true;
_audioPlayer.play();
}
}
#override
void onPause() {
_playing = false;
_audioPlayer.pause();
}
#override
void onSkipToNext() async {
skip(1);
}
#override
void onSkipToPrevious() {
skip(-1);
}
void skip(int offset) async {
int newPos = _queueIndex + offset;
if (!(newPos >= 0 && newPos < _queue.length)) {
return;
}
if (null == _playing) {
_playing = true;
} else if (_playing) {
await _audioPlayer.stop();
}
_queueIndex = newPos;
_skipState = offset > 0
? AudioProcessingState.skippingToNext
: AudioProcessingState.skippingToPrevious;
AudioServiceBackground.setMediaItem(mediaItem);
await _audioPlayer.setUrl(mediaItem.id);
print(mediaItem.id);
_skipState = null;
if (_playing) {
onPlay();
} else {
_setState(processingState: AudioProcessingState.ready);
}
}
#override
Future<void> onStop() async {
_playing = false;
await _audioPlayer.stop();
await _audioPlayer.dispose();
_playerStateSubscription.cancel();
_eventSubscription.cancel();
return await super.onStop();
}
#override
void onSeekTo(Duration position) {
_audioPlayer.seek(position);
}
#override
void onClick(MediaButton button) {
playPause();
}
#override
Future<void> onFastForward() async {
await _seekRelative(fastForwardInterval);
}
#override
Future<void> onRewind() async {
await _seekRelative(rewindInterval);
}
Future<void> _seekRelative(Duration offset) async {
var newPosition = _audioPlayer.playbackEvent.position + offset;
if (newPosition < Duration.zero) {
newPosition = Duration.zero;
}
if (newPosition > mediaItem.duration) {
newPosition = mediaItem.duration;
}
await _audioPlayer.seek(_audioPlayer.playbackEvent.position + offset);
}
_handlePlaybackCompleted() {
if (hasNext) {
onSkipToNext();
} else {
onStop();
}
}
void playPause() {
if (AudioServiceBackground.state.playing)
onPause();
else
onPlay();
}
Future<void> _setState({
AudioProcessingState processingState,
Duration position,
Duration bufferedPosition,
}) async {
print('SetState $processingState');
if (position == null) {
position = _audioPlayer.playbackEvent.position;
}
await AudioServiceBackground.setState(
controls: getControls(),
systemActions: [MediaAction.seekTo],
processingState:
processingState ?? AudioServiceBackground.state.processingState,
playing: _playing,
position: position,
bufferedPosition: bufferedPosition ?? position,
speed: _audioPlayer.speed,
);
}
List<MediaControl> getControls() {
if (_playing) {
return [
skipToPreviousControl,
pauseControl,
stopControl,
skipToNextControl
];
} else {
return [
skipToPreviousControl,
playControl,
stopControl,
skipToNextControl
];
}
} this is my class
}
class AudioState {
final List<MediaItem> queue;
final MediaItem mediaItem;
final PlaybackState playbackState;
AudioState(this.queue, this.mediaItem, this.playbackState);
}
You should overridthis method inAudioPlayerTask` class:
#override
// ignore: missing_return
Future<Function> onAddQueueItem(MediaItem mediaItem) async{
// queue.add(mediaItem); or somthing like this to update your queue
await AudioServiceBackground.setQueue(queue);
try {
await _player.load(ConcatenatingAudioSource(
children:
queue.map((item) => AudioSource.uri(Uri.parse(item.id))).toList(),
));
// In this example, we automatically start playing on start.
onPlay();
} catch (e) {
print("Error: $e");
onStop();
}
}
Then you can call this function in your UI:
await AudioService.start(
backgroundTaskEntrypoint: audioPlayerTaskEntrypoint,
androidNotificationChannelName:
'Audio Service Demo',
// Enable this if you want the Android service to exit the foreground state on pause.
//androidStopForegroundOnPause: true,
androidNotificationColor: 0xFF2196f3,
androidNotificationIcon: 'mipmap/ic_launcher',
androidEnableQueue: true,
);
// call this func:
await AudioService.addQueueItem(m);
According to the author here the plugin originally didn't allow passing any arguments to the background task directly. It was designed to let the background task internally query what to play from internal storage via SQLite or tools like shared_preferences.
However today, the plugin allows to pass media data down to the background task via 3 different ways that I identified:
AudioService.addQueueItem(item) combined with onAddQueueItem as mentioned in the above answer as well as this one
AudioService.customAction('url', url) combined with onCustomAction as explained here
Use the Map<String, dynamic> params argument of the AudioService.start method. It is received as an argument of the background task's onStart method as explained here

Flutter WebRTC audio but no video

So I'm building a video calling application using flutter, flutterWeb, and the WebRTC package.
I have a spring boot server sitting in the middle to pass the messages between the two clients.
Each side shows the local video, but neither shows the remote. Audio does work though. I got som nasty feedback loops. Testing with headphones showed that audio does indeed work.
My singaling code
typedef void StreamStateCallback(MediaStream stream);
class CallingService {
String sendToUserId;
String currentUserId;
final String authToken;
final StompClient _client;
final StreamStateCallback onAddRemoteStream;
final StreamStateCallback onRemoveRemoteStream;
final StreamStateCallback onAddLocalStream;
RTCPeerConnection _peerConnection;
List<RTCIceCandidate> _remoteCandidates = [];
String destination;
var hasOffer = false;
var isNegotiating = false;
MediaStream _localStream;
final Map<String, dynamic> _constraints = {
'mandatory': {
'OfferToReceiveAudio': true,
'OfferToReceiveVideo': true,
},
'optional': [],
};
CallingService(
this._client,
this.sendToUserId,
this.currentUserId,
this.authToken,
this.onAddRemoteStream,
this.onRemoveRemoteStream,
this.onAddLocalStream) {
destination = '/app/start-call/$sendToUserId';
print("destination $destination");
_client.subscribe(
destination: destination,
headers: {'Authorization': "$authToken"},
callback: (StompFrame frame) => processMessage(jsonDecode(frame.body)));
}
Future<void> startCall() async {
await processRemoteStream();
RTCSessionDescription description =
await _peerConnection.createOffer(_constraints);
await _peerConnection.setLocalDescription(description);
var message = RtcMessage(RtcMessageType.OFFER, currentUserId, {
'description': {'sdp': description.sdp, 'type': description.type},
});
sendMessage(message);
}
Future<void> processMessage(Map<String, dynamic> messageJson) async {
var message = RtcMessage.fromJson(messageJson);
if (message.from == currentUserId) {
return;
}
print("processing ${message.messageType.toString()}");
switch (message.messageType) {
case RtcMessageType.BYE:
// TODO: Handle this case.
break;
case RtcMessageType.LEAVE:
// TODO: Handle this case.
break;
case RtcMessageType.CANDIDATE:
await processCandidate(message);
break;
case RtcMessageType.ANSWER:
await processAnswer(message);
break;
case RtcMessageType.OFFER:
await processOffer(message);
break;
}
}
Future<void> processCandidate(RtcMessage candidate) async {
Map<String, dynamic> map = candidate.data['candidate'];
var rtcCandidate = RTCIceCandidate(
map['candidate'],
map['sdpMid'],
map['sdpMLineIndex'],
);
if (_peerConnection != null) {
_peerConnection.addCandidate(rtcCandidate);
} else {
_remoteCandidates.add(rtcCandidate);
}
}
Future<void> processAnswer(RtcMessage answer) async {
if (isNegotiating) {
return;
}
isNegotiating = true;
var description = answer.data['description'];
if (_peerConnection == null) {
return;
}
await _peerConnection.setRemoteDescription(
RTCSessionDescription(description['sdp'], description['type']));
}
Future<void> processOffer(RtcMessage offer) async {
await processRemoteStream();
var description = offer.data['description'];
await _peerConnection.setRemoteDescription(
new RTCSessionDescription(description['sdp'], description['type']));
var answerDescription = await _peerConnection.createAnswer(_constraints);
await _peerConnection.setLocalDescription(answerDescription);
var answerMessage = RtcMessage(RtcMessageType.ANSWER, currentUserId, {
'description': {
'sdp': answerDescription.sdp,
'type': answerDescription.type
},
});
sendMessage(answerMessage);
if (_remoteCandidates.isNotEmpty) {
_remoteCandidates
.forEach((candidate) => _peerConnection.addCandidate(candidate));
_remoteCandidates.clear();
}
}
Future<void> processRemoteStream() async {
_localStream = await createStream();
_peerConnection = await createPeerConnection(_iceServers, _config);
_peerConnection.addStream(_localStream);
_peerConnection.onSignalingState = (state) {
//isNegotiating = state != RTCSignalingState.RTCSignalingStateStable;
};
_peerConnection.onAddStream = (MediaStream stream) {
this.onAddRemoteStream(stream);
};
_peerConnection.onRemoveStream =
(MediaStream stream) => this.onRemoveRemoteStream(stream);
_peerConnection.onIceCandidate = (RTCIceCandidate candidate) {
var data = {
'candidate': {
'sdpMLineIndex': candidate.sdpMlineIndex,
'sdpMid': candidate.sdpMid,
'candidate': candidate.candidate,
},
};
var message = RtcMessage(RtcMessageType.CANDIDATE, currentUserId, data);
sendMessage(message);
};
}
void sendMessage(RtcMessage message) {
_client.send(
destination: destination,
headers: {'Authorization': "$authToken"},
body: jsonEncode(message.toJson()));
}
Map<String, dynamic> _iceServers = {
'iceServers': [
{'urls': 'stun:stun.l.google.com:19302'},
/*
* turn server configuration example.
{
'url': 'turn:123.45.67.89:3478',
'username': 'change_to_real_user',
'credential': 'change_to_real_secret'
},
*/
]
};
final Map<String, dynamic> _config = {
'mandatory': {},
'optional': [
{'DtlsSrtpKeyAgreement': true},
],
};
Future<MediaStream> createStream() async {
final Map<String, dynamic> mediaConstraints = {
'audio': true,
'video': {
'mandatory': {
'minWidth': '640',
'minHeight': '480',
'minFrameRate': '30',
},
'facingMode': 'user',
'optional': [],
}
};
MediaStream stream = await navigator.getUserMedia(mediaConstraints);
if (this.onAddLocalStream != null) {
this.onAddLocalStream(stream);
}
return stream;
}
}
Here are my widgets
class _CallScreenState extends State<CallScreen> {
StompClient _client;
CallingService _callingService;
RTCVideoRenderer _localRenderer = new RTCVideoRenderer();
RTCVideoRenderer _remoteRenderer = new RTCVideoRenderer();
final UserService userService = GetIt.instance.get<UserService>();
void onConnectCallback(StompClient client, StompFrame connectFrame) async {
var currentUser = await userService.getCurrentUser();
_callingService = CallingService(
_client,
widget.intent.toUserId.toString(),
currentUser.id.toString(),
widget.intent.authToken,
onAddRemoteStream,
onRemoveRemoteStream,
onAddLocalStream);
if (widget.intent.initialMessage != null) {
_callingService.processMessage(jsonDecode(widget.intent.initialMessage));
} else {
_callingService.startCall();
}
}
void onAddRemoteStream(MediaStream stream) {
_remoteRenderer.srcObject = stream;
}
void onRemoveRemoteStream(MediaStream steam) {
_remoteRenderer.srcObject = null;
}
void onAddLocalStream(MediaStream stream) {
_localRenderer.srcObject = stream;
}
#override
void initState() {
super.initState();
_localRenderer.initialize();
_remoteRenderer.initialize();
_client = StompClient(
config: StompConfig(
url: 'ws://${DomainService.getDomainBase()}/stomp',
onConnect: onConnectCallback,
onWebSocketError: (dynamic error) => print(error.toString()),
stompConnectHeaders: {'Authorization': "${widget.intent.authToken}"},
onDisconnect: (message) => print("disconnected ${message.body}"),),
);
_client.activate();
}
#override
Widget build(BuildContext context) {
return PlatformScaffold(
pageTitle: "",
child: Flex(
direction: Axis.vertical,
children: [
Flexible(
flex: 1,
child: RTCVideoView(_localRenderer),
),
Flexible(
flex: 1,
child: RTCVideoView(_remoteRenderer),
)
],
),
);
}
}
I put a print statment in the the widget on the addRemoteStream callback, and it's getting called. So some kind of stream is being sent. I'm just not sure why the video isnt' showing.
So my problem was that I wasn't adding queued candidates to the caller.
I added
sendMessage(answerMessage);
if (_remoteCandidates.isNotEmpty) {
_remoteCandidates
.forEach((candidate) => _peerConnection.addCandidate(candidate));
_remoteCandidates.clear();
}
to the processAnswer method and it works just fine!