I am trying to use a connected bluetooth device on other pages, but I'm unable to do that. I tried to use the provider, but that did not work, parameter passing did not work either.
After testing, I am using the following
I made a class ReactiveProvider
class ReactiveProvider(){
Stream<ConnectionStateUpdate> get currentConnectionStream {
return flutterReactiveBle.connectToAdvertisingDevice(
id: _foundBleUARTDevices[index].id,
prescanDuration: const Duration(seconds: 1),
withServices: [_uartUuid, _uartRx, _uartTx],
);
}
}
and setup in start
void main() {
runApp(
MultiProvider(providers: [
StreamProvider<ConnectionStateUpdate>(
create: (context) => ReactiveProvider().currentConnectionStream,
initialData: const ConnectionStateUpdate(
deviceId: "",
connectionState: DeviceConnectionState.disconnected,
failure: null),
)
], child: const MainApp()),
);
}
and in StatefullWidget
final _currentConnectionStream = Provider.of<ConnectionStateUpdate>(context);
I got the errors
The instance member 'context' can't be accessed in an initializer.
Try replacing the reference to the instance member with a different expression
and
The method 'listen' isn't defined for the type 'ConnectionStateUpdate'.
Try correcting the name to the name of an existing method, or defining a method named 'listen'.
In following function
_connection = _currentConnectionStream.listen((event) {});
I want to access the following parameters on another page using any state management
final flutterReactiveBle = FlutterReactiveBle();
List<DiscoveredDevice> _foundBleUARTDevices = [];
late StreamSubscription<DiscoveredDevice> _scanStream;
late Stream<ConnectionStateUpdate> _currentConnectionStream;
late StreamSubscription<ConnectionStateUpdate> _connection;
late QualifiedCharacteristic _txCharacteristic;
//late QualifiedCharacteristic _rxCharacteristic;
late Stream<List<int>> _receivedDataStream;
These are other functions I am using
void onNewReceivedData(List<int> data) {
_numberOfMessagesReceived += 1;
_receivedData
.add("$_numberOfMessagesReceived: ${String.fromCharCodes(data)}");
if (_receivedData.length > 10) {
_receivedData.removeAt(0);
}
}
void _disconnect() async {
await _connection.cancel();
_connected = false;
}
void _stopScan() async {
await _scanStream.cancel();
_scanning = false;
}
void _startScan() async {
_foundBleUARTDevices = [];
_scanning = true;
_scanStream = flutterReactiveBle
.scanForDevices(withServices: [_uartUuid]).listen((device) {
if (_foundBleUARTDevices.every((element) => element.id != device.id)) {
_foundBleUARTDevices.add(device);
}
}, onError: (Object error) {
_logTexts = "${_logTexts}ERROR while scanning:$error \n";
}, onDone: () async {
await _scanStream.cancel();
_scanning = false;
});
}
void onConnectDevice(index) {
_currentConnectionStream = flutterReactiveBle.connectToAdvertisingDevice(
id: _foundBleUARTDevices[index].id,
prescanDuration: const Duration(seconds: 1),
withServices: [_uartUuid, _uartRx, _uartTx],
);
_logTexts = "";
_connection = _currentConnectionStream.listen((event) {
var id = event.deviceId.toString();
switch (event.connectionState) {
case DeviceConnectionState.connecting:
{
_logTexts = "${_logTexts}Connecting to $id\n";
break;
}
case DeviceConnectionState.connected:
{
_connected = true;
_logTexts = "${_logTexts}Connected to $id\n";
_numberOfMessagesReceived = 0;
_receivedData = [];
_txCharacteristic = QualifiedCharacteristic(
serviceId: _uartUuid,
characteristicId: _uartTx,
deviceId: event.deviceId);
_receivedDataStream =
flutterReactiveBle.subscribeToCharacteristic(_txCharacteristic);
_receivedDataStream.listen((data) {
onNewReceivedData(data);
}, onError: (dynamic error) {
_logTexts = "${_logTexts}Error:$error$id\n";
});
break;
}
case DeviceConnectionState.disconnecting:
{
_connected = false;
_logTexts = "${_logTexts}Disconnecting from $id\n";
break;
}
case DeviceConnectionState.disconnected:
{
_logTexts = "${_logTexts}Disconnected from $id\n";
break;
}
}
});
}
Another question I have, is how I can use or keep connected using on void onConnectDevice(index) function, because as per the provider you don't need to pass the parameters.
Related
I have a StateNotifierProvider that calls an async function which loads some images from the internal storage and adds them to the AsyncValue data:
//Provider declaration
final paginationImagesProvider = StateNotifierProvider.autoDispose<PaginationNotifier, AsyncValue<List<Uint8List?>>>((ref) {
return PaginationNotifier(folderId: ref.watch(localStorageSelectedFolderProvider), itemsPerBatch: 100, ref: ref);
});
//Actual class with AsyncValue as State
class PaginationNotifier extends StateNotifier<AsyncValue<List<Uint8List?>>> {
final int itemsPerBatch;
final String folderId;
final Ref ref;
int _numberOfItemsInFolder = 0;
bool _alreadyFetching = false;
bool _hasMoreItems = true;
PaginationNotifier({required this.itemsPerBatch, required this.folderId, required this.ref}) : super(const AsyncValue.loading()) {
log("PaginationNotifier created with folderId: $folderId, itemsPerBatch: $itemsPerBatch");
init();
}
final List<Uint8List?> _items = [];
void init() {
if (_items.isEmpty) {
log("fetchingFirstBatch");
_fetchFirstBatch();
}
}
Future<List<Uint8List?>> _fetchNextItems() async {
List<AssetEntity> images = (await (await PhotoManager.getAssetPathList())
.firstWhere((element) => element.id == folderId)
.getAssetListRange(start: _items.length, end: _items.length + itemsPerBatch));
List<Uint8List?> newItems = [];
for (AssetEntity image in images) {
newItems.add(await image.thumbnailData);
}
return newItems;
}
void _updateData(List<Uint8List?> result) {
if (result.isEmpty) {
state = AsyncValue.data(_items);
} else {
state = AsyncValue.data(_items..addAll(result));
}
_hasMoreItems = _numberOfItemsInFolder > _items.length;
}
Future<void> _fetchFirstBatch() async {
try {
_numberOfItemsInFolder = await (await PhotoManager.getAssetPathList()).firstWhere((element) => element.id == folderId).assetCountAsync;
state = const AsyncValue.loading();
final List<Uint8List?> result = await _fetchNextItems();
_updateData(result);
} catch (e, stk) {
state = AsyncValue.error(e, stk);
}
}
Future<void> fetchNextBatch() async {
if (_alreadyFetching || !_hasMoreItems) return;
_alreadyFetching = true;
log("data updated");
state = AsyncValue.data(_items);
try {
final result = await _fetchNextItems();
_updateData(result);
} catch (e, stk) {
state = AsyncValue.error(e, stk);
log("error catched");
}
_alreadyFetching = false;
}
}
Then I use a scroll controller attached to a CustomScrollView in order to call fetchNextBatch() when the scroll position changes:
#override
void initState() {
if (!controller.hasListeners && !controller.hasClients) {
log("listener added");
controller.addListener(() {
double maxScroll = controller.position.maxScrollExtent;
double position = controller.position.pixels;
if ((position > maxScroll * 0.2 || position == 0) && ref.read(paginationImagesProvider.notifier).mounted) {
ref.read(paginationImagesProvider.notifier).fetchNextBatch();
}
});
}
super.initState();
}
The problem is that when the StateNotifierProvider is fetching more data in the async function fetchNextBatch() and I go back on the navigator (like navigator.pop()), Flutter gives me an error:
[ERROR:flutter/runtime/dart_vm_initializer.cc(41)] Unhandled Exception: Bad state: Tried to use PaginationNotifier after dispose was called.
Consider checking mounted.
I think that the async function responsible of loading data completes after I've popped the page from the Stack (which triggers a Provider dispose).
I'm probably missing something and I still haven't found a fix for this error, any help is appreciated.
The method goes to the line that calls the method that calls it, without moving to the lines after the await keyword
class WordAndMeaning extends StatelessWidget {
WordAndMeaning({super.key});
final viewModel = locator<HomeViewModel>();
#override
Widget build(BuildContext context) {
return Observer(
builder: (_) {
if (viewModel.userDailyRight == null) {
return const CircularProgressIndicator();
} else if (viewModel.userDailyRight == 0) {
return const Text(
"Out of your limit",
style: TextStyle(color: Colors.red, fontSize: 20),
);
} else if (viewModel.userDailyRight != 0 && !viewModel.isWordsPulled) {
viewModel.runPullWord();
if (viewModel.isStartedWordPulling) {
return const CircularProgressIndicator();
}
I run runPullWord after check userDailyRight and isWordsPulled at this stateless widget.
List<String> wordList = [
"pencil",
"headphone",
"bag",
"door",
"dress",
"bin",
];
final String _apiLink = "https://api.dictionaryapi.dev/api/v2/entries/en/";
#action
runPullWord() async {
isStartedWordPulling = true;
learnedWordKey = await pullLearnedWordsKeys();
wordAndMeanList = await pullWord();
setRandomWordIndexRightNow();
}
#action
Future<List<String>> pullLearnedWordsKeys() async {
return (await _firestore
.doc("students/$userID/learnedwords/learnedwords")
.get())
.data()!
.keys
.toList();
}
#action
Future<List<WordModel>> pullWord() async {
List<WordModel> localWordAndMeanList = [];
isWordsPulled = false;
try {
for (String word in learnedWordKey) {
wordList.remove(word);
}
for (int i = 0; i < wordList.length; i++) {
_response = await Dio().get(_apiLink +
wordList[
i]);
_pulledData = _response.data;
List<WordModel> currentWord =
((_pulledData as List).map((e) => WordModel.fromMap(e)).toList());
localWordAndMeanList.add(currentWord[0]);
}
isStartedWordPulling = false;
isWordsPulled = true;
return localWordAndMeanList;
} catch (e) {
debugPrint(e.toString());
return Future.error(e);
}
}
When the line with _response works here, the program goes back to the viewModel.runPullWord() line in the widget as soon as it comes to the await keyword and the pullWord method is not completed(_response is still null). In this way, the pullWord method is repeatedly executed.
This situation made me question my knowledge. Why did this situation occur? How can I solve this?
I want to access characteristic values of BLE from one dart file, What I am doing is that I am connecting the device from one activity and then sending the device info to all other activities. But to get values I have to write the same code again and again to all activities/dart files.
For example i am connecting device in an activity like this:
StreamBuilder<List<ScanResult>>(
stream: FlutterBlue.instance.scanResults,
initialData: [],
builder: (c, snapshot) => Column(
children: snapshot.data
.map(
(r) => ScanResultTile(
result: r,
onTap: () => Navigator.of(context)
.push(MaterialPageRoute(builder: (context) {
r.device.connect();
print('DEVICE CONNECTED');
return BluetoothConnectedSuccess(device: r.device);
Here device: r.device is the device that i have connected to my Flutter App. Now if i want to display device data i have to initilaze these lines of code everytime i jump to any screen/activity:
class BluetoothConnectedSuccess extends StatefulWidget {
const BluetoothConnectedSuccess({Key key, this.device}) : super(key: key);
final BluetoothDevice device;
#override
_BluetoothConnectedSuccessState createState() =>
_BluetoothConnectedSuccessState();
}
class _BluetoothConnectedSuccessState extends State<BluetoothConnectedSuccess> {
// BLE
final String SERVICE_UUID = "4fafc201-1fb5-459e-8fcc-c5c9c331914b";
final String CHARACTERISTIC_UUID = "beb5483e-36e1-4688-b7f5-ea07361b26a8";
bool isReady;
Stream<List<int>> stream;
List<int> lastValue;
List<double> traceDust = List();
#override
void initState() {
super.initState();
isReady = false;
connectToDevice();
}
connectToDevice() async {
await widget.device.connect();
discoverServices();
}
discoverServices() async {
List<BluetoothService> services = await widget.device.discoverServices();
services.forEach((service) {
if (service.uuid.toString() == SERVICE_UUID) {
service.characteristics.forEach((characteristic) {
if (characteristic.uuid.toString() == CHARACTERISTIC_UUID) {
characteristic.setNotifyValue(!characteristic.isNotifying);
stream = characteristic.value;
print(stream);
lastValue = characteristic.lastValue;
print(lastValue);
setState(() {
isReady = true;
});
}
});
}
});
}
_dataParser(List<int> data) {
var value = Uint8List.fromList(data);
print("stream.value: $value"); // stream.value: [33]
var hr = ByteData.sublistView(value, 0, 1);
print("Heart rate: ${hr.getUint8(0)}");
return hr.getUint8(0); // Heart rate: 33
}
It's creating a lot of mess to write the same code again and again to the activities where BLE data is needed.
Is there a way to only call this connected device from a single file instead of initializing the same code in every activity?
This is the link to my repo for a look at what I am doing on every activity/screen with BLE device data.
Please help me out as I am new to Flutter. Thank you
Firstly learn basic of state management using Get you can refer to my code here what happens is every time something changes or upadtes it will immediate show in UI using specific Get Widgets like Obx and GetX , these widgets listen to changes in value which are marked with obs (observable).
for exmaple :
Obx(
() => ble.isScan.value ? LinearProgressIndicator() : SizedBox(),
),
this will observe the changes in isScan value .
class BleServices extends GetxController {
FlutterBlue blue = FlutterBlue.instance;
BluetoothDevice d;
var connectedDevices = List<BluetoothDevice>().obs;
var scanResults = List<ScanResult>().obs;
var bleState = BluetoothState.off.obs;
var isScan = false.obs;
var scanRequire = false.obs;
var bluetoothServices = List<BluetoothService>().obs;
var discoveringServices = false.obs;
var characteristics = List<BluetoothCharacteristic>().obs;
#override
void onInit() async {
super.onInit();
final perb = await Permission.bluetooth.status.isGranted;
final perL = await Permission.location.status.isGranted;
if (perb && perL) {
getConnectedDevices();
} else {
await Permission.bluetooth.request();
await Permission.location.request();
}
isScanning();
state();
}
isDiscovering() async {}
getConnectedDevices() async {
final connectedDevice = await blue.connectedDevices;
connectedDevices.value = connectedDevice;
AppLogger.print('connected devices : $connectedDevice');
if (connectedDevice.length == 0) {
scanRequire.value = true;
searchDevices();
}
return connectedDevice;
}
searchDevices() {
// AppLogger.print('pppppppppppppp');
blue
.scan(timeout: Duration(seconds: 20))
.distinct()
.asBroadcastStream()
.listen((event) {
AppLogger.print(event.toString());
scanResults.addIf(!scanResults.contains(event), event);
});
Future.delayed(Duration(seconds: 20), () {
blue.stopScan();
Get.showSnackbar(GetBar(
message: 'scan is finished',
));
});
}
isScanning() {
blue.isScanning.listen((event) {
AppLogger.print(event.toString());
isScan.value = event;
});
}
state() {
blue.state.listen((event) {
AppLogger.print(event.toString());
bleState.value = event;
});
}
}
await AudioService.start(
backgroundTaskEntrypoint: _audioPlayerTaskEntrypoint,
androidNotificationChannelName: 'Audio Player',
androidNotificationColor: 0xFF2196f3,
androidNotificationIcon: 'mipmap/ic_launcher',
params: getParams(),
);
This is my code snippet where i am calling AudioService.start but i am unable to start service.
i am fetching the audio item from firebase and want to load those as a list view to audio_service..But i am unable to do that.My class where i have defined a audio service extending the background service.
import 'package:audio_service/audio_service.dart';
import 'package:just_audio/just_audio.dart';
MediaControl playControl = MediaControl(
androidIcon: 'drawable/ic_action_play_arrow',
label: 'Play',
action: MediaAction.play,
);
MediaControl pauseControl = MediaControl(
androidIcon: 'drawable/ic_action_pause',
label: 'Pause',
action: MediaAction.pause,
);
MediaControl skipToNextControl = MediaControl(
androidIcon: 'drawable/ic_action_skip_next',
label: 'Next',
action: MediaAction.skipToNext,
);
MediaControl skipToPreviousControl = MediaControl(
androidIcon: 'drawable/ic_action_skip_previous',
label: 'Previous',
action: MediaAction.skipToPrevious,
);
MediaControl stopControl = MediaControl(
androidIcon: 'drawable/ic_action_stop',
label: 'Stop',
action: MediaAction.stop,
);
class AudioPlayerTask extends BackgroundAudioTask {
//
var _queue = <MediaItem>[];
int _queueIndex = -1;
AudioPlayer _audioPlayer = new AudioPlayer();
AudioProcessingState _skipState;
bool _playing;
bool get hasNext => _queueIndex + 1 < _queue.length;
bool get hasPrevious => _queueIndex > 0;
MediaItem get mediaItem => _queue[_queueIndex];
StreamSubscription<AudioPlaybackState> _playerStateSubscription;
StreamSubscription<AudioPlaybackEvent> _eventSubscription;
#override
void onStart(Map<String, dynamic> params) {
print("-------------------------------------started");
_queue.clear();
List mediaItems = params['data'];
for (int i = 0; i < mediaItems.length; i++) {
MediaItem mediaItem = MediaItem.fromJson(mediaItems[i]);
_queue.add(mediaItem);
}
_playerStateSubscription = _audioPlayer.playbackStateStream
.where((state) => state == AudioPlaybackState.completed)
.listen((state) {
_handlePlaybackCompleted();
});
_eventSubscription = _audioPlayer.playbackEventStream.listen((event) {
final bufferingState =
event.buffering ? AudioProcessingState.buffering : null;
switch (event.state) {
case AudioPlaybackState.paused:
_setState(
processingState: bufferingState ?? AudioProcessingState.ready,
position: event.position);
break;
case AudioPlaybackState.playing:
_setState(
processingState: bufferingState ?? AudioProcessingState.ready,
position: event.position);
break;
case AudioPlaybackState.connecting:
_setState(
processingState: _skipState ?? AudioProcessingState.connecting,
position: event.position);
break;
default:
}
});
AudioServiceBackground.setQueue(_queue);
onSkipToNext();
}
#override
void onPlay() {
if (_skipState == null) {
_playing = true;
_audioPlayer.play();
}
}
#override
void onPause() {
_playing = false;
_audioPlayer.pause();
}
#override
void onSkipToNext() async {
skip(1);
}
#override
void onSkipToPrevious() {
skip(-1);
}
void skip(int offset) async {
int newPos = _queueIndex + offset;
if (!(newPos >= 0 && newPos < _queue.length)) {
return;
}
if (null == _playing) {
_playing = true;
} else if (_playing) {
await _audioPlayer.stop();
}
_queueIndex = newPos;
_skipState = offset > 0
? AudioProcessingState.skippingToNext
: AudioProcessingState.skippingToPrevious;
AudioServiceBackground.setMediaItem(mediaItem);
await _audioPlayer.setUrl(mediaItem.id);
print(mediaItem.id);
_skipState = null;
if (_playing) {
onPlay();
} else {
_setState(processingState: AudioProcessingState.ready);
}
}
#override
Future<void> onStop() async {
_playing = false;
await _audioPlayer.stop();
await _audioPlayer.dispose();
_playerStateSubscription.cancel();
_eventSubscription.cancel();
return await super.onStop();
}
#override
void onSeekTo(Duration position) {
_audioPlayer.seek(position);
}
#override
void onClick(MediaButton button) {
playPause();
}
#override
Future<void> onFastForward() async {
await _seekRelative(fastForwardInterval);
}
#override
Future<void> onRewind() async {
await _seekRelative(rewindInterval);
}
Future<void> _seekRelative(Duration offset) async {
var newPosition = _audioPlayer.playbackEvent.position + offset;
if (newPosition < Duration.zero) {
newPosition = Duration.zero;
}
if (newPosition > mediaItem.duration) {
newPosition = mediaItem.duration;
}
await _audioPlayer.seek(_audioPlayer.playbackEvent.position + offset);
}
_handlePlaybackCompleted() {
if (hasNext) {
onSkipToNext();
} else {
onStop();
}
}
void playPause() {
if (AudioServiceBackground.state.playing)
onPause();
else
onPlay();
}
Future<void> _setState({
AudioProcessingState processingState,
Duration position,
Duration bufferedPosition,
}) async {
print('SetState $processingState');
if (position == null) {
position = _audioPlayer.playbackEvent.position;
}
await AudioServiceBackground.setState(
controls: getControls(),
systemActions: [MediaAction.seekTo],
processingState:
processingState ?? AudioServiceBackground.state.processingState,
playing: _playing,
position: position,
bufferedPosition: bufferedPosition ?? position,
speed: _audioPlayer.speed,
);
}
List<MediaControl> getControls() {
if (_playing) {
return [
skipToPreviousControl,
pauseControl,
stopControl,
skipToNextControl
];
} else {
return [
skipToPreviousControl,
playControl,
stopControl,
skipToNextControl
];
}
} this is my class
}
class AudioState {
final List<MediaItem> queue;
final MediaItem mediaItem;
final PlaybackState playbackState;
AudioState(this.queue, this.mediaItem, this.playbackState);
}
You should overridthis method inAudioPlayerTask` class:
#override
// ignore: missing_return
Future<Function> onAddQueueItem(MediaItem mediaItem) async{
// queue.add(mediaItem); or somthing like this to update your queue
await AudioServiceBackground.setQueue(queue);
try {
await _player.load(ConcatenatingAudioSource(
children:
queue.map((item) => AudioSource.uri(Uri.parse(item.id))).toList(),
));
// In this example, we automatically start playing on start.
onPlay();
} catch (e) {
print("Error: $e");
onStop();
}
}
Then you can call this function in your UI:
await AudioService.start(
backgroundTaskEntrypoint: audioPlayerTaskEntrypoint,
androidNotificationChannelName:
'Audio Service Demo',
// Enable this if you want the Android service to exit the foreground state on pause.
//androidStopForegroundOnPause: true,
androidNotificationColor: 0xFF2196f3,
androidNotificationIcon: 'mipmap/ic_launcher',
androidEnableQueue: true,
);
// call this func:
await AudioService.addQueueItem(m);
According to the author here the plugin originally didn't allow passing any arguments to the background task directly. It was designed to let the background task internally query what to play from internal storage via SQLite or tools like shared_preferences.
However today, the plugin allows to pass media data down to the background task via 3 different ways that I identified:
AudioService.addQueueItem(item) combined with onAddQueueItem as mentioned in the above answer as well as this one
AudioService.customAction('url', url) combined with onCustomAction as explained here
Use the Map<String, dynamic> params argument of the AudioService.start method. It is received as an argument of the background task's onStart method as explained here
So I'm building a video calling application using flutter, flutterWeb, and the WebRTC package.
I have a spring boot server sitting in the middle to pass the messages between the two clients.
Each side shows the local video, but neither shows the remote. Audio does work though. I got som nasty feedback loops. Testing with headphones showed that audio does indeed work.
My singaling code
typedef void StreamStateCallback(MediaStream stream);
class CallingService {
String sendToUserId;
String currentUserId;
final String authToken;
final StompClient _client;
final StreamStateCallback onAddRemoteStream;
final StreamStateCallback onRemoveRemoteStream;
final StreamStateCallback onAddLocalStream;
RTCPeerConnection _peerConnection;
List<RTCIceCandidate> _remoteCandidates = [];
String destination;
var hasOffer = false;
var isNegotiating = false;
MediaStream _localStream;
final Map<String, dynamic> _constraints = {
'mandatory': {
'OfferToReceiveAudio': true,
'OfferToReceiveVideo': true,
},
'optional': [],
};
CallingService(
this._client,
this.sendToUserId,
this.currentUserId,
this.authToken,
this.onAddRemoteStream,
this.onRemoveRemoteStream,
this.onAddLocalStream) {
destination = '/app/start-call/$sendToUserId';
print("destination $destination");
_client.subscribe(
destination: destination,
headers: {'Authorization': "$authToken"},
callback: (StompFrame frame) => processMessage(jsonDecode(frame.body)));
}
Future<void> startCall() async {
await processRemoteStream();
RTCSessionDescription description =
await _peerConnection.createOffer(_constraints);
await _peerConnection.setLocalDescription(description);
var message = RtcMessage(RtcMessageType.OFFER, currentUserId, {
'description': {'sdp': description.sdp, 'type': description.type},
});
sendMessage(message);
}
Future<void> processMessage(Map<String, dynamic> messageJson) async {
var message = RtcMessage.fromJson(messageJson);
if (message.from == currentUserId) {
return;
}
print("processing ${message.messageType.toString()}");
switch (message.messageType) {
case RtcMessageType.BYE:
// TODO: Handle this case.
break;
case RtcMessageType.LEAVE:
// TODO: Handle this case.
break;
case RtcMessageType.CANDIDATE:
await processCandidate(message);
break;
case RtcMessageType.ANSWER:
await processAnswer(message);
break;
case RtcMessageType.OFFER:
await processOffer(message);
break;
}
}
Future<void> processCandidate(RtcMessage candidate) async {
Map<String, dynamic> map = candidate.data['candidate'];
var rtcCandidate = RTCIceCandidate(
map['candidate'],
map['sdpMid'],
map['sdpMLineIndex'],
);
if (_peerConnection != null) {
_peerConnection.addCandidate(rtcCandidate);
} else {
_remoteCandidates.add(rtcCandidate);
}
}
Future<void> processAnswer(RtcMessage answer) async {
if (isNegotiating) {
return;
}
isNegotiating = true;
var description = answer.data['description'];
if (_peerConnection == null) {
return;
}
await _peerConnection.setRemoteDescription(
RTCSessionDescription(description['sdp'], description['type']));
}
Future<void> processOffer(RtcMessage offer) async {
await processRemoteStream();
var description = offer.data['description'];
await _peerConnection.setRemoteDescription(
new RTCSessionDescription(description['sdp'], description['type']));
var answerDescription = await _peerConnection.createAnswer(_constraints);
await _peerConnection.setLocalDescription(answerDescription);
var answerMessage = RtcMessage(RtcMessageType.ANSWER, currentUserId, {
'description': {
'sdp': answerDescription.sdp,
'type': answerDescription.type
},
});
sendMessage(answerMessage);
if (_remoteCandidates.isNotEmpty) {
_remoteCandidates
.forEach((candidate) => _peerConnection.addCandidate(candidate));
_remoteCandidates.clear();
}
}
Future<void> processRemoteStream() async {
_localStream = await createStream();
_peerConnection = await createPeerConnection(_iceServers, _config);
_peerConnection.addStream(_localStream);
_peerConnection.onSignalingState = (state) {
//isNegotiating = state != RTCSignalingState.RTCSignalingStateStable;
};
_peerConnection.onAddStream = (MediaStream stream) {
this.onAddRemoteStream(stream);
};
_peerConnection.onRemoveStream =
(MediaStream stream) => this.onRemoveRemoteStream(stream);
_peerConnection.onIceCandidate = (RTCIceCandidate candidate) {
var data = {
'candidate': {
'sdpMLineIndex': candidate.sdpMlineIndex,
'sdpMid': candidate.sdpMid,
'candidate': candidate.candidate,
},
};
var message = RtcMessage(RtcMessageType.CANDIDATE, currentUserId, data);
sendMessage(message);
};
}
void sendMessage(RtcMessage message) {
_client.send(
destination: destination,
headers: {'Authorization': "$authToken"},
body: jsonEncode(message.toJson()));
}
Map<String, dynamic> _iceServers = {
'iceServers': [
{'urls': 'stun:stun.l.google.com:19302'},
/*
* turn server configuration example.
{
'url': 'turn:123.45.67.89:3478',
'username': 'change_to_real_user',
'credential': 'change_to_real_secret'
},
*/
]
};
final Map<String, dynamic> _config = {
'mandatory': {},
'optional': [
{'DtlsSrtpKeyAgreement': true},
],
};
Future<MediaStream> createStream() async {
final Map<String, dynamic> mediaConstraints = {
'audio': true,
'video': {
'mandatory': {
'minWidth': '640',
'minHeight': '480',
'minFrameRate': '30',
},
'facingMode': 'user',
'optional': [],
}
};
MediaStream stream = await navigator.getUserMedia(mediaConstraints);
if (this.onAddLocalStream != null) {
this.onAddLocalStream(stream);
}
return stream;
}
}
Here are my widgets
class _CallScreenState extends State<CallScreen> {
StompClient _client;
CallingService _callingService;
RTCVideoRenderer _localRenderer = new RTCVideoRenderer();
RTCVideoRenderer _remoteRenderer = new RTCVideoRenderer();
final UserService userService = GetIt.instance.get<UserService>();
void onConnectCallback(StompClient client, StompFrame connectFrame) async {
var currentUser = await userService.getCurrentUser();
_callingService = CallingService(
_client,
widget.intent.toUserId.toString(),
currentUser.id.toString(),
widget.intent.authToken,
onAddRemoteStream,
onRemoveRemoteStream,
onAddLocalStream);
if (widget.intent.initialMessage != null) {
_callingService.processMessage(jsonDecode(widget.intent.initialMessage));
} else {
_callingService.startCall();
}
}
void onAddRemoteStream(MediaStream stream) {
_remoteRenderer.srcObject = stream;
}
void onRemoveRemoteStream(MediaStream steam) {
_remoteRenderer.srcObject = null;
}
void onAddLocalStream(MediaStream stream) {
_localRenderer.srcObject = stream;
}
#override
void initState() {
super.initState();
_localRenderer.initialize();
_remoteRenderer.initialize();
_client = StompClient(
config: StompConfig(
url: 'ws://${DomainService.getDomainBase()}/stomp',
onConnect: onConnectCallback,
onWebSocketError: (dynamic error) => print(error.toString()),
stompConnectHeaders: {'Authorization': "${widget.intent.authToken}"},
onDisconnect: (message) => print("disconnected ${message.body}"),),
);
_client.activate();
}
#override
Widget build(BuildContext context) {
return PlatformScaffold(
pageTitle: "",
child: Flex(
direction: Axis.vertical,
children: [
Flexible(
flex: 1,
child: RTCVideoView(_localRenderer),
),
Flexible(
flex: 1,
child: RTCVideoView(_remoteRenderer),
)
],
),
);
}
}
I put a print statment in the the widget on the addRemoteStream callback, and it's getting called. So some kind of stream is being sent. I'm just not sure why the video isnt' showing.
So my problem was that I wasn't adding queued candidates to the caller.
I added
sendMessage(answerMessage);
if (_remoteCandidates.isNotEmpty) {
_remoteCandidates
.forEach((candidate) => _peerConnection.addCandidate(candidate));
_remoteCandidates.clear();
}
to the processAnswer method and it works just fine!