agora.io flutter Access denied finding property "net.dns1" - flutter

// Get microphone permission
await PermissionHandler().requestPermissions(
[PermissionGroup.microphone],
);
var engine = await RtcEngine.create(APP_ID);
engine.enableAudio();
// Define event handler
engine.setEventHandler(RtcEngineEventHandler(
joinChannelSuccess: (String channel, int uid, int elapsed) {
print('joinChannelSuccess ${channel} ${uid}');
setState(() {
_joined = true;
});
}, userJoined: (int uid, int elapsed) {
print('userJoined ${uid}');
setState(() {
_remoteUid = uid;
});
}, userOffline: (int uid, UserOfflineReason reason) {
print('userOffline ${uid}');
setState(() {
_remoteUid = null;
});
}));
// Join channel 123
await engine.joinChannel(Token, '123', null, 0);
i am tring to build audio calling app using agora.io but i keep getting this error saying "Access denied finding property "net.dns1" "

Related

Flutter How to check connection TCP IP

I want to perform an operation, but before that I want to check the connection status to the server whether it is still connected or not. If it is connected then perform the operation, if not then reconnect.
here is my code on button on pressed
Future<void> masuk() async {
if (_socketConnection == null) {
setState(() {
connect();
});
} else if (_subscription == null) {
setState(() {
connect();
});
} else {
_getId().then((id) {
setState(() {
deviceId = id;
print("Sambungan Masih terhubung");
send();
});
});
}
}
and here is code for connection
Future<void> connect() async {
var server = "10.0.2.2";
int port = 8000;
if (server.isNotEmpty) {
SocketClient socketClient = SocketClient(server, port);
_subscription = socketClient.connect().doOnCancel(() {
setState(() {
_subscription = null;
_socketConnection = null;
});
}).listen((connection) async {
print("listen:$connection");
setState(() {
_socketConnection = connection;
});
}, onError: (error) {
_result = "Connect Error:$error";
_subscription?.cancel();
setState(() {
_subscription = null;
_socketConnection = null;
});
}, cancelOnError: true);
}
setState(() {});
}
I have to restart the application first if I want to reconnect if the condition is "The server is off or restarted"

flutter google_speach response stream not working

final serviceAccount = ServiceAccount.fromString(r'''{
Json private keys
}''');
final speechToText = SpeechToText.viaServiceAccount(serviceAccount);
final config = RecognitionConfig(
encoding: AudioEncoding.LINEAR16,
model: RecognitionModel.basic,
enableAutomaticPunctuation: true,
sampleRateHertz: 16000,
languageCode: 'en-US');
final streamingConfig =
StreamingRecognitionConfig(config: config, interimResults: true);
Stream<List<int>> stream =
(await MicStream.microphone(sampleRate: 16000)) as Stream<List<int>>;
final responseStream =
speechToText.streamingRecognize(streamingConfig, stream);
if (_isListining) {
setState(() {
_isListining = false;
});
} else {
setState(() {
_isListining = true;
print('hello');
try {
responseStream.listen((data) {
print(data.results.length);
setState(() {
_sendController.text = data.results
.map((e) => e.alternatives.first.transcript)
.join('/n');
});
print(content);
}, onDone: () {
setState(() {
setState(() {
_isListining = false;
});
});
}, onError: (e) {
print('errorr : ' + e);
setState(() {
_isListining = false;
});
});
print('streaming');
} catch (e) {
print('not streaming');
print(e);
}
});
}
linke to packages used
https://pub.dev/packages/google_speech
https://pub.dev/packages/mic_stream
so the problem is that microphone streaming is working fine but responseStream from google apis not printing or doing anything
after reading the docs
found this
https://cloud.google.com/speech-to-text/docs/troubleshooting#returns_an_empty_response
and i dont know if it is the problem or not

I've got exception in agora when trying to parse remote user (host from web app) in flutter mobile app

I am using agora_rtc_engine on Flutter to create video call between mobile app and web app. But when I create call and try to connect to it from both applications, I get exception in mobile app:
[ERROR:flutter/lib/ui/ui_dart_state.cc(209)] Unhandled Exception: type 'Null' is not a subtype of type 'String' in type cast
E/flutter (17642): #0 _$UserInfoFromJson (package:agora_rtc_engine/src/classes.g.dart:11:27)
E/flutter (17642): #1 new UserInfo.fromJson (package:agora_rtc_engine/src/classes.dart:39:7)
E/flutter (17642): #2 RtcEngine.getUserInfoByUid.<anonymous closure> (package:agora_rtc_engine/src/rtc_engine.dart:314:23)
E/flutter (17642): #3 _rootRunUnary (dart:async/zone.dart:1436:47)
E/flutter (17642): #4 _CustomZone.runUnary (dart:async/zone.dart:1335:19)
E/flutter (17642): <asynchronous suspension>
It happens when I try call method getUserInfoByUid in callback userJoined ().
Here is my code:
Engine initialising.
Future _initEngine() async {
_engine = await RtcEngine.create(widget.appId ?? APP_ID);
_addListeners();
final userAccount = '${widget.residentId}|${widget.residentName}|1';
_engine.registerLocalUserAccount(widget.appId ?? APP_ID, userAccount);
await _engine.enableVideo();
await _engine.disableAudio();
await _engine.startPreview();
await _engine.enableAudioVolumeIndication(300, 3, true);
_engine.enableFaceDetection(true);
if (widget.isBroadcast!) {
await _engine.setChannelProfile(ChannelProfile.LiveBroadcasting);
await _engine.setClientRole(ClientRole.Broadcaster);
} else {
await _engine.setChannelProfile(ChannelProfile.Communication);
_engine.setParameters('{"che.audio.live_for_comm": true}');
final VideoEncoderConfiguration configuration = VideoEncoderConfiguration();
configuration.degradationPrefer = DegradationPreference.MaintainBalanced;
_engine.setVideoEncoderConfiguration(configuration);
_engine.setParameters('{"rtc.sync_user_account_callback": true}');
_joinChannel();
}
Channel joining:
Future _joinChannel() async {
await [Permission.microphone, Permission.camera].request();
final userAccount = '${widget.residentId}|${widget.residentName}|1';
await _engine.joinChannel(null, widget.channelName!, userAccount, 0);
}
Here is callbacks overriding:
void _addListeners() {
_engine
.setRemoteSubscribeFallbackOption(StreamFallbackOptions.VideoStreamLow);
_engine.setEventHandler(
RtcEngineEventHandler(
joinChannelSuccess: (channel, uid, elapsed) {
currentConnectedUser = ConnectedUser(
id: uid,
name: widget.residentName,
imageUrl: residentAvatarUrl,
reshubId: widget.residentId,
muteAudio: isAudioMuted,
muteVideo: isVideoMuted,
);
setState(() {
isJoined = true;
});
},
error: (error) {
print('agora error: ${error.toString()}');
},
userJoined: (uid, elapsed) async {
final UserInfo account = await _engine.getUserInfoByUid(uid);
late ConnectedUser user;
if (account.userAccount.isEmpty) {
user = ConnectedUser(
id: uid,
name: uid.toString(),
reshubId: uid.toString(),
muteAudio: false,
muteVideo: false,
);
} else {
final List<String> accSplit = account.userAccount.split('|');
user = ConnectedUser(
id: uid,
reshubId: accSplit[0],
name: accSplit[1],
muteAudio: false,
muteVideo: false,
imageUrl: avatars[accSplit[0]],
);
}
setState(() {
connectedUsers.removeWhere((element) => element.id == user.id);
connectedUsers.add(user);
});
},
userInfoUpdated: (int num, UserInfo user) async {
final List<String> accSplit = user.userAccount.split('|');
final ConnectedUser usr = ConnectedUser(
id: user.uid,
reshubId: accSplit[0],
name: accSplit[1],
muteAudio: false,
muteVideo: false,
imageUrl: avatars[accSplit[0]]);
setState(() {
connectedUsers.removeWhere((element) => element.id == usr.id);
connectedUsers.add(usr);
});
},
remoteAudioStateChanged: (int uid, AudioRemoteState state,
AudioRemoteStateReason reason, int elapsed) {
if (reason == AudioRemoteStateReason.RemoteMuted) {
setState(() {
connectedUsers.forEach((element) {
if (element.id == uid) {
element.muteAudio = true;
element.isSpeaking = false;
}
});
});
} else if (reason == AudioRemoteStateReason.RemoteUnmuted) {
setState(() {
connectedUsers.forEach((element) {
if (element.id == uid) {
element.muteAudio = false;
}
});
});
}
},
remoteVideoStateChanged: (int uid, VideoRemoteState state,
VideoRemoteStateReason reason, int elapsed) {
if (reason == VideoRemoteStateReason.RemoteUnmuted) {
setState(() {
connectedUsers.forEach((element) {
if (element.id == uid) {
element.muteVideo = false;
}
});
});
}
if (reason == VideoRemoteStateReason.RemoteMuted) {
setState(() {
connectedUsers.forEach((element) {
if (element.id == uid) {
element.muteVideo = true;
}
});
});
}
},
audioVolumeIndication: (listParticipants, totalVolume) {
listParticipants.forEach((participant) {
if (participant.volume > 4) {
if (participant.uid == 0) {
currentConnectedUser.isSpeaking = true;
} else {
connectedUsers
.firstWhereOrNull(
(element) => element.id == participant.uid)
?.isSpeaking = true;
}
} else {
if (participant.uid == 0) {
currentConnectedUser.isSpeaking = false;
} else {
connectedUsers
.firstWhereOrNull(
(element) => element.id == participant.uid)
?.isSpeaking = false;
}
}
});
setState(() {});
},
userOffline: (uid, reason) {
setState(() {
connectedUsers.removeWhere((element) => element.id == uid);
});
},
leaveChannel: (stats) {
setState(() {
isJoined = false;
connectedUsers.clear();
});
},
),
);
}
All this code works fine if I connect between the same platforms, f.e. web + web or mobile + mobile. I have problems only when I try create connection between two different platforms.
Perhaps somebody has a manual about multiplatform settings? Or some ideas about how to set up agora for calling between two different platforms?
You can't get UserInfo by Uid for user, who is connected via Web-app. In agora for web-app you can only operate with uid (no any additional information). So, if you need to get data about connected user, try it the other way.

Flutter Agora PlatformException(7, not initialized, null, null)

I am developing a voice call application using Flutter Agora SDK. So far, I have been able to comfortably perform voice calls without any errors. But today, while testing it on my real device, it gives an error even though I haven't made any changes to the initializing process. Even strangely, it works without any error on the emulator, but only on the real device gives error.
Future<void> initAgora() async {
await [Permission.microphone].request();
try {
engine = await RtcEngine.createWithContext(RtcEngineContext(APP_ID));
await engine.enableAudio();
await engine.setChannelProfile(ChannelProfile.Communication);
engine.setEventHandler(
RtcEngineEventHandler(
activeSpeaker: (i) {
log("Active Speaker: $i");
},
microphoneEnabled: (enable) {
log("Microphone: " + enable.toString());
callingController.microphoneState.value = enable;
},
warning: (warningCode) {
print(warningCode.toString());
},
rtcStats: (stats) {
log("User Count: ${stats.userCount}");
},
connectionStateChanged: (state, reason) {
log("Connection Changed : ${state.toString()}, ${reason.toString()}");
},
joinChannelSuccess: (String channel, int uid, int elapsed) {
log('joinChannelSuccess $channel $uid');
},
userJoined: (int uid, int elapsed) {
log('userJoined $uid');
},
userOffline: (int uid, UserOfflineReason reason) {
log('userOffline $uid');
callingController.finishCall(uid, "user_left");
},
error: (error) {
log("ERROR: $error", name: "AGORA");
},
),
);
if (callingController.isCallerMe) {
await joinChannel();
}
} catch (e) {
print(e);
failureSnackbar(e.toString());
}
}
The error that you shared occurs when you try calling a method before the SDK has been initialised properly. Can you please check your RtcEngine config. Also, if you have enabled tokens in your project please make sure that you pass it to the config.

Flutter Agora.io calling screen

I want to add in-app (video) calling like Messenger (Facebook) does. It works when one party creates channel and another one joins.
But is there a way to create calling screen where party B can accept or reject call? I am looking in Agora.io documentation but cannot find anything suitable for this.
This is my code though...
Future<void> initialize() async {
if (APP_ID.isEmpty) {
setState(() {
_infoStrings.add(
'APP_ID missing, please provide your APP_ID in settings.dart',
);
_infoStrings.add('Agora Engine is not starting');
});
return;
}
await _initAgoraRtcEngine();
_addAgoraEventHandlers();
await AgoraRtcEngine.enableWebSdkInteroperability(true);
await AgoraRtcEngine.setParameters('''
{\"che.video.lowBitRateStreamParameter\":{\"width\":320,\"height\":180,\"frameRate\":15,\"bitRate\":140}}''');
await AgoraRtcEngine.joinChannel(null, 'Test', null, 0);
}
Future<void> _initAgoraRtcEngine() async {
AgoraRtcEngine.create(APP_ID);
AgoraRtcEngine.enableVideo();
}
void _addAgoraEventHandlers() {
AgoraRtcEngine.onError = (dynamic code) {
setState(() {
final info = 'onError: $code';
_infoStrings.add(info);
});
};
AgoraRtcEngine.onJoinChannelSuccess = (
String channel,
int uid,
int elapsed,
) {
setState(() {
final info = 'onJoinChannel: $channel, uid: $uid';
_infoStrings.add(info);
});
};
AgoraRtcEngine.onLeaveChannel = () {
setState(() {
_infoStrings.add('onLeaveChannel');
_users.clear();
});
};
AgoraRtcEngine.onUserJoined = (int uid, int elapsed) {
setState(() {
final info = 'userJoined: $uid';
_infoStrings.add(info);
_users.add(uid);
});
};
AgoraRtcEngine.onUserOffline = (int uid, int reason) {
setState(() {
final info = 'userOffline: $uid';
_infoStrings.add(info);
_users.remove(uid);
});
};
AgoraRtcEngine.onFirstRemoteVideoFrame = (
int uid,
int width,
int height,
int elapsed,
) {
setState(() {
final info = 'firstRemoteVideo: $uid ${width}x $height';
_infoStrings.add(info);
});
};
}
You will need to push channelId to other user mobile in this case.
The CS Guy has created very useful video on you tube to implement this step as well as calling screen.
https://www.youtube.com/watch?v=v9ngriCV0J0
You need to use Native ConnectionService for Android and Callkit of iOS.
You can find the official Agora samples for the above feature here: https://github.com/AgoraIO/Advanced-Video/tree/master/Calling-Interface, but I don't think Agora has call-interface sample in Flutter, you have to write the wrapper on your own for now.
widget.chatRoomId is the id specified for both of the users when you create a chatroom for them.
Future<void> onJoin() async {
// update input validation
if (widget.chatRoomId.isNotEmpty) {
// await for camera and mic permissions before pushing video page
await _handleCameraAndMic();
// push video page with given channel name
await Navigator.push(
context,
MaterialPageRoute(
builder: (context) => CallPage(
channelName: widget.chatRoomId,
// TODO: set to _role
role: ClientRole.Broadcaster,
),
),
);
}
}