whenever I use the app with face detector in debug, profile, or release mode it works fine but when i build the app and install it then start it the app automatically close after the camera stream starts
im using google_ml_kit: ^0.7.3 and camera: ^0.9.4+14
this is the code i use to initialize the camera and start detecting every time
void initCamera() async {
final description = await availableCameras().then(
(cameras) => cameras.firstWhere(
(camera) => camera.lensDirection == CameraLensDirection.front,
),
);
cameraControllerNotifier.value = CameraController(
description,
ResolutionPreset.low,
enableAudio: false,
);
await cameraControllerNotifier.value!.initialize();
await Future.delayed(const Duration(milliseconds: 500));
isDetecting = false;
cameraControllerNotifier.value!.startImageStream((img) async {
if (isDetecting) return;
if (cameraControllerNotifier.value != null) {
isDetecting = true;
final image = InputImage.fromBytes(
bytes: img.planes[0].bytes,
inputImageData: InputImageData(
inputImageFormat:
InputImageFormatMethods.fromRawValue(img.format.raw)!,
size: Size(img.width.toDouble(), img.height.toDouble()),
imageRotation: MlHelper.rotationIntToImageRotation(
description.sensorOrientation,
),
planeData: null,
),
);
try {
final faceDetector = GoogleMlKit.vision.faceDetector(
const FaceDetectorOptions(
mode: FaceDetectorMode.accurate,
enableLandmarks: true,
),
);
List<Face> _faces = await faceDetector.processImage(image);
if (_faces.isNotEmpty) {
//..........
} else {
isClose.value = false;
}
isDetecting = false;
// facesNotifier.value = _faces;
} catch (e) {
isClose.value = false;
isDetecting = false;
log("FaceKIt Error : $e");
}
}
});
if (mounted) {
setState(() {});
}
}
Related
final serviceAccount = ServiceAccount.fromString(r'''{
Json private keys
}''');
final speechToText = SpeechToText.viaServiceAccount(serviceAccount);
final config = RecognitionConfig(
encoding: AudioEncoding.LINEAR16,
model: RecognitionModel.basic,
enableAutomaticPunctuation: true,
sampleRateHertz: 16000,
languageCode: 'en-US');
final streamingConfig =
StreamingRecognitionConfig(config: config, interimResults: true);
Stream<List<int>> stream =
(await MicStream.microphone(sampleRate: 16000)) as Stream<List<int>>;
final responseStream =
speechToText.streamingRecognize(streamingConfig, stream);
if (_isListining) {
setState(() {
_isListining = false;
});
} else {
setState(() {
_isListining = true;
print('hello');
try {
responseStream.listen((data) {
print(data.results.length);
setState(() {
_sendController.text = data.results
.map((e) => e.alternatives.first.transcript)
.join('/n');
});
print(content);
}, onDone: () {
setState(() {
setState(() {
_isListining = false;
});
});
}, onError: (e) {
print('errorr : ' + e);
setState(() {
_isListining = false;
});
});
print('streaming');
} catch (e) {
print('not streaming');
print(e);
}
});
}
linke to packages used
https://pub.dev/packages/google_speech
https://pub.dev/packages/mic_stream
so the problem is that microphone streaming is working fine but responseStream from google apis not printing or doing anything
after reading the docs
found this
https://cloud.google.com/speech-to-text/docs/troubleshooting#returns_an_empty_response
and i dont know if it is the problem or not
How to change the progress_dialog package to the sn_progress_dialog package? I'm trying to make a file downloader app with a progress dialog, but the progress_dialog package is not null safety.
Future _downloadAndSaveFileToStorage(String urlPath) async {
final name = urlPdf.split('/').last;
ProgressDialog pr;
pr = ProgressDialog(context, type: ProgressDialogType.Normal);
pr.style(message: "Download file ...");
try{
await pr.show();
final Directory _documentDir = Directory('/storage/emulated/0/MyDocuments/$name');
await dio!.download(urlPath, _documentDir.path, onReceiveProgress: (rec, total){
setState(() {
_isLoading = true;
progress = ((rec / total)*100).toStringAsFixed(0) + " %";
print(progress);
pr.update(message: "Please wait : $progress");
});
});
pr.hide();
_fileFullPath = _documentDir.path;
} catch (e) {
print(e);
}
setState(() {
_isLoading = false;
});
}
And this is my screenshot app with progress_dialog package.
Just do like this :
Future _downloadAndSaveFileToStorage(String urlPath) async {
final name = urlPdf.split('/').last;
ProgressDialog pd = ProgressDialog(context: context);
try{
pd.show(max: 100, msg: 'Download file ...');
final Directory _documentDir = Directory('/storage/emulated/0/MyDocuments/$name');
await dio!.download(urlPath, _documentDir.path, onReceiveProgress: (rec, total){
setState(() {
_isLoading = true;
progress = ((rec / total)*100).toStringAsFixed(0) + " %";
print(progress);
pd.update(progress);
});
});
pd.close();
_fileFullPath = _documentDir.path;
} catch (e) {
pd.close();
print(e);
}
setState(() {
_isLoading = false;
});
}
and you can change color or message in show method like this :
pd.show(
max: 100,
msg: 'Preparing Download...',
progressType: ProgressType.valuable,
backgroundColor: Color(0xff212121),
progressValueColor: Color(0xff3550B4),
progressBgColor: Colors.white70,
msgColor: Colors.white,
valueColor: Colors.white);
just need a little tweaking :
Future _downloadAndSaveFileToStorage(String urlPath) async {
final name = urlPdf.split('/').last;
ProgressDialog pd = ProgressDialog(context: context);
try{
pd.show(
max: 100,
msg: 'Preparing Download...',
progressType: ProgressType.valuable,
backgroundColor: Color(0xff212121),
progressValueColor: Color(0xff3550B4),
progressBgColor: Colors.white70,
msgColor: Colors.white,
valueColor: Colors.white
);
final Directory _documentDir = Directory('/storage/emulated/0/MYDocuments/$name');
await dio!.download(urlPath, _documentDir.path, onReceiveProgress: (rec, total){
setState(() {
_isLoading = true;
int progress = (((rec / total) * 100).toInt());
print(progress);
pd.update(value: progress, msg: 'File Downloading');
});
});
pd.close();
_fileFullPath = _documentDir.path;
} catch (e) {
pd.close();
print(e);
}
setState(() {
_isLoading = false;
});
}
I am building a simple music player type app. I am facing an issue when my audio completed the time it's showing
'package:flutter/src/material/slider.dart': Failed assertion: line 166 pos 15: 'value >= min && value <= max': is not true.
My code
Expanded(
child: Slider(
activeColor: Color(0xffe7ad29),
inactiveColor: Color(0xFF707070),
value: model.playerBarValue,
onChanged: (val) {
model.seekFromBar(val);
},
),
),
class PlayerProvider extends ChangeNotifier {
final player = AssetsAudioPlayer();
String link;
Duration playerTimeNow = Duration(seconds: 0);
Duration playerLength;
double playerBarValue = 0.0;
Episode episode;
Item podcastInfo;
String episodeName, episodeThumbnail;
bool isPlaying = false;
PlayerProvider() {
updateState();
}
play() async {
print("Started Playing");
// Stop previous playing
player.stop();
playerTimeNow = Duration(seconds: 0);
isPlaying = false;
// link = updateLinkToHttps(link);
print(link);
final audio = Audio.network(
link,
metas: Metas(
title: podcastInfo.collectionName,
artist: podcastInfo.artistName,
album: podcastInfo.trackName,
image: MetasImage.network(podcastInfo.artworkUrl600),
//can be MetasImage.network
),
);
var duration = await player.open(
audio,
showNotification: true,
notificationSettings: NotificationSettings(
//seekBarEnabled: false,
//stopEnabled: true,
//customStopAction: (player){
// player.stop();
//}
//prevEnabled: false,
customNextAction: (player) {
print("next1");
forward();
}, customPrevAction: (player) {
print("next2");
backword();
}
//customStopIcon: AndroidResDrawable(name: "ic_stop_custom"),
//customPauseIcon: AndroidResDrawable(name:"ic_pause_custom"),
//customPlayIcon: AndroidResDrawable(name:"ic_play_custom"),
),
);
isPlaying = true;
// player.play(); // Usually you don't want to wait for playback to finish.
print("started");
setState();
}
pause() async {
await player.pause();
isPlaying = false;
print("paused");
setState();
}
resume() async {
//TODO: Setup resume
await player.seek(playerTimeNow);
player.play();
isPlaying = true;
}
speed(double val) async {
print(val);
//TODO: Setup resume
await player.setPlaySpeed(val);
isPlaying = true;
}
updateState() {
player.currentPosition.listen((event) {
playerTimeNow = event;
updatePlayerBar();
});
}
updatePlayerBar() {
int totalLengthInMilliSeconds = playerLength.inMilliseconds;
int totalPlayedInMilliSeconds = playerTimeNow.inMilliseconds;
double newPlayerBarValue =
totalPlayedInMilliSeconds / totalLengthInMilliSeconds;
playerBarValue = newPlayerBarValue;
// print(playerBarValue);
// print(playerTimeNow);
// print(playerLength);
// print(playerLength);
// if (playerLength == playerTimeNow) {
// print('Finish');
// player.stop();
// }
notifyListeners();
}
forward() async {
//TODO: Check if at-least 10 seconds are left;
if (playerTimeNow + Duration(seconds: 10) < playerLength)
await player.seek(playerTimeNow + Duration(seconds: 10));
else
await player.seek(playerLength);
print("Forwarded 10 seconds");
}
backword() async {
Duration back = playerTimeNow.inSeconds > 10
? playerTimeNow - Duration(seconds: 10)
: Duration(seconds: 0);
await player.seek(back);
print("Backwarded 10 seconds");
}
seekFromBar(double val) async {
double totalMillis = playerLength.inMilliseconds * val;
int newMillis = totalMillis.toInt();
Duration newSeekLocations = Duration(milliseconds: newMillis);
await player.seek(newSeekLocations);
print("Seek from Bar");
}
setState() {
notifyListeners();
}
}
When time is finished of player then it's showing this error on red screen. I need to know the fix of this? Mean when it's finished time go to 0 or something. The issue is on the slider I think because if I back from the red screen then my slider goes to zero.
check thet the value of model.playerBarValue is neither Nan or null, and set a max value for the slider.
Slider(
value: model.playerBarValue.isNaN==true || model.playerBarValue==null? 0 : model.playerBarValue,
min: 0.0,
max: duration.inSeconds.toDouble() + 1.0,
onChanged: (value) {
. . .
},
)
I'm using AudioService plugin and it works fine on Android and in debug mode in iOS. But once I test it on a real iOS device (in release mode) It's giving me exceptions
first at all
void quranStartListeningPoint() => AudioServiceBackground.run(() => QuranAudioService());
second this is my play Function
void startListeningToAyah({model.Ayah ayah, model.Surah surah}) {
if (AudioService.running) {
await AudioService.stop();
await Future.delayed(Duration(seconds: 1));
}
await AudioService.start(
androidNotificationColor: 0XFFB590EE,
backgroundTaskEntrypoint: quranStartListeningPoint,
params: {
'sheikhId': "$selectedReciter",
"ayahIndex": ayah.numberInSurah,
"surah": surah.number,
"quranModel": _quranDao.quranModelAsJson
},
);
}
Next I convert quranModelAsJson from a json to a model because I need the whole model in the Service Class and this is the only way to send it (as I think)
class QuranAudioService extends BackgroundAudioTask {
final _audioPlayer = AudioPlayer();
final String baseUrl = "https://cdn.alquran.cloud/media/audio/ayah/";
String ayahUrl;
int surahNumber;
int ayahIndex;
int renewSurah = 0;
QuranModel model;
Surah surah;
String sheikhId;
#override
Future<void> onStart(Map<String, dynamic> params) async {
await _audioPlayer.setReleaseMode(ReleaseMode.STOP);
implementParams(params);
onCompleteListener();
AudioServiceBackground.setState(
systemActions: [MediaAction.seekTo],
controls: getPlayControllers(),
playing: true,
processingState: AudioProcessingState.connecting);
ayahUrl = "$baseUrl$sheikhId/${surah.ayahs[ayahIndex].number}";
await _audioPlayer.play("$ayahUrl");
setMediaItem();
AudioServiceBackground.setState(
controls: getPlayControllers(),
playing: true,
processingState: AudioProcessingState.ready,
systemActions: [MediaAction.seekTo],
);
}
void setMediaItem() {
AudioServiceBackground.setMediaItem(
MediaItem(
extras: {"surahIndex": surahNumber, "renewSurah": renewSurah},
id: "$ayahIndex",
album: "${surah.englishName}",
title: "${surah.name}",
),
);
}
#override
Future<void> onPause() async {
// Broadcast that we're paused, and what controls are available.
AudioServiceBackground.setState(
controls: getPauseControllers(),
systemActions: [MediaAction.seekTo],
playing: false,
processingState: AudioProcessingState.ready);
// Pause the audio.
_audioPlayer.pause();
}
#override
Future<void> onStop() async {
_audioPlayer.stop();
if (ayahIndex == surah.ayahs.length) {
await AudioServiceBackground.setState(
controls: [replayControl],
playing: false,
processingState: AudioProcessingState.stopped);
} else {
await AudioServiceBackground.setState(
controls: [],
playing: false,
processingState: AudioProcessingState.stopped);
return super.onStop();
}
}
#override
Future<void> onPlay() async {
setMediaItem();
AudioServiceBackground.setState(
controls: getPlayControllers(),
playing: true,
processingState: AudioProcessingState.ready,
systemActions: [MediaAction.seekTo],
);
await _audioPlayer.play(ayahUrl);
renewSurah = 0;
}
#override
Future<void> onSkipToNext() async {
playNext();
}
#override
Future<void> onSkipToPrevious() async {
playPrevious();
}
// #override
// void onRewind() {
// ayahIndex = 0;
// ayahUrl = "$baseUrl$sheikhId/${surah.ayahs[ayahIndex].number}";
// this.onPlay();
// }
void implementParams(Map<String, dynamic> params) {
surahNumber = params["surah"] - 1;
ayahIndex = params["ayahIndex"] - 1;
sheikhId = params["sheikhId"];
if (model == null) model = QuranModel.fromJson(params["quranModel"]);
surah = model.surahs[surahNumber];
}
void onCompleteListener() {
_audioPlayer.onPlayerCompletion.listen((event) {
playNext();
});
}
void playNext() async {
ayahIndex++;
if (ayahIndex < surah.ayahs.length) {
ayahUrl = "$baseUrl$sheikhId/${surah.ayahs[ayahIndex].number}";
this.onPlay();
} else
changeNextSurahIndex();
}
void changeNextSurahIndex() {
renewSurah = 1;
if (surahNumber == 113) {
surahNumber = 0;
} else
surahNumber++;
ayahIndex = 0;
surah = model.surahs[surahNumber];
ayahUrl = "$baseUrl$sheikhId/${surah.ayahs[ayahIndex].number}";
this.onPlay();
}
void changePreviousSurahIndex() {
if (surahNumber == 0) {
surahNumber = 113;
} else
surahNumber--;
ayahIndex = 0;
renewSurah = 1;
surah = model.surahs[surahNumber];
ayahUrl = "$baseUrl$sheikhId/${surah.ayahs[ayahIndex].number}";
this.onPlay();
}
void playPrevious() async {
if (ayahIndex > 0) {
ayahIndex--;
ayahUrl = "$baseUrl$sheikhId/${surah.ayahs[ayahIndex].number}";
this.onPlay();
} else {
changePreviousSurahIndex();
}
}
List<MediaControl> getPlayControllers() {
return [
skipToNextControl,
pauseControl,
skipToPreviousControl,
stopControl
];
}
List<MediaControl> getPauseControllers() {
return [skipToNextControl, playControl, skipToPreviousControl, stopControl];
}
}
It was always giving me that ayahs was called on null
which means my json didnt converted to the model
so I deleted this implementation and added just a url to play
and the exception was
2020-08-31 17:58:08.458205-0400 Runner[700:75506] iOS => call startHeadlessService, playerId bb98efb6-a819-4ea7-a566-1dc6f0ff3df4
2020-08-31 17:58:08.471709-0400 Runner[700:76237] [VERBOSE-2:ui_dart_state.cc(166)] Unhandled Exception: NoSuchMethodError: The method '*' was called on null.
Receiver: null
Tried calling: *()
#0 AudioServiceBackground.run (package:audio_service/audio_service.dart:144)
<asynchronous suspension>
await AudioService.start(
backgroundTaskEntrypoint: _audioPlayerTaskEntrypoint,
androidNotificationChannelName: 'Audio Player',
androidNotificationColor: 0xFF2196f3,
androidNotificationIcon: 'mipmap/ic_launcher',
params: getParams(),
);
This is my code snippet where i am calling AudioService.start but i am unable to start service.
i am fetching the audio item from firebase and want to load those as a list view to audio_service..But i am unable to do that.My class where i have defined a audio service extending the background service.
import 'package:audio_service/audio_service.dart';
import 'package:just_audio/just_audio.dart';
MediaControl playControl = MediaControl(
androidIcon: 'drawable/ic_action_play_arrow',
label: 'Play',
action: MediaAction.play,
);
MediaControl pauseControl = MediaControl(
androidIcon: 'drawable/ic_action_pause',
label: 'Pause',
action: MediaAction.pause,
);
MediaControl skipToNextControl = MediaControl(
androidIcon: 'drawable/ic_action_skip_next',
label: 'Next',
action: MediaAction.skipToNext,
);
MediaControl skipToPreviousControl = MediaControl(
androidIcon: 'drawable/ic_action_skip_previous',
label: 'Previous',
action: MediaAction.skipToPrevious,
);
MediaControl stopControl = MediaControl(
androidIcon: 'drawable/ic_action_stop',
label: 'Stop',
action: MediaAction.stop,
);
class AudioPlayerTask extends BackgroundAudioTask {
//
var _queue = <MediaItem>[];
int _queueIndex = -1;
AudioPlayer _audioPlayer = new AudioPlayer();
AudioProcessingState _skipState;
bool _playing;
bool get hasNext => _queueIndex + 1 < _queue.length;
bool get hasPrevious => _queueIndex > 0;
MediaItem get mediaItem => _queue[_queueIndex];
StreamSubscription<AudioPlaybackState> _playerStateSubscription;
StreamSubscription<AudioPlaybackEvent> _eventSubscription;
#override
void onStart(Map<String, dynamic> params) {
print("-------------------------------------started");
_queue.clear();
List mediaItems = params['data'];
for (int i = 0; i < mediaItems.length; i++) {
MediaItem mediaItem = MediaItem.fromJson(mediaItems[i]);
_queue.add(mediaItem);
}
_playerStateSubscription = _audioPlayer.playbackStateStream
.where((state) => state == AudioPlaybackState.completed)
.listen((state) {
_handlePlaybackCompleted();
});
_eventSubscription = _audioPlayer.playbackEventStream.listen((event) {
final bufferingState =
event.buffering ? AudioProcessingState.buffering : null;
switch (event.state) {
case AudioPlaybackState.paused:
_setState(
processingState: bufferingState ?? AudioProcessingState.ready,
position: event.position);
break;
case AudioPlaybackState.playing:
_setState(
processingState: bufferingState ?? AudioProcessingState.ready,
position: event.position);
break;
case AudioPlaybackState.connecting:
_setState(
processingState: _skipState ?? AudioProcessingState.connecting,
position: event.position);
break;
default:
}
});
AudioServiceBackground.setQueue(_queue);
onSkipToNext();
}
#override
void onPlay() {
if (_skipState == null) {
_playing = true;
_audioPlayer.play();
}
}
#override
void onPause() {
_playing = false;
_audioPlayer.pause();
}
#override
void onSkipToNext() async {
skip(1);
}
#override
void onSkipToPrevious() {
skip(-1);
}
void skip(int offset) async {
int newPos = _queueIndex + offset;
if (!(newPos >= 0 && newPos < _queue.length)) {
return;
}
if (null == _playing) {
_playing = true;
} else if (_playing) {
await _audioPlayer.stop();
}
_queueIndex = newPos;
_skipState = offset > 0
? AudioProcessingState.skippingToNext
: AudioProcessingState.skippingToPrevious;
AudioServiceBackground.setMediaItem(mediaItem);
await _audioPlayer.setUrl(mediaItem.id);
print(mediaItem.id);
_skipState = null;
if (_playing) {
onPlay();
} else {
_setState(processingState: AudioProcessingState.ready);
}
}
#override
Future<void> onStop() async {
_playing = false;
await _audioPlayer.stop();
await _audioPlayer.dispose();
_playerStateSubscription.cancel();
_eventSubscription.cancel();
return await super.onStop();
}
#override
void onSeekTo(Duration position) {
_audioPlayer.seek(position);
}
#override
void onClick(MediaButton button) {
playPause();
}
#override
Future<void> onFastForward() async {
await _seekRelative(fastForwardInterval);
}
#override
Future<void> onRewind() async {
await _seekRelative(rewindInterval);
}
Future<void> _seekRelative(Duration offset) async {
var newPosition = _audioPlayer.playbackEvent.position + offset;
if (newPosition < Duration.zero) {
newPosition = Duration.zero;
}
if (newPosition > mediaItem.duration) {
newPosition = mediaItem.duration;
}
await _audioPlayer.seek(_audioPlayer.playbackEvent.position + offset);
}
_handlePlaybackCompleted() {
if (hasNext) {
onSkipToNext();
} else {
onStop();
}
}
void playPause() {
if (AudioServiceBackground.state.playing)
onPause();
else
onPlay();
}
Future<void> _setState({
AudioProcessingState processingState,
Duration position,
Duration bufferedPosition,
}) async {
print('SetState $processingState');
if (position == null) {
position = _audioPlayer.playbackEvent.position;
}
await AudioServiceBackground.setState(
controls: getControls(),
systemActions: [MediaAction.seekTo],
processingState:
processingState ?? AudioServiceBackground.state.processingState,
playing: _playing,
position: position,
bufferedPosition: bufferedPosition ?? position,
speed: _audioPlayer.speed,
);
}
List<MediaControl> getControls() {
if (_playing) {
return [
skipToPreviousControl,
pauseControl,
stopControl,
skipToNextControl
];
} else {
return [
skipToPreviousControl,
playControl,
stopControl,
skipToNextControl
];
}
} this is my class
}
class AudioState {
final List<MediaItem> queue;
final MediaItem mediaItem;
final PlaybackState playbackState;
AudioState(this.queue, this.mediaItem, this.playbackState);
}
You should overridthis method inAudioPlayerTask` class:
#override
// ignore: missing_return
Future<Function> onAddQueueItem(MediaItem mediaItem) async{
// queue.add(mediaItem); or somthing like this to update your queue
await AudioServiceBackground.setQueue(queue);
try {
await _player.load(ConcatenatingAudioSource(
children:
queue.map((item) => AudioSource.uri(Uri.parse(item.id))).toList(),
));
// In this example, we automatically start playing on start.
onPlay();
} catch (e) {
print("Error: $e");
onStop();
}
}
Then you can call this function in your UI:
await AudioService.start(
backgroundTaskEntrypoint: audioPlayerTaskEntrypoint,
androidNotificationChannelName:
'Audio Service Demo',
// Enable this if you want the Android service to exit the foreground state on pause.
//androidStopForegroundOnPause: true,
androidNotificationColor: 0xFF2196f3,
androidNotificationIcon: 'mipmap/ic_launcher',
androidEnableQueue: true,
);
// call this func:
await AudioService.addQueueItem(m);
According to the author here the plugin originally didn't allow passing any arguments to the background task directly. It was designed to let the background task internally query what to play from internal storage via SQLite or tools like shared_preferences.
However today, the plugin allows to pass media data down to the background task via 3 different ways that I identified:
AudioService.addQueueItem(item) combined with onAddQueueItem as mentioned in the above answer as well as this one
AudioService.customAction('url', url) combined with onCustomAction as explained here
Use the Map<String, dynamic> params argument of the AudioService.start method. It is received as an argument of the background task's onStart method as explained here