Right way to use static bool - flutter

So I am using AudioPlayer in Flutter to play different audios that get streamed from Firebase using StreamBuilder. While I am able to stop a certain audio when another one plays by making the AudioPlayer static, I am unable to do the same with play button. When a certain song plays, the play button remains in "play" state even when the song changes.
Here is the snipped of my code
class _RingtoneWidgetState extends State<RingtoneWidget> {
static AudioPlayer audioPlayer = AudioPlayer();
AuthenticationScreenController authenticationScreenController =
Get.put(AuthenticationScreenController());
#override
void dispose() {
// TODO: implement dispose
super.dispose();
audioPlayer.stop().whenComplete(() {
widget.isPlaying = false;
});
}
#override
Widget build(BuildContext context) {
return Card(
.....
title: CustomText(widget.ringtoneTitle as String, 18,
FontWeight.w500, AppColors.whiteColor),
subtitle: CustomText(
"x audio usado: ${widget.numberOfTimesRingtoneUsed}",
12,
FontWeight.w400,
AppColors.whiteColor),
trailing: GestureDetector(
onTap: () async {
widget.isPlaying
? await audioPlayer.stop().whenComplete(() {
setState(() {
widget.isPlaying = !widget.isPlaying;
});
})
: await audioPlayer
.play(UrlSource(widget.audioUrl as String))
.whenComplete(() {
setState(() {
widget.isPlaying = !widget.isPlaying;
});
});
},
child: widget.isPlaying
? const Icon(
Icons.pause_circle,
color: AppColors.whiteColor,
)
: const Icon(
Icons.play_circle_outlined,
color: AppColors.whiteColor,
),
),
),
],
),
);
}
}
Here is the code for screen where this widget is listed
Expanded(
child: Scaffold(
backgroundColor: AppColors.defaultColor,
body: StreamBuilder<QuerySnapshot>(
stream: FirebaseFirestore.instance
.collection('artistdata')
.doc(authInstance.currentUser!.email)
.collection('songs')
.snapshots(),
builder: (BuildContext context,
AsyncSnapshot<QuerySnapshot> snapshot) {
if (snapshot.connectionState ==
ConnectionState.waiting) {
return Text("Loading");
}
FirebaseFirestore.instance
.collection('artistdata')
.doc(authInstance.currentUser!.email)
.collection('songs')
.get()
.then(
(value) {
authenticationScreenController
.totalNumberOfSongs.value = value.size;
},
);
return ListView(
padding: EdgeInsets.only(bottom: 50),
children: snapshot.data!.docs.map((document) {
Map<String, dynamic> data =
document.data()! as Map<String, dynamic>;
return RingtoneWidget(
false,
audioUrl: data['audiourl'],
imageUrl: data['imageurl'],
numberOfTimesRingtoneUsed:
data['numberoftimesused'],
ringtoneTitle: data['songtitle'],
);
}).toList(),
);
},
),
),
),

Related

Using a Stack containing a QR code scanner & a CameraPreview leads to preview freezing

I initially tried to find a library that would allow for a single camera component that can do both, qr code scanning & allowing the user to take a normal picture with it. I also came across this post that went unanswered (if you have any suggestions, feel free to comment them below), so I decided to use two different components in a stack and play with their visibility.
I'm using the mobile_scanner library for the QR code scanner and the official docs for the picture taking functionality.
The idea (until a widget that can do both shows up) is that initially, the user will see the CameraPreview() widget, and when toggling the switch to the ON state, they will see the MobileScanner() one. The issue that occurs, is that although the MobileScanner widget seems to handle its visibility changes normally (meaning, pausing & resuming the camera), the CameraPreview one cannot. If I toggle the switch on and then toggle it off again, the camera preview is frozen. I'm having a hard time understanding whether I should be manually calling _controller.resumePreview() and at what point exactly.
Here's the screen code:
class CameraComponent extends StatefulWidget {
const CameraComponent({Key? key}) : super(key: key);
#override
State<CameraComponent> createState() => _CameraComponentState();
}
class _CameraComponentState extends State<CameraComponent> {
CameraCubit get _cubit => context.read<CameraCubit>();
bool qrModeEnabled = false;
CameraController? _controller;
late Future<void> _initializeControllerFuture;
Photo? _artwork;
final ArtworkRepository _artworkRepository = getIt<ArtworkRepository>();
#override
void initState() {
super.initState();
_cubit.init();
}
#override
void dispose() {
// Dispose of the controller when the widget is disposed.
_controller?.dispose();
super.dispose();
}
#override
Widget build(BuildContext context) {
return Scaffold(
body: BlocConsumer<CameraCubit, CameraState>(
listener: (BuildContext context, CameraState state) {
state.maybeWhen(
initCamera: (List<CameraDescription> cameras) {
_controller = CameraController(
cameras.firstWhere(
(CameraDescription camera) =>
camera.lensDirection == CameraLensDirection.back,
orElse: () => cameras.first,
),
ResolutionPreset.max,
);
_initializeControllerFuture = _controller!.initialize();
},
orElse: () {},
);
},
builder: (BuildContext context, CameraState state) {
return state.maybeWhen(
initCamera: (_) {
return FutureBuilder<void>(
future: _initializeControllerFuture,
builder: (BuildContext context, AsyncSnapshot<void> snapshot) {
if (snapshot.connectionState == ConnectionState.done) {
return Stack(
children: <Widget>[
Visibility(
visible: qrModeEnabled,
child: MobileScanner(
onDetect: (Barcode barcode,
MobileScannerArguments? args) async {
if (barcode.rawValue == null) {
debugPrint('Failed to scan qr code');
} else {
try {
final int artworkId = int.parse(
barcode.rawValue!.split('_')[1]);
debugPrint(
'QR code found! ${barcode.rawValue!}');
_artwork = _artworkRepository.savedArtworks
.firstWhereOrNull(
(Photo element) => element.id == artworkId,
);
_artwork ??= await _artworkRepository
.getArtworkDetails(id: artworkId);
NavigatorUtils.goToArtworkViewScreen(
context,
artwork: _artwork!,
);
} catch (e) {
rethrow;
}
}
},
),
),
Visibility(
visible: !qrModeEnabled,
child: CameraPreview(_controller!),
),
Align(
child: Image.asset('assets/icons/b_logo_grey.png'),
),
Align(
alignment: Alignment.bottomCenter,
child: Padding(
padding: const EdgeInsets.only(bottom: 144),
child: Text(
'Scan a QR code or take a photo of the artwork',
style: montserratRegular15.copyWith(
color: AppColors.white,
),
),
),
),
Align(
alignment: Alignment.bottomCenter,
child: Padding(
padding: const EdgeInsets.only(bottom: 80),
child: GestureDetector(
onTap: () async {
final XFile image =
await _controller!.takePicture();
final Uint8List bytes =
await image.readAsBytes();
final String base64Image = base64Encode(bytes);
_cubit.addBase64Image(base64Image: base64Image);
},
child: Image.asset(
'assets/icons/shutter_icon.png',
height: 48,
),
),
),
),
Align(
alignment: Alignment.topRight,
child: Padding(
padding: const EdgeInsets.only(top: 10, right: 10),
child: Switch(
value: qrModeEnabled,
onChanged: (bool value) {
setState(() {
qrModeEnabled = !qrModeEnabled;
});
},
activeTrackColor: AppColors.red,
inactiveTrackColor: AppColors.white,
),
),
),
],
);
} else {
return const Center(child: CircularProgressIndicator());
}
},
);
},
orElse: () => const SizedBox.shrink(),
);
},
),
);
}
}
and the CameraCubit.dart file:
class CameraCubit extends Cubit<CameraState> {
CameraCubit({
required ClarifaiRepository clarifaiRepository,
}) : _clarifaiRepository = clarifaiRepository,
super(const CameraState.initial());
final ClarifaiRepository _clarifaiRepository;
Future<void> init() async {
try {
final List<CameraDescription> cameras = await availableCameras();
if (cameras.isNotEmpty) {
emit(CameraState.initCamera(cameras: cameras));
}
} catch (e) {
emit(CameraState.error(e));
}
}
Future<void> addBase64Image({required String base64Image}) async {
emit(const CameraState.loading());
try {
await _clarifaiRepository.addBase64Image(base64Image: base64Image);
emit(const CameraState.success());
} catch (e) {
emit(CameraState.error(e));
}
}
}
I haven't used the Mobile Scanner library before. Instead, I have used the qr_code_scanner dependency for QR codes and it has a reassemble method like this:
#override
void reassemble() {
super.reassemble();
if (Platform.isAndroid) {
controller!.pauseCamera();
} else if (Platform.isIOS) {
controller!.resumeCamera();
}
}
If this doesn't help, you can also manually pause/resume when the button is pressed with the method you mentioned in the end.

How do I get data from my firestore document as a variable and use that to direct the path my stream builder uses in flutter

The code below is what I am trying now. The page works does everything I need but now I need this database reference to use the loanuid, clientuid, and companyName to get to the right directory.
StreamBuilder<QuerySnapshot>(
stream: FirebaseFirestore.instance
.collection('prosperitybank')
.doc('OHViYK8Zz6XfKGJsSXRL')
.collection('Project Information')
.snapshots()```
I need it from my collection.(userCreationRef).doc(loggedinuid) as shown in the picture. I can not figure out how to do this without the stream builders interfering any help would be greatly appreciated. I have tried to using this to help but it did not How can you nest StreamBuilders in Flutter?. I also tried looking at the documentation here https://firebase.flutter.dev/docs/firestore/usage/.
Picture of Document I need data fields from
import 'package:cloud_firestore/cloud_firestore.dart';
import 'package:firebase_auth/firebase_auth.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:photoloanupdated/screens/mains/viewproperties.dart';
import 'package:provider/provider.dart';
class HomePage extends StatefulWidget {
const HomePage({Key? key}) : super(key: key);
#override
_HomePageState createState() => _HomePageState();
}
class _HomePageState extends State<HomePage> {
#override
Widget build(BuildContext context) {
final FirebaseAuth auth = FirebaseAuth.instance;
final user = auth.currentUser;
final uid = user?.uid;
var users = FirebaseFirestore.instance.collection('userCreationRequests');
var companyname = "";
return Scaffold(
appBar: AppBar(
title: Text(companyname),
),
body:
FutureBuilder<DocumentSnapshot>(
future: users.doc(uid).get(),
builder:
(BuildContext context, AsyncSnapshot<DocumentSnapshot> snapshot) {
if (snapshot.hasError) {
return Text("Something went wrong");
}
if (snapshot.hasData && !snapshot.data!.exists) {
return Text("Document does not exist");
}
if (snapshot.connectionState == ConnectionState.done) {
Map<String, dynamic> data =
snapshot.data!.data() as Map<String, dynamic>;
return Text("Full Name: ${data['companyName']} ${data['last_name']}");
}
return Text("loading");
},
);
StreamBuilder<QuerySnapshot>(
stream: FirebaseFirestore.instance
.collection('prosperitybank')
.doc('OHViYK8Zz6XfKGJsSXRL')
.collection('Project Information')
.snapshots(), //key spot fV or email fix
builder: (context, snapshot) {
if (snapshot.hasData) {
return ListView.builder(
itemCount: snapshot.data?.docs.length,
itemBuilder: (BuildContext context, int index) {
QueryDocumentSnapshot<Object?>? documentSnapshot =
snapshot.data?.docs[index];
//for date/time DateTime mydateTime = documentSnapshot['created'].toDate();
return InkWell(
onTap: () {
Navigator.of(context)
.push(
MaterialPageRoute(
builder: (context) => ViewProperties(documentSnapshot,
snapshot.data?.docs[index].reference)),
)
.then((value) {
setState(() {});
});
},
child: Card(
child: Container(
child: Padding(
padding: const EdgeInsets.all(15.0),
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Text(
"${documentSnapshot!['address']}",
style: TextStyle(
fontSize: 24.0,
fontFamily: "lato",
fontWeight: FontWeight.bold,
color: Colors.black),
),
Container(
alignment: Alignment.centerRight,
child: Text(
"${documentSnapshot!['projectcomplete'].toString() + "% Complete"}",
// for mydateTime.toString(),
style: TextStyle(
fontSize: 17.0,
fontFamily: "lato",
color: Colors.black87),
),
)
],
),
),
),
),
);
},
);
} else {
return Center(
child: Text("Loading..."),
);
}
},
),
);
}
}
String uuid;
Future<List<Map<String, dynamic>>> _onQuery() {
Future<List<Map<String, dynamic>>> res;
if (uuid != null) {
res = future.get().then((v) => v.docs
.map((e) => e.data())
.where((e) =>
e['uuid'].toLowerCase().contains(uuid))
.toList());
} else {
res = future.get().then((v) => v.docs.map((e) => e.data()).toList());
}
setState(() {});
return res;
}
now you can use _onQuery as stream.

unnecessary container but I need this container? flutter/Dart

I am working on a app were I get data from the ESP32 and display it on a simple page. Here is my code for my sensorpage:
import 'dart:async';
import 'dart:convert' show utf8;
import 'package:flutter/material.dart';
import 'package:flutter_blue/flutter_blue.dart';
class sensorpage extends StatefulWidget {
const sensorpage({Key? key, required this.device}) : super(key: key);
final BluetoothDevice device;
#override
_sensorpageState createState() => _sensorpageState();
}
class _sensorpageState extends State<sensorpage> {
final String SERVICE_UUID = "edb91e04-3e19-11ec-9bbc-0242ac130002";
final String CHARACTERISTIC_UUID = "edb920c0-3e19-11ec-9bbc-0242ac130002";
late bool isReady;
//String val1 = "";
//int pot1 = 0;
FlutterBlue flutterBlue = FlutterBlue.instance;
//late StreamSubscription<ScanResult> scanSubScription;
//late BluetoothDevice targetDevice;
late Stream<List<int>> stream;
#override
void initState() {
super.initState();
isReady = false;
connectToDevice();
}
connectToDevice() async {
if (widget.device == null) {
_Pop();
return;
}
Timer(const Duration(seconds: 15), () {
if (!isReady) {
disconnectFromDevice();
_Pop();
}
});
await widget.device.connect();
discoverServices();
}
disconnectFromDevice() {
if (widget.device == null) {
_Pop();
return;
}
widget.device.disconnect();
}
discoverServices() async {
if (widget.device == null) {
_Pop();
return;
}
List<BluetoothService> services = await widget.device.discoverServices();
services.forEach((service) {
if (service.uuid.toString() == SERVICE_UUID) {
service.characteristics.forEach((characteristic) {
if (characteristic.uuid.toString() == CHARACTERISTIC_UUID) {
characteristic.setNotifyValue(!characteristic.isNotifying);
stream = characteristic.value;
setState(() {
isReady = true;
});
}
});
}
});
if (!isReady) {
_Pop();
}
}
Future<bool> _onWillPop() async {
bool shouldPop = false;
await showDialog(
context: context,
builder: (context) =>
AlertDialog(
title: const Text('Are you sure?'),
content: const Text('Do you want to disconnect device and go back?'),
actions: <Widget>[
ElevatedButton(
onPressed: () {
// shouldPop is already false
},
child: const Text('No')),
ElevatedButton(
onPressed: () async {
await disconnectFromDevice();
Navigator.of(context).pop();
shouldPop = true;
},
child: const Text('Yes')),
],
));
return shouldPop;
}
_Pop() {
Navigator.of(context).pop(true);
}
String _dataParser( List<int> dataFromDevice) {
return utf8.decode(dataFromDevice);
}
#override
Widget build(BuildContext context) {
return WillPopScope(
onWillPop: _onWillPop,
child: Scaffold(
appBar: AppBar(
title: const Text('Test'),
),
body: Container(
child: !isReady
? const Center(
child: Text(
"Waiting...",
style: TextStyle(fontSize: 24, color: Colors.red),
),
)
: Container(
child: StreamBuilder<List<int>>(
stream: stream,
builder: (BuildContext context,
AsyncSnapshot<List<int>> snapshot) {
if (snapshot.hasError){
return Text('Error: ${snapshot.error}');
}
if (snapshot.connectionState == ConnectionState.active) {
var currentValue = _dataParser (snapshot.data!);
//val1 = currentValue.split(',')[0];
//pot1 = int.parse(val1);
return Center(
child: Column(
mainAxisAlignment: MainAxisAlignment.center,
children: <Widget>[
const Text('Current value',
style: TextStyle(fontSize: 14)),
Text('${currentValue} jawool',
style: const TextStyle(
fontWeight: FontWeight.bold,
fontSize: 24))
])
);
} else {
return const Text('Check the stream');
}
},
)),
)
));
}
}´
My Problem is that my second container where I display my data is shown as unnecessary but I don´t know why.
I assume you mean this piece of code?
body: Container(
child: !isReady
? const Center(
child: Text(
"Waiting...",
style: TextStyle(fontSize: 24, color: Colors.red),
),
)
: Container(
child: StreamBuilder<List<int>>(
If isReady is true you return Container(child: Container(child: SteamBuilder));
You should change it to this and it should be fine:
body: Container(
child: !isReady
? const Center(
child: Text(
"Waiting...",
style: TextStyle(fontSize: 24, color: Colors.red),
),
)
: StreamBuilder<List<int>>(
The first Container is just wrapping the content based on the boolean and you don't need it. According to the flutter team:
Wrapping a widget in Container with no other parameters set has no effect and makes code needlessly more complex
So instead you can directly do:
body: !isReady ? buildSomething() : Container(....more code);

How to get the audio file/URL total duration using audio_service package?

I am using just_audio & audio_service packages to implement a playlist music player, I am facing an issue with getting the total duration of each audio file/URL, I didn't pass duration to "MediaItem instance", so how to get duration without pass the duration to MediaItem.
I will add duration as null or didn't initialize it.
MediaItem(
id: 'https://s3.amazonaws.com/scifri-episodes/scifri20181123-episode.mp3',
album: "Science Friday",
title: "A Salute To Head-Scratching Science",
artist: "Science Friday and WNYC Studios",
// duration: const Duration(milliseconds: 5739820,
or
duration: null
),
),
Example:
import 'dart:async';
import 'package:audio_service/audio_service.dart';
import 'package:audio_service_example/common.dart';
import 'package:audio_session/audio_session.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:just_audio/just_audio.dart';
import 'package:rxdart/rxdart.dart';
// You might want to provide this using dependency injection rather than a
// global variable.
late AudioPlayerHandler _audioHandler;
Future<void> main() async {
_audioHandler = await AudioService.init(
builder: () => AudioPlayerHandlerImpl(),
config: const AudioServiceConfig(
androidNotificationChannelId: 'com.ryanheise.myapp.channel.audio',
androidNotificationChannelName: 'Audio playback',
androidNotificationOngoing: true,
),
);
runApp(MyApp());
}
/// The app widget
class MyApp extends StatelessWidget {
#override
Widget build(BuildContext context) {
return MaterialApp(
debugShowCheckedModeBanner: false,
title: 'Audio Service Demo',
theme: ThemeData(primarySwatch: Colors.blue),
home: MainScreen3(),
);
}
}
/// The main screen.
class MainScreen3 extends StatelessWidget {
Stream<Duration> get _bufferedPositionStream => _audioHandler.playbackState
.map((state) => state.bufferedPosition)
.distinct();
Stream<Duration?> get _durationStream =>
_audioHandler.mediaItem.map((item) => item?.duration).distinct();
Stream<PositionData> get _positionDataStream =>
Rx.combineLatest3<Duration, Duration, Duration?, PositionData>(
AudioService.position,
_bufferedPositionStream,
_durationStream,
(position, bufferedPosition, duration) => PositionData(
position, bufferedPosition, duration ?? Duration.zero));
#override
Widget build(BuildContext context) {
return Scaffold(
body: SafeArea(
child: Column(
crossAxisAlignment: CrossAxisAlignment.center,
mainAxisAlignment: MainAxisAlignment.center,
children: [
// MediaItem display
Expanded(
child: StreamBuilder<MediaItem?>(
stream: _audioHandler.mediaItem,
builder: (context, snapshot) {
final mediaItem = snapshot.data;
if (mediaItem == null) return const SizedBox();
return Column(
crossAxisAlignment: CrossAxisAlignment.center,
children: [
if (mediaItem.artUri != null)
Expanded(
child: Padding(
padding: const EdgeInsets.all(8.0),
child: Center(
child: Image.network('${mediaItem.artUri!}'),
),
),
),
Text(mediaItem.album ?? '',
style: Theme.of(context).textTheme.headline6),
Text(mediaItem.title),
],
);
},
),
),
// Playback controls
ControlButtons(_audioHandler),
// A seek bar.
StreamBuilder<PositionData>(
stream: _positionDataStream,
builder: (context, snapshot) {
final positionData = snapshot.data ??
PositionData(Duration.zero, Duration.zero, Duration.zero);
return SeekBar(
duration: positionData.duration,
position: positionData.position,
onChangeEnd: (newPosition) {
_audioHandler.seek(newPosition);
},
);
},
),
const SizedBox(height: 8.0),
// Repeat/shuffle controls
Row(
children: [
StreamBuilder<AudioServiceRepeatMode>(
stream: _audioHandler.playbackState
.map((state) => state.repeatMode)
.distinct(),
builder: (context, snapshot) {
final repeatMode =
snapshot.data ?? AudioServiceRepeatMode.none;
const icons = [
Icon(Icons.repeat, color: Colors.grey),
Icon(Icons.repeat, color: Colors.orange),
Icon(Icons.repeat_one, color: Colors.orange),
];
const cycleModes = [
AudioServiceRepeatMode.none,
AudioServiceRepeatMode.all,
AudioServiceRepeatMode.one,
];
final index = cycleModes.indexOf(repeatMode);
return IconButton(
icon: icons[index],
onPressed: () {
_audioHandler.setRepeatMode(cycleModes[
(cycleModes.indexOf(repeatMode) + 1) %
cycleModes.length]);
},
);
},
),
Expanded(
child: Text(
"Playlist",
style: Theme.of(context).textTheme.headline6,
textAlign: TextAlign.center,
),
),
StreamBuilder<bool>(
stream: _audioHandler.playbackState
.map((state) =>
state.shuffleMode == AudioServiceShuffleMode.all)
.distinct(),
builder: (context, snapshot) {
final shuffleModeEnabled = snapshot.data ?? false;
return IconButton(
icon: shuffleModeEnabled
? const Icon(Icons.shuffle, color: Colors.orange)
: const Icon(Icons.shuffle, color: Colors.grey),
onPressed: () async {
final enable = !shuffleModeEnabled;
await _audioHandler.setShuffleMode(enable
? AudioServiceShuffleMode.all
: AudioServiceShuffleMode.none);
},
);
},
),
],
),
// Playlist
Container(
height: 240.0,
child: StreamBuilder<QueueState>(
stream: _audioHandler.queueState,
builder: (context, snapshot) {
final queueState = snapshot.data ?? QueueState.empty;
final queue = queueState.queue;
return ReorderableListView(
onReorder: (int oldIndex, int newIndex) {
if (oldIndex < newIndex) newIndex--;
_audioHandler.moveQueueItem(oldIndex, newIndex);
},
children: [
for (var i = 0; i < queue.length; i++)
Dismissible(
key: ValueKey(queue[i].id),
background: Container(
color: Colors.redAccent,
alignment: Alignment.centerRight,
child: const Padding(
padding: EdgeInsets.only(right: 8.0),
child: Icon(Icons.delete, color: Colors.white),
),
),
onDismissed: (dismissDirection) {
_audioHandler.removeQueueItemAt(i);
},
child: Material(
color: i == queueState.queueIndex
? Colors.grey.shade300
: null,
child: ListTile(
title: Text(queue[i].title),
onTap: () => _audioHandler.skipToQueueItem(i),
),
),
),
],
);
},
),
),
],
),
),
);
}
}
class ControlButtons extends StatelessWidget {
final AudioPlayerHandler audioHandler;
ControlButtons(this.audioHandler);
#override
Widget build(BuildContext context) {
return Row(
mainAxisSize: MainAxisSize.min,
children: [
IconButton(
icon: const Icon(Icons.volume_up),
onPressed: () {
showSliderDialog(
context: context,
title: "Adjust volume",
divisions: 10,
min: 0.0,
max: 1.0,
value: audioHandler.volume.value,
stream: audioHandler.volume,
onChanged: audioHandler.setVolume,
);
},
),
StreamBuilder<QueueState>(
stream: audioHandler.queueState,
builder: (context, snapshot) {
final queueState = snapshot.data ?? QueueState.empty;
return IconButton(
icon: const Icon(Icons.skip_previous),
onPressed:
queueState.hasPrevious ? audioHandler.skipToPrevious : null,
);
},
),
StreamBuilder<PlaybackState>(
stream: audioHandler.playbackState,
builder: (context, snapshot) {
final playbackState = snapshot.data;
final processingState = playbackState?.processingState;
final playing = playbackState?.playing;
if (processingState == AudioProcessingState.loading ||
processingState == AudioProcessingState.buffering) {
return Container(
margin: const EdgeInsets.all(8.0),
width: 64.0,
height: 64.0,
child: const CircularProgressIndicator(),
);
} else if (playing != true) {
return IconButton(
icon: const Icon(Icons.play_arrow),
iconSize: 64.0,
onPressed: audioHandler.play,
);
} else {
return IconButton(
icon: const Icon(Icons.pause),
iconSize: 64.0,
onPressed: audioHandler.pause,
);
}
},
),
StreamBuilder<QueueState>(
stream: audioHandler.queueState,
builder: (context, snapshot) {
final queueState = snapshot.data ?? QueueState.empty;
return IconButton(
icon: const Icon(Icons.skip_next),
onPressed: queueState.hasNext ? audioHandler.skipToNext : null,
);
},
),
StreamBuilder<double>(
stream: audioHandler.speed,
builder: (context, snapshot) => IconButton(
icon: Text("${snapshot.data?.toStringAsFixed(1)}x",
style: const TextStyle(fontWeight: FontWeight.bold)),
onPressed: () {
showSliderDialog(
context: context,
title: "Adjust speed",
divisions: 10,
min: 0.5,
max: 1.5,
value: audioHandler.speed.value,
stream: audioHandler.speed,
onChanged: audioHandler.setSpeed,
);
},
),
),
],
);
}
}
class QueueState {
static final QueueState empty =
const QueueState([], 0, [], AudioServiceRepeatMode.none);
final List<MediaItem> queue;
final int? queueIndex;
final List<int>? shuffleIndices;
final AudioServiceRepeatMode repeatMode;
const QueueState(
this.queue, this.queueIndex, this.shuffleIndices, this.repeatMode);
bool get hasPrevious =>
repeatMode != AudioServiceRepeatMode.none || (queueIndex ?? 0) > 0;
bool get hasNext =>
repeatMode != AudioServiceRepeatMode.none ||
(queueIndex ?? 0) + 1 < queue.length;
List<int> get indices =>
shuffleIndices ?? List.generate(queue.length, (i) => i);
}
/// An [AudioHandler] for playing a list of podcast episodes.
///
/// This class exposes the interface and not the implementation.
abstract class AudioPlayerHandler implements AudioHandler {
Stream<QueueState> get queueState;
Future<void> moveQueueItem(int currentIndex, int newIndex);
ValueStream<double> get volume;
Future<void> setVolume(double volume);
ValueStream<double> get speed;
}
/// The implementation of [AudioPlayerHandler].
///
/// This handler is backed by a just_audio player. The player's effective
/// sequence is mapped onto the handler's queue, and the player's state is
/// mapped onto the handler's state.
class AudioPlayerHandlerImpl extends BaseAudioHandler
with SeekHandler
implements AudioPlayerHandler {
// ignore: close_sinks
final BehaviorSubject<List<MediaItem>> _recentSubject =
BehaviorSubject.seeded(<MediaItem>[]);
final _mediaLibrary = MediaLibrary();
final _player = AudioPlayer();
final _playlist = ConcatenatingAudioSource(children: []);
#override
final BehaviorSubject<double> volume = BehaviorSubject.seeded(1.0);
#override
final BehaviorSubject<double> speed = BehaviorSubject.seeded(1.0);
final _mediaItemExpando = Expando<MediaItem>();
/// A stream of the current effective sequence from just_audio.
Stream<List<IndexedAudioSource>> get _effectiveSequence => Rx.combineLatest3<
List<IndexedAudioSource>?,
List<int>?,
bool,
List<IndexedAudioSource>?>(_player.sequenceStream,
_player.shuffleIndicesStream, _player.shuffleModeEnabledStream,
(sequence, shuffleIndices, shuffleModeEnabled) {
if (sequence == null) return [];
if (!shuffleModeEnabled) return sequence;
if (shuffleIndices == null) return null;
if (shuffleIndices.length != sequence.length) return null;
return shuffleIndices.map((i) => sequence[i]).toList();
}).whereType<List<IndexedAudioSource>>();
/// Computes the effective queue index taking shuffle mode into account.
int? getQueueIndex(
int? currentIndex, bool shuffleModeEnabled, List<int>? shuffleIndices) {
final effectiveIndices = _player.effectiveIndices ?? [];
final shuffleIndicesInv = List.filled(effectiveIndices.length, 0);
for (var i = 0; i < effectiveIndices.length; i++) {
shuffleIndicesInv[effectiveIndices[i]] = i;
}
return (shuffleModeEnabled &&
((currentIndex ?? 0) < shuffleIndicesInv.length))
? shuffleIndicesInv[currentIndex ?? 0]
: currentIndex;
}
/// A stream reporting the combined state of the current queue and the current
/// media item within that queue.
#override
Stream<QueueState> get queueState =>
Rx.combineLatest3<List<MediaItem>, PlaybackState, List<int>, QueueState>(
queue,
playbackState,
_player.shuffleIndicesStream.whereType<List<int>>(),
(queue, playbackState, shuffleIndices) => QueueState(
queue,
playbackState.queueIndex,
playbackState.shuffleMode == AudioServiceShuffleMode.all
? shuffleIndices
: null,
playbackState.repeatMode,
)).where((state) =>
state.shuffleIndices == null ||
state.queue.length == state.shuffleIndices!.length);
#override
Future<void> setShuffleMode(AudioServiceShuffleMode mode) async {
final enabled = mode == AudioServiceShuffleMode.all;
if (enabled) {
await _player.shuffle();
}
playbackState.add(playbackState.value.copyWith(shuffleMode: mode));
await _player.setShuffleModeEnabled(enabled);
}
#override
Future<void> setRepeatMode(AudioServiceRepeatMode repeatMode) async {
playbackState.add(playbackState.value.copyWith(repeatMode: repeatMode));
await _player.setLoopMode(LoopMode.values[repeatMode.index]);
}
#override
Future<void> setSpeed(double speed) async {
this.speed.add(speed);
await _player.setSpeed(speed);
}
#override
Future<void> setVolume(double volume) async {
this.volume.add(volume);
await _player.setVolume(volume);
}
AudioPlayerHandlerImpl() {
_init();
}
Future<void> _init() async {
final session = await AudioSession.instance;
await session.configure(const AudioSessionConfiguration.speech());
// Broadcast speed changes. Debounce so that we don't flood the notification
// with updates.
speed.debounceTime(const Duration(milliseconds: 250)).listen((speed) {
playbackState.add(playbackState.value.copyWith(speed: speed));
});
// Load and broadcast the initial queue
await updateQueue(_mediaLibrary.items[MediaLibrary.albumsRootId]!);
// For Android 11, record the most recent item so it can be resumed.
mediaItem
.whereType<MediaItem>()
.listen((item) => _recentSubject.add([item]));
// Broadcast media item changes.
Rx.combineLatest4<int?, List<MediaItem>, bool, List<int>?, MediaItem?>(
_player.currentIndexStream,
queue,
_player.shuffleModeEnabledStream,
_player.shuffleIndicesStream,
(index, queue, shuffleModeEnabled, shuffleIndices) {
final queueIndex =
getQueueIndex(index, shuffleModeEnabled, shuffleIndices);
return (queueIndex != null && queueIndex < queue.length)
? queue[queueIndex]
: null;
}).whereType<MediaItem>().distinct().listen(mediaItem.add);
// Propagate all events from the audio player to AudioService clients.
_player.playbackEventStream.listen(_broadcastState);
_player.shuffleModeEnabledStream
.listen((enabled) => _broadcastState(_player.playbackEvent));
// In this example, the service stops when reaching the end.
_player.processingStateStream.listen((state) {
if (state == ProcessingState.completed) {
stop();
_player.seek(Duration.zero, index: 0);
}
});
// Broadcast the current queue.
_effectiveSequence
.map((sequence) =>
sequence.map((source) => _mediaItemExpando[source]!).toList())
.pipe(queue);
// Load the playlist.
_playlist.addAll(queue.value.map(_itemToSource).toList());
await _player.setAudioSource(_playlist);
}
AudioSource _itemToSource(MediaItem mediaItem) {
final audioSource = AudioSource.uri(Uri.parse(mediaItem.id));
_mediaItemExpando[audioSource] = mediaItem;
return audioSource;
}
List<AudioSource> _itemsToSources(List<MediaItem> mediaItems) =>
mediaItems.map(_itemToSource).toList();
#override
Future<List<MediaItem>> getChildren(String parentMediaId,
[Map<String, dynamic>? options]) async {
switch (parentMediaId) {
case AudioService.recentRootId:
// When the user resumes a media session, tell the system what the most
// recently played item was.
return _recentSubject.value;
default:
// Allow client to browse the media library.
return _mediaLibrary.items[parentMediaId]!;
}
}
#override
ValueStream<Map<String, dynamic>> subscribeToChildren(String parentMediaId) {
switch (parentMediaId) {
case AudioService.recentRootId:
final stream = _recentSubject.map((_) => <String, dynamic>{});
return _recentSubject.hasValue
? stream.shareValueSeeded(<String, dynamic>{})
: stream.shareValue();
default:
return Stream.value(_mediaLibrary.items[parentMediaId])
.map((_) => <String, dynamic>{})
.shareValue();
}
}
#override
Future<void> addQueueItem(MediaItem mediaItem) async {
await _playlist.add(_itemToSource(mediaItem));
}
#override
Future<void> addQueueItems(List<MediaItem> mediaItems) async {
await _playlist.addAll(_itemsToSources(mediaItems));
}
#override
Future<void> insertQueueItem(int index, MediaItem mediaItem) async {
await _playlist.insert(index, _itemToSource(mediaItem));
}
#override
Future<void> updateQueue(List<MediaItem> newQueue) async {
await _playlist.clear();
await _playlist.addAll(_itemsToSources(newQueue));
}
#override
Future<void> updateMediaItem(MediaItem mediaItem) async {
final index = queue.value.indexWhere((item) => item.id == mediaItem.id);
_mediaItemExpando[_player.sequence![index]] = mediaItem;
}
#override
Future<void> removeQueueItem(MediaItem mediaItem) async {
final index = queue.value.indexOf(mediaItem);
await _playlist.removeAt(index);
}
#override
Future<void> moveQueueItem(int currentIndex, int newIndex) async {
await _playlist.move(currentIndex, newIndex);
}
#override
Future<void> skipToNext() => _player.seekToNext();
#override
Future<void> skipToPrevious() => _player.seekToPrevious();
#override
Future<void> skipToQueueItem(int index) async {
if (index < 0 || index >= _playlist.children.length) return;
// This jumps to the beginning of the queue item at [index].
_player.seek(Duration.zero,
index: _player.shuffleModeEnabled
? _player.shuffleIndices![index]
: index);
}
#override
Future<void> play() => _player.play();
#override
Future<void> pause() => _player.pause();
#override
Future<void> seek(Duration position) => _player.seek(position);
#override
Future<void> stop() async {
await _player.stop();
await playbackState.firstWhere(
(state) => state.processingState == AudioProcessingState.idle);
}
/// Broadcasts the current state to all clients.
void _broadcastState(PlaybackEvent event) {
final playing = _player.playing;
final queueIndex = getQueueIndex(
event.currentIndex, _player.shuffleModeEnabled, _player.shuffleIndices);
playbackState.add(playbackState.value.copyWith(
controls: [
MediaControl.skipToPrevious,
if (playing) MediaControl.pause else MediaControl.play,
MediaControl.stop,
MediaControl.skipToNext,
],
systemActions: const {
MediaAction.seek,
MediaAction.seekForward,
MediaAction.seekBackward,
},
androidCompactActionIndices: const [0, 1, 3],
processingState: const {
ProcessingState.idle: AudioProcessingState.idle,
ProcessingState.loading: AudioProcessingState.loading,
ProcessingState.buffering: AudioProcessingState.buffering,
ProcessingState.ready: AudioProcessingState.ready,
ProcessingState.completed: AudioProcessingState.completed,
}[_player.processingState]!,
playing: playing,
updatePosition: _player.position,
bufferedPosition: _player.bufferedPosition,
speed: _player.speed,
queueIndex: queueIndex,
));
}
}
/// Provides access to a library of media items. In your app, this could come
/// from a database or web service.
class MediaLibrary {
static const albumsRootId = 'albums';
final items = <String, List<MediaItem>>{
AudioService.browsableRootId: const [
MediaItem(
id: albumsRootId,
title: "Albums",
playable: false,
),
],
albumsRootId: [
MediaItem(
id: 'https://s3.amazonaws.com/scifri-episodes/scifri20181123-episode.mp3',
album: "Science Friday",
title: "A Salute To Head-Scratching Science",
artist: "Science Friday and WNYC Studios",
duration: const Duration(milliseconds: 5739820),
artUri: Uri.parse(
'https://media.wnyc.org/i/1400/1400/l/80/1/ScienceFriday_WNYCStudios_1400.jpg'),
),
MediaItem(
id: 'https://s3.amazonaws.com/scifri-segments/scifri201711241.mp3',
album: "Science Friday",
title: "From Cat Rheology To Operatic Incompetence",
artist: "Science Friday and WNYC Studios",
duration: const Duration(milliseconds: 2856950),
artUri: Uri.parse(
'https://media.wnyc.org/i/1400/1400/l/80/1/ScienceFriday_WNYCStudios_1400.jpg'),
),
MediaItem(
id: 'https://s3.amazonaws.com/scifri-segments/scifri202011274.mp3',
album: "Science Friday",
title: "Laugh Along At Home With The Ig Nobel Awards",
artist: "Science Friday and WNYC Studios",
duration: const Duration(milliseconds: 1791883),
artUri: Uri.parse(
'https://media.wnyc.org/i/1400/1400/l/80/1/ScienceFriday_WNYCStudios_1400.jpg'),
),
],
};
}
When AudioSource is supplied into AudioPlayer, AudioPlayer will return its total duration through durationStream.
You can simply listen to this durationStream and copy mediaItem with the duration received from durationStream before you sink it into your mediaItem stream of AudioHandler.
In the example, you can change the part where you sink MediaItem to mediaItem stream in the _init method on line 475 like below.
Before:
// Broadcast media item changes.
Rx.combineLatest4<int?, List<MediaItem>, bool, List<int>?, MediaItem?>(
_player.currentIndexStream,
queue,
_player.shuffleModeEnabledStream,
_player.shuffleIndicesStream,
(index, queue, shuffleModeEnabled, shuffleIndices) {
final queueIndex =
getQueueIndex(index, shuffleModeEnabled, shuffleIndices);
return (queueIndex != null && queueIndex < queue.length)
? queue[queueIndex]
: null;
}).whereType<MediaItem>().distinct().listen(mediaItem.add);
After:
// Broadcast media item changes.
Rx.combineLatest5<int?, List<MediaItem>, bool, List<int>?, Duration?,
MediaItem?>(
_player.currentIndexStream,
queue,
_player.shuffleModeEnabledStream,
_player.shuffleIndicesStream,
_player.durationStream, // <- add listening to durationStream here
(index, queue, shuffleModeEnabled, shuffleIndices, duration) {
final queueIndex =
getQueueIndex(index, shuffleModeEnabled, shuffleIndices);
return (queueIndex != null && queueIndex < queue.length)
? queue[queueIndex].copyWith(duration: duration) // <- sink mediaItem provided with duration
: null;
}).whereType<MediaItem>().distinct().listen(mediaItem.add);

Accessing a stream inside a Nested StreamBuilder in Flutter

I am having an issue calling a StreamBuilder inside another StreamBuilder which is located inside the onPressed field of RasedButton widget. Basically, I am trying to access a stream (after adding data to it) inside the inner StreamBuilder, but the execution does not call this part of the code:
Widget submitButton(BuildContext ctx, AccountBloc bloc){
return StreamBuilder(
stream: bloc.submitValid,
builder: (context, snapshot){
return SizedBox(
width: double.infinity,
height: 60,
child: RaisedButton(
color: HexColor("0072b1"),
child: Text("Sign in", style: TextStyle(
fontWeight: FontWeight.bold,
fontSize: 20,
color: Colors.white
),
),
onPressed:() {
if(snapshot.hasData){
bloc.loginUser();
// DEBUGGING
print(bloc.isAuthenticated.listen((val) { print("debug isAuthenticated: $val"); }));
StreamBuilder(
stream: bloc.isAuthenticated,
builder: (ctx, snapshotA){
print("here");
if(!snapshotA.hasData){
return Text("loading....");
}
print("***${snapshotA.data}****");
if(snapshotA.data == false){
return navSignUpScreen(ctx);
}else if (snapshotA.data == false){
print("here");
return navHomeScreen(ctx);
}
return navSignUpScreen(ctx);
}
);
}
},
),
);
}
);
}
The BLOC part is as follows:
final _isAuthenticated = BehaviorSubject<bool>();
Stream<bool> get isAuthenticated => _isAuthenticated.stream;
void loginUser() async {
var inMemory = InMemoryProvider();
inMemory.newInMemoryProvider();
UserModel userResult = await _repository.loginUser(_email.value, _password.value);
if(userResult.status == 200){
// save TOKEN
UserModel userModel = UserModel.fromJsonToModel(userResult.data);
bool res = await inMemory.store(userModel.id, userModel.token);
_isAuthenticated.sink.add(res);
}
_userLoginResponse.sink.add(userResult);
}
The navHomeScreen definition is as simple as this:
class HomeScreen extends StatefulWidget {
createState() {
return HomeState();
}
}
class HomeState extends State<HomeScreen> {
int _currentIndex = 0;
final List<Widget> _children = [
AllReportsScreen(), UserProfileScreen()
];
Widget build(context) {
return Scaffold(
body: _children[_currentIndex],
bottomNavigationBar: mainBottomNavigatorBar(),
);
}
Widget mainBottomNavigatorBar() {
return BottomNavigationBar(
type: BottomNavigationBarType.fixed,
onTap: onTabTapped,
currentIndex: _currentIndex,
items: [
BottomNavigationBarItem(
backgroundColor: Colors.black45,
icon: new Icon(Icons.note),
title: new Text('Reports', style: TextStyle(fontSize: 15.0)),
),
BottomNavigationBarItem(
backgroundColor: Colors.black45,
icon: new Icon(Icons.label_important),
title: new Text('Attention', style: TextStyle(fontSize: 15.0)),
),
BottomNavigationBarItem(
backgroundColor: Colors.black45,
icon: new Icon(Icons.person_pin),
title: new Text('Profile', style: TextStyle(fontSize: 15.0)),
),
],
);
}
void onTabTapped(int index) {
setState(() {
_currentIndex = index;
});
}
}
Using combineLatest from rxdart to combine two Streams isAuthenticated and submitValid into one Stream:
class LoginState {
final bool isAuthenticated;
final bool submitValid;
}
final state$ = Rx.combineLatest(
bloc.isAuthenticated,
bloc.submitValid,
(isAuth, valid) => LoginState(isAuth, valid),
);
StreamBuilder<LoginState>(
stream: state$,
builder: (context, snapshot) {
final state = snapshot.data;
if (state == null) return ...;
if (!state.submitValid) return ...;
if (state.isAuthenticated) return ...;
}
)