Audio seek not working with package Audioplayers - flutter

i need to make an audio listening application and in that need to implement a seeker. Now i can listen to audio and also able to pause it but when i seeks the audio.It starts again.
I used Audioplayers(https://pub.dev/packages/audioplayers) package version : ^0.20.1 for that.
import 'package:audioplayers/audioplayers.dart';
import 'package:flutter/material.dart';
class PlayAudio extends StatefulWidget {
const PlayAudio({Key? key}) : super(key: key);
#override
State<PlayAudio> createState() => _PlayAudioState();
}
class _PlayAudioState extends State<PlayAudio> {
var value=0.0;
bool isPlaying=false;
Duration duration=const Duration();
Duration position=const Duration();
AudioPlayer audioPlayer=AudioPlayer();
// AudioPlayer player = AudioPlayer();
#override
void initState() {
// TODO: implement initState
super.initState();
audioPlayer.onPlayerStateChanged.listen((event) {
setState((){
isPlaying=event==PlayerState.PLAYING;
});
});
///listen to audio duration
audioPlayer.onDurationChanged.listen((event) {
setState((){
duration=event;
});
});
audioPlayer.onAudioPositionChanged.listen((event) {
setState((){
position=event;
});
});
}
#override
Widget build(BuildContext context) {
return Scaffold(
body: Center(
child: Column(
mainAxisAlignment: MainAxisAlignment.center,
children: [
InkWell(
onTap: ()async{
if(isPlaying){
await audioPlayer.pause();
}else if(!isPlaying){
String url="https://www.learningcontainer.com/wp-content/uploads/2020/02/Kalimba.mp3";
await audioPlayer.play(url);
}
},
child: Container(
color: Colors.red,
child: const Padding(
padding: EdgeInsets.all(8.0),
child: Text("play Audio"),
),
),
),
Slider.adaptive(
min: 0.0,
value: position.inSeconds.toDouble(),
max: 120,
onChanged: (value1)async{
Duration newDuration = Duration(seconds: value1.toInt());
await audioPlayer.seek(newDuration);
}),
Text(formatTime(position)),
Text(formatTime(duration-position)),
],
),
),
);
}
String formatTime(Duration duration){
String twoDigits(int n)=>n.toString().padLeft(2,"0");
final hours=twoDigits(duration.inHours);
final minutes=twoDigits(duration.inMinutes.remainder(60));
final seconds=twoDigits(duration.inSeconds.remainder(60));
return [
if(duration.inHours>0) hours,minutes,seconds
].join(":");
}
}
when i checked it on IOS simulator it working fine but in android real device it showing the problem.

Related

Jerky video playing in my Flutter app for Windows

In my Flutter project for Windows, I want to be able to read locally at least 4 videos simultaneously (up to 1920 x 1080, up to 60 fps).
The videos can be of different formats (mp4, wmv, ...), different sizes and different framerates.
Using dart_vlc, I created a test project with 4 video players:
void main()
{
DartVLC.initialize();
runApp(const DartVLCExample());
}
class DartVLCExample extends StatelessWidget
{
const DartVLCExample({Key? key}) : super(key: key);
#override
Widget build(BuildContext context)
{
return MaterialApp(
home: Scaffold(
appBar: AppBar(
title: const Text('package:dart_vlc'),
centerTitle: true,
),
body: const PrimaryScreen(),
),
);
}
}
class PrimaryScreen extends StatelessWidget
{
const PrimaryScreen({Key? key}) : super(key: key);
#override
Widget build(BuildContext context)
{
return Row(
children: const [
Expanded(child: VideoPlayer(index: 1)),
Expanded(child: VideoPlayer(index: 2)),
Expanded(child: VideoPlayer(index: 3)),
Expanded(child: VideoPlayer(index: 4)),
],
);
}
}
class VideoPlayer extends StatefulWidget
{
final int index;
const VideoPlayer({
super.key,
required this.index,
});
#override
VideoPlayerState createState() => VideoPlayerState();
}
class VideoPlayerState extends State<VideoPlayer>
{
late Player player;
MediaType mediaType = MediaType.file;
CurrentState current = CurrentState();
PositionState position = PositionState();
PlaybackState playback = PlaybackState();
GeneralState general = GeneralState();
VideoDimensions videoDimensions = const VideoDimensions(0, 0);
List<Media> medias = <Media>[];
List<Device> devices = <Device>[];
TextEditingController controller = TextEditingController();
TextEditingController metasController = TextEditingController();
double bufferingProgress = 0.0;
Media? metadataCurrentMedia;
#override
void initState()
{
super.initState();
if (mounted) {
player = Player(
id: widget.index,
videoDimensions: const VideoDimensions(640, 360),
);
player.currentStream.listen((value) {
setState(() => current = value);
});
player.positionStream.listen((value) {
setState(() => position = value);
});
player.playbackStream.listen((value) {
setState(() => playback = value);
});
player.generalStream.listen((value) {
setState(() => general = value);
});
player.videoDimensionsStream.listen((value) {
setState(() => videoDimensions = value);
});
player.bufferingProgressStream.listen(
(value) {
setState(() => bufferingProgress = value);
},
);
player.errorStream.listen((event) {
debugPrint('libVLC error.');
});
devices = Devices.all;
Equalizer equalizer = Equalizer.createMode(EqualizerMode.live);
equalizer.setPreAmp(10.0);
equalizer.setBandAmp(31.25, 10.0);
player.setEqualizer(equalizer);
medias.add(
Media.file(File("C:\\test${widget.index}.mp4"),
),
);
}
}
#override
Widget build(BuildContext context)
{
return ListView(
children: [
ClipRRect(
child: Video(
player: player,
width: 640,
height: 650,
volumeThumbColor: Colors.blue,
volumeActiveColor: Colors.blue,
showControls: true,
),
),
ElevatedButton(
onPressed: () => setState(() {
player.open(Playlist(medias: medias));
},
),
child: const Text('PLAY', style: TextStyle(fontSize: 16)),
),
],
);
}
}
But unfortunately, if I play 4 videos (1920 x 1080, 60 fps) simultaneously, the videos are jerky.
I tried to read the same videos with the official VLC player, and it works fine, so it's apparently not a problem with my machine, but rather an optimization issue. And since I'm a beginner when it comes to video players, I don't really know where to start.
So how can I optimize that, using dart_vlc or something else?
Thanks.

How to play one audio file at a time in a series of audio files in Flutter?

I have several audio files to be played, so I used ListView to represent every audio file as an item of ListView, each one with its own controllers (play/pause button and duration slider). The code is as follows (I have used one audio file for all of the items for simplicity sake):
import 'package:audioplayers/audioplayers.dart';
class AudioTestScreen extends StatelessWidget {
const AudioTestScreen({Key? key}) : super(key: key);
#override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(title: const Text("Songs")),
body: ListView.builder(
itemCount: 10,
itemBuilder: (ctx, index) => const AudioItem(),
),
);
}
}
class AudioItem extends StatefulWidget {
const AudioItem({Key? key}) : super(key: key);
#override
State<AudioItem> createState() => _AudioItemState();
}
class _AudioItemState extends State<AudioItem> {
final audioPlayer = AudioPlayer();
bool isPlaying = false;
Duration duration = Duration.zero; // For total duration
Duration position = Duration.zero; // For the current position
#override
void initState() {
super.initState();
setAudioPlayer();
audioPlayer.onDurationChanged.listen((newDuration) {
setState(() {
duration = newDuration;
});
});
audioPlayer.onPositionChanged.listen((newPosition) {
if (mounted) {
setState(() {
position = newPosition;
});
}
});
audioPlayer.onPlayerStateChanged.listen((state) {
if (mounted) {
setState(() {
isPlaying = state == PlayerState.playing;
});
}
});
}
Future<void> setAudioPlayer() async {
final player = AudioCache(prefix: "assets/audios/");
final url = await player.load("song.mp3");
audioPlayer.setSourceUrl(url.path);
audioPlayer.setReleaseMode(ReleaseMode.stop);
}
#override
Widget build(BuildContext context) {
return Container(
padding: const EdgeInsets.symmetric(vertical: 8, horizontal: 16),
margin: const EdgeInsets.symmetric(vertical: 8, horizontal: 16),
decoration: BoxDecoration(
color: const Color(0xFFF4F2FF),
borderRadius: BorderRadius.circular(12),
border: Border.all(width: 1, color: Colors.grey)
),
child: Column(
children: [
Slider(
value: position.inMilliseconds.toDouble(),
max: duration.inMilliseconds.toDouble(),
onChanged: (value) {
setState(() {
position = Duration(milliseconds: value.toInt());
});
audioPlayer.seek(position);
},
),
GestureDetector(
onTap: () async {
isPlaying
? await audioPlayer.pause()
: await audioPlayer.resume();
},
child: CircleAvatar(
child: Icon(isPlaying ? Icons.pause : Icons.play_arrow),
),
)
],
),
);
}
}
And here is how it looks like:
Now when I play a music file, and later tap on another item to play it, both of them plays at the same time, but I want the previous one to pause and only the current one to play.
How can I achieve this behavior? Thanks in advance.
create the audio player in the parent class and pass it to the children. Then before you play stop the player and then play it with new url
widget.player.stop()
Use this to stop the player
EDIT
class AudioItem extends StatefulWidget {
final AudioPlayer audioPlayer;
final int currentIndex;
final int index;
final VoidCallback setIndex;
const AudioItem({Key? key, required this.audioPlayer, required required this.currentIndex, required this.index, required this.setIndex}) : super(key: key);
Add these 3 variables to the Audio item. When you add these Widgets in the tree pass the values
#override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(title: const Text("Songs")),
body: ListView.builder(
itemCount: 10,
itemBuilder: (ctx, index) => const AudioItem(
audioPlayer: audioPlayer,
currentIndex:currentIndex, <--this is the variable in which we know which item is playing.
index: index,
setIndex: (){
currentIndex = index;
setState((){});
}
),
),
);
}
Now when the play button is clicked call this setIndex method that will update the parent.

Using video_player package with Flutter Hooks to play a background fullscreen video

I have a Home Screen Widget, that plays a fullscreen background video using the video_player package.
This code works fine for me:
class HomeScreen extends StatefulWidget {
HomeScreen({Key key}) : super(key: key);
#override
_HomeScreenState createState() => _HomeScreenState();
}
class _HomeScreenState extends State<HomeScreen> {
VideoPlayerController _controller;
void initState() {
super.initState();
// Pointing the video controller to mylocal asset.
_controller = VideoPlayerController.asset("assets/waterfall.mp4");
_controller.initialize().then((_) {
// Once the video has been loaded we play the video and set looping to true.
_controller.play();
_controller.setLooping(true);
// Ensure the first frame is shown after the video is initialized.
setState(() {});
});
}
#override
void dispose() {
super.dispose();
_controller.dispose();
}
#override
Widget build(BuildContext context) {
return SafeArea(
child: Scaffold(
body: Stack(
children: <Widget>[
SizedBox.expand(
child: FittedBox(
// If your background video doesn't look right, try changing the BoxFit property.
// BoxFit.fill created the look I was going for.
fit: BoxFit.fill,
child: SizedBox(
width: _controller.value.size?.width ?? 0,
height: _controller.value.size?.height ?? 0,
child: VideoPlayer(_controller),
),
),
),
Container(
child: Center(
child: Text('Hello!'),
),
),
],
),
),
);
}
}
The question is, how can I implement this using flutter Hooks? I understand that I have to use useEffect() to implement the functionality of initState() and dispose(), useFuture() and maybe useMemoized() to handle asynchronous _controller.initialize() call and what possibly else? But, I cannot glue them to get the desired result. Can anyone indicate to me the "using Hooks" implementation of the above code?
I was looking for the answer to how to convert a VideoPlayer demo from StatefulWidget to HookWidget when I came across this question. I've come up with something that works so I'll post it here since there is nothing elsewhere that I could find and some others are hitting this page looking for an answer.
I used a viewmodel. The video controller is a property of the viewmodel. This code will not compile since some of the controls are not included. But it will demonstrate the structure and incorporation of the viewmodel.
Here's the widget file:
import 'package:flutter/foundation.dart';
import 'package:flutter_hooks/flutter_hooks.dart';
import 'package:flutter/material.dart';
import 'package:flutter_riverpod/flutter_riverpod.dart';
import 'package:video_player/video_player.dart';
import 'intro_viewmodel.dart';
class IntroPage extends HookWidget {
Future<void> saveAndGetStarted(BuildContext context) async {
final IntroViewModel introViewModel = context.read(introViewModelProvider);
await introViewModel.completeIntro();
}
Future<void> onNext(BuildContext context) async {
final IntroViewModel introViewModel = context.read(introViewModelProvider);
await introViewModel.incrementIntro();
}
final List<SliderModel> slides = [
SliderModel(
description: 'A word with you before you get started.\n',
title: 'Why This App?',
localImageSrc: 'media/Screen1-Movingforward-pana.svg',
backgroundColor: Colors.lightGray),
SliderModel(
description: 'This information will help the app be more accurate\n',
title: 'Personal Profile',
localImageSrc: 'media/Screen2-Teaching-cuate.svg',
backgroundColor: Colors.lightGray)
];
#override
Widget build(BuildContext context) {
final IntroViewModel introViewModel = context.read(introViewModelProvider);
return Scaffold(
body: Padding(
padding: const EdgeInsets.all(16.0),
child: Center(
child: Column(
children: [
Text(
slides[introViewModel.index].description,
style: Theme.of(context).textTheme.headline5,
textAlign: TextAlign.center,
),
Expanded(
child: FractionallySizedBox(
widthFactor: .98,
heightFactor: .5,
child: VideoPlayer(introViewModel.videoController),
)),
Align(
alignment: Alignment.bottomCenter,
child: CustomRaisedButton(
onPressed: () {
if (introViewModel.index == slides.length - 1) {
saveAndGetStarted(context);
} else {
onNext(context);
}
},
color: Theme.of(context).accentColor,
borderRadius: 15,
height: 50,
child: Text(
introViewModel.index == 0
? 'Continue'
: 'Save and Get Started',
style: Theme.of(context)
.textTheme
.headline5
.copyWith(color: Colors.white),
),
),
),
],
),
),
));
}
#override
void debugFillProperties(DiagnosticPropertiesBuilder properties) {
super.debugFillProperties(properties);
properties.add(IterableProperty<SliderModel>('slides', slides));
}
}
And here is the viewmodel code
import 'package:flutter/foundation.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:video_player/video_player.dart';
import '../top_level_providers.dart';
final introViewModelProvider = ChangeNotifierProvider<IntroViewModel>((ref) {
//this singleton class provides global access to selected variables
final SharedPreferencesService localSharedPreferencesService =
ref.watch(sharedPreferencesService);
return IntroViewModel(localSharedPreferencesService);
});
class IntroViewModel extends ChangeNotifier {
IntroViewModel(this.localSharedPreferencesService) : super() {
state = localSharedPreferencesService?.isIntroComplete();
// Pointing the video controller to my local asset.
videoController = VideoPlayerController.asset('media/test_search.mp4');
videoController.initialize().then((_) {
// Once the video has been loaded we play the video and set looping to true.
// not autoplaying yet
// videoController.play();
// videoController.setLooping(true);
});
}
final SharedPreferencesService localSharedPreferencesService;
VideoPlayerController videoController;
bool state = false;
int index = 0;
Future<void> completeIntro() async {
await localSharedPreferencesService.setIntroComplete();
state = true;
notifyListeners();
}
Future<void> incrementIntro() async {
++index;
notifyListeners();
}
bool get isIntroComplete => state;
}

Keep VideoPlayerController playing audio when closing the app

I am trying to develop an app that presents videos to the user. I am using VideoPlayerController for loading the videos, and ChewieController for the UI.
It works great, but when the user closes the app, the video stops. I would like the video to keep playing its audio even when closing the app/locking the device.
I couldn't find anything about it on the VideoPlayerController and in the ChewieController documentations.
Is this functionality possible in Flutter and Dart?
Thank you!
Unfortunately Flutter's video_player package doesn't support background video or audio playing. But you can use flutter_playout which wraps ExoPlayer on Android and AVPlayer framework on iOS with the ability to playback video in background or even lock screen. You can find out more about it here. Below is an example code provided by library's GitHub repo which plays a video and it keeps playing in background
import 'dart:io';
import 'package:flutter/material.dart';
import 'package:flutter_playout/multiaudio/HLSManifestLanguage.dart';
import 'package:flutter_playout/multiaudio/MultiAudioSupport.dart';
import 'package:flutter_playout/player_observer.dart';
import 'package:flutter_playout/player_state.dart';
import 'package:flutter_playout/video.dart';
import 'package:flutter_playout_example/hls/getManifestLanguages.dart';
class VideoPlayout extends StatefulWidget {
final PlayerState desiredState;
final bool showPlayerControls;
const VideoPlayout({Key key, this.desiredState, this.showPlayerControls})
: super(key: key);
#override
_VideoPlayoutState createState() => _VideoPlayoutState();
}
class _VideoPlayoutState extends State<VideoPlayout>
with PlayerObserver, MultiAudioSupport {
final String _url = null;
List<HLSManifestLanguage> _hlsLanguages = List<HLSManifestLanguage>();
#override
void initState() {
super.initState();
Future.delayed(Duration.zero, _getHLSManifestLanguages);
}
Future<void> _getHLSManifestLanguages() async {
if (!Platform.isIOS && _url != null && _url.isNotEmpty) {
_hlsLanguages = await getManifestLanguages(_url);
setState(() {});
}
}
#override
Widget build(BuildContext context) {
return Container(
child: Column(
children: <Widget>[
/* player */
AspectRatio(
aspectRatio: 16 / 9,
child: Video(
autoPlay: true,
showControls: widget.showPlayerControls,
title: "MTA International",
subtitle: "Reaching The Corners Of The Earth",
preferredAudioLanguage: "eng",
isLiveStream: true,
position: 0,
url: _url,
onViewCreated: _onViewCreated,
desiredState: widget.desiredState,
),
),
/* multi language menu */
_hlsLanguages.length < 2 && !Platform.isIOS
? Container()
: Container(
child: Row(
children: _hlsLanguages
.map((e) => MaterialButton(
child: Text(
e.name,
style: Theme.of(context)
.textTheme
.button
.copyWith(color: Colors.white),
),
onPressed: () {
setPreferredAudioLanguage(e.code);
},
))
.toList(),
),
),
],
),
);
}
void _onViewCreated(int viewId) {
listenForVideoPlayerEvents(viewId);
enableMultiAudioSupport(viewId);
}
#override
void onPlay() {
// TODO: implement onPlay
super.onPlay();
}
#override
void onPause() {
// TODO: implement onPause
super.onPause();
}
#override
void onComplete() {
// TODO: implement onComplete
super.onComplete();
}
#override
void onTime(int position) {
// TODO: implement onTime
super.onTime(position);
}
#override
void onSeek(int position, double offset) {
// TODO: implement onSeek
super.onSeek(position, offset);
}
#override
void onDuration(int duration) {
// TODO: implement onDuration
super.onDuration(duration);
}
#override
void onError(String error) {
// TODO: implement onError
super.onError(error);
}
}
As the video_player package now has the allowBackgroundPlayback option, I created this simple example showing how to integrate video_player and audio service.
example_video_player.dart
// This example demonstrates a simple video_player integration.
import 'dart:async';
import 'package:audio_service/audio_service.dart';
import 'package:flutter/material.dart';
import 'package:video_player/video_player.dart';
// You might want to provide this using dependency injection rather than a
// global variable.
late AudioPlayerHandler _audioHandler;
Future<void> main() async {
_audioHandler = await AudioService.init(
builder: () => AudioPlayerHandler(),
config: const AudioServiceConfig(
androidNotificationChannelId: 'com.ryanheise.myapp.channel.audio',
androidNotificationChannelName: 'Audio playback',
androidNotificationOngoing: true,
),
);
runApp(const MyApp());
}
class MyApp extends StatelessWidget {
const MyApp({Key? key}) : super(key: key);
#override
Widget build(BuildContext context) {
return MaterialApp(
title: 'Audio Service Demo',
theme: ThemeData(primarySwatch: Colors.blue),
home: const MainScreen(),
);
}
}
class MainScreen extends StatefulWidget {
const MainScreen({Key? key}) : super(key: key);
#override
_MainScreenState createState() => _MainScreenState();
}
class _MainScreenState extends State<MainScreen> {
late VideoPlayerController _controller;
#override
void initState() {
super.initState();
_controller = VideoPlayerController.network('https://flutter.github.io/assets-for-api-docs/assets/videos/bee.mp4',
videoPlayerOptions: VideoPlayerOptions(allowBackgroundPlayback: true))
..initialize().then((_) {
_audioHandler.setVideoFunctions(_controller.play, _controller.pause, _controller.seekTo, () {
_controller.seekTo(Duration.zero);
_controller.pause();
});
// So that our clients (the Flutter UI and the system notification) know
// what state to display, here we set up our audio handler to broadcast all
// playback state changes as they happen via playbackState...
_audioHandler.initializeStreamController(_controller);
_audioHandler.playbackState.addStream(_audioHandler.streamController.stream);
// Ensure the first frame is shown after the video is initialized, even before the play button has been pressed.
setState(() {});
});
}
#override
void dispose() {
// Close the stream
_audioHandler.streamController.close();
super.dispose();
}
#override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
title: const Text('Audio Service Demo'),
),
body: Center(
child: Column(
mainAxisAlignment: MainAxisAlignment.center,
children: [
Center(
child: _controller.value.isInitialized
? AspectRatio(
aspectRatio: _controller.value.aspectRatio,
child: VideoPlayer(_controller),
)
: Container(),
),
// Play/pause/stop buttons.
StreamBuilder<bool>(
stream: _audioHandler.playbackState.map((state) => state.playing).distinct(),
builder: (context, snapshot) {
final playing = snapshot.data ?? false;
return Row(
mainAxisAlignment: MainAxisAlignment.center,
children: [
_button(Icons.fast_rewind, _audioHandler.rewind),
if (playing) _button(Icons.pause, _audioHandler.pause) else _button(Icons.play_arrow, _audioHandler.play),
_button(Icons.stop, _audioHandler.stop),
_button(Icons.fast_forward, _audioHandler.fastForward),
],
);
},
),
// Display the processing state.
StreamBuilder<AudioProcessingState>(
stream: _audioHandler.playbackState.map((state) => state.processingState).distinct(),
builder: (context, snapshot) {
final processingState = snapshot.data ?? AudioProcessingState.idle;
return Text("Processing state: ${(processingState)}");
},
),
],
),
),
);
}
IconButton _button(IconData iconData, VoidCallback onPressed) => IconButton(
icon: Icon(iconData),
iconSize: 64.0,
onPressed: onPressed,
);
}
class MediaState {
final MediaItem? mediaItem;
final Duration position;
MediaState(this.mediaItem, this.position);
}
/// An [AudioHandler] for playing a single item.
class AudioPlayerHandler extends BaseAudioHandler with SeekHandler {
late StreamController<PlaybackState> streamController;
static final _item = MediaItem(
id: 'https://s3.amazonaws.com/scifri-episodes/scifri20181123-episode.mp3',
album: "Science Friday",
title: "A Salute To Head-Scratching Science",
artist: "Science Friday and WNYC Studios",
duration: const Duration(milliseconds: 5739820),
artUri: Uri.parse('https://media.wnyc.org/i/1400/1400/l/80/1/ScienceFriday_WNYCStudios_1400.jpg'),
);
Function? _videoPlay;
Function? _videoPause;
Function? _videoSeek;
Function? _videoStop;
void setVideoFunctions(Function play, Function pause, Function seek, Function stop) {
_videoPlay = play;
_videoPause = pause;
_videoSeek = seek;
_videoStop = stop;
mediaItem.add(_item);
}
/// Initialise our audio handler.
AudioPlayerHandler();
// In this simple example, we handle only 4 actions: play, pause, seek and
// stop. Any button press from the Flutter UI, notification, lock screen or
// headset will be routed through to these 4 methods so that you can handle
// your audio playback logic in one place.
#override
Future<void> play() async => _videoPlay!();
#override
Future<void> pause() async => _videoPause!();
#override
Future<void> seek(Duration position) async => _videoSeek!(position);
#override
Future<void> stop() async => _videoStop!();
void initializeStreamController(VideoPlayerController? videoPlayerController) {
bool _isPlaying() => videoPlayerController?.value.isPlaying ?? false;
AudioProcessingState _processingState() {
if (videoPlayerController == null) return AudioProcessingState.idle;
if (videoPlayerController.value.isInitialized) return AudioProcessingState.ready;
return AudioProcessingState.idle;
}
Duration _bufferedPosition() {
DurationRange? currentBufferedRange = videoPlayerController?.value.buffered.firstWhere((durationRange) {
Duration position = videoPlayerController.value.position;
bool isCurrentBufferedRange = durationRange.start < position && durationRange.end > position;
return isCurrentBufferedRange;
});
if (currentBufferedRange == null) return Duration.zero;
return currentBufferedRange.end;
}
void _addVideoEvent() {
streamController.add(PlaybackState(
controls: [
MediaControl.rewind,
if (_isPlaying()) MediaControl.pause else MediaControl.play,
MediaControl.stop,
MediaControl.fastForward,
],
systemActions: const {
MediaAction.seek,
MediaAction.seekForward,
MediaAction.seekBackward,
},
androidCompactActionIndices: const [0, 1, 3],
processingState: _processingState(),
playing: _isPlaying(),
updatePosition: videoPlayerController?.value.position ?? Duration.zero,
bufferedPosition: _bufferedPosition(),
speed: videoPlayerController?.value.playbackSpeed ?? 1.0,
));
}
void startStream() {
videoPlayerController?.addListener(_addVideoEvent);
}
void stopStream() {
videoPlayerController?.removeListener(_addVideoEvent);
streamController.close();
}
streamController = StreamController<PlaybackState>(onListen: startStream, onPause: stopStream, onResume: startStream, onCancel: stopStream);
}
}
I've been using the better_player package. It's quite good uses video_player and chewie and also has support for player notification and PiP.
And don't forget to enable the background audio capability on your xcode.
xcode-audio-capability

Flutter: How to play and pause audio using the same button as countdown start button?

I have a play pause button for a countdown timer. When I press play, the timer starts, at the same time I was the audio to play as well. How should I implement this?
Note: the most of the counter code is in another file. Just wanted to get the play/pause for audio working first in the same onPressed for now.
Below is what I have so far:
Updated: I think I messed up somewhere. I'm not able to pause the audio now. I have been trying few of the flutter audioplayer tutorials but I'm not able figure out how to implement both countdown and audio action with a single play pause button
import 'package:file_picker/file_picker.dart';
import 'package:flutter/material.dart';
import 'package:audioplayers/audio_cache.dart';
import 'package:audioplayers/audioplayers.dart';
class Jiddu extends StatefulWidget {
const Jiddu({Key key}) : super(key: key);
#override
_JidduState createState() => _JidduState();
}
class _JidduState extends State<Jiddu> with TickerProviderStateMixin {
AnimationController controller;
AudioPlayer _audioPlayer = AudioPlayer();
bool isPlaying = false;
#override
void initState() {
super.initState();
controller = AnimationController(
vsync: this,
duration: Duration(seconds: 60),
);
_audioCache = AudioCache(
prefix: "audio/",
fixedPlayer: AudioPlayer()..setReleaseMode(ReleaseMode.STOP));
}
playme() async {
AudioPlayer audioplay = await _audioCache.play('30sec.mp3');
}
AudioCache _audioCache;
#override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(title: Text('Audio')),
body: Column(
children: <Widget>[
Center(
child: RaisedButton(
onPressed: () {
Navigator.pop(context);
},
child: Text('Go Back'))),
Container(
margin: EdgeInsets.all(20.0),
child: Row(
mainAxisAlignment: MainAxisAlignment.center,
children: <Widget>[
FloatingActionButton(
child: AnimatedBuilder(
animation: controller,
builder: (BuildContext context, Widget child) {
return new Icon(controller.isAnimating
? Icons.pause
: Icons.play_arrow);
},
),
onPressed: () {
if (controller.isAnimating && isPlaying){
controller.stop();
} else {
controller.reverse(
from: controller.value == 0.0
? 1.0
: controller.value);
playme();
}
},
),
],
)),
],
),
);
}
}
Just define your function to your condition in onPressed.
It could be like:
onPressed: () {
if (controller.isAnimating)
controller.stop();
else {
playaudio(); // or what is your function for this
controller.reverse(
from: controller.value == 0.0
? 1.0
: controller.value);
}
setState(() {
_visible = !_visible;
});
},