I created a button using a play and stop image, what I want is when i press play and that the sound finishes, the stop image should go back to play, but it's not working, can anyone help here please. cheers.
- (IBAction)play {
if(clicked == 0){
clicked = 1;
NSURL *url = [NSURL fileURLWithPath:[NSString stringWithFormat:#"%#/TEST.wav", [[NSBundle mainBundle] resourcePath]]];
NSError *error;
audioPlayer = [[AVAudioPlayer alloc] initWithContentsOfURL:url error:&error];
audioPlayer.numberOfLoops = 0;
[audioPlayer play];
[start setImage:[UIImage imageNamed:#"stop.png"] forState:UIControlStateNormal];
}
else{
[audioPlayer release];
clicked = 0;
[start setImage:[UIImage imageNamed:#"pla.png"] forState:UIControlStateNormal];
}
}
//If user does not do anything by the end of the sound set the button to start
- (void) audioPlayerDidFinishPlaying: (AVAudioPlayer *) player
successfully: (BOOL) flag {
if (flag==YES) {
[start setImage:[UIImage imageNamed:#"pla.png"] forState:UIControlStateNormal];
}
}
This is my .h file
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
int clicked;
#interface _0_RabbanasViewController : UIViewController {
IBOutlet UIScrollView *scroll;
AVAudioPlayer *audioPlayer;
IBOutlet UIButton *start;
IBOutlet UIImage *iPla;
IBOutlet UIImage *iStop;
BOOL successfully;
}
- (IBAction)play;
- (IBAction)next;
#end
Make sure you set the delegate of the AVAudioPlayer instance to self:
[audioPlayer setDelegate:self];
and in the header file, subscribe to the AVAudioPlayerDelegate protocol:
<AVAudioPlayerDelegate>
Now it should work.
EDIT: Here's where to subscribe to the AVAudioPlayerDelegate protocol:
#interface _0_RabbanasViewController : UIViewController <AVAudioPlayerDelegate> {
IBOutlet UIScrollView *scroll;
AVAudioPlayer *audioPlayer;
IBOutlet UIButton *start;
...
Hope it works now. :)
EDIT2: Set audioPlayer delegate to self here:
audioPlayer = [[AVAudioPlayer alloc] initWithContentsOfURL:url error:&error];
audioPlayer.numberOfLoops = 0;
[audioPlayer setDelegate:self];
[audioPlayer play];
EDIT3: Add this:
- (void) audioPlayerDidFinishPlaying: (AVAudioPlayer *) player
successfully: (BOOL) flag {
if (flag==YES) {
clicked = 0;
[start setImage:[UIImage imageNamed:#"pla.png"] forState:UIControlStateNormal];
}
}
Related
I am developing AVAudioPlayer according to my project requirement. In audioplayer iam using storyboard to develope coding.In my audio player using json parser to retrieve the urls from server.And finally gave the connections from first responder to Audioviewcontroller.I Can run the audioplayer. Audioplayer is successfully playing. But view will not be displayed.Blank screen will be displayed. I can drag and drop of all buttons like play button, pause button,forward button and rewind button etc. Those are buttons are drag and drop on AudioviewController. And give the connections also.These are all buttons are not displayed on Audioviewcontroller to play the corresponding song. Plz help me any body whats the problem. I am new to the ios programming.Thanks in Advance...
Audioviewcontroller.m
-(void)updateCurrentTimeForPlayer:(AVAudioPlayer *)
{
currentTime.text = [NSString stringWithFormat:#"%d:%02d", (int)p.currentTime / 60, (int)p.currentTime % 60, nil];
progressBar.value = p.currentTime;
}
-(void)updateCurrentTime
{
[self updateCurrentTimeForPlayer:self.avPlayer];
}
-(void)updateViewForPlayerState:(AVAudioPlayer *)p
{
[self updateCurrentTimeForPlayer:p];
if (updateTimer)
[updateTimer invalidate];
if (p.playing)
{
[playButton setImage:(p.playing==YES)? pauseButtonBG:playButtonBG forState:UIControlStateNormal];
updateTimer=[NSTimer scheduledTimerWithTimeInterval:.01 target:self selector:#selector(updateCurrentTime) userInfo:p repeats:YES];
}
else
{
[playButton setImage:(p.playing==YES)? pauseButtonBG:playButtonBG forState:UIControlStateNormal];
updateTimer=nil;
}
}
-(void)updateViewForPlayerStateInBackground:(AVAudioPlayer *)p
{
[self updateCurrentTimeForPlayer:p];
if (p.playing)
{
[playButton setImage:(p.playing==YES)? pauseButtonBG:playButtonBG forState:UIControlStateNormal];
}
else
{
[playButton setImage:(p.playing==YES)? pauseButtonBG:playButtonBG forState:UIControlStateNormal];
}
}
-(void)updateViewForPlayerInfo:(AVAudioPlayer*)p
{
duration.text = [NSString stringWithFormat:#"%d:%02d", (int)p.duration / 60, (int)p.duration % 60, nil];
progressBar.maximumValue = p.duration;
volumeSlider.value = p.volume;
}
// rewind audio player
-(void)rewind
{
AVAudioPlayer *p=rewindTimer.userInfo;
p.currentTime-=SKIP_TIME;
[self updateCurrentTimeForPlayer:p];
}
// forward audio player
-(void)forward
{
AVAudioPlayer *p=forwardTimer.userInfo;
p.currentTime+=SKIP_INTERVAL;
[self updateCurrentTimeForPlayer:p];
}
json parser::
-(void)loadData
{
NSURLRequest *request = [NSURLRequest requestWithURL:[NSURL URLWithString:#"http://www.teluguastrology.com"]];
[[NSURLConnection alloc] initWithRequest:request delegate:self];
}
// delegate methods in json
-(void)connection:(NSURLConnection *)connectiondidReceiveResponse:(NSURLResponse *)response
{
[_data setLength:0];
NSLog(#"didReceiveResponse called");
}
//connection did receive data
-(void)connection:(NSURLConnection *)connection didReceiveData:(NSMutableData *)data
{
[_data appendData:data];
}
// connection did finish loading
-(void)connectionDidFinishLoading:(NSURLConnection *)connection
{
NSError *jsonError = nil;
id jsonObject = [NSJSONSerialization JSONObjectWithData:_data options:kNilOptions error:&jsonError];
if ([jsonObject isKindOfClass:[NSArray class]])
{
//NSArray *jsonArray = (NSArray *)jsonObject;
}
else if ([jsonObject isKindOfClass:[NSDictionary class]])
{
NSDictionary *jsonDictionary = (NSDictionary *)jsonObject;
NSArray *array=[[NSArray alloc]init];
array=[jsonDictionary objectForKey:#"audio-urls"];
dataDictionary=[array objectAtIndex:0];
/* NSLog(#"%#",dataDictionary);*/
}
[urlsArray addObject:[dataDictionary objectForKey:#"Meshamu-Aries"]];
NSLog(#"%#",urlsArray);
- (void)connection:(NSURLConnection *)connection didFailWithError:(NSError *)error
{
// [connection release];
NSLog(#"didFailWithError called");
}
// Implement viewDidLoad to do additional setup after loading the view, typically from a nib.
- (void)viewDidLoad
{
// self.view.backgroundColor=[UIColor redColor];
urlsArray=[[NSMutableArray alloc]init];
[super viewDidLoad];
playButtonBG=[[UIImage imageNamed:#"audio_play.png"]init];
pauseButtonBG=[[UIImage imageNamed:#"audio_pause.png"]init];
[playButton setImage:playButtonBG forState:UIControlStateNormal];
//[self registerForBackGroundNotification];
updateTimer=nil;
rewindTimer=nil;
forwardTimer=nil;
// to adjust the font in label to using following method
duration.adjustsFontSizeToFitWidth=YES;
currentTime.adjustsFontSizeToFitWidth=YES;
progressBar.minimumValue=0.0;
_data=[[NSMutableData alloc]init];
[self loadData];
backgroundImg=[[UIImageView alloc]init];
backgroundImg.image=[UIImage imageNamed:#"image.png"];
[self.view addSubview:backgroundImg];
}
-(void)pausePlaybackForPlayer:(AVAudioPlayer *)p
{
[p pause];
[self updateViewForPlayerState:p];
}
-(void)startPlaybackForPlayer:(AVAudioPlayer *)p
{
if([p play])
{
[self updateViewForPlayerState:p];
}
else
NSLog(#"Could not play %#\n", p.url);
}
-(IBAction)playButtonPressed:(UIButton *)sender
{
if (avPlayer.playing==YES)
[self pausePlaybackForPlayer:avPlayer];
else
[self startPlaybackForPlayer:avPlayer];
}
-(IBAction)rewindButtonPressed:(UIButton *)sender
{
if(rewindTimer)
[rewindTimer invalidate];
rewindTimer=[NSTimer scheduledTimerWithTimeInterval:SKIP_INTERVAL target:self selector:#selector(rewind) userInfo:avPlayer repeats:YES];
}
-(IBAction)rewindButtonReleased:(UIButton *)sender
{
if(rewindTimer)
[rewindTimer invalidate];
rewindTimer=nil;
}
-(IBAction)forwardButtonPressed:(UIButton *)sender
{
if(forwardTimer)
[forwardTimer invalidate];
forwardTimer=[NSTimer scheduledTimerWithTimeInterval:SKIP_TIME target:self selector:#selector(forward) userInfo:avPlayer repeats:YES];
}
-(IBAction)forwardButtonReleased:(UIButton *)sender
{
if(forwardTimer)
[forwardTimer invalidate];
forwardTimer=nil;
}
-(IBAction)volumeSliderMoved:(UISlider *)sender
{
avPlayer.volume=[sender value];
}
-(IBAction)progressBarMoved:(UISlider *)sender
{
avPlayer.currentTime=[sender value];
[self updateCurrentTimeForPlayer:avPlayer];
}
Audioviewcontroller.h
#interface AudioViewController : UIViewController<AVAudioPlayerDelegate,NSURLConnectionDelegate>
{
IBOutlet UILabel *astroName;
IBOutlet UIButton *playButton;
IBOutlet UIButton *forwardButton;
IBOutlet UIButton *rewindButton;
IBOutlet UISlider *volumeSlider;
IBOutlet UILabel *currentTime;
IBOutlet UILabel *duration;
IBOutlet UISlider *progressBar;
UIImage *playButtonBG;
UIImage *pauseButtonBG;
NSTimer *forwardTimer;
NSTimer *rewindTimer;
NSTimer *updateTimer;
UIImageView *backgroundImg;
BOOL inBackground;
NSURLConnection *connect;
NSMutableData *responseData;
NSMutableData *downloadData;
NSMutableString *responseString;
NSMutableArray *urlsArray;
NSMutableData *_data;
NSDictionary *dataDictionary;
}
- (IBAction)playButtonPressed:(UIButton*)sender;
- (IBAction)rewindButtonPressed:(UIButton*)sender;
- (IBAction)rewindButtonReleased:(UIButton*)sender;
- (IBAction)forwardButtonPressed:(UIButton*)sender;
- (IBAction)forwardButtonReleased:(UIButton*)sender;
- (IBAction)volumeSliderMoved:(UISlider*)sender;
-(IBAction)progressBarMoved:(UISlider *)sender;
-(void)registerForBackGroundNotification;
#property(nonatomic,retain) UILabel *astroName;
#property(nonatomic,retain) UIButton *playButton;
#property(nonatomic,retain) UIButton *pauseButton;
#property(nonatomic,retain) UIButton *forwardButton;
#property(nonatomic,retain) UIButton *rewindButton;
#property(nonatomic,retain) UISlider *volumeSlider;
#property(nonatomic,retain) UISlider *progressBar;
#property(nonatomic,retain) UILabel *currentTime;
#property(nonatomic,retain) UILabel *duration;
#property(nonatomic,retain) NSTimer *updateTimer;
#property(nonatomic,retain) AVAudioPlayer *avPlayer;
#property(nonatomic,assign) BOOL *inBackGround;
#property(nonatomic,retain) UIImageView *backgroundImg;
#property(retain, nonatomic) NSMutableData* responseData;
#property(nonatomic,strong) NSData *downloadData;
#property(nonatomic,strong) NSMutableString *responseString;
#property (nonatomic,assign)int selectedIndex;
#end
Just try to comment this line of code. You have added UIImageView to self.view after adding all button. Please add this by using storyboard.
//[self.view addSubview:backgroundImg];
Good Luck !!
I'm currently trying to use a custom class for the AVAudioPlayer and all works great for playing a audio file but when it comes to stoping the said file it just skips over the stop command and plays another on top of the current file,
If anyone has any ideas on why this is happening I'd really appreciate the help.
Below is my AudioPlayer1.h file:
#import <Foundation/Foundation.h>
#import "AVFoundation/AVAudioPlayer.h"
#import <AVFoundation/AVFoundation.h>
#interface AudioPlayer1 : NSObject <AVAudioPlayerDelegate> {
AVAudioPlayer *player;
BOOL playing;
NSTimeInterval duration;
}
#property (nonatomic, assign) AVAudioPlayer *player;
#property(readonly, getter=isPlaying) BOOL playing;
#property (readonly) NSTimeInterval duration;
-(void)GetSoundFileDuration:(NSString *) sound_file_name;
-(void)play;
-(void)stop;
-(void)setVolume:(float) volume;
- (void)audioPlayerDidFinishPlaying:(AVAudioPlayer *)player successfully:(BOOL)flag;
- (void)PlaySoundFile:(NSString *) sound_file_name;
- (void)notPlaying;
#end
Below is the contents of AudioPlayer1.m file:
#import "AudioPlayer1.h"
#implementation AudioPlayer1
#synthesize player;
#synthesize duration;
#synthesize playing;
- (id)init
{
self = [super init];
if (self != nil)
{
player = [[AVAudioPlayer alloc] init];
[player initWithContentsOfURL:nil error:nil];
player.delegate = self;
}
return self;
}
- (void) setVolume:(float)volume
{
[player setVolume:volume];
}
- (void)PlaySoundFile:(NSString *) sound_file_name
{
[player stop];
NSURL *sound_file = [[NSURL alloc] initFileURLWithPath: [[NSBundle mainBundle] pathForResource:sound_file_name ofType:#"mp3"]];
[player initWithContentsOfURL:sound_file error:nil];
[player prepareToPlay];
playing = YES;
[sound_file release];
}
- (void)audioPlayerDidFinishPlaying:(AVAudioPlayer *)player successfully:(BOOL)flag
{
NSLog(#"audioPlayerDidFinishPlaying");
playing = NO;
}
- (void) play
{
NSLog(#"Play Called");
[player setVolume:0.1];
[player play];
playing = YES;
}
- (void) stop
{
NSLog(#"Stop Called");
[player setVolume:0.0];
[player stop];
playing = NO;
}
-(void)GetSoundFileDuration:(NSString *) sound_file_name
{
NSLog(#"%#",duration);
}
-(void) notPlaying
{
playing = NO;
}
#end
And below is the code that would start the audio:
- (IBAction)WPNaritive01:(id)sender
{
AudioPlayer1 * player = [[AudioPlayer1 alloc] init ];
if (player.playing == YES)
{
[player stop];
}
if (player.playing == NO)
{
[player PlaySoundFile:#"1"];
[player setVolume:0.1];
[player play];
}
}
I apologize about the odd layout to the code as I'm still learning. I've only now found out about making these classes by chance from reading another question. lol
Ok, I've figured it out.
Rather than creating the object over and over like I was perhaps doing in the code above I should have been alloc and init the player in viewdidload rather than doing it in the IBAction like I did.
This is what I did incase anyone else comes across the same problem:
LockonLocation.h
#interface LockOnLocation : UIViewController {
AudioPlayer1 *player;
...
LockonLocation.m
- (void)viewDidLoad
player = [[AudioPlayer1 alloc] init ];
...
And I used the same code as above to play the audio so that was correct.
If anyone has any other advice for me or any other members I'd love to hear from you...
Hello Okay right I have a view called 'RecordViewController' this has the voice recorder function so once the user presses 'record', it records their voice, I have also created a 'back' button. But once that back button is tapped the voice recording also stops. I want it so once the user goes back, its still recording their voice. Here is the code I used:
.h
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#import <CoreAudio/CoreAudioTypes.h>
#interface RecordViewController : UIViewController <AVAudioRecorderDelegate> {
IBOutlet UIButton * btnStart;
IBOutlet UIButton * btnPlay;
IBOutlet UIActivityIndicatorView * actSpinner;
BOOL toggle;
NSURL * recordedTmpFile;
AVAudioRecorder * recorder;
NSError * error;
NSTimer *theTimer;
IBOutlet UILabel *seconds;
int mainInt;
NSString *timeRemainingString;
}
#property (nonatomic,retain)IBOutlet UIActivityIndicatorView * actSpinner;
#property (nonatomic,retain)IBOutlet UIButton * btnStart;
#property (nonatomic,retain)IBOutlet UIButton * btnPlay;
- (IBAction) start_button_pressed;
- (IBAction) play_button_pressed;
- (IBAction)goBack:(id)sender;
- (void)countUp;
#end
.m
#import "RecordViewController.h"
#implementation RecordViewController
#synthesize actSpinner, btnStart, btnPlay;
- (void)countUp {
mainInt += 1;
seconds.text = [NSString stringWithFormat:#"%02d", mainInt];
}
- (IBAction)goBack:(id)sender; {
[self dismissModalViewControllerAnimated:YES];
}
- (void)viewDidLoad {
[super viewDidLoad];
//Start the toggle in true mode.
toggle = YES;
btnPlay.hidden = YES;
//Instanciate an instance of the AVAudioSession object.
AVAudioSession * audioSession = [AVAudioSession sharedInstance];
//Setup the audioSession for playback and record.
//We could just use record and then switch it to playback leter, but
//since we are going to do both lets set it up once.
[audioSession setCategory:AVAudioSessionCategoryPlayAndRecord
error: &error];
//Activate the session
[audioSession setActive:YES error: &error];
}
- (IBAction)start_button_pressed {
if(toggle) {
toggle = NO;
[actSpinner startAnimating];
[btnStart setImage:[UIImage imageNamed:#"stoprecording.png"]
forState:UIControlStateNormal];
mainInt = 0;
theTimer = [NSTimer scheduledTimerWithTimeInterval:1.0 target:self selector:#selector(countUp) userInfo:nil repeats:YES];
btnPlay.enabled = toggle;
btnPlay.hidden = !toggle;
//Begin the recording session.
//Error handling removed. Please add to your own code.
//Setup the dictionary object with all the recording settings that this
//Recording sessoin will use
//Its not clear to me which of these are required and which are the bare minimum.
//This is a good resource: http://www.totodotnet.net/tag/avaudiorecorder/
NSMutableDictionary* recordSetting = [[NSMutableDictionary alloc] init];
[recordSetting setValue :[NSNumber numberWithInt:kAudioFormatAppleIMA4] forKey:AVFormatIDKey];
[recordSetting setValue:[NSNumber numberWithFloat:44100.0] forKey:AVSampleRateKey];
[recordSetting setValue:[NSNumber numberWithInt:2] forKey:AVNumberOfChannelsKey];
//Now that we have our settings we are going to instanciate an instance of our recorder instance.
//Generate a temp file for use by the recording.
//This sample was one I found online and seems to be a good choice for making a tmp file that
//will not overwrite an existing one.
NSString *fileName = [NSString stringWithFormat:#"%.0f.caf", [NSDate timeIntervalSinceReferenceDate] * 1000.0];
NSString *path = [NSTemporaryDirectory() stringByAppendingPathComponent:fileName];
recordedTmpFile = [NSURL fileURLWithPath:path];
NSLog(#"Using File called: %#",recordedTmpFile);
//Setup the recorder to use this file and record to it.
recorder = [[AVAudioRecorder alloc] initWithURL:recordedTmpFile settings:recordSetting error:&error];
//Use the recorder to start the recording.
//Im not sure why we set the delegate to self yet.
//Found this in antother example, but Im fuzzy on this still.
[recorder setDelegate:self];
//We call this to start the recording process and initialize
//the subsstems so that when we actually say "record" it starts right away.
[recorder prepareToRecord];
//Start the actual Recording
[recorder record];
//There is an optional method for doing the recording for a limited time see
//[recorder recordForDuration:(NSTimeInterval) 10]
} else {
toggle = YES;
[actSpinner stopAnimating];
[btnStart setImage:[UIImage imageNamed:#"recordrecord.png"] forState:UIControlStateNormal];
btnPlay.enabled = toggle;
btnPlay.hidden = !toggle;
[theTimer invalidate];
NSLog(#"Using File called: %#",recordedTmpFile);
//Stop the recorder.
[recorder stop];
}
}
- (IBAction)play_button_pressed{
//The play button was pressed...
//Setup the AVAudioPlayer to play the file that we just recorded.
AVAudioPlayer * avPlayer = [[AVAudioPlayer alloc] initWithContentsOfURL:recordedTmpFile error:&error];
[avPlayer prepareToPlay];
[avPlayer play];
}
- (void)viewDidUnload {
// Release any retained subviews of the main view.
// e.g. self.myOutlet = nil;
//Clean up the temp file.
NSFileManager * fm = [NSFileManager defaultManager];
[fm removeItemAtPath:[recordedTmpFile path] error:&error];
//Call the dealloc on the remaining objects.
[recorder dealloc];
recorder = nil;
recordedTmpFile = nil;
}
#end
As you have structured your code now, the audio recording code is completely dependent on the presence of the user interface. As soon as your view controller gets deallocated, the audio recorder gets destroyed, too.
You have to decouple the audio recording code from the user interface code. Create a separate class that contains all properties and functionality that is related to recording. Then create an instance of that class outside your view controller (e.g. in your application delegate) and only pass this object to your view controller. The important thing is that the recorder object is totally unaffected by the presence or absence of the view controller.
I am using the AVAudioPlayer framework, and I have several sounds that play one at a time. When a sound is finished playing, I want the application to do something. I tried to use audioPlayerDidFinishPlaying to do the action at the end of the first sound, but I couldn't use that for the second sound because I got a redefinition error. Can I use NSTimeInterval to get the duration of a sound?
// ViewController.h
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#import <AVFoundation/AVAudioPlayer.h>
#interface ViewController : UIViewController {
UIButton *begin;
UIWindow *firstTestWindow;
UIWindow *secondTestWindow;
AVAudioPlayer *player;
NSTimeInterval duration;
}
#property (nonatomic, retain) IBOutlet UIButton *begin;
#property (nonatomic, retain) IBOutlet UIWindow *firstTestWindow;
#property (nonatomic, retain) IBOutlet UIWindow *secondTestWindow;
#property (nonatomic, retain) AVAudioPlayer *player;
#property (readonly) NSTimeInterval duration;
- (IBAction)begin:(id)sender;
- (IBAction)doTapScreen:(id)sender;
#end
//ViewController.m
#import "ViewController.h"
#implementation ViewController
#synthesize begin, player, firstTestWindow, secondTestWindow;
//first sound
- (IBAction)begin:(id)sender; {
[firstTestWindow makeKeyAndVisible];
NSString *soundFilePath =
[[NSBundle mainBundle] pathForResource: #"Tone1"
ofType: #"mp3"];
NSURL *fileURL = [[NSURL alloc] initFileURLWithPath: soundFilePath];
AVAudioPlayer *newPlayer =
[[AVAudioPlayer alloc] initWithContentsOfURL: fileURL
error: nil];
[fileURL release];
self.player = newPlayer;
[newPlayer release];
[player prepareToPlay];
[self.player play];
}
//If user does not do anything by the end of the sound go to secondWindow
- (void) audioPlayerDidFinishPlaying: (AVAudioPlayer *) player
successfully: (BOOL) flag {
if (flag==YES) {
[secondWindow makeKeyAndVisible];
}
}
//second sound
- (IBAction)doTapScreen:(id)sender {
//When user touches the screen, either play the sound or go to the next window
if (self.player.playing) {
[self.player stop];
[thirdWindow makeKeyAndVisible];
}
else {
NSString *soundFilePath =
[[NSBundle mainBundle] pathForResource: #"Tone2"
ofType: #"mp3"];
NSURL *fileURL = [[NSURL alloc] initFileURLWithPath: soundFilePath];
AVAudioPlayer *newPlayer =
[[AVAudioPlayer alloc] initWithContentsOfURL: fileURL
error: nil];
[fileURL release];
self.player = newPlayer;
[newPlayer release];
[player prepareToPlay];
[self.player play];
}
}
When a sound is finished playing, I want the application to do something.
Then set yourself as the delegate and respond to audioPlayerDidFinishPlaying:successfully:.
In the code snippet you showed, you've done step 2 but not step 1: You're not setting yourself as the delegate.
I tried to use applicationDidFinishLaunching to do the action at the end of the first sound …
Brain fart? That method has nothing that I can see to do with playing sounds.
…, but I couldn't use that for the second sound because I got a redefinition error.
Huh? Did you implement the method twice or something instead of just comparing the player object in the method body?
It would help if you showed the exact error message you got.
Swift 3:
func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) {
print("sound file finished")
}
I want to play multiple MP3 files, in sequence (one after the other) , using AVAudioPlayer. I tried it, and it stops after playing the first MP3. However, if I go into debugger, it works fine.. any ideas? I read somewhere AVAudioPlayer plays audio in the background.. how do I prevent it from doing this?
Vas
I think the AVQueuePlayer (subclass of AVPlayer) does exactly this job (play a sequence of items) since iOS 4.1 :
http://developer.apple.com/library/ios/#documentation/AVFoundation/Reference/AVQueuePlayer_Class/Reference/Reference.html
I didn't try it myself however but will definitely give a try to it.
Use one AVAudioPlayer per sound.
Well, your code example didn't work out of the box for me. Soooo, I figured I'd respond with a fixed version:
Looper.h:
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#interface Looper : NSObject <AVAudioPlayerDelegate> {
AVAudioPlayer* player;
NSArray* fileNameQueue;
int index;
}
#property (nonatomic, retain) AVAudioPlayer* player;
#property (nonatomic, retain) NSArray* fileNameQueue;
- (id)initWithFileNameQueue:(NSArray*)queue;
- (void)audioPlayerDidFinishPlaying:(AVAudioPlayer *)player successfully:(BOOL)flag;
- (void)play:(int)i;
- (void)stop;
#end
Looper.m:
#import "Looper.h"
#implementation Looper
#synthesize player, fileNameQueue;
- (id)initWithFileNameQueue:(NSArray*)queue {
if ((self = [super init])) {
self.fileNameQueue = queue;
index = 0;
[self play:index];
}
return self;
}
- (void)audioPlayerDidFinishPlaying:(AVAudioPlayer *)player successfully:(BOOL)flag {
if (index < fileNameQueue.count) {
[self play:index];
} else {
//reached end of queue
}
}
- (void)play:(int)i {
self.player = [[AVAudioPlayer alloc] initWithContentsOfURL:[[NSURL alloc] initFileURLWithPath:[[NSBundle mainBundle] pathForResource:[fileNameQueue objectAtIndex:i] ofType:nil]] error:nil];
[player release];
player.delegate = self;
[player prepareToPlay];
[player play];
index++;
}
- (void)stop {
if (self.player.playing) [player stop];
}
- (void)dealloc {
self.fileNameQueue = nil;
self.player = nil;
[super dealloc];
}
#end
And here's how I would call it:
Looper * looper = [[Looper alloc] initWithFileNameQueue:[NSArray arrayWithObjects: audioFile, audioFile2, nil ]];
I only have slightly over a year of experience with iPhone/iPad development using Objective-C so feel free to respond with additional criticism.
I've implemented a class to handle this.
To use just do something like this:
[looper playAudioFiles:[NSArray arrayWithObjects:
#"add.mp3",
[NSString stringWithFormat:#"%d.mp3", numeral1.tag],
#"and.mp3",
[NSString stringWithFormat:#"%d.mp3", numeral2.tag],
nil
]];
Looper.m
#import "Looper.h"
#implementation Looper
#synthesize player, fileNameQueue;
- (id)initWithFileNameQueue:(NSArray*)queue {
if ((self = [super init])) {
self.fileNameQueue = queue;
index = 0;
[self play:index];
}
return self;
}
- (void)audioPlayerDidFinishPlaying:(AVAudioPlayer *)player successfully:(BOOL)flag {
if (index < fileNameQueue.count) {
[self play:index];
} else {
//reached end of queue
}
}
- (void)play:(int)i {
self.player = [[AVAudioPlayer alloc] initWithContentsOfURL:[[NSURL alloc] initFileURLWithPath:[[NSBundle mainBundle] pathForResource:[fileNameQueue objectAtIndex:i] ofType:nil]] error:nil];
[player release];
player.delegate = self;
[player prepareToPlay];
[player play];
index++;
}
- (void)stop {
if (self.player.playing) [player stop];
}
- (void)dealloc {
self.fileNameQueue = nil;
self.player = nil;
[super dealloc];
}
#end
Looper.h
#import <Foundation/Foundation.h>
#interface Looper : NSObject <AVAudioPlayerDelegate> {
AVAudioPlayer* player;
NSArray* fileNameQueue;
int index;
}
#property (nonatomic, retain) AVAudioPlayer* player;
#property (nonatomic, retain) NSArray* fileNameQueue;
- (id)initWithFileNameQueue:(NSArray*)queue;
- (void)audioPlayerDidFinishPlaying:(AVAudioPlayer *)player successfully:(BOOL)flag;
- (void)play:(int)i;
- (void)stop;
#end
It is a good idea to initialize, prepare the items, and queue ahead of time, for example on the viewDidLoad method.
If you are working on Swift,
override func viewDidLoad() {
super.viewDidLoad()
let item0 = AVPlayerItem.init(URL: NSBundle.mainBundle().URLForResource("url", withExtension: "wav")!)
let item1 = AVPlayerItem.init(URL: NSBundle.mainBundle().URLForResource("dog", withExtension: "aifc")!)
let item2 = AVPlayerItem.init(URL: NSBundle.mainBundle().URLForResource("GreatJob", withExtension: "wav")!)
let itemsToPlay:[AVPlayerItem] = [item0, item1, item2]
queuePlayer = AVQueuePlayer.init(items: itemsToPlay)
}
and then when an event occurs,
queuePlayer.play()
Notice that if you use the queue, you still might have some gaps between the sounds.
You can find the Objective-C version in the question How to do something when AVQueuePlayer finishes the last playeritem
Hope it helps.