I have searched a lot trying to figure this out - the code to me seems ok but the functionality doesn't reflect this.
I have a View that is presented (its an Audio Recorder view). You can press record and it records to a file just fine (data exists in the file). I can then play the file back via the play button (the AVAudioPlayer is pointing to that file).
However when I close/dismiss that view and come back to it - the file will not play when Play is tapped even though it should be exactly the same code as the file location has not changed.
UPDATE:
Seems that [audioPlayer play] returns no. I have also looked into the data. It seems that when the view appears again and loads that data it doesnt load it correctly (NSdata in Nslog displays mainly 0's) - even though the file exists and has data in it (i can see and hear it after transferring to my mac).
This leads me to suspect that either I am loading the data wrong or avaudioplayer wont read the data for some reason...
Please take a look at the code below:
(NSString *) removeCharsFrom: (NSString *) remover {
remover = [remover stringByTrimmingCharactersInSet:[NSCharacterSet whitespaceAndNewlineCharacterSet]];
remover = [remover stringByReplacingOccurrencesOfString:#" " withString:#"_"];
remover = [remover stringByReplacingOccurrencesOfString:#"/" withString:#"_"];
remover = [remover stringByReplacingOccurrencesOfString:#"\\" withString:#"_"];
remover = [remover stringByReplacingOccurrencesOfString:#":" withString:#"_"];
remover = [remover stringByReplacingOccurrencesOfString:#";" withString:#"_"];
remover = [remover stringByReplacingOccurrencesOfString:#"(" withString:#"_"];
remover = [remover stringByReplacingOccurrencesOfString:#")" withString:#"_"];
remover = [remover stringByReplacingOccurrencesOfString:#"£" withString:#"_"];
remover = [remover stringByReplacingOccurrencesOfString:#"$" withString:#"_"];
remover = [remover stringByReplacingOccurrencesOfString:#"&" withString:#"_"];
remover = [remover stringByReplacingOccurrencesOfString:#"'" withString:#"_"];
remover = [remover stringByReplacingOccurrencesOfString:#"{" withString:#"_"];
remover = [remover stringByReplacingOccurrencesOfString:#"}" withString:#"_"];
remover = [remover stringByReplacingOccurrencesOfString:#"[" withString:#"_"];
remover = [remover stringByReplacingOccurrencesOfString:#"]" withString:#"_"];
remover = [remover stringByReplacingOccurrencesOfString:#"""" withString:#"_"];
return remover;
}
- (NSString *) audioPathForResource: (NSString *) audio {
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *saveDirectory = [paths objectAtIndex:0];
NSString *newFolder = [saveDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"/%#/Audio",catName]];
if (![[NSFileManager defaultManager] fileExistsAtPath:newFolder]) {
[[NSFileManager defaultManager] createDirectoryAtPath:newFolder withIntermediateDirectories:YES attributes:nil error:nil];
}
NSString *saveFileName = [NSString stringWithFormat:#"%#.caf",audio];
NSString *newFilePath = [newFolder stringByAppendingPathComponent:saveFileName];
return [newFilePath stringByAddingPercentEscapesUsingEncoding:NSUTF8StringEncoding];
}
- (IBAction)cancelTapped:(id)sender {
[self dismissModalViewControllerAnimated:YES];
}
- (IBAction)saveTapped:(id)sender {
[self.parentViewController performSelector:#selector(changeAddAudioIcon)];
[self dismissModalViewControllerAnimated:YES];
}
- (IBAction)trashTapped:(id)sender {
UIAlertView *alert =
[[UIAlertView alloc] initWithTitle: #"Delete"
message: #"Would you like to delete the audio file? Warning: This cannot be undone."
delegate: self
cancelButtonTitle: #"Cancel"
otherButtonTitles: #"Delete", nil];
[alert show];
[alert release];
}
- (IBAction)pauseTapped:(id)sender {
pauseBtn.enabled = NO;
playBtn.enabled = YES;
recordBtn.enabled = YES;
trashBtn.enabled = YES;
if (audioRecorder.recording)
{
[audioRecorder stop];
} else if (audioPlayer.playing) {
[audioPlayer stop];
}
}
- (IBAction)recordTapped:(id)sender {
if (!audioRecorder.recording)
{
playBtn.enabled = NO;
pauseBtn.enabled = YES;
trashBtn.enabled = NO;
[audioRecorder record];
}
}
- (IBAction)playTapped:(id)sender {
pauseBtn.enabled = YES;
recordBtn.enabled = NO;
trashBtn.enabled = YES;
NSError *error;
NSLog(#"%#",filepathstring);
NSURL *soundFileURL = [NSURL fileURLWithPath:filepathstring];
audioPlayer = [[AVAudioPlayer alloc]
initWithContentsOfURL:soundFileURL
error:&error];
audioPlayer.delegate = self;
if (error)
NSLog(#"Error: %#",
[error localizedDescription]);
else
[audioPlayer play];
}
- (void)alertView: (UIAlertView *)alertView didDismissWithButtonIndex:(NSInteger)buttonIndex {
switch (buttonIndex) {
case 0:
return;
break;
case 1:
{
NSError *error = nil;
[[NSFileManager defaultManager] removeItemAtPath:filepathstring error:&error];
trashBtn.enabled = NO;
}
break;
default:
break;
}
}
-(void)audioPlayerDidFinishPlaying:
(AVAudioPlayer *)player successfully:(BOOL)flag
{
recordBtn.enabled = YES;
pauseBtn.enabled = NO;
playBtn.enabled = YES;
if(player != audioPlayer) {
[player release];
}
}
-(void)audioPlayerDecodeErrorDidOccur:
(AVAudioPlayer *)player
error:(NSError *)error
{
NSLog(#"Decode Error occurred");
}
-(void)audioRecorderDidFinishRecording:
(AVAudioRecorder *)recorder
successfully:(BOOL)flag
{
NSLog(#"Recording success:%#",flag ? #"YES" : #"NO");
trashBtn.enabled = YES;
pauseBtn.enabled = NO;
playBtn.enabled = YES;
}
-(void)audioRecorderEncodeErrorDidOccur:
(AVAudioRecorder *)recorder
error:(NSError *)error
{
NSLog(#"Encode Error occurred");
}
#pragma mark - View lifecycle
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view from its nib.
}
- (void) viewWillAppear:(BOOL)animated {
catName = [NSString stringWithFormat:#"%#",[self removeCharsFrom:catName]];
testName = [NSString stringWithFormat:#"%#",[self removeCharsFrom:testName]];
filepathstring = [[self audioPathForResource:testName] retain];
NSLog(#"At start = %#",filepathstring);
if ([[NSFileManager defaultManager] fileExistsAtPath:filepathstring]) {
playBtn.enabled = YES;
trashBtn.enabled = YES;
recordBtn.enabled = YES;
}
else
{
playBtn.enabled = NO;
trashBtn.enabled = NO;
}
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayAndRecord error:nil];
NSURL *soundFileURL = [NSURL fileURLWithPath:filepathstring];
NSDictionary *recordSettings = [NSDictionary
dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:AVAudioQualityMin],
AVEncoderAudioQualityKey,
[NSNumber numberWithInt:16],
AVEncoderBitRateKey,
[NSNumber numberWithInt: 2],
AVNumberOfChannelsKey,
[NSNumber numberWithFloat:44100.0],
AVSampleRateKey,
nil];
NSError *error = nil;
audioRecorder = [[AVAudioRecorder alloc]
initWithURL:soundFileURL
settings:recordSettings
error:&error];
audioRecorder.delegate = self;
if (error)
{
NSLog(#"error: %#", [error localizedDescription]);
} else {
[audioRecorder prepareToRecord];
}
}
- (void)viewDidUnload
{
[self setCancelBtn:nil];
[self setSaveBtn:nil];
[self setTimeLabel:nil];
[self setDescriptionLabel:nil];
[self setToolsBar:nil];
[self setTrashBtn:nil];
[self setPauseBtn:nil];
[self setRecordBtn:nil];
[self setPlayBtn:nil];
[super viewDidUnload];
// Release any retained subviews of the main view.
// e.g. self.myOutlet = nil;
}
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation
{
// Return YES for supported orientations
if (UI_USER_INTERFACE_IDIOM() == UIUserInterfaceIdiomPad) {
return YES;
} else {
return UIInterfaceOrientationIsPortrait(interfaceOrientation);
}
}
- (void)dealloc {
[cancelBtn release];
[saveBtn release];
[timeLabel release];
[descriptionLabel release];
[toolsBar release];
[trashBtn release];
[pauseBtn release];
[recordBtn release];
[playBtn release];
[audioPlayer release];
[audioRecorder release];
[super dealloc];
}
Here is the answer:
NSData *data = [NSData dataWithContentsOfMappedFile:[NSString stringWithFormat:#"%#",filepathstring]];
AVAudioPlayer *ap = [[AVAudioPlayer alloc]
initWithData:data error:&error];
Seems that it just wouldnt work with filepathstring but within an NSString it worked fine. Obvious now!
Related
I have to implement functionality to repeatedly pause and resume video capture in a single session, but have each new segment (the captured segments after each pause) added to the same video file, with AVFoundation. Currently, every time I press "stop" then "record" again, it just saves a new video file to my iPhone's Document directory and starts capturing to a new file. I need to be able to press the "record/stop" button over, only capture video & audio when record is active... then when the "done" button is pressed, have a single AV file with all the segments together. And all this needs to happen in the same capture session / preview session.
I am not using AVAssetWriterInput.
The only way I can think of to try this is when the "done" button is pressed, taking each individual output file and combining them together into a single file.
This code is working for iOS 5 but not for iOS 6. Actually for iOS 6, the first time when I pause recording (stop recording) AVCaptureFileOutputRecordingDelegate method (captureOutput: didFinishRecordingToOutputFileAtURL: fromConnections: error:) is called but after that when I start the recording the delegate method (captureOutput: didFinishRecordingToOutputFileAtURL: fromConnections: error:) is called again but it is not called at the time of stop recording.
I need a solution for that issue. Please help me.
//View LifeCycle
- (void)viewDidLoad
{
[super viewDidLoad];
self.finalRecordedVideoName = [self stringWithNewUUID];
arrVideoName = [[NSMutableArray alloc]initWithCapacity:0];
arrOutputUrl = [[NSMutableArray alloc] initWithCapacity:0];
CaptureSession = [[AVCaptureSession alloc] init];
captureDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
if ([captureDevices count] > 0)
{
NSError *error;
VideoInputDevice = [[AVCaptureDeviceInput alloc] initWithDevice:[self backFacingCamera] error:&error];
if (!error)
{
if ([CaptureSession canAddInput:VideoInputDevice])
[CaptureSession addInput:VideoInputDevice];
else
NSLog(#"Couldn't add video input");
}
else
{
NSLog(#"Couldn't create video input");
}
}
else
{
NSLog(#"Couldn't create video capture device");
}
//ADD VIDEO PREVIEW LAYER
NSLog(#"Adding video preview layer");
AVCaptureVideoPreviewLayer *layer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:CaptureSession];
[self setPreviewLayer:layer];
UIDeviceOrientation currentOrientation = [UIDevice currentDevice].orientation;
NSLog(#"%d",currentOrientation);
if (currentOrientation == UIDeviceOrientationPortrait)
{
PreviewLayer.orientation = AVCaptureVideoOrientationPortrait;
}
else if (currentOrientation == UIDeviceOrientationPortraitUpsideDown)
{
PreviewLayer.orientation = AVCaptureVideoOrientationPortraitUpsideDown;
}
else if (currentOrientation == UIDeviceOrientationLandscapeRight)
{
PreviewLayer.orientation = AVCaptureVideoOrientationLandscapeRight;
}
else if (currentOrientation == UIDeviceOrientationLandscapeLeft)
{
PreviewLayer.orientation = AVCaptureVideoOrientationLandscapeLeft;
}
[[self PreviewLayer] setVideoGravity:AVLayerVideoGravityResizeAspectFill];
//ADD MOVIE FILE OUTPUT
NSLog(#"Adding movie file output");
MovieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
VideoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
[VideoDataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
NSString* key = (NSString*)kCVPixelBufferBytesPerRowAlignmentKey;
NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
[VideoDataOutput setVideoSettings:videoSettings];
Float64 TotalSeconds = 60; //Total seconds
int32_t preferredTimeScale = 30; //Frames per second
CMTime maxDuration = CMTimeMakeWithSeconds(TotalSeconds, preferredTimeScale);//<<SET MAX DURATION
MovieFileOutput.maxRecordedDuration = maxDuration;
MovieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024; //<<SET MIN FREE SPACE IN BYTES FOR RECORDING TO CONTINUE ON A VOLUME
//SET THE CONNECTION PROPERTIES (output properties)
[self CameraSetOutputProperties]; //(We call a method as it also has to be done after changing camera)
AVCaptureConnection *videoConnection = nil;
for ( AVCaptureConnection *connection in [MovieFileOutput connections] )
{
NSLog(#"%#", connection);
for ( AVCaptureInputPort *port in [connection inputPorts] )
{
NSLog(#"%#", port);
if ( [[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
}
}
}
if([videoConnection isVideoOrientationSupported]) // **Here it is, its always false**
{
[videoConnection setVideoOrientation:[[UIDevice currentDevice] orientation]];
} NSLog(#"Setting image quality");
[CaptureSession setSessionPreset:AVCaptureSessionPresetLow];
//----- DISPLAY THE PREVIEW LAYER -----
CGRect layerRect = CGRectMake(5, 5, 299, ([[UIScreen mainScreen] bounds].size.height == 568)?438:348);
[self.PreviewLayer setBounds:layerRect];
[self.PreviewLayer setPosition:CGPointMake(CGRectGetMidX(layerRect),CGRectGetMidY(layerRect))];
if ([CaptureSession canAddOutput:MovieFileOutput])
[CaptureSession addOutput:MovieFileOutput];
[CaptureSession addOutput:VideoDataOutput];
//We use this instead so it goes on a layer behind our UI controls (avoids us having to manually bring each control to the front):
CameraView = [[UIView alloc] init];
[videoPreviewLayer addSubview:CameraView];
[videoPreviewLayer sendSubviewToBack:CameraView];
[[CameraView layer] addSublayer:PreviewLayer];
//----- START THE CAPTURE SESSION RUNNING -----
[CaptureSession startRunning];
}
#pragma mark - IBACtion Methods
-(IBAction)btnStartAndStopPressed:(id)sender
{
UIButton *StartAndStopButton = (UIButton*)sender;
if ([StartAndStopButton isSelected] == NO)
{
[StartAndStopButton setSelected:YES];
[btnPauseAndResume setEnabled:YES];
[btnBack setEnabled:NO];
[btnSwitchCameraInput setHidden:YES];
NSDate *date = [NSDate date];
NSLog(#" date %#",date);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *recordedFileName = nil;
recordedFileName = [NSString stringWithFormat:#"output%#.mov",date];
NSString *documentsDirectory = [paths objectAtIndex:0];
self.outputPath = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"%#",recordedFileName]];
NSLog(#"%#",self.outputPath);
[arrVideoName addObject:recordedFileName];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:self.outputPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:self.outputPath])
{
NSError *error;
if ([[NSFileManager defaultManager] removeItemAtPath:self.outputPath error:&error] == NO)
{
//Error - handle if requried
}
}
//Start recording
[MovieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
recordingTimer = [NSTimer scheduledTimerWithTimeInterval:1.0 target:self selector:#selector(VideoRecording) userInfo:nil repeats:YES];
}
else
{
[StartAndStopButton setSelected:NO];
[btnPauseAndResume setEnabled:NO];
[btnBack setEnabled:YES];
[btnSwitchCameraInput setHidden:NO];
NSLog(#"STOP RECORDING");
WeAreRecording = NO;
[MovieFileOutput stopRecording];
[((ActOutAppDelegate *)ActOut_AppDelegate) showLoadingViewOnView:self.view withLabel:#"Please wait...."];
if ([recordingTimer isValid])
{
[recordingTimer invalidate];
recordingTimer = nil;
recordingTime = 30;
}
stopRecording = YES;
}
}
- (IBAction)btnPauseAndResumePressed:(id)sender
{
UIButton *PauseAndResumeButton = (UIButton*)sender;
if (PauseAndResumeButton.selected == NO)
{
PauseAndResumeButton.selected = YES;
NSLog(#"recording paused");
WeAreRecording = NO;
[MovieFileOutput stopRecording];
[self pauseTimer:recordingTimer];
[btnStartAndStop setEnabled:NO];
[btnBack setEnabled:YES];
[btnSwitchCameraInput setHidden:NO];
}
else
{
PauseAndResumeButton.selected = NO;
NSLog(#"recording resumed");
[btnStartAndStop setEnabled:YES];
[btnBack setEnabled:NO];
[btnSwitchCameraInput setHidden:YES];
WeAreRecording = YES;
NSDate *date = [NSDate date];
NSLog(#" date %#",date);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask, YES);
NSString *recordedFileName = nil;
recordedFileName = [NSString stringWithFormat:#"output%#.mov",date];
NSString *documentsDirectory = [paths objectAtIndex:0];
self.outputPath = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"%#",recordedFileName]];
NSLog(#"%#",self.outputPath);
[arrVideoName addObject:recordedFileName];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:self.outputPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:self.outputPath])
{
NSError *error;
if ([[NSFileManager defaultManager] removeItemAtPath:self.outputPath error:&error] == NO)
{
//Error - handle if requried
}
}
[self resumeTimer:recordingTimer];
//Start recording
[MovieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
}
}
- (void) CameraSetOutputProperties
{
//SET THE CONNECTION PROPERTIES (output properties)
AVCaptureConnection *CaptureConnection = [MovieFileOutput connectionWithMediaType:AVMediaTypeVideo];
[CaptureConnection setVideoOrientation:AVCaptureVideoOrientationPortrait];
//Set frame rate (if requried)
CMTimeShow(CaptureConnection.videoMinFrameDuration);
CMTimeShow(CaptureConnection.videoMaxFrameDuration);
if (CaptureConnection.supportsVideoMinFrameDuration)
CaptureConnection.videoMinFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
if (CaptureConnection.supportsVideoMaxFrameDuration)
CaptureConnection.videoMaxFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
CMTimeShow(CaptureConnection.videoMinFrameDuration);
CMTimeShow(CaptureConnection.videoMaxFrameDuration);
}
- (AVCaptureDevice *) CameraWithPosition:(AVCaptureDevicePosition) Position
{
NSArray *Devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *Device in Devices)
{
if ([Device position] == Position)
{
NSLog(#"%d",Position);
return Device;
}
}
return nil;
}
#pragma mark - AVCaptureFileOutputRecordingDelegate Method
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
if(videoWriterInput.readyForMoreMediaData && WeAreRecording) [videoWriterInput appendSampleBuffer:sampleBuffer];
for(AVCaptureConnection *captureConnection in [captureOutput connections])
{
if ([captureConnection isVideoOrientationSupported])
{
AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationLandscapeLeft;
[captureConnection setVideoOrientation:orientation];
}
}
UIDeviceOrientation curOr = [[UIDevice currentDevice] orientation];
CGAffineTransform t;
if (curOr == UIDeviceOrientationPortrait)
{
t = CGAffineTransformMakeRotation(-M_PI / 2);
}
else if (curOr == UIDeviceOrientationPortraitUpsideDown)
{
t = CGAffineTransformMakeRotation(M_PI / 2);
}
else if (curOr == UIDeviceOrientationLandscapeRight)
{
t = CGAffineTransformMakeRotation(M_PI);
}
else
{
t = CGAffineTransformMakeRotation(0);
}
}
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{
NSLog(#"didFinishRecordingToOutputFileAtURL - enter");
NSLog(#"output file url : %#", [outputFileURL absoluteString]);
BOOL RecordedSuccessfully = YES;
if ([error code] != noErr)
{
// A problem occurred: Find out if the recording was successful.
id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
if (value)
{
RecordedSuccessfully = [value boolValue];
}
}
AVCaptureConnection *videoConnection=nil;
for ( AVCaptureConnection *connection in [MovieFileOutput connections] )
{
NSLog(#"%#", connection);
for ( AVCaptureInputPort *port in [connection inputPorts] )
{
NSLog(#"%#", port);
if ( [[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
}
}
}
if([videoConnection isVideoOrientationSupported]) // **Here it is, its always false**
{
[videoConnection setVideoOrientation:[[UIDevice currentDevice] orientation]];
} NSLog(#"Setting image quality");
NSData *videoData = [NSData dataWithContentsOfURL:outputFileURL];
[videoData writeToFile:self.outputPath atomically:NO];
[arrOutputUrl addObject:outputFileURL];
if (stopRecording)
{
[self mergeMultipleVideo];
}
}
//Method to merge multiple audios
-(void)mergeMultipleVideo
{
mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime nextClipStartTime = kCMTimeZero;
NSLog(#"Array of output file url : %#", arrOutputUrl);
if (arrOutputUrl.count > 0)
{
for(int i = 0 ;i < [arrOutputUrl count];i++)
{
AVURLAsset* VideoAsset = [[AVURLAsset alloc]initWithURL:[arrOutputUrl objectAtIndex:i] options:nil];
CMTimeRange timeRangeInAsset;
timeRangeInAsset = CMTimeRangeMake(kCMTimeZero, [VideoAsset duration]);
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, VideoAsset.duration) ofTrack:[[VideoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
nextClipStartTime = CMTimeAdd(nextClipStartTime, timeRangeInAsset.duration);
}
}
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"%#.mov",self.finalRecordedVideoName]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exportSession.outputURL=url;
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
exportSession.shouldOptimizeForNetworkUse = YES;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exportSession path:myPathDocs];
});
}];
}
-(void)exportDidFinish:(AVAssetExportSession*)session path:(NSString*)outputVideoPath
{
NSLog(#"session.status : %d",session.status);
if (session.status == AVAssetExportSessionStatusCompleted)
{
NSURL *outputURL = session.outputURL;
NSData *videoData = [NSData dataWithContentsOfURL:outputURL];
[videoData writeToFile:outputVideoPath atomically:NO];
if ([arrVideoName count] > 0)
{
for (int i = 0; i < [arrVideoName count]; i++)
{
NSArray* documentPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString* fullFilePath = [[documentPaths objectAtIndex:0] stringByAppendingPathComponent: [NSString stringWithFormat:#"%#",[arrVideoName objectAtIndex:i]]];
NSLog(#"Full path of file to be deleted: %#",fullFilePath);
NSFileManager *fileManager = [NSFileManager defaultManager];
NSError *error;
if ([fileManager fileExistsAtPath:fullFilePath])
{
[fileManager removeItemAtPath:fullFilePath error:&error];
}
}
[arrVideoName removeAllObjects];
}
if (arrOutputUrl.count > 0)
{
[arrOutputUrl removeAllObjects];
}
[((ActOutAppDelegate *)ActOut_AppDelegate) removeLoadingViewfromView:self.view];
[self.view addSubview:afterRecordingPopupView];
}
}
Look at the AVCaptureConnection's enabled property. For your output connection, set enabled to NO instead of stopping the session.
//i have take 2 song 's array
nsmutablearray * enrique = [[NSMutableArray alloc]init];
[enrique addObject:#"add.mp3"];
[enrique addObject:#"hero.mp3"];
// this section of uitableview didselectrow
- (void)tableView:(UITableView *)tableView didSelectRowAtIndexPath:(NSIndexPath *)indexPath{
cell = [tableView cellForRowAtIndexPath:indexPath];
cellText = cell.textLabel.text;
lbl.text=cellText;
player=[passarray objectAtIndex:indexPath.row];
NSLog(#"mp3 %#",player);
// [passarray indexOfObject:player];
[self.player prepareToPlay];
// [player play];
}
- (void)tableView:(UITableView *)tableView didSelectRowAtIndexPath:(NSIndexPath *)indexPath
{
[audioPlayer stop];
[tableView deselectRowAtIndexPath:indexPath animated:YES];
songNumber = indexPath.row;
[self loadPlayer:songNumber];
}
-(void)loadPlayer:(int)songIndex
{
if(songIndex <= 0)
{
[previousButton setEnabled:NO];
}
else
{
[previousButton setEnabled:YES];
}
if(songIndex+1 >= self.arySongsList.count)
{
[nextButton setEnabled:NO];
}
else
{
[nextButton setEnabled:YES];
}
if(songIndex < self.arySongsList.count)
{
NSURL *audioFileLocationURL;
audioFileLocationURL = [NSURL URLWithString:[NSString stringWithFormat:#"%#",[self.arySongsList objectAtIndex:songIndex]]];
NSString *str = [[[NSString alloc]initWithFormat:#"%#",audioFileLocationURL]autorelease];
str = [str lastPathComponent];
str = [str stringByReplacingOccurrencesOfString:#"%20" withString:#" "];
str = [str stringByReplacingOccurrencesOfString:#"%5B" withString:#"["];
str = [str stringByReplacingOccurrencesOfString:#"%5D" withString:#"]"];
NSError *error;
if(songIndex == self.arySongsList.count)
{
NSURL *url = [NSURL URLWithString:#"http://sound18.mp3pk.com/pop_remix/ebodf11/ebodf11-15(www.songs.pk).mp3"];
NSData *data = [NSData dataWithContentsOfURL:url];
audioPlayer = [[AVAudioPlayer alloc] initWithData:data error:&error];
}
else
{
audioPlayer = [[AVAudioPlayer alloc] initWithContentsOfURL:audioFileLocationURL error:&error];
}
[audioPlayer setNumberOfLoops:0];
[audioPlayer setDelegate:self];
if (error)
{
NSLog(#"%#", [error localizedDescription]);
[[self volumeControl] setEnabled:NO];
[[self playPauseButton] setEnabled:NO];
[[self alertLabel] setText:#"Unable to load file"];
[[self alertLabel] setHidden:NO];
}
else
{
[[self alertLabel] setText:[NSString stringWithFormat:#"%# has loaded", str]];
[[self alertLabel] setHidden:NO];
//Make sure the system follows our playback status
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback error:nil];
[[AVAudioSession sharedInstance] setActive: YES error: nil];
//Load the audio into memory
[audioPlayer prepareToPlay];
}
}
if (!self.audioPlayer.playing)
{
[self playAudio];
}
else if (self.audioPlayer.playing)
{
[self pauseAudio];
}
}
I'm new to iPhone Development.
I have integrated iCloud storage in my application. I am successful in uploading documents on iCloud.
My document's size is around 126799 bytes. During uploading on iCloud I have made sure that a proper document is uploaded on iCloud by printing its length and content on the console. But when I am fetching document from iCloud it only gives me 3/4 of the content of that document. I have also checked this on console by printing its length and content.
/////====== variables are declared in interface file
- (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
NSURL *ubiq = [[NSFileManager defaultManager]
URLForUbiquityContainerIdentifier:nil];
if (ubiq)
{
NSLog(#"iCloud access at %#", ubiq);
// TODO: Load document...
[self loadDocument];
}
else
{
NSLog(#"No iCloud access");
}
}
- (void)loadDocument{
NSMetadataQuery *query = [[NSMetadataQuery alloc] init];
_query = query;
[query setSearchScopes:[NSArray arrayWithObject:NSMetadataQueryUbiquitousDocumentsScope]];
NSString *filename = #"supplimentlistdescription.txt";
NSPredicate *pred = [NSPredicate predicateWithFormat:#"%K like '%#'",filename,NSMetadataItemFSNameKey];
[query setPredicate:pred];
[[NSNotificationCenter defaultCenter]
addObserver:self
selector:#selector(queryDidFinishGathering:)
name:NSMetadataQueryDidFinishGatheringNotification
object:query];
[query startQuery];
}
- (void)queryDidFinishGathering:(NSNotification *)notification {
NSMetadataQuery *query = [notification object];
[query disableUpdates];
[query stopQuery];
[[NSNotificationCenter defaultCenter] removeObserver:self
name:NSMetadataQueryDidFinishGatheringNotification
object:query];
_query = nil;
[self loadData:query];
}
- (void)loadData:(NSMetadataQuery *)query {
if ([query resultCount] == 1)
{
NSMetadataItem *item = [query resultAtIndex:0];
NSURL *url = [item valueForAttribute:NSMetadataItemURLKey];
Note *doc = [[Note alloc] initWithFileURL:url];
self.doc = doc;
[self.doc openWithCompletionHandler:^(BOOL success)
{
if (success)
{
NSLog(#"iCloud document opened");
}
else
{
NSLog(#"failed opening document from iCloud");
}
}
];
}
else
{
NSFileManager *filemgr = [NSFileManager defaultManager];
NSString *fileurlstring = [NSString stringWithFormat:#"Documents/Federal Rules of Civil Procedure"];
NSLog(#"fileurlstring:%#",fileurlstring);
ubiquityURL = [[filemgr URLForUbiquityContainerIdentifier:nil]
URLByAppendingPathComponent:fileurlstring];
[ubiquityURL retain];
NSLog(#"ubiquityURL1:%#",ubiquityURL);
if ([filemgr fileExistsAtPath:[ubiquityURL path]] == NO)
{
[ubiquityURL retain];
[filemgr createDirectoryAtURL:ubiquityURL withIntermediateDirectories:YES attributes:nil error:nil];
[ubiquityURL retain];
}
ubiquityURL = [ubiquityURL URLByAppendingPathComponent:#"supplimentlistdescription.txt"];
[ubiquityURL retain];
NSLog(#"ubiquityURL:%#",ubiquityURL);
Note *doc = [[Note alloc] initWithFileURL:ubiquityURL];
self.doc = doc;
[doc saveToURL:[doc fileURL]
forSaveOperation:UIDocumentSaveForCreating
completionHandler:^(BOOL success)
{
if (success) {
[doc openWithCompletionHandler:^(BOOL success)
{
NSLog(#"new document opened from iCloud");
}
];
}
}
];
}
}
-
///Note.h
#import <UIKit/UIKit.h>
#interface Note : UIDocument
#property (strong) NSString * noteContent;
#end
-
#import "Note.h"
#implementation Note
#synthesize noteContent;
- (BOOL)loadFromContents:(id)contents ofType:(NSString *)typeName
error:(NSError **)outError
{
if ([contents length] > 0)
{
self.noteContent = [[NSString alloc]
initWithBytes:[contents bytes]
length:[contents length]
encoding:NSUTF8StringEncoding];
NSLog(#"loadFromContents1");
NSLog(#"noteContent:%#",noteContent);
NSLog(#"noteContent.length:%d",noteContent.length);
}
else
{
// When the note is first created, assign some default content
self.noteContent = #"Empty";
}
return YES;
}
- (id)contentsForType:(NSString *)typeName error:(NSError **)outError
{
if ([self.noteContent length] == 0)
{
//self.noteContent = #"Empty";
NSString *FolderName = #"Federal Rules of Civil Procedure";
NSString *fileName = #"supplimentlistdescription.txt";
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
[FolderName retain];
NSString *fileName1 = [NSString stringWithFormat:#"%#/%#/%#",documentsDirectory,FolderName, fileName];
NSLog(#"fileName1:%#",fileName1);
NSData *data = [[NSData alloc]initWithContentsOfFile:fileName1];
noteContent =[[NSString alloc]initWithData:data encoding:NSMacOSRomanStringEncoding];
NSLog(#"noteContent:%#",noteContent);
NSLog(#"noteContent.length:%d",noteContent.length);
}
return [NSData dataWithBytes:[self.noteContent UTF8String]
length:[self.noteContent length]];
}
#end
Can you please tell me whats can be the problem? Any suggestion will be appreciated. Thanks
I got a same problem like your before.
You should use
for writing
[self.noteContent dataUsingEncoding:NSUTF8StringEncoding];
for reading
self.noteContent = [[NSString alloc] initWithData:contents encoding:NSUTF8StringEncoding];
Example :
- (BOOL)loadFromContents:(id)contents ofType:(NSString *)typeName error:(NSError **)outError
{
if ([contents length] > 0) {
self.noteContent = [[NSString alloc] initWithData:contents encoding:NSUTF8StringEncoding];
} else {
self.noteContent = #""; // When the note is created we assign some default content
}
[[NSNotificationCenter defaultCenter] postNotificationName:#"noteModified"
object:self];
return YES;
}
// Called whenever the application (auto)saves the content of a note
- (id)contentsForType:(NSString *)typeName error:(NSError **)outError
{
if ([self.noteContent length] == 0) {
self.noteContent = #"";
}
return [self.noteContent dataUsingEncoding:NSUTF8StringEncoding];
}
MGTwitterEngine.m
- (NSString *)username
{
return [[_username retain] autorelease];
}
- (NSString *)password
{
return [[_password retain] autorelease];
}
- (void)setUsername:(NSString *)newUsername password:(NSString *)newPassword
{
// Set new credentials.
[_username release];
_username = [newUsername retain];
[_password release];
_password = [newPassword retain];
if ([self clearsCookies]) {
// Remove all cookies for twitter, to ensure next connection uses new credentials.
NSString *urlString = [NSString stringWithFormat:#"%#://%#",
(_secureConnection) ? #"https" : #"http",
_APIDomain];
NSURL *url = [NSURL URLWithString:urlString];
NSHTTPCookieStorage *cookieStorage = [NSHTTPCookieStorage sharedHTTPCookieStorage];
NSEnumerator *enumerator = [[cookieStorage cookiesForURL:url] objectEnumerator];
NSHTTPCookie *cookie = nil;
while (cookie == [enumerator nextObject]) {
[cookieStorage deleteCookie:cookie];
}
}
}
- (NSString *)sendUpdate:(NSString *)status
{
return [self sendUpdate:status inReplyTo:0];
}
- (NSString *)sendUpdate:(NSString *)status inReplyTo:(unsigned long)updateID
{
if (!status) {
return nil;
}
NSString *path = [NSString stringWithFormat:#"statuses/update.%#", API_FORMAT];
NSString *trimmedText = status;
if ([trimmedText length] > MAX_MESSAGE_LENGTH) {
trimmedText = [trimmedText substringToIndex:MAX_MESSAGE_LENGTH];
}
NSMutableDictionary *params = [NSMutableDictionary dictionaryWithCapacity:0];
[params setObject:trimmedText forKey:#"status"];
if (updateID > 0) {
[params setObject:[NSString stringWithFormat:#"%u", updateID] forKey:#"in_reply_to_status_id"];
}
NSString *body = [self _queryStringWithBase:nil parameters:params prefixed:NO];
return [self _sendRequestWithMethod:HTTP_POST_METHOD path:path
queryParameters:params body:body
requestType:MGTwitterUpdateSendRequest
responseType:MGTwitterStatus];
}
TwitterPostViewController.m
- (IBAction)submitTweet{
[tweet resignFirstResponder];
if([[tweet text] length] > 0){
NSLog(#"%#",[[NSUserDefaults standardUserDefaults] valueForKey:#"TwitterUsername"]);
NSLog(#"%#",[[NSUserDefaults standardUserDefaults] valueForKey:#"TwitterPassword"]);
[[UIApplication sharedApplication] setNetworkActivityIndicatorVisible:YES];
[engine sendUpdate:[tweet text]];
}
}
- (void)requestFailed:(NSString *)requestIdentifier withError:(NSError *)error{
NSLog(#"Fail: %#", error);
[[UIApplication sharedApplication] setNetworkActivityIndicatorVisible:NO];
UIAlertView *failAlert;
if([error code] == 401){
failAlert = [[UIAlertView alloc] initWithTitle:#"Error" message:#"Incorrect Username & Password." delegate:nil cancelButtonTitle:#"OK" otherButtonTitles:nil];
[failAlert setTag:10];
[failAlert setDelegate:self];
}
else
{
failAlert = [[UIAlertView alloc] initWithTitle:#"Error" message:#"Failed sending status to Twitter." delegate:nil cancelButtonTitle:#"OK" otherButtonTitles:nil];
}
[failAlert show];
[failAlert release];
}
It shows me the fail popup of Incorrect username and password
I have checked through nslog that username and password are going correct.
what could be wrong?
It looks like the version of MGTwitterEngine you're using is trying to use basic auth. That was switched off in Twitter in favour of OAuth. Get a newer version of MGTwitterEngine (or a fork that supports OAuth).
i am working on a fishing app these days and i am getting a memory leak problem
-(void)requestFinished:(ASIFormDataRequest *) request {
if(hud != nil){
[hud show:NO];
[hud release];
hud = nil;
}
isLoading = NO;
self.responseText = [request responseString];
[self parseXml]; //I am getting leak here
if ( [self.responseText hasPrefix:#"<result>"]) {
UIAlertView *info = [[[UIAlertView alloc] initWithTitle:#" " message:#"Limited Internet access, please find a stronger signal in the area" delegate:nil cancelButtonTitle:#"OK" otherButtonTitles:nil]autorelease];
[info show];
}
if (!isRefreshButtonClicked) {
[UIAccelerometer sharedAccelerometer].delegate = self;
[NSThread detachNewThreadSelector:#selector(parseXml) toTarget:self withObject:nil];
} }
This is my function...
-(void) parseXml
{
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
_fishes = [[fishes parseXml:self.responseText] retain];
[self performSelectorOnMainThread:#selector(parseXmlDone) withObject:nil waitUntilDone:YES];
[pool release];
Here _fishes is an array which is getting a value from a array return type function.....and here is that function...
+(NSMutableArray *)parseXml:(NSString *)xmlString {
//xmlString = [xmlString stringByReplacingOccurrencesOfString:#"&" withString:#""];
const char *cString = [xmlString UTF8String];
NSMutableArray *fishes = [NSMutableArray array];
NSData *xmlData = [NSData dataWithBytes:cString length:strlen(cString)];
NSError *error;
GDataXMLDocument *doc = [[GDataXMLDocument alloc]initWithData:xmlData options:0 error:&error];
if (doc == nil) { return nil; }
//parseXml
NSArray *_fishes = [doc.rootElement elementsForName:#"fishery"];
for (GDataXMLElement *_fish in _fishes) {
NSMutableDictionary *fish = [NSMutableDictionary dictionary];
NSArray *ids = [_fish elementsForName:#"id"];
if ([ids count]>0) {
GDataXMLElement *firstId = (GDataXMLElement *)[ids objectAtIndex:0];
[fish setValue:firstId.stringValue forKey:#"id"];
} else continue;
NSArray *names = [_fish elementsForName:#"name"];
if ([names count]>0) {
GDataXMLElement *firstName = (GDataXMLElement *)[names objectAtIndex:0];
[fish setValue:firstName.stringValue forKey:#"name"];...........
........
else continue;
NSArray *distances = [_fish elementsForName:#"distance"];
if ([distances count]>0) {
GDataXMLElement *distance = (GDataXMLElement *)[distances objectAtIndex:0];
[fish setValue:distance.stringValue forKey:#"distance"];
}else continue;
[fishes addObject:fish];
}
[doc release];
return fishes;
}
#end
I hope u guys will understand my problem...thanx
In -parseXml,
_fishes = [[fishes parseXml:self.responseText] retain];
will leak any previous object _fishes was pointing to in case -parseXml is sent more than once. You could use a retain property instead of an instance variable, or a setter method that releases the previous object, or release the previous object before assigning a new (retained) object to _fishes.