I have an iOS app that records the video from front facing camera in the background and was working fine. But now I am trying to play a short mp4 from at the same time and the playback using MPMoviePlayerController stops the capture session.
I tried AVPlayer instead with the same result.
I also set the [[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayAndRecord error:nil];
Still no luck. Did anyone faced and solved the same problem.
Thanks for any suggestion.
using ios5 SDK.
#import "ViewController.h"
#interface ViewController ()
#end
#implementation ViewController
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
UIButton *recButton=[[UIButton alloc] initWithFrame:CGRectMake(10,200, 200,40)] ;
recButton.backgroundColor = [UIColor blackColor];
[recButton addTarget:self action:#selector(startRecording) forControlEvents:UIControlEventTouchUpInside];
[self.view addSubview:recButton];
isRecording=NO;
}
- (void)viewDidUnload
{
[super viewDidUnload];
// Release any retained subviews of the main view.
}
-(void) viewWillAppear:(BOOL)animated
{
self.navigationController.navigationBarHidden = YES;
}
-(void) viewWillDisappear:(BOOL)animated
{
self.navigationController.navigationBarHidden = NO;
}
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation
{
if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPhone) {
return (interfaceOrientation == UIInterfaceOrientationPortrait);
} else {
return YES;
}
}
#pragma mark video playing
-(void) startRecording
{
if (isRecording) {
[self stopVideoRecording];
isRecording=NO;
}
else
{
[self initCaptureSession];
[self startVideoRecording];
isRecording=YES;
}
NSURL *url = [NSURL fileURLWithPath:[[NSBundle mainBundle]
pathForResource:#"new"
ofType:#"mov"]];
[self playMovieAtURL:url];
}
-(void) playMovieAtURL: (NSURL*) theURL {
player =
[[MPMoviePlayerController alloc] initWithContentURL: theURL ];
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayAndRecord error:nil];
player.scalingMode = MPMovieScalingModeAspectFill;
player.controlStyle = MPMovieControlStyleNone;
[player prepareToPlay];
// Register for the playback finished notification
[[NSNotificationCenter defaultCenter]
addObserver: self
selector: #selector(myMovieFinishedCallback:)
name: MPMoviePlayerPlaybackDidFinishNotification
object: player];
[player.view setFrame: self.view.bounds];
[self.view addSubview:player.view];
// Movie playback is asynchronous, so this method returns immediately.
[player play];
}
// When the movie is done, release the controller.
-(void) myMovieFinishedCallback: (NSNotification*) aNotification
{
MPMoviePlayerController* theMovie = [aNotification object];
[[NSNotificationCenter defaultCenter]
removeObserver: self
name: MPMoviePlayerPlaybackDidFinishNotification
object: theMovie];
[player.view removeFromSuperview];
[self stopVideoRecording];
}
#pragma mark -
#pragma mark recording
-(void) initCaptureSession
{
NSLog(#"Setting up capture session");
captureSession = [[AVCaptureSession alloc] init];
//----- ADD INPUTS -----
NSLog(#"Adding video input");
//ADD VIDEO INPUT
AVCaptureDevice *VideoDevice = [self frontFacingCameraIfAvailable ];
//[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if (VideoDevice)
{
NSError *error;
videoInputDevice = [AVCaptureDeviceInput deviceInputWithDevice:VideoDevice error:&error];
if (!error)
{
if ([captureSession canAddInput:videoInputDevice])
[captureSession addInput:videoInputDevice];
else
NSLog(#"Couldn't add video input");
}
else
{
NSLog(#"Couldn't create video input");
}
}
else
{
NSLog(#"Couldn't create video capture device");
}
//ADD AUDIO INPUT
NSLog(#"Adding audio input");
AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
NSError *error = nil;
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error];
if (audioInput)
{
[captureSession addInput:audioInput];
}
//----- ADD OUTPUTS ---
//ADD MOVIE FILE OUTPUT
NSLog(#"Adding movie file output");
movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
// Float64 TotalSeconds = 60; //Total seconds
// int32_t preferredTimeScale = 30; //Frames per second
// CMTime maxDuration = CMTimeMakeWithSeconds(TotalSeconds, preferredTimeScale); //<<SET MAX DURATION
// movieFileOutput.maxRecordedDuration = maxDuration;
movieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024; //<<SET MIN FREE SPACE IN BYTES FOR RECORDING TO CONTINUE ON A VOLUME
if ([captureSession canAddOutput:movieFileOutput])
[captureSession addOutput:movieFileOutput];
//SET THE CONNECTION PROPERTIES (output properties)
[self CameraSetOutputProperties]; //(We call a method as it also has to be done after changing camera)
//----- SET THE IMAGE QUALITY / RESOLUTION -----
//Options:
// AVCaptureSessionPresetHigh - Highest recording quality (varies per device)
// AVCaptureSessionPresetMedium - Suitable for WiFi sharing (actual values may change)
// AVCaptureSessionPresetLow - Suitable for 3G sharing (actual values may change)
// AVCaptureSessionPreset640x480 - 640x480 VGA (check its supported before setting it)
// AVCaptureSessionPreset1280x720 - 1280x720 720p HD (check its supported before setting it)
// AVCaptureSessionPresetPhoto - Full photo resolution (not supported for video output)
NSLog(#"Setting image quality");
[captureSession setSessionPreset:AVCaptureSessionPresetMedium];
if ([captureSession canSetSessionPreset:AVCaptureSessionPreset640x480]) //Check size based configs are supported before setting them
[captureSession setSessionPreset:AVCaptureSessionPreset640x480];
//----- START THE CAPTURE SESSION RUNNING -----
[captureSession startRunning];
}
//********** CAMERA SET OUTPUT PROPERTIES **********
- (void) CameraSetOutputProperties
{
AVCaptureConnection *CaptureConnection=nil;
//SET THE CONNECTION PROPERTIES (output properties)
NSComparisonResult order = [[UIDevice currentDevice].systemVersion compare: #"5.0.0" options: NSNumericSearch];
if (order == NSOrderedSame || order == NSOrderedDescending) {
// OS version >= 5.0.0
CaptureConnection = [movieFileOutput connectionWithMediaType:AVMediaTypeVideo];
// if (CaptureConnection.supportsVideoMinFrameDuration)
// CaptureConnection.videoMinFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
// if (CaptureConnection.supportsVideoMaxFrameDuration)
// CaptureConnection.videoMaxFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
// if (CaptureConnection.supportsVideoMinFrameDuration)
// {
// // CMTimeShow(CaptureConnection.videoMinFrameDuration);
// // CMTimeShow(CaptureConnection.videoMaxFrameDuration);
// }
} else {
// OS version < 5.0.0
CaptureConnection = [self connectionWithMediaType:AVMediaTypeVideo fromConnections:[movieFileOutput connections]];
}
//Set landscape (if required)
if ([CaptureConnection isVideoOrientationSupported])
{
AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;// AVCaptureVideoOrientationLandscapeRight; //<<<<<SET VIDEO ORIENTATION IF LANDSCAPE
[CaptureConnection setVideoOrientation:orientation];
}
//Set frame rate (if requried)
//CMTimeShow(CaptureConnection.videoMinFrameDuration);
//CMTimeShow(CaptureConnection.videoMaxFrameDuration);
}
- (void) startVideoRecording
{
//Create temporary URL to record to
NSString *outputPath = [[NSString alloc] initWithFormat:#"%#%#", NSTemporaryDirectory(), #"output.mov"];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
NSFileManager *fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:outputPath])
{
NSError *error;
if ([fileManager removeItemAtPath:outputPath error:&error] == NO)
{
//Error - handle if requried
NSLog(#"file remove error");
}
}
//Start recording
[movieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
}
-(void) stopVideoRecording
{
[movieFileOutput stopRecording];
}
//********** DID FINISH RECORDING TO OUTPUT FILE AT URL **********/
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput
didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
fromConnections:(NSArray *)connections
error:(NSError *)error
{
NSLog(#"didFinishRecordingToOutputFileAtURL - enter");
BOOL RecordedSuccessfully = YES;
if ([error code] != noErr)
{
// A problem occurred: Find out if the recording was successful.
id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
if (value)
{
RecordedSuccessfully = [value boolValue];
}
}
if (RecordedSuccessfully)
{
//----- RECORDED SUCESSFULLY -----
NSLog(#"didFinishRecordingToOutputFileAtURL - success");
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL])
{
[library writeVideoAtPathToSavedPhotosAlbum:outputFileURL
completionBlock:^(NSURL *assetURL, NSError *error)
{
if (error)
{
NSLog(#"File save error");
}
else
{
recordedVideoURL=assetURL;
}
}];
}
else {
NSString *assetURL=[self copyFileToDocuments:outputFileURL];
if(assetURL!=nil)
{
recordedVideoURL=[NSURL URLWithString:assetURL];
}
}
}
}
- (NSString*) copyFileToDocuments:(NSURL *)fileURL
{
NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
[dateFormatter setDateFormat:#"yyyy-MM-dd_HH-mm-ss"];
NSString *destinationPath = [documentsDirectory stringByAppendingFormat:#"/output_%#.mov", [dateFormatter stringFromDate:[NSDate date]]];
NSError *error;
if (![[NSFileManager defaultManager] copyItemAtURL:fileURL toURL:[NSURL fileURLWithPath:destinationPath] error:&error]) {
NSLog(#"File save error %#", [error localizedDescription]);
return nil;
}
return destinationPath;
}
- (AVCaptureConnection *)connectionWithMediaType:(NSString *)mediaType fromConnections:(NSArray *)connections
{
for ( AVCaptureConnection *connection in connections ) {
for ( AVCaptureInputPort *port in [connection inputPorts] ) {
if ( [[port mediaType] isEqual:mediaType] ) {
return connection;
}
}
}
return nil;
}
- (AVCaptureDevice *)frontFacingCameraIfAvailable
{
// look at all the video devices and get the first one that's on the front
NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
AVCaptureDevice *captureDevice = nil;
for (AVCaptureDevice *device in videoDevices)
{
if (device.position == AVCaptureDevicePositionFront)
{
captureDevice = device;
break;
}
}
// couldn't find one on the front, so just get the default video device.
if ( ! captureDevice)
{
captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
}
return captureDevice;
}
#pragma mark -
#end
I have this problem too,I don't know how to fix it,but I know the problem is here:
[captureSession addInput:audioInput];
If you delete this code of line,it will be work fine,I think it's audio mix or some audio problem.
I still finding the answer.
I have found the answer here: answer ,it work!
but u remember add AudioToolbox.framework,maybe helpful for you.
Related
I have to implement functionality to repeatedly pause and resume video capture in a single session, but have each new segment (the captured segments after each pause) added to the same video file, with AVFoundation. Currently, every time I press "stop" then "record" again, it just saves a new video file to my iPhone's Document directory and starts capturing to a new file. I need to be able to press the "record/stop" button over, only capture video & audio when record is active... then when the "done" button is pressed, have a single AV file with all the segments together. And all this needs to happen in the same capture session / preview session.
I am not using AVAssetWriterInput.
The only way I can think of to try this is when the "done" button is pressed, taking each individual output file and combining them together into a single file.
This code is working for iOS 5 but not for iOS 6. Actually for iOS 6, the first time when I pause recording (stop recording) AVCaptureFileOutputRecordingDelegate method (captureOutput: didFinishRecordingToOutputFileAtURL: fromConnections: error:) is called but after that when I start the recording the delegate method (captureOutput: didFinishRecordingToOutputFileAtURL: fromConnections: error:) is called again but it is not called at the time of stop recording.
I need a solution for that issue. Please help me.
//View LifeCycle
- (void)viewDidLoad
{
[super viewDidLoad];
self.finalRecordedVideoName = [self stringWithNewUUID];
arrVideoName = [[NSMutableArray alloc]initWithCapacity:0];
arrOutputUrl = [[NSMutableArray alloc] initWithCapacity:0];
CaptureSession = [[AVCaptureSession alloc] init];
captureDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
if ([captureDevices count] > 0)
{
NSError *error;
VideoInputDevice = [[AVCaptureDeviceInput alloc] initWithDevice:[self backFacingCamera] error:&error];
if (!error)
{
if ([CaptureSession canAddInput:VideoInputDevice])
[CaptureSession addInput:VideoInputDevice];
else
NSLog(#"Couldn't add video input");
}
else
{
NSLog(#"Couldn't create video input");
}
}
else
{
NSLog(#"Couldn't create video capture device");
}
//ADD VIDEO PREVIEW LAYER
NSLog(#"Adding video preview layer");
AVCaptureVideoPreviewLayer *layer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:CaptureSession];
[self setPreviewLayer:layer];
UIDeviceOrientation currentOrientation = [UIDevice currentDevice].orientation;
NSLog(#"%d",currentOrientation);
if (currentOrientation == UIDeviceOrientationPortrait)
{
PreviewLayer.orientation = AVCaptureVideoOrientationPortrait;
}
else if (currentOrientation == UIDeviceOrientationPortraitUpsideDown)
{
PreviewLayer.orientation = AVCaptureVideoOrientationPortraitUpsideDown;
}
else if (currentOrientation == UIDeviceOrientationLandscapeRight)
{
PreviewLayer.orientation = AVCaptureVideoOrientationLandscapeRight;
}
else if (currentOrientation == UIDeviceOrientationLandscapeLeft)
{
PreviewLayer.orientation = AVCaptureVideoOrientationLandscapeLeft;
}
[[self PreviewLayer] setVideoGravity:AVLayerVideoGravityResizeAspectFill];
//ADD MOVIE FILE OUTPUT
NSLog(#"Adding movie file output");
MovieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
VideoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
[VideoDataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
NSString* key = (NSString*)kCVPixelBufferBytesPerRowAlignmentKey;
NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
[VideoDataOutput setVideoSettings:videoSettings];
Float64 TotalSeconds = 60; //Total seconds
int32_t preferredTimeScale = 30; //Frames per second
CMTime maxDuration = CMTimeMakeWithSeconds(TotalSeconds, preferredTimeScale);//<<SET MAX DURATION
MovieFileOutput.maxRecordedDuration = maxDuration;
MovieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024; //<<SET MIN FREE SPACE IN BYTES FOR RECORDING TO CONTINUE ON A VOLUME
//SET THE CONNECTION PROPERTIES (output properties)
[self CameraSetOutputProperties]; //(We call a method as it also has to be done after changing camera)
AVCaptureConnection *videoConnection = nil;
for ( AVCaptureConnection *connection in [MovieFileOutput connections] )
{
NSLog(#"%#", connection);
for ( AVCaptureInputPort *port in [connection inputPorts] )
{
NSLog(#"%#", port);
if ( [[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
}
}
}
if([videoConnection isVideoOrientationSupported]) // **Here it is, its always false**
{
[videoConnection setVideoOrientation:[[UIDevice currentDevice] orientation]];
} NSLog(#"Setting image quality");
[CaptureSession setSessionPreset:AVCaptureSessionPresetLow];
//----- DISPLAY THE PREVIEW LAYER -----
CGRect layerRect = CGRectMake(5, 5, 299, ([[UIScreen mainScreen] bounds].size.height == 568)?438:348);
[self.PreviewLayer setBounds:layerRect];
[self.PreviewLayer setPosition:CGPointMake(CGRectGetMidX(layerRect),CGRectGetMidY(layerRect))];
if ([CaptureSession canAddOutput:MovieFileOutput])
[CaptureSession addOutput:MovieFileOutput];
[CaptureSession addOutput:VideoDataOutput];
//We use this instead so it goes on a layer behind our UI controls (avoids us having to manually bring each control to the front):
CameraView = [[UIView alloc] init];
[videoPreviewLayer addSubview:CameraView];
[videoPreviewLayer sendSubviewToBack:CameraView];
[[CameraView layer] addSublayer:PreviewLayer];
//----- START THE CAPTURE SESSION RUNNING -----
[CaptureSession startRunning];
}
#pragma mark - IBACtion Methods
-(IBAction)btnStartAndStopPressed:(id)sender
{
UIButton *StartAndStopButton = (UIButton*)sender;
if ([StartAndStopButton isSelected] == NO)
{
[StartAndStopButton setSelected:YES];
[btnPauseAndResume setEnabled:YES];
[btnBack setEnabled:NO];
[btnSwitchCameraInput setHidden:YES];
NSDate *date = [NSDate date];
NSLog(#" date %#",date);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *recordedFileName = nil;
recordedFileName = [NSString stringWithFormat:#"output%#.mov",date];
NSString *documentsDirectory = [paths objectAtIndex:0];
self.outputPath = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"%#",recordedFileName]];
NSLog(#"%#",self.outputPath);
[arrVideoName addObject:recordedFileName];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:self.outputPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:self.outputPath])
{
NSError *error;
if ([[NSFileManager defaultManager] removeItemAtPath:self.outputPath error:&error] == NO)
{
//Error - handle if requried
}
}
//Start recording
[MovieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
recordingTimer = [NSTimer scheduledTimerWithTimeInterval:1.0 target:self selector:#selector(VideoRecording) userInfo:nil repeats:YES];
}
else
{
[StartAndStopButton setSelected:NO];
[btnPauseAndResume setEnabled:NO];
[btnBack setEnabled:YES];
[btnSwitchCameraInput setHidden:NO];
NSLog(#"STOP RECORDING");
WeAreRecording = NO;
[MovieFileOutput stopRecording];
[((ActOutAppDelegate *)ActOut_AppDelegate) showLoadingViewOnView:self.view withLabel:#"Please wait...."];
if ([recordingTimer isValid])
{
[recordingTimer invalidate];
recordingTimer = nil;
recordingTime = 30;
}
stopRecording = YES;
}
}
- (IBAction)btnPauseAndResumePressed:(id)sender
{
UIButton *PauseAndResumeButton = (UIButton*)sender;
if (PauseAndResumeButton.selected == NO)
{
PauseAndResumeButton.selected = YES;
NSLog(#"recording paused");
WeAreRecording = NO;
[MovieFileOutput stopRecording];
[self pauseTimer:recordingTimer];
[btnStartAndStop setEnabled:NO];
[btnBack setEnabled:YES];
[btnSwitchCameraInput setHidden:NO];
}
else
{
PauseAndResumeButton.selected = NO;
NSLog(#"recording resumed");
[btnStartAndStop setEnabled:YES];
[btnBack setEnabled:NO];
[btnSwitchCameraInput setHidden:YES];
WeAreRecording = YES;
NSDate *date = [NSDate date];
NSLog(#" date %#",date);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask, YES);
NSString *recordedFileName = nil;
recordedFileName = [NSString stringWithFormat:#"output%#.mov",date];
NSString *documentsDirectory = [paths objectAtIndex:0];
self.outputPath = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"%#",recordedFileName]];
NSLog(#"%#",self.outputPath);
[arrVideoName addObject:recordedFileName];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:self.outputPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:self.outputPath])
{
NSError *error;
if ([[NSFileManager defaultManager] removeItemAtPath:self.outputPath error:&error] == NO)
{
//Error - handle if requried
}
}
[self resumeTimer:recordingTimer];
//Start recording
[MovieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
}
}
- (void) CameraSetOutputProperties
{
//SET THE CONNECTION PROPERTIES (output properties)
AVCaptureConnection *CaptureConnection = [MovieFileOutput connectionWithMediaType:AVMediaTypeVideo];
[CaptureConnection setVideoOrientation:AVCaptureVideoOrientationPortrait];
//Set frame rate (if requried)
CMTimeShow(CaptureConnection.videoMinFrameDuration);
CMTimeShow(CaptureConnection.videoMaxFrameDuration);
if (CaptureConnection.supportsVideoMinFrameDuration)
CaptureConnection.videoMinFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
if (CaptureConnection.supportsVideoMaxFrameDuration)
CaptureConnection.videoMaxFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
CMTimeShow(CaptureConnection.videoMinFrameDuration);
CMTimeShow(CaptureConnection.videoMaxFrameDuration);
}
- (AVCaptureDevice *) CameraWithPosition:(AVCaptureDevicePosition) Position
{
NSArray *Devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *Device in Devices)
{
if ([Device position] == Position)
{
NSLog(#"%d",Position);
return Device;
}
}
return nil;
}
#pragma mark - AVCaptureFileOutputRecordingDelegate Method
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
if(videoWriterInput.readyForMoreMediaData && WeAreRecording) [videoWriterInput appendSampleBuffer:sampleBuffer];
for(AVCaptureConnection *captureConnection in [captureOutput connections])
{
if ([captureConnection isVideoOrientationSupported])
{
AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationLandscapeLeft;
[captureConnection setVideoOrientation:orientation];
}
}
UIDeviceOrientation curOr = [[UIDevice currentDevice] orientation];
CGAffineTransform t;
if (curOr == UIDeviceOrientationPortrait)
{
t = CGAffineTransformMakeRotation(-M_PI / 2);
}
else if (curOr == UIDeviceOrientationPortraitUpsideDown)
{
t = CGAffineTransformMakeRotation(M_PI / 2);
}
else if (curOr == UIDeviceOrientationLandscapeRight)
{
t = CGAffineTransformMakeRotation(M_PI);
}
else
{
t = CGAffineTransformMakeRotation(0);
}
}
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{
NSLog(#"didFinishRecordingToOutputFileAtURL - enter");
NSLog(#"output file url : %#", [outputFileURL absoluteString]);
BOOL RecordedSuccessfully = YES;
if ([error code] != noErr)
{
// A problem occurred: Find out if the recording was successful.
id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
if (value)
{
RecordedSuccessfully = [value boolValue];
}
}
AVCaptureConnection *videoConnection=nil;
for ( AVCaptureConnection *connection in [MovieFileOutput connections] )
{
NSLog(#"%#", connection);
for ( AVCaptureInputPort *port in [connection inputPorts] )
{
NSLog(#"%#", port);
if ( [[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
}
}
}
if([videoConnection isVideoOrientationSupported]) // **Here it is, its always false**
{
[videoConnection setVideoOrientation:[[UIDevice currentDevice] orientation]];
} NSLog(#"Setting image quality");
NSData *videoData = [NSData dataWithContentsOfURL:outputFileURL];
[videoData writeToFile:self.outputPath atomically:NO];
[arrOutputUrl addObject:outputFileURL];
if (stopRecording)
{
[self mergeMultipleVideo];
}
}
//Method to merge multiple audios
-(void)mergeMultipleVideo
{
mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime nextClipStartTime = kCMTimeZero;
NSLog(#"Array of output file url : %#", arrOutputUrl);
if (arrOutputUrl.count > 0)
{
for(int i = 0 ;i < [arrOutputUrl count];i++)
{
AVURLAsset* VideoAsset = [[AVURLAsset alloc]initWithURL:[arrOutputUrl objectAtIndex:i] options:nil];
CMTimeRange timeRangeInAsset;
timeRangeInAsset = CMTimeRangeMake(kCMTimeZero, [VideoAsset duration]);
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, VideoAsset.duration) ofTrack:[[VideoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
nextClipStartTime = CMTimeAdd(nextClipStartTime, timeRangeInAsset.duration);
}
}
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"%#.mov",self.finalRecordedVideoName]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exportSession.outputURL=url;
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
exportSession.shouldOptimizeForNetworkUse = YES;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exportSession path:myPathDocs];
});
}];
}
-(void)exportDidFinish:(AVAssetExportSession*)session path:(NSString*)outputVideoPath
{
NSLog(#"session.status : %d",session.status);
if (session.status == AVAssetExportSessionStatusCompleted)
{
NSURL *outputURL = session.outputURL;
NSData *videoData = [NSData dataWithContentsOfURL:outputURL];
[videoData writeToFile:outputVideoPath atomically:NO];
if ([arrVideoName count] > 0)
{
for (int i = 0; i < [arrVideoName count]; i++)
{
NSArray* documentPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString* fullFilePath = [[documentPaths objectAtIndex:0] stringByAppendingPathComponent: [NSString stringWithFormat:#"%#",[arrVideoName objectAtIndex:i]]];
NSLog(#"Full path of file to be deleted: %#",fullFilePath);
NSFileManager *fileManager = [NSFileManager defaultManager];
NSError *error;
if ([fileManager fileExistsAtPath:fullFilePath])
{
[fileManager removeItemAtPath:fullFilePath error:&error];
}
}
[arrVideoName removeAllObjects];
}
if (arrOutputUrl.count > 0)
{
[arrOutputUrl removeAllObjects];
}
[((ActOutAppDelegate *)ActOut_AppDelegate) removeLoadingViewfromView:self.view];
[self.view addSubview:afterRecordingPopupView];
}
}
Look at the AVCaptureConnection's enabled property. For your output connection, set enabled to NO instead of stopping the session.
I have got some code online which captures video from the camera of iPhone and then stores it to a video file and it is working fine. But my purpose is not to save it in the memory, but to send it to a sever. I have found out that there is a free media server named WOWZA which allows streaming and also Apple has (HSL) HTTP Live Streaming feature and that the servers expect the video to be in h.264 format for video and in mp3 for audio. By reading some of the documents about Apple HSL I also came to know that it gives a different url in the playlist file for each segment of the media file which is then played in the correct order on a device through the browser. I am not sure how to get small segments of the file that is recorded by the phone's camera and also how to convert it into the required format.
Following is the code for capturing video:
Implementation File
#import "THCaptureViewController.h"
#import <AVFoundation/AVFoundation.h>
#import "THPlayerViewController.h"
#define VIDEO_FILE #"test.mov"
#interface THCaptureViewController ()
#property (nonatomic, strong) AVCaptureSession *captureSession;
#property (nonatomic, strong) AVCaptureMovieFileOutput *captureOutput;
#property (nonatomic, weak) AVCaptureDeviceInput *activeVideoInput;
#property (nonatomic, strong) AVCaptureVideoPreviewLayer *previewLayer;
#end
#implementation THCaptureViewController
- (void)viewDidLoad
{
[super viewDidLoad];
#if TARGET_IPHONE_SIMULATOR
self.simulatorView.hidden = NO;
[self.view bringSubviewToFront:self.simulatorView];
#else
self.simulatorView.hidden = YES;
[self.view sendSubviewToBack:self.simulatorView];
#endif
// Hide the toggle button if device has less than 2 cameras. Does 3GS support iOS 6?
self.toggleCameraButton.hidden = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] count] < 2;
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0),
^{
[self setUpCaptureSession];
});
}
#pragma mark - Configure Capture Session
- (void)setUpCaptureSession
{
self.captureSession = [[AVCaptureSession alloc] init];
NSError *error;
// Set up hardware devices
AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if (videoDevice) {
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
if (input) {
[self.captureSession addInput:input];
self.activeVideoInput = input;
}
}
AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
if (audioDevice) {
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:&error];
if (audioInput) {
[self.captureSession addInput:audioInput];
}
}
//Create a VideoDataOutput and add it to the session
AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
[self.captureSession addOutput:output];
// Setup the still image file output
AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
[stillImageOutput setOutputSettings:#{AVVideoCodecKey : AVVideoCodecJPEG}];
if ([self.captureSession canAddOutput:stillImageOutput]) {
[self.captureSession addOutput:stillImageOutput];
}
// Start running session so preview is available
[self.captureSession startRunning];
// Set up preview layer
dispatch_async(dispatch_get_main_queue(), ^{
self.previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
self.previewLayer.frame = self.previewView.bounds;
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[[self.previewLayer connection] setVideoOrientation:[self currentVideoOrientation]];
[self.previewView.layer addSublayer:self.previewLayer];
});
}
#pragma mark - Start Recording
- (IBAction)startRecording:(id)sender {
if ([sender isSelected]) {
[sender setSelected:NO];
[self.captureOutput stopRecording];
} else {
[sender setSelected:YES];
if (!self.captureOutput) {
self.captureOutput = [[AVCaptureMovieFileOutput alloc] init];
[self.captureSession addOutput:self.captureOutput];
}
// Delete the old movie file if it exists
//[[NSFileManager defaultManager] removeItemAtURL:[self outputURL] error:nil];
[self.captureSession startRunning];
AVCaptureConnection *videoConnection = [self connectionWithMediaType:AVMediaTypeVideo fromConnections:self.captureOutput.connections];
if ([videoConnection isVideoOrientationSupported]) {
videoConnection.videoOrientation = [self currentVideoOrientation];
}
if ([videoConnection isVideoStabilizationSupported]) {
videoConnection.enablesVideoStabilizationWhenAvailable = YES;
}
[self.captureOutput startRecordingToOutputFileURL:[self outputURL] recordingDelegate:self];
}
// Disable the toggle button if recording
self.toggleCameraButton.enabled = ![sender isSelected];
}
- (AVCaptureConnection *)connectionWithMediaType:(NSString *)mediaType fromConnections:(NSArray *)connections {
for (AVCaptureConnection *connection in connections) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([[port mediaType] isEqual:mediaType]) {
return connection;
}
}
}
return nil;
}
#pragma mark - AVCaptureFileOutputRecordingDelegate
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error {
if (!error) {
[self presentRecording];
} else {
NSLog(#"Error: %#", [error localizedDescription]);
}
}
#pragma mark - Show Last Recording
- (void)presentRecording
{
NSString *tracksKey = #"tracks";
AVAsset *asset = [AVURLAsset assetWithURL:[self outputURL]];
[asset loadValuesAsynchronouslyForKeys:#[tracksKey] completionHandler:^{
NSError *error;
AVKeyValueStatus status = [asset statusOfValueForKey:tracksKey error:&error];
if (status == AVKeyValueStatusLoaded) {
dispatch_async(dispatch_get_main_queue(), ^{
UIStoryboard *mainStoryboard = [UIStoryboard storyboardWithName:#"MainStoryboard" bundle:nil];
THPlayerViewController *controller = [mainStoryboard instantiateViewControllerWithIdentifier:#"THPlayerViewController"];
controller.title = #"Capture Recording";
controller.asset = asset;
[self presentViewController:controller animated:YES completion:nil];
});
}
}];
}
#pragma mark - Recoding Destination URL
- (NSURL *)outputURL
{
NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSLog(#"documents Directory: %#", documentsDirectory);
NSString *filePath = [documentsDirectory stringByAppendingPathComponent:VIDEO_FILE];
NSLog(#"output url: %#", filePath);
return [NSURL fileURLWithPath:filePath];
}
#end
I found this link which shows how to capture the video in frames. But I am not sure that if capturing the video in frames will help me in sending the video in h.264 format to the server. Can this be done, if yes then how?
Here the person who has asked the question says (in the comments below the question) that he was able to do it successfully, but he hasn't mentioned that how he captured the video.
Please tell me which data type should be used to get small segments of the video captured and also how to convert the captured data in the required format and send it to the server.
You can use live sdk .You have to setup nginx powered streaming server.
Please follow this link .I have used it and it is very efficient solution .
https://github.com/ltebean/Live
My problem
I am using AVFoundation to capture the frames of the video output every 5 seconds. When the user taps a button, the input camera should switch from the front one to the back one and the opposite. Problem is, every time I switch the back camera to the front camera it freezes (oddly enough, it works the other way around - meaning, front camera to back camera)!
The code I use
To switch between the cameras, I use the exact code taken from Apple's AVCam sample code with a slight change of variable names (so it will match my code):
- (BOOL)toggleCamera
{
BOOL success = NO;
if ([self cameraCount] > 1) {
NSError *error;
AVCaptureDeviceInput *newVideoInput;
AVCaptureDevicePosition position = [[self.videoInput device] position];
if (position == AVCaptureDevicePositionBack)
newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self frontFacingCamera] error:&error];
else if (position == AVCaptureDevicePositionFront)
newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self backFacingCamera] error:&error];
else
goto bail;
if (newVideoInput != nil) {
// Start configuring the session.
[[self captureSession] beginConfiguration];
// Remove the current video input device.
[[self captureSession] removeInput:[self videoInput]];
if ([[self captureSession] canAddInput:newVideoInput]) {
[[self captureSession] addInput:newVideoInput];
[self setVideoInput:newVideoInput];
}
else {
[[self captureSession] addInput:[self videoInput]];
}
[[self captureSession] commitConfiguration];
success = YES;
[newVideoInput release];
}
else if (error) {
NSLog(#"PICTURE TAKER: Failed to toggle cameras."
#"\nError: %#", error.localizedDescription);
}
}
bail:
return success;
}
And to setup the AVCaptureSession, again, I use the exact same code from the AVCam sample code, with a slight change in the added output, (to match my needs):
- (BOOL) setupSession
{
BOOL success = NO;
// Set torch and flash mode to auto
if ([[self backFacingCamera] hasFlash]) {
if ([[self backFacingCamera] lockForConfiguration:nil]) {
if ([[self backFacingCamera] isFlashModeSupported:AVCaptureFlashModeAuto]) {
[[self backFacingCamera] setFlashMode:AVCaptureFlashModeAuto];
}
[[self backFacingCamera] unlockForConfiguration];
}
}
if ([[self backFacingCamera] hasTorch]) {
if ([[self backFacingCamera] lockForConfiguration:nil]) {
if ([[self backFacingCamera] isTorchModeSupported:AVCaptureTorchModeAuto]) {
[[self backFacingCamera] setTorchMode:AVCaptureTorchModeAuto];
}
[[self backFacingCamera] unlockForConfiguration];
}
}
// Init the device inputs
AVCaptureDeviceInput *newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self frontFacingCamera] error:nil];
// Create a video output & configure it.
AVCaptureVideoDataOutput *newVideoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
newVideoDataOutput.videoSettings = #{(NSString *)kCVPixelBufferPixelFormatTypeKey : #(kCVPixelFormatType_32BGRA)};
// Set the output's delegate.
dispatch_queue_t queue = dispatch_queue_create("MyQueue", NULL);
[newVideoDataOutput setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);
// Create session (use default AVCaptureSessionPresetHigh)
AVCaptureSession *newCaptureSession = [[AVCaptureSession alloc] init];
// Add inputs and output to the capture session
if ([newCaptureSession canAddInput:newVideoInput]) {
[newCaptureSession addInput:newVideoInput];
}
if ([newCaptureSession canAddOutput:newVideoDataOutput]) {
[newCaptureSession addOutput:newVideoDataOutput];
}
[self setVideoOutput:newVideoDataOutput];
[self setVideoInput:newVideoInput];
[self setCaptureSession:newCaptureSession];
[newVideoDataOutput release];
[newVideoInput release];
[newCaptureSession release];
// Get our view's layer.
CALayer *viewLayer = self.view.layer;
// Add the the session's preview layer.
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.captureSession];
captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
captureVideoPreviewLayer.frame = self.view.bounds;
[viewLayer insertSublayer:captureVideoPreviewLayer below:self.imageViewOverlay.layer];
[captureVideoPreviewLayer release];
success = YES;
return success;
}
*Important note: the camera freezes right after the call to the output's delegate.
Can anyone please help me solve this? It seems as if I had tried almost everything!
Update #1
As requested, self is referring to a view controllers that presented the session's preview layer and manages all things related to the camera. In addition, I'm posting the code the handles the frontFacingCamera part, which is (just like all of the code that I have posted so far) taken from Apple's AVCam sample code:
// Find a camera with the specificed AVCaptureDevicePosition, returning nil if one is not found
- (AVCaptureDevice *) cameraWithPosition:(AVCaptureDevicePosition) position
{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices) {
if ([device position] == position) {
return device;
}
}
return nil;
}
// Find a front facing camera, returning nil if one is not found
- (AVCaptureDevice *) frontFacingCamera
{
return [self cameraWithPosition:AVCaptureDevicePositionFront];
}
BOOL isUsingFrontFacingCamera;
- (BOOL) swapCameras
{
if ([self cameraCount] > 1) {
AVCaptureDevicePosition desiredPosition;
if (isUsingFrontFacingCamera) {
desiredPosition = AVCaptureDevicePositionBack;
} else {
desiredPosition = AVCaptureDevicePositionFront;
}
for (AVCaptureDevice *d in [AVCaptureDevice devicesWithMediaType: AVMediaTypeVideo]) {
if ([d position] == desiredPosition) {
[[self session] beginConfiguration];
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:d error:nil];
for (AVCaptureInput *oldInput in [[self session] inputs]) {
[[self session] removeInput:oldInput];
}
[[self session] addInput:input];
[[self session] commitConfiguration];
break;
}
}
isUsingFrontFacingCamera = !isUsingFrontFacingCamera;
return YES;
}
return NO;
}
Answer from here
EDITED : This from Square Cam Apple sample code. Works fine. Tested also. http://developer.apple.com/library/ios/#samplecode/SquareCam/Listings/SquareCam_SqareCamViewController_m.html
// use front/back camera
- (IBAction)switchCameras:(id)sender
{
AVCaptureDevicePosition desiredPosition;
if (isUsingFrontFacingCamera)
desiredPosition = AVCaptureDevicePositionBack;
else
desiredPosition = AVCaptureDevicePositionFront;
for (AVCaptureDevice *d in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
if ([d position] == desiredPosition) {
[[previewLayer session] beginConfiguration];
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:d error:nil];
for (AVCaptureInput *oldInput in [[previewLayer session] inputs]) {
[[previewLayer session] removeInput:oldInput];
}
[[previewLayer session] addInput:input];
[[previewLayer session] commitConfiguration];
break;
}
}
isUsingFrontFacingCamera = !isUsingFrontFacingCamera;
}
In my application for recording and playing audio using AVAudioRecorder and AVAudioPlayer I came across a scenario in the case of incoming phone call.While the recording is in progress and if the phone call comes,the audio recorded after the phone call is only recorded.I want the recording recorded after the phone call to be the continuation of the audio recorded before the phone call.
I track the interruption occuring in audio recorder using the AVAudioRecorderDelegate methods
(void)audioRecorderBeginInterruption:(AVAudioRecorder *)avRecorder
and
(void)audioRecorderEndInterruption:(AVAudioRecorder *)avRecorder.
In my EndInterruption method I activates the audioSession.
Here is the recording code that I use
- (void)startRecordingProcess
{
AVAudioSession *audioSession = [AVAudioSession sharedInstance];
NSError *err = nil;
[audioSession setCategory:AVAudioSessionCategoryPlayAndRecord error:&err];
if(err)
{
DEBUG_LOG(#"audioSession: %# %d %#", [err domain], [err code], [[err userInfo] description]);
return;
}
[audioSession setActive:YES error:&err];
err = nil;
if(err)
{
DEBUG_LOG(#"audioSession: %# %d %#", [err domain], [err code], [[err userInfo] description]);
return;
}
// Record settings for recording the audio
recordSetting = [[NSDictionary alloc] initWithObjectsAndKeys:
[NSNumber numberWithInt:kAudioFormatMPEG4AAC],AVFormatIDKey,
[NSNumber numberWithInt:44100],AVSampleRateKey,
[NSNumber numberWithInt: 2],AVNumberOfChannelsKey,
[NSNumber numberWithInt:16],AVLinearPCMBitDepthKey,
[NSNumber numberWithBool:NO],AVLinearPCMIsBigEndianKey,
[NSNumber numberWithBool:NO],AVLinearPCMIsFloatKey,
nil];
BOOL fileExists = [[NSFileManager defaultManager] fileExistsAtPath:recorderFilePath];
if (fileExists)
{
BOOL appendingFileExists =
[[NSFileManager defaultManager] fileExistsAtPath:appendingFilePath];
if (appendingFileExists)
{
[[NSFileManager defaultManager]removeItemAtPath:appendingFilePath error:nil];
}
if (appendingFilePath)
{
[appendingFilePath release];
appendingFilePath = nil;
}
appendingFilePath = [[NSString alloc]initWithFormat:#"%#/AppendedAudio.m4a", DOCUMENTS_FOLDER];
fileUrl = [NSURL fileURLWithPath:appendingFilePath];
}
else
{
isFirstTime = YES;
if (recorderFilePath)
{
DEBUG_LOG(#"Testing 2");
[recorderFilePath release];
recorderFilePath = nil;
}
DEBUG_LOG(#"Testing 3");
recorderFilePath = [[NSString alloc]initWithFormat:#"%#/RecordedAudio.m4a", DOCUMENTS_FOLDER];
fileUrl = [NSURL fileURLWithPath:recorderFilePath];
}
err = nil;
recorder = [[recorder initWithURL:fileUrl settings:recordSetting error:&err]retain];
if(!recorder)
{
DEBUG_LOG(#"recorder: %# %d %#", [err domain], [err code], [[err userInfo] description]);
[[AlertFunctions sharedInstance] showMessageWithTitle:kAppName
message:[err localizedDescription]
delegate:nil
cancelButtonTitle:#"Ok"];
return;
}
//prepare to record
[recorder setDelegate:self];
[recorder prepareToRecord];
recorder.meteringEnabled = YES;
[recorder record];
}
While searching for a solution to this issue I came across another link
how to resume recording after interruption occured in iphone? and http://www.iphonedevsdk.com/forum/iphone-sdk-development/31268-avaudiorecorderdelegate-interruption.html which speaks of the same issue.
I tried the suggestions that were given in those links but were not successful.
I hope to make it work with AVAudioRecorder itself.
Is there any way I could find a solution to this issue?
All valuable suggestions are appreciated.
After several research I was notified by Apple that it's an issue with the current API. So I managed to find a workaround for the issue by saving the previous audio file just after interruption and joining it with the resumed audio file. Hope it helps someone out there who may face the same issue.
I was also facing a similar issue where AVAudioRecorder was recording only after interruption.
So i fixed this issue by maintaining an array of recordings and keeping them in the NSTemporaryDirectory and finally merging them at the end.
Below are the key steps:
Make your class listen to the AVAudioSessionInterruptionNotification.
On interruption begin (AVAudioSessionInterruptionTypeBegan), save your recording
On interruption end(AVAudioSessionInterruptionTypeEnded), start a new recording for interruption option AVAudioSessionInterruptionOptionShouldResume
Append all recordings on hitting the Save button.
The code snippets for the above mentioned steps are:
// 1. Make this class listen to the AVAudioSessionInterruptionNotification in viewDidLoad
- (void)viewDidLoad
{
[super viewDidLoad];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(handleAudioSessionInterruption:)
name:AVAudioSessionInterruptionNotification
object:[AVAudioSession sharedInstance]];
// other coding stuff
}
// observe the interruption begin / end
- (void)handleAudioSessionInterruption:(NSNotification*)notification
{
AVAudioSessionInterruptionType interruptionType = [notification.userInfo[AVAudioSessionInterruptionTypeKey] unsignedIntegerValue];
AVAudioSessionInterruptionOptions interruptionOption = [notification.userInfo[AVAudioSessionInterruptionOptionKey] unsignedIntegerValue];
switch (interruptionType) {
// 2. save recording on interruption begin
case AVAudioSessionInterruptionTypeBegan:{
// stop recording
// Update the UI accordingly
break;
}
case AVAudioSessionInterruptionTypeEnded:{
if (interruptionOption == AVAudioSessionInterruptionOptionShouldResume) {
// create a new recording
// Update the UI accordingly
}
break;
}
default:
break;
}
}
// 4. append all recordings
- (void) audioRecorderDidFinishRecording:(AVAudioRecorder *)avrecorder successfully:(BOOL)flag
{
// append all recordings one after other
}
Here is a working example:
//
// XDRecordViewController.m
//
// Created by S1LENT WARRIOR
//
#import "XDRecordViewController.h"
#interface XDRecordViewController ()
{
AVAudioRecorder *recorder;
__weak IBOutlet UIButton* btnRecord;
__weak IBOutlet UIButton* btnSave;
__weak IBOutlet UIButton* btnDiscard;
__weak IBOutlet UILabel* lblTimer; // a UILabel to display the recording time
// some variables to display the timer on a lblTimer
NSTimer* timer;
NSTimeInterval intervalTimeElapsed;
NSDate* pauseStart;
NSDate* previousFireDate;
NSDate* recordingStartDate;
// interruption handling variables
BOOL isInterrupted;
NSInteger preInterruptionDuration;
NSMutableArray* recordings; // an array of recordings to be merged in the end
}
#end
#implementation XDRecordViewController
- (void)viewDidLoad
{
[super viewDidLoad];
// Make this class listen to the AVAudioSessionInterruptionNotification
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(handleAudioSessionInterruption:)
name:AVAudioSessionInterruptionNotification
object:[AVAudioSession sharedInstance]];
[self clearContentsOfDirectory:NSTemporaryDirectory()]; // clear contents of NSTemporaryDirectory()
recordings = [NSMutableArray new]; // initialize recordings
[self setupAudioSession]; // setup the audio session. you may customize it according to your requirements
}
- (void)viewDidAppear:(BOOL)animated
{
[super viewDidAppear:animated];
[self initRecording]; // start recording as soon as the view appears
}
- (void)dealloc
{
[self clearContentsOfDirectory:NSTemporaryDirectory()]; // remove all files files from NSTemporaryDirectory
[[NSNotificationCenter defaultCenter] removeObserver:self]; // remove this class from NSNotificationCenter
}
#pragma mark - Event Listeners
// called when recording button is tapped
- (IBAction) btnRecordingTapped:(UIButton*)sender
{
sender.selected = !sender.selected; // toggle the button
if (sender.selected) { // resume recording
[recorder record];
[self resumeTimer];
} else { // pause recording
[recorder pause];
[self pauseTimer];
}
}
// called when save button is tapped
- (IBAction) btnSaveTapped:(UIButton*)sender
{
[self pauseTimer]; // pause the timer
// disable the UI while the recording is saving so that user may not press the save, record or discard button again
btnSave.enabled = NO;
btnRecord.enabled = NO;
btnDiscard.enabled = NO;
[recorder stop]; // stop the AVAudioRecorder so that the audioRecorderDidFinishRecording delegate function may get called
// Deactivate the AVAudioSession
NSError* error;
[[AVAudioSession sharedInstance] setActive:NO error:&error];
if (error) {
NSLog(#"%#", error);
}
}
// called when discard button is tapped
- (IBAction) btnDiscardTapped:(id)sender
{
[self stopTimer]; // stop the timer
recorder.delegate = Nil; // set delegate to Nil so that audioRecorderDidFinishRecording delegate function may not get called
[recorder stop]; // stop the recorder
// Deactivate the AVAudioSession
NSError* error;
[[AVAudioSession sharedInstance] setActive:NO error:&error];
if (error) {
NSLog(#"%#", error);
}
[self.navigationController popViewControllerAnimated:YES];
}
#pragma mark - Notification Listeners
// called when an AVAudioSessionInterruption occurs
- (void)handleAudioSessionInterruption:(NSNotification*)notification
{
AVAudioSessionInterruptionType interruptionType = [notification.userInfo[AVAudioSessionInterruptionTypeKey] unsignedIntegerValue];
AVAudioSessionInterruptionOptions interruptionOption = [notification.userInfo[AVAudioSessionInterruptionOptionKey] unsignedIntegerValue];
switch (interruptionType) {
case AVAudioSessionInterruptionTypeBegan:{
// • Recording has stopped, already inactive
// • Change state of UI, etc., to reflect non-recording state
preInterruptionDuration += recorder.currentTime; // time elapsed
if(btnRecord.selected) { // timer is already running
[self btnRecordingTapped:btnRecord]; // pause the recording and pause the timer
}
recorder.delegate = Nil; // Set delegate to nil so that audioRecorderDidFinishRecording may not get called
[recorder stop]; // stop recording
isInterrupted = YES;
break;
}
case AVAudioSessionInterruptionTypeEnded:{
// • Make session active
// • Update user interface
// • AVAudioSessionInterruptionOptionShouldResume option
if (interruptionOption == AVAudioSessionInterruptionOptionShouldResume) {
// Here you should create a new recording
[self initRecording]; // create a new recording
[self btnRecordingTapped:btnRecord];
}
break;
}
default:
break;
}
}
#pragma mark - AVAudioRecorderDelegate
- (void) audioRecorderDidFinishRecording:(AVAudioRecorder *)avrecorder successfully:(BOOL)flag
{
[self appendAudiosAtURLs:recordings completion:^(BOOL success, NSURL *outputUrl) {
// do whatever you want with the new audio file :)
}];
}
#pragma mark - Timer
- (void)timerFired:(NSTimer*)timer
{
intervalTimeElapsed++;
[self updateDisplay];
}
// function to time string
- (NSString*) timerStringSinceTimeInterval:(NSTimeInterval)timeInterval
{
NSDate *timerDate = [NSDate dateWithTimeIntervalSince1970:timeInterval];
NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
[dateFormatter setDateFormat:#"mm:ss"];
[dateFormatter setTimeZone:[NSTimeZone timeZoneForSecondsFromGMT:0.0]];
return [dateFormatter stringFromDate:timerDate];
}
// called when recording pauses
- (void) pauseTimer
{
pauseStart = [NSDate dateWithTimeIntervalSinceNow:0];
previousFireDate = [timer fireDate];
[timer setFireDate:[NSDate distantFuture]];
}
- (void) resumeTimer
{
if (!timer) {
timer = [NSTimer scheduledTimerWithTimeInterval:1.0
target:self
selector:#selector(timerFired:)
userInfo:Nil
repeats:YES];
return;
}
float pauseTime = - 1 * [pauseStart timeIntervalSinceNow];
[timer setFireDate:[previousFireDate initWithTimeInterval:pauseTime sinceDate:previousFireDate]];
}
- (void)stopTimer
{
[self updateDisplay];
[timer invalidate];
timer = nil;
}
- (void)updateDisplay
{
lblTimer.text = [self timerStringSinceTimeInterval:intervalTimeElapsed];
}
#pragma mark - Helper Functions
- (void) initRecording
{
// Set the audio file
NSString* name = [NSString stringWithFormat:#"recording_%#.m4a", #(recordings.count)]; // creating a unique name for each audio file
NSURL *outputFileURL = [NSURL fileURLWithPathComponents:#[NSTemporaryDirectory(), name]];
[recordings addObject:outputFileURL];
// Define the recorder settings
NSMutableDictionary *recordSetting = [[NSMutableDictionary alloc] init];
[recordSetting setValue:#(kAudioFormatMPEG4AAC) forKey:AVFormatIDKey];
[recordSetting setValue:#(44100.0) forKey:AVSampleRateKey];
[recordSetting setValue:#(1) forKey:AVNumberOfChannelsKey];
NSError* error;
// Initiate and prepare the recorder
recorder = [[AVAudioRecorder alloc] initWithURL:outputFileURL settings:recordSetting error:&error];
recorder.delegate = self;
recorder.meteringEnabled = YES;
[recorder prepareToRecord];
if (![AVAudioSession sharedInstance].inputAvailable) { // can not record audio if mic is unavailable
NSLog(#"Error: Audio input device not available!");
return;
}
intervalTimeElapsed = 0;
recordingStartDate = [NSDate date];
if (isInterrupted) {
intervalTimeElapsed = preInterruptionDuration;
isInterrupted = NO;
}
// Activate the AVAudioSession
[[AVAudioSession sharedInstance] setActive:YES error:&error];
if (error) {
NSLog(#"%#", error);
}
recordingStartDate = [NSDate date]; // Set the recording start date
[self btnRecordingTapped:btnRecord];
}
- (void)setupAudioSession
{
static BOOL audioSessionSetup = NO;
if (audioSessionSetup) {
return;
}
AVAudioSession* session = [AVAudioSession sharedInstance];
[session setCategory:AVAudioSessionCategoryPlayAndRecord
withOptions:AVAudioSessionCategoryOptionDefaultToSpeaker
error:Nil];
[session setMode:AVAudioSessionModeSpokenAudio error:nil];
audioSessionSetup = YES;
}
// gets an array of audios and append them to one another
// the basic logic was derived from here: http://stackoverflow.com/a/16040992/634958
// i modified this logic to append multiple files
- (void) appendAudiosAtURLs:(NSMutableArray*)urls completion:(void(^)(BOOL success, NSURL* outputUrl))handler
{
// Create a new audio track we can append to
AVMutableComposition* composition = [AVMutableComposition composition];
AVMutableCompositionTrack* appendedAudioTrack =
[composition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
// Grab the first audio track that need to be appended
AVURLAsset* originalAsset = [[AVURLAsset alloc]
initWithURL:urls.firstObject options:nil];
[urls removeObjectAtIndex:0];
NSError* error = nil;
// Grab the first audio track and insert it into our appendedAudioTrack
AVAssetTrack *originalTrack = [[originalAsset tracksWithMediaType:AVMediaTypeAudio] firstObject];
CMTimeRange timeRange = CMTimeRangeMake(kCMTimeZero, originalAsset.duration);
[appendedAudioTrack insertTimeRange:timeRange
ofTrack:originalTrack
atTime:kCMTimeZero
error:&error];
CMTime duration = originalAsset.duration;
if (error) {
if (handler) {
dispatch_async(dispatch_get_main_queue(), ^{
handler(NO, Nil);
});
}
}
for (NSURL* audioUrl in urls) {
AVURLAsset* newAsset = [[AVURLAsset alloc]
initWithURL:audioUrl options:nil];
// Grab the rest of the audio tracks and insert them at the end of each other
AVAssetTrack *newTrack = [[newAsset tracksWithMediaType:AVMediaTypeAudio] firstObject];
timeRange = CMTimeRangeMake(kCMTimeZero, newAsset.duration);
[appendedAudioTrack insertTimeRange:timeRange
ofTrack:newTrack
atTime:duration
error:&error];
duration = appendedAudioTrack.timeRange.duration;
if (error) {
if (handler) {
dispatch_async(dispatch_get_main_queue(), ^{
handler(NO, Nil);
});
}
}
}
// Create a new audio file using the appendedAudioTrack
AVAssetExportSession* exportSession = [AVAssetExportSession
exportSessionWithAsset:composition
presetName:AVAssetExportPresetAppleM4A];
if (!exportSession) {
if (handler) {
dispatch_async(dispatch_get_main_queue(), ^{
handler(NO, Nil);
});
}
}
NSArray* appendedAudioPath = #[NSTemporaryDirectory(), #"temp.m4a"]; // name of the final audio file
exportSession.outputURL = [NSURL fileURLWithPathComponents:appendedAudioPath];
exportSession.outputFileType = AVFileTypeAppleM4A;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
BOOL success = NO;
// exported successfully?
switch (exportSession.status) {
case AVAssetExportSessionStatusFailed:
break;
case AVAssetExportSessionStatusCompleted: {
success = YES;
break;
}
case AVAssetExportSessionStatusWaiting:
break;
default:
break;
}
if (handler) {
dispatch_async(dispatch_get_main_queue(), ^{
handler(success, exportSession.outputURL);
});
}
}];
}
- (void) clearContentsOfDirectory:(NSString*)directory
{
NSFileManager *fm = [NSFileManager defaultManager];
NSError *error = nil;
for (NSString *file in [fm contentsOfDirectoryAtPath:directory error:&error]) {
[fm removeItemAtURL:[NSURL fileURLWithPathComponents:#[directory, file]] error:&error];
}
}
#end
I know its too late to answer to question, but hope this helps someone else!
i am trying to record a video from front camera of iPhone 4 using AVFoundation Framework with the help of WWDC samples i got from iPhone developer program. But i still cant get it to work..the video does not get recorded or mayb saved in my iPhone library...here's the code i am trying to use...it would b really helpful if someone culd help me with the problem i am having??
-(void)recordVideo
{
AVCaptureDeviceInput *videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self backFacingCamera] error:nil];
AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
AVCaptureSession *session = [[AVCaptureSession alloc] init];
[session addInput:videoInput];
[session addOutput:movieFileOutput];
[movieFileOutput release];
if (![session isRunning])
{
[self performSelector:#selector(startRecording) withObject:nil afterDelay:1.0];
[session startRunning];
}
}
- (void) startRecording
{
NSLog(#"start recording");
AVCaptureConnection *videoConnection = [playVideo connectionWithMediaType:AVMediaTypeVideo fromConnections:[[self movieFileOutput] connections]];
if ([videoConnection isVideoOrientationSupported]) {
[videoConnection setVideoOrientation:[self orientation]];
}
[[self movieFileOutput] startRecordingToOutputFileURL:[self tempFileURL]
recordingDelegate:self];
}
- (void) stopRecording
{
NSLog(#"stop recording");
[[self movieFileOutput] stopRecording];
}
- (NSURL *) tempFileURL
{
NSLog(#"temp file url");
NSString *outputPath = [[NSString alloc] initWithFormat:#"%#%#", NSTemporaryDirectory(), #"output.mov"];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
NSFileManager *fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:outputPath]) {
NSLog(#"exists");
}
[outputPath release];
return [outputURL autorelease];
}
- (void) setConnectionWithMediaType:(NSString *)mediaType enabled:(BOOL)enabled;
{
[[playVideo connectionWithMediaType:AVMediaTypeVideo fromConnections:[[self movieFileOutput] connections]] setEnabled:enabled];
}
+ (AVCaptureConnection *)connectionWithMediaType:(NSString *)mediaType fromConnections:(NSArray *)connections;
{
NSLog(#"connection with media type");
for ( AVCaptureConnection *connection in connections ) {
for ( AVCaptureInputPort *port in [connection inputPorts] ) {
if ( [[port mediaType] isEqual:mediaType] ) {
return [[connection retain] autorelease];
}
}
}
return nil;
}
#implementation recordVideo (Internal)
- (AVCaptureDevice *) cameraWithPosition:(AVCaptureDevicePosition) position
{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices) {
if ([device position] == position) {
return device;
}
}
return nil;
}
- (AVCaptureDevice *) backFacingCamera
{
NSLog(#"back");
return [self cameraWithPosition:AVCaptureDevicePositionBack];
}
- (AVCaptureDevice *) frontFacingCamera
{
NSLog(#"front ");
return [self cameraWithPosition:AVCaptureDevicePositionFront];
}
#end
#implementation recordVideo (AVCaptureFileOutputRecordingDelegate)
- (void) captureOutput:(AVCaptureFileOutput *)captureOutput
didStartRecordingToOutputFileAtURL:(NSURL *)fileURL
fromConnections:(NSArray *)connections
{
NSLog(#"did start recording");
}
- (void) captureOutput:(AVCaptureFileOutput *)captureOutput
didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
fromConnections:(NSArray *)connections
error:(NSError *)error
{
NSLog(#"did finish recording output file");
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL])
{
[library writeVideoAtPathToSavedPhotosAlbum:outputFileURL
completionBlock:^(NSURL *assetURL, NSError *error){
if (error && [delegate respondsToSelector:#selector(assetLibraryError:forURL:)]) {
[delegate assetLibraryError:error forURL:assetURL];
}
}];
}
else {
}
[library release];
}
#end
You are throwing away the errors - how can you debug if you do that?
You need to be more specific in what is / is not happening. Which methods are called? What errors are reported by the SDK (if any)? etc...
you have released reference to movieFileOutput in -(void)recordVideo;
and you are still using released movieFileOutput in - (void) startRecording as well as
other functions.You better not release object that you are using.