I have to implement functionality to repeatedly pause and resume video capture in a single session, but have each new segment (the captured segments after each pause) added to the same video file, with AVFoundation. Currently, every time I press "stop" then "record" again, it just saves a new video file to my iPhone's Document directory and starts capturing to a new file. I need to be able to press the "record/stop" button over, only capture video & audio when record is active... then when the "done" button is pressed, have a single AV file with all the segments together. And all this needs to happen in the same capture session / preview session.
I am not using AVAssetWriterInput.
The only way I can think of to try this is when the "done" button is pressed, taking each individual output file and combining them together into a single file.
This code is working for iOS 5 but not for iOS 6. Actually for iOS 6, the first time when I pause recording (stop recording) AVCaptureFileOutputRecordingDelegate method (captureOutput: didFinishRecordingToOutputFileAtURL: fromConnections: error:) is called but after that when I start the recording the delegate method (captureOutput: didFinishRecordingToOutputFileAtURL: fromConnections: error:) is called again but it is not called at the time of stop recording.
I need a solution for that issue. Please help me.
//View LifeCycle
- (void)viewDidLoad
{
[super viewDidLoad];
self.finalRecordedVideoName = [self stringWithNewUUID];
arrVideoName = [[NSMutableArray alloc]initWithCapacity:0];
arrOutputUrl = [[NSMutableArray alloc] initWithCapacity:0];
CaptureSession = [[AVCaptureSession alloc] init];
captureDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
if ([captureDevices count] > 0)
{
NSError *error;
VideoInputDevice = [[AVCaptureDeviceInput alloc] initWithDevice:[self backFacingCamera] error:&error];
if (!error)
{
if ([CaptureSession canAddInput:VideoInputDevice])
[CaptureSession addInput:VideoInputDevice];
else
NSLog(#"Couldn't add video input");
}
else
{
NSLog(#"Couldn't create video input");
}
}
else
{
NSLog(#"Couldn't create video capture device");
}
//ADD VIDEO PREVIEW LAYER
NSLog(#"Adding video preview layer");
AVCaptureVideoPreviewLayer *layer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:CaptureSession];
[self setPreviewLayer:layer];
UIDeviceOrientation currentOrientation = [UIDevice currentDevice].orientation;
NSLog(#"%d",currentOrientation);
if (currentOrientation == UIDeviceOrientationPortrait)
{
PreviewLayer.orientation = AVCaptureVideoOrientationPortrait;
}
else if (currentOrientation == UIDeviceOrientationPortraitUpsideDown)
{
PreviewLayer.orientation = AVCaptureVideoOrientationPortraitUpsideDown;
}
else if (currentOrientation == UIDeviceOrientationLandscapeRight)
{
PreviewLayer.orientation = AVCaptureVideoOrientationLandscapeRight;
}
else if (currentOrientation == UIDeviceOrientationLandscapeLeft)
{
PreviewLayer.orientation = AVCaptureVideoOrientationLandscapeLeft;
}
[[self PreviewLayer] setVideoGravity:AVLayerVideoGravityResizeAspectFill];
//ADD MOVIE FILE OUTPUT
NSLog(#"Adding movie file output");
MovieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
VideoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
[VideoDataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
NSString* key = (NSString*)kCVPixelBufferBytesPerRowAlignmentKey;
NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
[VideoDataOutput setVideoSettings:videoSettings];
Float64 TotalSeconds = 60; //Total seconds
int32_t preferredTimeScale = 30; //Frames per second
CMTime maxDuration = CMTimeMakeWithSeconds(TotalSeconds, preferredTimeScale);//<<SET MAX DURATION
MovieFileOutput.maxRecordedDuration = maxDuration;
MovieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024; //<<SET MIN FREE SPACE IN BYTES FOR RECORDING TO CONTINUE ON A VOLUME
//SET THE CONNECTION PROPERTIES (output properties)
[self CameraSetOutputProperties]; //(We call a method as it also has to be done after changing camera)
AVCaptureConnection *videoConnection = nil;
for ( AVCaptureConnection *connection in [MovieFileOutput connections] )
{
NSLog(#"%#", connection);
for ( AVCaptureInputPort *port in [connection inputPorts] )
{
NSLog(#"%#", port);
if ( [[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
}
}
}
if([videoConnection isVideoOrientationSupported]) // **Here it is, its always false**
{
[videoConnection setVideoOrientation:[[UIDevice currentDevice] orientation]];
} NSLog(#"Setting image quality");
[CaptureSession setSessionPreset:AVCaptureSessionPresetLow];
//----- DISPLAY THE PREVIEW LAYER -----
CGRect layerRect = CGRectMake(5, 5, 299, ([[UIScreen mainScreen] bounds].size.height == 568)?438:348);
[self.PreviewLayer setBounds:layerRect];
[self.PreviewLayer setPosition:CGPointMake(CGRectGetMidX(layerRect),CGRectGetMidY(layerRect))];
if ([CaptureSession canAddOutput:MovieFileOutput])
[CaptureSession addOutput:MovieFileOutput];
[CaptureSession addOutput:VideoDataOutput];
//We use this instead so it goes on a layer behind our UI controls (avoids us having to manually bring each control to the front):
CameraView = [[UIView alloc] init];
[videoPreviewLayer addSubview:CameraView];
[videoPreviewLayer sendSubviewToBack:CameraView];
[[CameraView layer] addSublayer:PreviewLayer];
//----- START THE CAPTURE SESSION RUNNING -----
[CaptureSession startRunning];
}
#pragma mark - IBACtion Methods
-(IBAction)btnStartAndStopPressed:(id)sender
{
UIButton *StartAndStopButton = (UIButton*)sender;
if ([StartAndStopButton isSelected] == NO)
{
[StartAndStopButton setSelected:YES];
[btnPauseAndResume setEnabled:YES];
[btnBack setEnabled:NO];
[btnSwitchCameraInput setHidden:YES];
NSDate *date = [NSDate date];
NSLog(#" date %#",date);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *recordedFileName = nil;
recordedFileName = [NSString stringWithFormat:#"output%#.mov",date];
NSString *documentsDirectory = [paths objectAtIndex:0];
self.outputPath = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"%#",recordedFileName]];
NSLog(#"%#",self.outputPath);
[arrVideoName addObject:recordedFileName];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:self.outputPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:self.outputPath])
{
NSError *error;
if ([[NSFileManager defaultManager] removeItemAtPath:self.outputPath error:&error] == NO)
{
//Error - handle if requried
}
}
//Start recording
[MovieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
recordingTimer = [NSTimer scheduledTimerWithTimeInterval:1.0 target:self selector:#selector(VideoRecording) userInfo:nil repeats:YES];
}
else
{
[StartAndStopButton setSelected:NO];
[btnPauseAndResume setEnabled:NO];
[btnBack setEnabled:YES];
[btnSwitchCameraInput setHidden:NO];
NSLog(#"STOP RECORDING");
WeAreRecording = NO;
[MovieFileOutput stopRecording];
[((ActOutAppDelegate *)ActOut_AppDelegate) showLoadingViewOnView:self.view withLabel:#"Please wait...."];
if ([recordingTimer isValid])
{
[recordingTimer invalidate];
recordingTimer = nil;
recordingTime = 30;
}
stopRecording = YES;
}
}
- (IBAction)btnPauseAndResumePressed:(id)sender
{
UIButton *PauseAndResumeButton = (UIButton*)sender;
if (PauseAndResumeButton.selected == NO)
{
PauseAndResumeButton.selected = YES;
NSLog(#"recording paused");
WeAreRecording = NO;
[MovieFileOutput stopRecording];
[self pauseTimer:recordingTimer];
[btnStartAndStop setEnabled:NO];
[btnBack setEnabled:YES];
[btnSwitchCameraInput setHidden:NO];
}
else
{
PauseAndResumeButton.selected = NO;
NSLog(#"recording resumed");
[btnStartAndStop setEnabled:YES];
[btnBack setEnabled:NO];
[btnSwitchCameraInput setHidden:YES];
WeAreRecording = YES;
NSDate *date = [NSDate date];
NSLog(#" date %#",date);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask, YES);
NSString *recordedFileName = nil;
recordedFileName = [NSString stringWithFormat:#"output%#.mov",date];
NSString *documentsDirectory = [paths objectAtIndex:0];
self.outputPath = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"%#",recordedFileName]];
NSLog(#"%#",self.outputPath);
[arrVideoName addObject:recordedFileName];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:self.outputPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:self.outputPath])
{
NSError *error;
if ([[NSFileManager defaultManager] removeItemAtPath:self.outputPath error:&error] == NO)
{
//Error - handle if requried
}
}
[self resumeTimer:recordingTimer];
//Start recording
[MovieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
}
}
- (void) CameraSetOutputProperties
{
//SET THE CONNECTION PROPERTIES (output properties)
AVCaptureConnection *CaptureConnection = [MovieFileOutput connectionWithMediaType:AVMediaTypeVideo];
[CaptureConnection setVideoOrientation:AVCaptureVideoOrientationPortrait];
//Set frame rate (if requried)
CMTimeShow(CaptureConnection.videoMinFrameDuration);
CMTimeShow(CaptureConnection.videoMaxFrameDuration);
if (CaptureConnection.supportsVideoMinFrameDuration)
CaptureConnection.videoMinFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
if (CaptureConnection.supportsVideoMaxFrameDuration)
CaptureConnection.videoMaxFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
CMTimeShow(CaptureConnection.videoMinFrameDuration);
CMTimeShow(CaptureConnection.videoMaxFrameDuration);
}
- (AVCaptureDevice *) CameraWithPosition:(AVCaptureDevicePosition) Position
{
NSArray *Devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *Device in Devices)
{
if ([Device position] == Position)
{
NSLog(#"%d",Position);
return Device;
}
}
return nil;
}
#pragma mark - AVCaptureFileOutputRecordingDelegate Method
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
if(videoWriterInput.readyForMoreMediaData && WeAreRecording) [videoWriterInput appendSampleBuffer:sampleBuffer];
for(AVCaptureConnection *captureConnection in [captureOutput connections])
{
if ([captureConnection isVideoOrientationSupported])
{
AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationLandscapeLeft;
[captureConnection setVideoOrientation:orientation];
}
}
UIDeviceOrientation curOr = [[UIDevice currentDevice] orientation];
CGAffineTransform t;
if (curOr == UIDeviceOrientationPortrait)
{
t = CGAffineTransformMakeRotation(-M_PI / 2);
}
else if (curOr == UIDeviceOrientationPortraitUpsideDown)
{
t = CGAffineTransformMakeRotation(M_PI / 2);
}
else if (curOr == UIDeviceOrientationLandscapeRight)
{
t = CGAffineTransformMakeRotation(M_PI);
}
else
{
t = CGAffineTransformMakeRotation(0);
}
}
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{
NSLog(#"didFinishRecordingToOutputFileAtURL - enter");
NSLog(#"output file url : %#", [outputFileURL absoluteString]);
BOOL RecordedSuccessfully = YES;
if ([error code] != noErr)
{
// A problem occurred: Find out if the recording was successful.
id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
if (value)
{
RecordedSuccessfully = [value boolValue];
}
}
AVCaptureConnection *videoConnection=nil;
for ( AVCaptureConnection *connection in [MovieFileOutput connections] )
{
NSLog(#"%#", connection);
for ( AVCaptureInputPort *port in [connection inputPorts] )
{
NSLog(#"%#", port);
if ( [[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
}
}
}
if([videoConnection isVideoOrientationSupported]) // **Here it is, its always false**
{
[videoConnection setVideoOrientation:[[UIDevice currentDevice] orientation]];
} NSLog(#"Setting image quality");
NSData *videoData = [NSData dataWithContentsOfURL:outputFileURL];
[videoData writeToFile:self.outputPath atomically:NO];
[arrOutputUrl addObject:outputFileURL];
if (stopRecording)
{
[self mergeMultipleVideo];
}
}
//Method to merge multiple audios
-(void)mergeMultipleVideo
{
mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime nextClipStartTime = kCMTimeZero;
NSLog(#"Array of output file url : %#", arrOutputUrl);
if (arrOutputUrl.count > 0)
{
for(int i = 0 ;i < [arrOutputUrl count];i++)
{
AVURLAsset* VideoAsset = [[AVURLAsset alloc]initWithURL:[arrOutputUrl objectAtIndex:i] options:nil];
CMTimeRange timeRangeInAsset;
timeRangeInAsset = CMTimeRangeMake(kCMTimeZero, [VideoAsset duration]);
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, VideoAsset.duration) ofTrack:[[VideoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
nextClipStartTime = CMTimeAdd(nextClipStartTime, timeRangeInAsset.duration);
}
}
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"%#.mov",self.finalRecordedVideoName]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exportSession.outputURL=url;
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
exportSession.shouldOptimizeForNetworkUse = YES;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exportSession path:myPathDocs];
});
}];
}
-(void)exportDidFinish:(AVAssetExportSession*)session path:(NSString*)outputVideoPath
{
NSLog(#"session.status : %d",session.status);
if (session.status == AVAssetExportSessionStatusCompleted)
{
NSURL *outputURL = session.outputURL;
NSData *videoData = [NSData dataWithContentsOfURL:outputURL];
[videoData writeToFile:outputVideoPath atomically:NO];
if ([arrVideoName count] > 0)
{
for (int i = 0; i < [arrVideoName count]; i++)
{
NSArray* documentPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString* fullFilePath = [[documentPaths objectAtIndex:0] stringByAppendingPathComponent: [NSString stringWithFormat:#"%#",[arrVideoName objectAtIndex:i]]];
NSLog(#"Full path of file to be deleted: %#",fullFilePath);
NSFileManager *fileManager = [NSFileManager defaultManager];
NSError *error;
if ([fileManager fileExistsAtPath:fullFilePath])
{
[fileManager removeItemAtPath:fullFilePath error:&error];
}
}
[arrVideoName removeAllObjects];
}
if (arrOutputUrl.count > 0)
{
[arrOutputUrl removeAllObjects];
}
[((ActOutAppDelegate *)ActOut_AppDelegate) removeLoadingViewfromView:self.view];
[self.view addSubview:afterRecordingPopupView];
}
}
Look at the AVCaptureConnection's enabled property. For your output connection, set enabled to NO instead of stopping the session.
Related
I'm using the CoreMotion API and I would like to save the accelerometer's values every 10ms (100hz). The update intervals I obtained so far aren't accurate.
Here's an example of update intervals I got (in second):
0.010414999997
0.0105919999769
0.0117060000193
0.0198359999922
0.00989700001082
0.0100809999858
0.0100519999978
0.0106810000143
0.010420000006
0.0107459999854
0.0105899999908
0.0105130000156
0.0104829999909
0.0107439999992
0.010391000018
0.0105859999894
0.0102320000005
0.010134000011
0.0101929999946
0.010666999995
0.00996399999713
0.0123709999898
0.0181950000115
0.0107940000016
0.00988500000676
0.0101469999936
0.0103529999906
As you can see, some values are higher than 10ms.
Some more informations:
- Xcode 3.2.6 / iOS4.3 / armv7
- Tested on iPhone4 iOS5.1 and iPodTouch4 iOS4.3.3
- The source code:
-(void) viewDidLoad {
started = NO;
motionManager = [[CMMotionManager alloc] init];
if ([motionManager isAccelerometerAvailable]) {
motionManager.accelerometerUpdateInterval = 0.01; //100Hz
motionQueue = [[NSOperationQueue alloc] init];
[motionQueue setMaxConcurrentOperationCount:1]; // Serial operation queue
} else {
NSLog(#"Accelerometer is not available!\n");
}
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSMutableString *documentsDirectory = [[NSMutableString alloc] initWithString:[paths objectAtIndex:0]];
path = [[NSMutableString alloc] init];
[path setString:[NSString stringWithFormat:#"%#/timestamp.cap", documentsDirectory]];
NSFileManager *fileManager = [NSFileManager defaultManager];
[fileManager removeItemAtPath:path error:NULL];
[fileManager createFileAtPath:path contents:nil attributes:nil];
}
- (IBAction) startStopButton:(id) sender {
if(!started) {
started = YES;
[sender setTitle:#"Stop" forState:UIControlStateNormal];
CMAccelerometerHandler dataHandler = ^(CMAccelerometerData *accelerometerData, NSError *error) {
NSString *content = [NSString stringWithFormat:#"%f\n", accelerometerData.timestamp];
NSFileHandle *fh = [NSFileHandle fileHandleForWritingAtPath:path];
[fh seekToEndOfFile];
[fh writeData:[content dataUsingEncoding:NSUTF8StringEncoding]];
[fh closeFile];
};
[motionManager startAccelerometerUpdatesToQueue:motionQueue withHandler:dataHandler];
} else {
started = NO;
[motionManager stopAccelerometerUpdates];
[motionQueue waitUntilAllOperationsAreFinished];
[sender setTitle:#"Start" forState:UIControlStateNormal];
}
}
Thank you in advance for your answers.
I have an iOS app that records the video from front facing camera in the background and was working fine. But now I am trying to play a short mp4 from at the same time and the playback using MPMoviePlayerController stops the capture session.
I tried AVPlayer instead with the same result.
I also set the [[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayAndRecord error:nil];
Still no luck. Did anyone faced and solved the same problem.
Thanks for any suggestion.
using ios5 SDK.
#import "ViewController.h"
#interface ViewController ()
#end
#implementation ViewController
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
UIButton *recButton=[[UIButton alloc] initWithFrame:CGRectMake(10,200, 200,40)] ;
recButton.backgroundColor = [UIColor blackColor];
[recButton addTarget:self action:#selector(startRecording) forControlEvents:UIControlEventTouchUpInside];
[self.view addSubview:recButton];
isRecording=NO;
}
- (void)viewDidUnload
{
[super viewDidUnload];
// Release any retained subviews of the main view.
}
-(void) viewWillAppear:(BOOL)animated
{
self.navigationController.navigationBarHidden = YES;
}
-(void) viewWillDisappear:(BOOL)animated
{
self.navigationController.navigationBarHidden = NO;
}
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation
{
if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPhone) {
return (interfaceOrientation == UIInterfaceOrientationPortrait);
} else {
return YES;
}
}
#pragma mark video playing
-(void) startRecording
{
if (isRecording) {
[self stopVideoRecording];
isRecording=NO;
}
else
{
[self initCaptureSession];
[self startVideoRecording];
isRecording=YES;
}
NSURL *url = [NSURL fileURLWithPath:[[NSBundle mainBundle]
pathForResource:#"new"
ofType:#"mov"]];
[self playMovieAtURL:url];
}
-(void) playMovieAtURL: (NSURL*) theURL {
player =
[[MPMoviePlayerController alloc] initWithContentURL: theURL ];
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayAndRecord error:nil];
player.scalingMode = MPMovieScalingModeAspectFill;
player.controlStyle = MPMovieControlStyleNone;
[player prepareToPlay];
// Register for the playback finished notification
[[NSNotificationCenter defaultCenter]
addObserver: self
selector: #selector(myMovieFinishedCallback:)
name: MPMoviePlayerPlaybackDidFinishNotification
object: player];
[player.view setFrame: self.view.bounds];
[self.view addSubview:player.view];
// Movie playback is asynchronous, so this method returns immediately.
[player play];
}
// When the movie is done, release the controller.
-(void) myMovieFinishedCallback: (NSNotification*) aNotification
{
MPMoviePlayerController* theMovie = [aNotification object];
[[NSNotificationCenter defaultCenter]
removeObserver: self
name: MPMoviePlayerPlaybackDidFinishNotification
object: theMovie];
[player.view removeFromSuperview];
[self stopVideoRecording];
}
#pragma mark -
#pragma mark recording
-(void) initCaptureSession
{
NSLog(#"Setting up capture session");
captureSession = [[AVCaptureSession alloc] init];
//----- ADD INPUTS -----
NSLog(#"Adding video input");
//ADD VIDEO INPUT
AVCaptureDevice *VideoDevice = [self frontFacingCameraIfAvailable ];
//[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if (VideoDevice)
{
NSError *error;
videoInputDevice = [AVCaptureDeviceInput deviceInputWithDevice:VideoDevice error:&error];
if (!error)
{
if ([captureSession canAddInput:videoInputDevice])
[captureSession addInput:videoInputDevice];
else
NSLog(#"Couldn't add video input");
}
else
{
NSLog(#"Couldn't create video input");
}
}
else
{
NSLog(#"Couldn't create video capture device");
}
//ADD AUDIO INPUT
NSLog(#"Adding audio input");
AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
NSError *error = nil;
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error];
if (audioInput)
{
[captureSession addInput:audioInput];
}
//----- ADD OUTPUTS ---
//ADD MOVIE FILE OUTPUT
NSLog(#"Adding movie file output");
movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
// Float64 TotalSeconds = 60; //Total seconds
// int32_t preferredTimeScale = 30; //Frames per second
// CMTime maxDuration = CMTimeMakeWithSeconds(TotalSeconds, preferredTimeScale); //<<SET MAX DURATION
// movieFileOutput.maxRecordedDuration = maxDuration;
movieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024; //<<SET MIN FREE SPACE IN BYTES FOR RECORDING TO CONTINUE ON A VOLUME
if ([captureSession canAddOutput:movieFileOutput])
[captureSession addOutput:movieFileOutput];
//SET THE CONNECTION PROPERTIES (output properties)
[self CameraSetOutputProperties]; //(We call a method as it also has to be done after changing camera)
//----- SET THE IMAGE QUALITY / RESOLUTION -----
//Options:
// AVCaptureSessionPresetHigh - Highest recording quality (varies per device)
// AVCaptureSessionPresetMedium - Suitable for WiFi sharing (actual values may change)
// AVCaptureSessionPresetLow - Suitable for 3G sharing (actual values may change)
// AVCaptureSessionPreset640x480 - 640x480 VGA (check its supported before setting it)
// AVCaptureSessionPreset1280x720 - 1280x720 720p HD (check its supported before setting it)
// AVCaptureSessionPresetPhoto - Full photo resolution (not supported for video output)
NSLog(#"Setting image quality");
[captureSession setSessionPreset:AVCaptureSessionPresetMedium];
if ([captureSession canSetSessionPreset:AVCaptureSessionPreset640x480]) //Check size based configs are supported before setting them
[captureSession setSessionPreset:AVCaptureSessionPreset640x480];
//----- START THE CAPTURE SESSION RUNNING -----
[captureSession startRunning];
}
//********** CAMERA SET OUTPUT PROPERTIES **********
- (void) CameraSetOutputProperties
{
AVCaptureConnection *CaptureConnection=nil;
//SET THE CONNECTION PROPERTIES (output properties)
NSComparisonResult order = [[UIDevice currentDevice].systemVersion compare: #"5.0.0" options: NSNumericSearch];
if (order == NSOrderedSame || order == NSOrderedDescending) {
// OS version >= 5.0.0
CaptureConnection = [movieFileOutput connectionWithMediaType:AVMediaTypeVideo];
// if (CaptureConnection.supportsVideoMinFrameDuration)
// CaptureConnection.videoMinFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
// if (CaptureConnection.supportsVideoMaxFrameDuration)
// CaptureConnection.videoMaxFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
// if (CaptureConnection.supportsVideoMinFrameDuration)
// {
// // CMTimeShow(CaptureConnection.videoMinFrameDuration);
// // CMTimeShow(CaptureConnection.videoMaxFrameDuration);
// }
} else {
// OS version < 5.0.0
CaptureConnection = [self connectionWithMediaType:AVMediaTypeVideo fromConnections:[movieFileOutput connections]];
}
//Set landscape (if required)
if ([CaptureConnection isVideoOrientationSupported])
{
AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;// AVCaptureVideoOrientationLandscapeRight; //<<<<<SET VIDEO ORIENTATION IF LANDSCAPE
[CaptureConnection setVideoOrientation:orientation];
}
//Set frame rate (if requried)
//CMTimeShow(CaptureConnection.videoMinFrameDuration);
//CMTimeShow(CaptureConnection.videoMaxFrameDuration);
}
- (void) startVideoRecording
{
//Create temporary URL to record to
NSString *outputPath = [[NSString alloc] initWithFormat:#"%#%#", NSTemporaryDirectory(), #"output.mov"];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
NSFileManager *fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:outputPath])
{
NSError *error;
if ([fileManager removeItemAtPath:outputPath error:&error] == NO)
{
//Error - handle if requried
NSLog(#"file remove error");
}
}
//Start recording
[movieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
}
-(void) stopVideoRecording
{
[movieFileOutput stopRecording];
}
//********** DID FINISH RECORDING TO OUTPUT FILE AT URL **********/
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput
didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
fromConnections:(NSArray *)connections
error:(NSError *)error
{
NSLog(#"didFinishRecordingToOutputFileAtURL - enter");
BOOL RecordedSuccessfully = YES;
if ([error code] != noErr)
{
// A problem occurred: Find out if the recording was successful.
id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
if (value)
{
RecordedSuccessfully = [value boolValue];
}
}
if (RecordedSuccessfully)
{
//----- RECORDED SUCESSFULLY -----
NSLog(#"didFinishRecordingToOutputFileAtURL - success");
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL])
{
[library writeVideoAtPathToSavedPhotosAlbum:outputFileURL
completionBlock:^(NSURL *assetURL, NSError *error)
{
if (error)
{
NSLog(#"File save error");
}
else
{
recordedVideoURL=assetURL;
}
}];
}
else {
NSString *assetURL=[self copyFileToDocuments:outputFileURL];
if(assetURL!=nil)
{
recordedVideoURL=[NSURL URLWithString:assetURL];
}
}
}
}
- (NSString*) copyFileToDocuments:(NSURL *)fileURL
{
NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
[dateFormatter setDateFormat:#"yyyy-MM-dd_HH-mm-ss"];
NSString *destinationPath = [documentsDirectory stringByAppendingFormat:#"/output_%#.mov", [dateFormatter stringFromDate:[NSDate date]]];
NSError *error;
if (![[NSFileManager defaultManager] copyItemAtURL:fileURL toURL:[NSURL fileURLWithPath:destinationPath] error:&error]) {
NSLog(#"File save error %#", [error localizedDescription]);
return nil;
}
return destinationPath;
}
- (AVCaptureConnection *)connectionWithMediaType:(NSString *)mediaType fromConnections:(NSArray *)connections
{
for ( AVCaptureConnection *connection in connections ) {
for ( AVCaptureInputPort *port in [connection inputPorts] ) {
if ( [[port mediaType] isEqual:mediaType] ) {
return connection;
}
}
}
return nil;
}
- (AVCaptureDevice *)frontFacingCameraIfAvailable
{
// look at all the video devices and get the first one that's on the front
NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
AVCaptureDevice *captureDevice = nil;
for (AVCaptureDevice *device in videoDevices)
{
if (device.position == AVCaptureDevicePositionFront)
{
captureDevice = device;
break;
}
}
// couldn't find one on the front, so just get the default video device.
if ( ! captureDevice)
{
captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
}
return captureDevice;
}
#pragma mark -
#end
I have this problem too,I don't know how to fix it,but I know the problem is here:
[captureSession addInput:audioInput];
If you delete this code of line,it will be work fine,I think it's audio mix or some audio problem.
I still finding the answer.
I have found the answer here: answer ,it work!
but u remember add AudioToolbox.framework,maybe helpful for you.
I have searched a lot trying to figure this out - the code to me seems ok but the functionality doesn't reflect this.
I have a View that is presented (its an Audio Recorder view). You can press record and it records to a file just fine (data exists in the file). I can then play the file back via the play button (the AVAudioPlayer is pointing to that file).
However when I close/dismiss that view and come back to it - the file will not play when Play is tapped even though it should be exactly the same code as the file location has not changed.
UPDATE:
Seems that [audioPlayer play] returns no. I have also looked into the data. It seems that when the view appears again and loads that data it doesnt load it correctly (NSdata in Nslog displays mainly 0's) - even though the file exists and has data in it (i can see and hear it after transferring to my mac).
This leads me to suspect that either I am loading the data wrong or avaudioplayer wont read the data for some reason...
Please take a look at the code below:
(NSString *) removeCharsFrom: (NSString *) remover {
remover = [remover stringByTrimmingCharactersInSet:[NSCharacterSet whitespaceAndNewlineCharacterSet]];
remover = [remover stringByReplacingOccurrencesOfString:#" " withString:#"_"];
remover = [remover stringByReplacingOccurrencesOfString:#"/" withString:#"_"];
remover = [remover stringByReplacingOccurrencesOfString:#"\\" withString:#"_"];
remover = [remover stringByReplacingOccurrencesOfString:#":" withString:#"_"];
remover = [remover stringByReplacingOccurrencesOfString:#";" withString:#"_"];
remover = [remover stringByReplacingOccurrencesOfString:#"(" withString:#"_"];
remover = [remover stringByReplacingOccurrencesOfString:#")" withString:#"_"];
remover = [remover stringByReplacingOccurrencesOfString:#"£" withString:#"_"];
remover = [remover stringByReplacingOccurrencesOfString:#"$" withString:#"_"];
remover = [remover stringByReplacingOccurrencesOfString:#"&" withString:#"_"];
remover = [remover stringByReplacingOccurrencesOfString:#"'" withString:#"_"];
remover = [remover stringByReplacingOccurrencesOfString:#"{" withString:#"_"];
remover = [remover stringByReplacingOccurrencesOfString:#"}" withString:#"_"];
remover = [remover stringByReplacingOccurrencesOfString:#"[" withString:#"_"];
remover = [remover stringByReplacingOccurrencesOfString:#"]" withString:#"_"];
remover = [remover stringByReplacingOccurrencesOfString:#"""" withString:#"_"];
return remover;
}
- (NSString *) audioPathForResource: (NSString *) audio {
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *saveDirectory = [paths objectAtIndex:0];
NSString *newFolder = [saveDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"/%#/Audio",catName]];
if (![[NSFileManager defaultManager] fileExistsAtPath:newFolder]) {
[[NSFileManager defaultManager] createDirectoryAtPath:newFolder withIntermediateDirectories:YES attributes:nil error:nil];
}
NSString *saveFileName = [NSString stringWithFormat:#"%#.caf",audio];
NSString *newFilePath = [newFolder stringByAppendingPathComponent:saveFileName];
return [newFilePath stringByAddingPercentEscapesUsingEncoding:NSUTF8StringEncoding];
}
- (IBAction)cancelTapped:(id)sender {
[self dismissModalViewControllerAnimated:YES];
}
- (IBAction)saveTapped:(id)sender {
[self.parentViewController performSelector:#selector(changeAddAudioIcon)];
[self dismissModalViewControllerAnimated:YES];
}
- (IBAction)trashTapped:(id)sender {
UIAlertView *alert =
[[UIAlertView alloc] initWithTitle: #"Delete"
message: #"Would you like to delete the audio file? Warning: This cannot be undone."
delegate: self
cancelButtonTitle: #"Cancel"
otherButtonTitles: #"Delete", nil];
[alert show];
[alert release];
}
- (IBAction)pauseTapped:(id)sender {
pauseBtn.enabled = NO;
playBtn.enabled = YES;
recordBtn.enabled = YES;
trashBtn.enabled = YES;
if (audioRecorder.recording)
{
[audioRecorder stop];
} else if (audioPlayer.playing) {
[audioPlayer stop];
}
}
- (IBAction)recordTapped:(id)sender {
if (!audioRecorder.recording)
{
playBtn.enabled = NO;
pauseBtn.enabled = YES;
trashBtn.enabled = NO;
[audioRecorder record];
}
}
- (IBAction)playTapped:(id)sender {
pauseBtn.enabled = YES;
recordBtn.enabled = NO;
trashBtn.enabled = YES;
NSError *error;
NSLog(#"%#",filepathstring);
NSURL *soundFileURL = [NSURL fileURLWithPath:filepathstring];
audioPlayer = [[AVAudioPlayer alloc]
initWithContentsOfURL:soundFileURL
error:&error];
audioPlayer.delegate = self;
if (error)
NSLog(#"Error: %#",
[error localizedDescription]);
else
[audioPlayer play];
}
- (void)alertView: (UIAlertView *)alertView didDismissWithButtonIndex:(NSInteger)buttonIndex {
switch (buttonIndex) {
case 0:
return;
break;
case 1:
{
NSError *error = nil;
[[NSFileManager defaultManager] removeItemAtPath:filepathstring error:&error];
trashBtn.enabled = NO;
}
break;
default:
break;
}
}
-(void)audioPlayerDidFinishPlaying:
(AVAudioPlayer *)player successfully:(BOOL)flag
{
recordBtn.enabled = YES;
pauseBtn.enabled = NO;
playBtn.enabled = YES;
if(player != audioPlayer) {
[player release];
}
}
-(void)audioPlayerDecodeErrorDidOccur:
(AVAudioPlayer *)player
error:(NSError *)error
{
NSLog(#"Decode Error occurred");
}
-(void)audioRecorderDidFinishRecording:
(AVAudioRecorder *)recorder
successfully:(BOOL)flag
{
NSLog(#"Recording success:%#",flag ? #"YES" : #"NO");
trashBtn.enabled = YES;
pauseBtn.enabled = NO;
playBtn.enabled = YES;
}
-(void)audioRecorderEncodeErrorDidOccur:
(AVAudioRecorder *)recorder
error:(NSError *)error
{
NSLog(#"Encode Error occurred");
}
#pragma mark - View lifecycle
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view from its nib.
}
- (void) viewWillAppear:(BOOL)animated {
catName = [NSString stringWithFormat:#"%#",[self removeCharsFrom:catName]];
testName = [NSString stringWithFormat:#"%#",[self removeCharsFrom:testName]];
filepathstring = [[self audioPathForResource:testName] retain];
NSLog(#"At start = %#",filepathstring);
if ([[NSFileManager defaultManager] fileExistsAtPath:filepathstring]) {
playBtn.enabled = YES;
trashBtn.enabled = YES;
recordBtn.enabled = YES;
}
else
{
playBtn.enabled = NO;
trashBtn.enabled = NO;
}
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayAndRecord error:nil];
NSURL *soundFileURL = [NSURL fileURLWithPath:filepathstring];
NSDictionary *recordSettings = [NSDictionary
dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:AVAudioQualityMin],
AVEncoderAudioQualityKey,
[NSNumber numberWithInt:16],
AVEncoderBitRateKey,
[NSNumber numberWithInt: 2],
AVNumberOfChannelsKey,
[NSNumber numberWithFloat:44100.0],
AVSampleRateKey,
nil];
NSError *error = nil;
audioRecorder = [[AVAudioRecorder alloc]
initWithURL:soundFileURL
settings:recordSettings
error:&error];
audioRecorder.delegate = self;
if (error)
{
NSLog(#"error: %#", [error localizedDescription]);
} else {
[audioRecorder prepareToRecord];
}
}
- (void)viewDidUnload
{
[self setCancelBtn:nil];
[self setSaveBtn:nil];
[self setTimeLabel:nil];
[self setDescriptionLabel:nil];
[self setToolsBar:nil];
[self setTrashBtn:nil];
[self setPauseBtn:nil];
[self setRecordBtn:nil];
[self setPlayBtn:nil];
[super viewDidUnload];
// Release any retained subviews of the main view.
// e.g. self.myOutlet = nil;
}
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation
{
// Return YES for supported orientations
if (UI_USER_INTERFACE_IDIOM() == UIUserInterfaceIdiomPad) {
return YES;
} else {
return UIInterfaceOrientationIsPortrait(interfaceOrientation);
}
}
- (void)dealloc {
[cancelBtn release];
[saveBtn release];
[timeLabel release];
[descriptionLabel release];
[toolsBar release];
[trashBtn release];
[pauseBtn release];
[recordBtn release];
[playBtn release];
[audioPlayer release];
[audioRecorder release];
[super dealloc];
}
Here is the answer:
NSData *data = [NSData dataWithContentsOfMappedFile:[NSString stringWithFormat:#"%#",filepathstring]];
AVAudioPlayer *ap = [[AVAudioPlayer alloc]
initWithData:data error:&error];
Seems that it just wouldnt work with filepathstring but within an NSString it worked fine. Obvious now!
i needs to create a video from the image selected.
i have code it shoudl work but its giving error while appending buffer.
This is how both type of images has been saved.
-(void)imagePickerController:(UIImagePickerController *)picker didFinishPickingImage:(UIImage *)img editingInfo:(NSDictionary *)editInfo
{
// NSLog(#"Came From Here");
imgv.image = img;
AppDelegate *app = (AppDelegate *)[[UIApplication sharedApplication] delegate];
app.imgmain = img;
[self dismissModalViewControllerAnimated:YES];
RecordVoice *rec = [[RecordVoice alloc] initWithNibName:#"RecordVoice" bundle:nil];
rec.hidesBottomBarWhenPushed = YES;
// rec.img.image = img;
[self.navigationController pushViewController:rec animated:YES];
//[self presentModalViewController:rec animated:YES];
[rec release];
// flag =#"yes";
// need to show the upload image button now
// [username, ititle resignFirstResponder];
}
on the other view controller i am showing this image on a uiimage view.
and on button click i am converting that image to video with this code.
-(void)createVideoFile
{
NSString *documentsDirectoryPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSArray *dirContents = [[NSFileManager defaultManager] contentsOfDirectoryAtPath:documentsDirectoryPath error:nil];
for (NSString *tString in dirContents) {
if ([tString isEqualToString:#"test.mp4"])
{
[[NSFileManager defaultManager]removeItemAtPath:[NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,tString] error:nil];
}
}
NSString *nfile = [documentsDirectoryPath stringByAppendingPathComponent:#"test.mp4"];
AVURLAsset * urlAsset = [AVURLAsset URLAssetWithURL:recordedTmpFile options:nil];
NSLog(#"Write Started");
NSError *error = nil;
CGSize size = img.image.size; //CGSizeMake(320, 480);
NSLog(#"Write Started");
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:nfile] fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings] retain];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
int frameCount = 0;
buffer = [self pixelBufferFromCGImage:[img.image CGImage] andSize:size];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30)
{
if (adaptor.assetWriterInput.readyForMoreMediaData)
{
printf("appending %d attemp %d\n", frameCount, j);
CMTime frameTime = urlAsset.duration;//CMTimeMake(frameCount,(int32_t) 10);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
//if(buffer)
// CVBufferRelease(buffer);
[NSThread sleepForTimeInterval:0.05];
}
else
{
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n", frameCount, j);
}
frameCount++;
[videoWriterInput markAsFinished];
[videoWriter finishWriting];
[self CompileFilesToMakeMovie];
[altv dismissWithClickedButtonIndex:0 animated:YES];
[altv release];
NSLog(#"Write Ended");
}
But this is not working...
i am stuck can anyone please help me in this???? :(
i have figured out the problem.
if we use image with big size it wont work. like the pictures taken from camera app has a big size.
so i am compressing it to low level and then it works..
i didnt yet got why its working like but got the solution
I am trying to take the camera input and write the data to disk using avassetwriter. From the delegate, it simply appears that avassetwriterinputpixelbufferadator is failing to append data. I'm not sure why
- (NSURL*) assetURL{
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES);
NSString *basePath = ([paths count] > 0) ? [paths objectAtIndex:0] : nil;
NSString *filePath = [basePath stringByAppendingPathComponent:#"videoOutput"];
return [NSURL fileURLWithPath:filePath];
}
- (id) init {
if(![super init]) return nil;
self.captureSession = [[c alloc] init];
self.captureSession.sessionPreset = AVCaptureSessionPresetHigh;
// HIGH: 640 x 480
// MEDIUM: 360 x 480
// LOW: 192 x 144
[self loadVideoInput];
[self loadVideoOutput];
[self loadPreviewLayer];
[self loadWriter];
return self;
}
- (void) loadVideoInput{
AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices) {
if (device.position == AVCaptureDevicePositionFront) {
videoDevice = device;
break;
}
}
if ( videoDevice ) {
NSError *error;
AVCaptureDeviceInput *videoIn = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
if ( !error ) {
if ([self.captureSession canAddInput:videoIn])
[self.captureSession addInput:videoIn];
else NSLog(#"Couldn't add video input");
} else NSLog(#"Couldn't create video input");
} else NSLog(#"Couldn't create video capture device");
}
- (void) loadVideoOutput{
AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
output.alwaysDiscardsLateVideoFrames = YES;
[output setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
//dispatch_queue_t queue = dispatch_queue_create("myQueue", NULL);
//[output setSampleBufferDelegate:self queue:queue];
//dispatch_release(queue);
//output.minFrameDuration = CMTimeMake(15, 1); // If you wish to cap the frame rate to a known value, such as 15 fps, set
[output setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]]; // BGRA is necessary for manual preview
if ([self.captureSession canAddOutput:videoOut])
[self.captureSession addOutput:videoOut];
else
NSLog(#"Couldn't add video output");
//[self.captureSession addOutput:output];
[output release];
}
- (void) loadPreviewLayer{
previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.captureSession];
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
}
- (void) loadWriter{
NSError *error = nil;
videoWriter = [[AVAssetWriter alloc] initWithURL:[self assetURL] fileType:AVFileTypeQuickTimeMovie error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:640], AVVideoWidthKey,
[NSNumber numberWithInt:480], AVVideoHeightKey,
nil];
writerInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings] retain];
writerInput.expectsMediaDataInRealTime = YES;
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
[videoWriter addInput:writerInput];
currentTime = kCMTimeZero;
adaptor = [[AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:nil] retain];
NSLog(#"Error? %#",error);
}
- (void) captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
if(recording){
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
currentTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
// STUFF ISN'T WORKING HERE
BOOL success = [adaptor appendPixelBuffer:imageBuffer withPresentationTime:currentTime];
NSLog(#"%#",success ? #"YES" : #"NO");
}
}
- (void) startRecording{
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:currentTime];
recording = YES;
}
- (void) stopRecording{
recording = NO;
[writerInput markAsFinished];
[videoWriter endSessionAtSourceTime:currentTime];
[videoWriter finishWriting];
}
You need to remove the existing file ("filePath" in your case) each time, trying to record the video.
[adaptor appendPixelBuffer:imageBuffer withPresentationTime:currentTime];
maybe you need manager the video time by yourself, you get currentTime from sampleBuffer, it's not exactly the video time. try to accumulate the capture time pre frame