I am trying to take the camera input and write the data to disk using avassetwriter. From the delegate, it simply appears that avassetwriterinputpixelbufferadator is failing to append data. I'm not sure why
- (NSURL*) assetURL{
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES);
NSString *basePath = ([paths count] > 0) ? [paths objectAtIndex:0] : nil;
NSString *filePath = [basePath stringByAppendingPathComponent:#"videoOutput"];
return [NSURL fileURLWithPath:filePath];
}
- (id) init {
if(![super init]) return nil;
self.captureSession = [[c alloc] init];
self.captureSession.sessionPreset = AVCaptureSessionPresetHigh;
// HIGH: 640 x 480
// MEDIUM: 360 x 480
// LOW: 192 x 144
[self loadVideoInput];
[self loadVideoOutput];
[self loadPreviewLayer];
[self loadWriter];
return self;
}
- (void) loadVideoInput{
AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices) {
if (device.position == AVCaptureDevicePositionFront) {
videoDevice = device;
break;
}
}
if ( videoDevice ) {
NSError *error;
AVCaptureDeviceInput *videoIn = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
if ( !error ) {
if ([self.captureSession canAddInput:videoIn])
[self.captureSession addInput:videoIn];
else NSLog(#"Couldn't add video input");
} else NSLog(#"Couldn't create video input");
} else NSLog(#"Couldn't create video capture device");
}
- (void) loadVideoOutput{
AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
output.alwaysDiscardsLateVideoFrames = YES;
[output setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
//dispatch_queue_t queue = dispatch_queue_create("myQueue", NULL);
//[output setSampleBufferDelegate:self queue:queue];
//dispatch_release(queue);
//output.minFrameDuration = CMTimeMake(15, 1); // If you wish to cap the frame rate to a known value, such as 15 fps, set
[output setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]]; // BGRA is necessary for manual preview
if ([self.captureSession canAddOutput:videoOut])
[self.captureSession addOutput:videoOut];
else
NSLog(#"Couldn't add video output");
//[self.captureSession addOutput:output];
[output release];
}
- (void) loadPreviewLayer{
previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.captureSession];
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
}
- (void) loadWriter{
NSError *error = nil;
videoWriter = [[AVAssetWriter alloc] initWithURL:[self assetURL] fileType:AVFileTypeQuickTimeMovie error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:640], AVVideoWidthKey,
[NSNumber numberWithInt:480], AVVideoHeightKey,
nil];
writerInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings] retain];
writerInput.expectsMediaDataInRealTime = YES;
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
[videoWriter addInput:writerInput];
currentTime = kCMTimeZero;
adaptor = [[AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:nil] retain];
NSLog(#"Error? %#",error);
}
- (void) captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
if(recording){
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
currentTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
// STUFF ISN'T WORKING HERE
BOOL success = [adaptor appendPixelBuffer:imageBuffer withPresentationTime:currentTime];
NSLog(#"%#",success ? #"YES" : #"NO");
}
}
- (void) startRecording{
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:currentTime];
recording = YES;
}
- (void) stopRecording{
recording = NO;
[writerInput markAsFinished];
[videoWriter endSessionAtSourceTime:currentTime];
[videoWriter finishWriting];
}
You need to remove the existing file ("filePath" in your case) each time, trying to record the video.
[adaptor appendPixelBuffer:imageBuffer withPresentationTime:currentTime];
maybe you need manager the video time by yourself, you get currentTime from sampleBuffer, it's not exactly the video time. try to accumulate the capture time pre frame
Related
I am using following code to embed Camera into my application view.
Here is my code
- (void)viewDidLoad
{
[super viewDidLoad];
AVCaptureSession *session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPresetMedium;
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
captureVideoPreviewLayer.frame = self.cameraView.bounds;
[self.cameraView.layer addSublayer:captureVideoPreviewLayer];
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input)
{
[Utilities alertDisplay:#"Error" message:#"Camera not found. Please use Photo Gallery instead."];
}
[session addInput:input];
stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[stillImageOutput setOutputSettings:outputSettings];
[session addOutput:stillImageOutput];
[session startRunning];
}
-(AVCaptureDevice *)backFacingCameraIfAvailable{
NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
AVCaptureDevice *captureDevice = nil;
for (AVCaptureDevice *device in videoDevices){
if (device.position == AVCaptureDevicePositionBack){
captureDevice = device;
break;
}
}
// couldn't find one on the front, so just get the default video device.
if (!captureDevice){
captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
}
return captureDevice;
}
- (IBAction)scanButtonPressed:(id)sender
{
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection) { break; }
}
NSLog(#"about to request a capture from: %#", stillImageOutput);
[stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
CFDictionaryRef exifAttachments = CMGetAttachment( imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
if (exifAttachments)
{
// Do something with the attachments.
NSLog(#"attachements: %#", exifAttachments);
}
else
NSLog(#"no attachments");
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
//self.vImage.image = image;
}];
}
The problem i am facing is, I don't get any camera opened in my cameraView and also on scanBtnPressed i get
stillImageOutput.connections = 0 objects.
What is wrong?
Well, I just copy pasted your code into a blank project and it worked fine when changing self.cameraView.layer to self.view.layer. However, I did try creating self.cameraView and never initializing it and it had similar consequences to those you described.
Overall, I would check to make sure that self.cameraView isn't nil. If it's done programmatically, make sure you're calling alloc/init and setting a frame, and if it's an IBOutlet make sure it's properly linked.
I have to implement functionality to repeatedly pause and resume video capture in a single session, but have each new segment (the captured segments after each pause) added to the same video file, with AVFoundation. Currently, every time I press "stop" then "record" again, it just saves a new video file to my iPhone's Document directory and starts capturing to a new file. I need to be able to press the "record/stop" button over, only capture video & audio when record is active... then when the "done" button is pressed, have a single AV file with all the segments together. And all this needs to happen in the same capture session / preview session.
I am not using AVAssetWriterInput.
The only way I can think of to try this is when the "done" button is pressed, taking each individual output file and combining them together into a single file.
This code is working for iOS 5 but not for iOS 6. Actually for iOS 6, the first time when I pause recording (stop recording) AVCaptureFileOutputRecordingDelegate method (captureOutput: didFinishRecordingToOutputFileAtURL: fromConnections: error:) is called but after that when I start the recording the delegate method (captureOutput: didFinishRecordingToOutputFileAtURL: fromConnections: error:) is called again but it is not called at the time of stop recording.
I need a solution for that issue. Please help me.
//View LifeCycle
- (void)viewDidLoad
{
[super viewDidLoad];
self.finalRecordedVideoName = [self stringWithNewUUID];
arrVideoName = [[NSMutableArray alloc]initWithCapacity:0];
arrOutputUrl = [[NSMutableArray alloc] initWithCapacity:0];
CaptureSession = [[AVCaptureSession alloc] init];
captureDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
if ([captureDevices count] > 0)
{
NSError *error;
VideoInputDevice = [[AVCaptureDeviceInput alloc] initWithDevice:[self backFacingCamera] error:&error];
if (!error)
{
if ([CaptureSession canAddInput:VideoInputDevice])
[CaptureSession addInput:VideoInputDevice];
else
NSLog(#"Couldn't add video input");
}
else
{
NSLog(#"Couldn't create video input");
}
}
else
{
NSLog(#"Couldn't create video capture device");
}
//ADD VIDEO PREVIEW LAYER
NSLog(#"Adding video preview layer");
AVCaptureVideoPreviewLayer *layer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:CaptureSession];
[self setPreviewLayer:layer];
UIDeviceOrientation currentOrientation = [UIDevice currentDevice].orientation;
NSLog(#"%d",currentOrientation);
if (currentOrientation == UIDeviceOrientationPortrait)
{
PreviewLayer.orientation = AVCaptureVideoOrientationPortrait;
}
else if (currentOrientation == UIDeviceOrientationPortraitUpsideDown)
{
PreviewLayer.orientation = AVCaptureVideoOrientationPortraitUpsideDown;
}
else if (currentOrientation == UIDeviceOrientationLandscapeRight)
{
PreviewLayer.orientation = AVCaptureVideoOrientationLandscapeRight;
}
else if (currentOrientation == UIDeviceOrientationLandscapeLeft)
{
PreviewLayer.orientation = AVCaptureVideoOrientationLandscapeLeft;
}
[[self PreviewLayer] setVideoGravity:AVLayerVideoGravityResizeAspectFill];
//ADD MOVIE FILE OUTPUT
NSLog(#"Adding movie file output");
MovieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
VideoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
[VideoDataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
NSString* key = (NSString*)kCVPixelBufferBytesPerRowAlignmentKey;
NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
[VideoDataOutput setVideoSettings:videoSettings];
Float64 TotalSeconds = 60; //Total seconds
int32_t preferredTimeScale = 30; //Frames per second
CMTime maxDuration = CMTimeMakeWithSeconds(TotalSeconds, preferredTimeScale);//<<SET MAX DURATION
MovieFileOutput.maxRecordedDuration = maxDuration;
MovieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024; //<<SET MIN FREE SPACE IN BYTES FOR RECORDING TO CONTINUE ON A VOLUME
//SET THE CONNECTION PROPERTIES (output properties)
[self CameraSetOutputProperties]; //(We call a method as it also has to be done after changing camera)
AVCaptureConnection *videoConnection = nil;
for ( AVCaptureConnection *connection in [MovieFileOutput connections] )
{
NSLog(#"%#", connection);
for ( AVCaptureInputPort *port in [connection inputPorts] )
{
NSLog(#"%#", port);
if ( [[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
}
}
}
if([videoConnection isVideoOrientationSupported]) // **Here it is, its always false**
{
[videoConnection setVideoOrientation:[[UIDevice currentDevice] orientation]];
} NSLog(#"Setting image quality");
[CaptureSession setSessionPreset:AVCaptureSessionPresetLow];
//----- DISPLAY THE PREVIEW LAYER -----
CGRect layerRect = CGRectMake(5, 5, 299, ([[UIScreen mainScreen] bounds].size.height == 568)?438:348);
[self.PreviewLayer setBounds:layerRect];
[self.PreviewLayer setPosition:CGPointMake(CGRectGetMidX(layerRect),CGRectGetMidY(layerRect))];
if ([CaptureSession canAddOutput:MovieFileOutput])
[CaptureSession addOutput:MovieFileOutput];
[CaptureSession addOutput:VideoDataOutput];
//We use this instead so it goes on a layer behind our UI controls (avoids us having to manually bring each control to the front):
CameraView = [[UIView alloc] init];
[videoPreviewLayer addSubview:CameraView];
[videoPreviewLayer sendSubviewToBack:CameraView];
[[CameraView layer] addSublayer:PreviewLayer];
//----- START THE CAPTURE SESSION RUNNING -----
[CaptureSession startRunning];
}
#pragma mark - IBACtion Methods
-(IBAction)btnStartAndStopPressed:(id)sender
{
UIButton *StartAndStopButton = (UIButton*)sender;
if ([StartAndStopButton isSelected] == NO)
{
[StartAndStopButton setSelected:YES];
[btnPauseAndResume setEnabled:YES];
[btnBack setEnabled:NO];
[btnSwitchCameraInput setHidden:YES];
NSDate *date = [NSDate date];
NSLog(#" date %#",date);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *recordedFileName = nil;
recordedFileName = [NSString stringWithFormat:#"output%#.mov",date];
NSString *documentsDirectory = [paths objectAtIndex:0];
self.outputPath = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"%#",recordedFileName]];
NSLog(#"%#",self.outputPath);
[arrVideoName addObject:recordedFileName];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:self.outputPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:self.outputPath])
{
NSError *error;
if ([[NSFileManager defaultManager] removeItemAtPath:self.outputPath error:&error] == NO)
{
//Error - handle if requried
}
}
//Start recording
[MovieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
recordingTimer = [NSTimer scheduledTimerWithTimeInterval:1.0 target:self selector:#selector(VideoRecording) userInfo:nil repeats:YES];
}
else
{
[StartAndStopButton setSelected:NO];
[btnPauseAndResume setEnabled:NO];
[btnBack setEnabled:YES];
[btnSwitchCameraInput setHidden:NO];
NSLog(#"STOP RECORDING");
WeAreRecording = NO;
[MovieFileOutput stopRecording];
[((ActOutAppDelegate *)ActOut_AppDelegate) showLoadingViewOnView:self.view withLabel:#"Please wait...."];
if ([recordingTimer isValid])
{
[recordingTimer invalidate];
recordingTimer = nil;
recordingTime = 30;
}
stopRecording = YES;
}
}
- (IBAction)btnPauseAndResumePressed:(id)sender
{
UIButton *PauseAndResumeButton = (UIButton*)sender;
if (PauseAndResumeButton.selected == NO)
{
PauseAndResumeButton.selected = YES;
NSLog(#"recording paused");
WeAreRecording = NO;
[MovieFileOutput stopRecording];
[self pauseTimer:recordingTimer];
[btnStartAndStop setEnabled:NO];
[btnBack setEnabled:YES];
[btnSwitchCameraInput setHidden:NO];
}
else
{
PauseAndResumeButton.selected = NO;
NSLog(#"recording resumed");
[btnStartAndStop setEnabled:YES];
[btnBack setEnabled:NO];
[btnSwitchCameraInput setHidden:YES];
WeAreRecording = YES;
NSDate *date = [NSDate date];
NSLog(#" date %#",date);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask, YES);
NSString *recordedFileName = nil;
recordedFileName = [NSString stringWithFormat:#"output%#.mov",date];
NSString *documentsDirectory = [paths objectAtIndex:0];
self.outputPath = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"%#",recordedFileName]];
NSLog(#"%#",self.outputPath);
[arrVideoName addObject:recordedFileName];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:self.outputPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:self.outputPath])
{
NSError *error;
if ([[NSFileManager defaultManager] removeItemAtPath:self.outputPath error:&error] == NO)
{
//Error - handle if requried
}
}
[self resumeTimer:recordingTimer];
//Start recording
[MovieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
}
}
- (void) CameraSetOutputProperties
{
//SET THE CONNECTION PROPERTIES (output properties)
AVCaptureConnection *CaptureConnection = [MovieFileOutput connectionWithMediaType:AVMediaTypeVideo];
[CaptureConnection setVideoOrientation:AVCaptureVideoOrientationPortrait];
//Set frame rate (if requried)
CMTimeShow(CaptureConnection.videoMinFrameDuration);
CMTimeShow(CaptureConnection.videoMaxFrameDuration);
if (CaptureConnection.supportsVideoMinFrameDuration)
CaptureConnection.videoMinFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
if (CaptureConnection.supportsVideoMaxFrameDuration)
CaptureConnection.videoMaxFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
CMTimeShow(CaptureConnection.videoMinFrameDuration);
CMTimeShow(CaptureConnection.videoMaxFrameDuration);
}
- (AVCaptureDevice *) CameraWithPosition:(AVCaptureDevicePosition) Position
{
NSArray *Devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *Device in Devices)
{
if ([Device position] == Position)
{
NSLog(#"%d",Position);
return Device;
}
}
return nil;
}
#pragma mark - AVCaptureFileOutputRecordingDelegate Method
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
if(videoWriterInput.readyForMoreMediaData && WeAreRecording) [videoWriterInput appendSampleBuffer:sampleBuffer];
for(AVCaptureConnection *captureConnection in [captureOutput connections])
{
if ([captureConnection isVideoOrientationSupported])
{
AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationLandscapeLeft;
[captureConnection setVideoOrientation:orientation];
}
}
UIDeviceOrientation curOr = [[UIDevice currentDevice] orientation];
CGAffineTransform t;
if (curOr == UIDeviceOrientationPortrait)
{
t = CGAffineTransformMakeRotation(-M_PI / 2);
}
else if (curOr == UIDeviceOrientationPortraitUpsideDown)
{
t = CGAffineTransformMakeRotation(M_PI / 2);
}
else if (curOr == UIDeviceOrientationLandscapeRight)
{
t = CGAffineTransformMakeRotation(M_PI);
}
else
{
t = CGAffineTransformMakeRotation(0);
}
}
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{
NSLog(#"didFinishRecordingToOutputFileAtURL - enter");
NSLog(#"output file url : %#", [outputFileURL absoluteString]);
BOOL RecordedSuccessfully = YES;
if ([error code] != noErr)
{
// A problem occurred: Find out if the recording was successful.
id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
if (value)
{
RecordedSuccessfully = [value boolValue];
}
}
AVCaptureConnection *videoConnection=nil;
for ( AVCaptureConnection *connection in [MovieFileOutput connections] )
{
NSLog(#"%#", connection);
for ( AVCaptureInputPort *port in [connection inputPorts] )
{
NSLog(#"%#", port);
if ( [[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
}
}
}
if([videoConnection isVideoOrientationSupported]) // **Here it is, its always false**
{
[videoConnection setVideoOrientation:[[UIDevice currentDevice] orientation]];
} NSLog(#"Setting image quality");
NSData *videoData = [NSData dataWithContentsOfURL:outputFileURL];
[videoData writeToFile:self.outputPath atomically:NO];
[arrOutputUrl addObject:outputFileURL];
if (stopRecording)
{
[self mergeMultipleVideo];
}
}
//Method to merge multiple audios
-(void)mergeMultipleVideo
{
mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime nextClipStartTime = kCMTimeZero;
NSLog(#"Array of output file url : %#", arrOutputUrl);
if (arrOutputUrl.count > 0)
{
for(int i = 0 ;i < [arrOutputUrl count];i++)
{
AVURLAsset* VideoAsset = [[AVURLAsset alloc]initWithURL:[arrOutputUrl objectAtIndex:i] options:nil];
CMTimeRange timeRangeInAsset;
timeRangeInAsset = CMTimeRangeMake(kCMTimeZero, [VideoAsset duration]);
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, VideoAsset.duration) ofTrack:[[VideoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
nextClipStartTime = CMTimeAdd(nextClipStartTime, timeRangeInAsset.duration);
}
}
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"%#.mov",self.finalRecordedVideoName]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exportSession.outputURL=url;
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
exportSession.shouldOptimizeForNetworkUse = YES;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exportSession path:myPathDocs];
});
}];
}
-(void)exportDidFinish:(AVAssetExportSession*)session path:(NSString*)outputVideoPath
{
NSLog(#"session.status : %d",session.status);
if (session.status == AVAssetExportSessionStatusCompleted)
{
NSURL *outputURL = session.outputURL;
NSData *videoData = [NSData dataWithContentsOfURL:outputURL];
[videoData writeToFile:outputVideoPath atomically:NO];
if ([arrVideoName count] > 0)
{
for (int i = 0; i < [arrVideoName count]; i++)
{
NSArray* documentPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString* fullFilePath = [[documentPaths objectAtIndex:0] stringByAppendingPathComponent: [NSString stringWithFormat:#"%#",[arrVideoName objectAtIndex:i]]];
NSLog(#"Full path of file to be deleted: %#",fullFilePath);
NSFileManager *fileManager = [NSFileManager defaultManager];
NSError *error;
if ([fileManager fileExistsAtPath:fullFilePath])
{
[fileManager removeItemAtPath:fullFilePath error:&error];
}
}
[arrVideoName removeAllObjects];
}
if (arrOutputUrl.count > 0)
{
[arrOutputUrl removeAllObjects];
}
[((ActOutAppDelegate *)ActOut_AppDelegate) removeLoadingViewfromView:self.view];
[self.view addSubview:afterRecordingPopupView];
}
}
Look at the AVCaptureConnection's enabled property. For your output connection, set enabled to NO instead of stopping the session.
I need to capture Image & Video without opening imagepickerController.
You can Capture video and photo using AVCaptureSession
refer to iPhone SDK 4 AVFoundation - How to use captureStillImageAsynchronouslyFromConnection correctly?
-(void) viewDidAppear:(BOOL)animated
{
AVCaptureSession *session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPresetMedium;
CALayer *viewLayer = self.vImagePreview.layer;
NSLog(#"viewLayer = %#", viewLayer);
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
captureVideoPreviewLayer.frame = self.vImagePreview.bounds;
[self.vImagePreview.layer addSublayer:captureVideoPreviewLayer];
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
NSLog(#"ERROR: trying to open camera: %#", error);
}
[session addInput:input];
stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[stillImageOutput setOutputSettings:outputSettings];
[session addOutput:stillImageOutput];
[session startRunning];
}
-(IBAction) captureNow
{
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection) { break; }
}
NSLog(#"about to request a capture from: %#", stillImageOutput);
[stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
CFDictionaryRef exifAttachments = CMGetAttachment( imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
if (exifAttachments)
{
// Do something with the attachments.
NSLog(#"attachements: %#", exifAttachments);
}
else
NSLog(#"no attachments");
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
self.vImage.image = image;
}];
}
i needs to create a video from the image selected.
i have code it shoudl work but its giving error while appending buffer.
This is how both type of images has been saved.
-(void)imagePickerController:(UIImagePickerController *)picker didFinishPickingImage:(UIImage *)img editingInfo:(NSDictionary *)editInfo
{
// NSLog(#"Came From Here");
imgv.image = img;
AppDelegate *app = (AppDelegate *)[[UIApplication sharedApplication] delegate];
app.imgmain = img;
[self dismissModalViewControllerAnimated:YES];
RecordVoice *rec = [[RecordVoice alloc] initWithNibName:#"RecordVoice" bundle:nil];
rec.hidesBottomBarWhenPushed = YES;
// rec.img.image = img;
[self.navigationController pushViewController:rec animated:YES];
//[self presentModalViewController:rec animated:YES];
[rec release];
// flag =#"yes";
// need to show the upload image button now
// [username, ititle resignFirstResponder];
}
on the other view controller i am showing this image on a uiimage view.
and on button click i am converting that image to video with this code.
-(void)createVideoFile
{
NSString *documentsDirectoryPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSArray *dirContents = [[NSFileManager defaultManager] contentsOfDirectoryAtPath:documentsDirectoryPath error:nil];
for (NSString *tString in dirContents) {
if ([tString isEqualToString:#"test.mp4"])
{
[[NSFileManager defaultManager]removeItemAtPath:[NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,tString] error:nil];
}
}
NSString *nfile = [documentsDirectoryPath stringByAppendingPathComponent:#"test.mp4"];
AVURLAsset * urlAsset = [AVURLAsset URLAssetWithURL:recordedTmpFile options:nil];
NSLog(#"Write Started");
NSError *error = nil;
CGSize size = img.image.size; //CGSizeMake(320, 480);
NSLog(#"Write Started");
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:nfile] fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings] retain];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
int frameCount = 0;
buffer = [self pixelBufferFromCGImage:[img.image CGImage] andSize:size];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30)
{
if (adaptor.assetWriterInput.readyForMoreMediaData)
{
printf("appending %d attemp %d\n", frameCount, j);
CMTime frameTime = urlAsset.duration;//CMTimeMake(frameCount,(int32_t) 10);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
//if(buffer)
// CVBufferRelease(buffer);
[NSThread sleepForTimeInterval:0.05];
}
else
{
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n", frameCount, j);
}
frameCount++;
[videoWriterInput markAsFinished];
[videoWriter finishWriting];
[self CompileFilesToMakeMovie];
[altv dismissWithClickedButtonIndex:0 animated:YES];
[altv release];
NSLog(#"Write Ended");
}
But this is not working...
i am stuck can anyone please help me in this???? :(
i have figured out the problem.
if we use image with big size it wont work. like the pictures taken from camera app has a big size.
so i am compressing it to low level and then it works..
i didnt yet got why its working like but got the solution
I'm trying to get audio to work with the video for an iOS application. The video is fine. No audio is recorded to the file (My iPhone speaker works.)
Here's the init setup:
session = [[AVCaptureSession alloc] init];
menu->session = session;
menu_open = NO;
session.sessionPreset = AVCaptureSessionPresetMedium;
camera = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
microphone = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
menu->camera = camera;
[session beginConfiguration];
[camera lockForConfiguration:nil];
if([camera isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]){
camera.exposureMode = AVCaptureExposureModeContinuousAutoExposure;
}
if([camera isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]){
camera.focusMode = AVCaptureFocusModeContinuousAutoFocus;
}
if([camera isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]){
camera.whiteBalanceMode = AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance;
}
if ([camera hasTorch]) {
if([camera isTorchModeSupported:AVCaptureTorchModeOn]){
[camera setTorchMode:AVCaptureTorchModeOn];
}
}
[camera unlockForConfiguration];
[session commitConfiguration];
AVCaptureDeviceInput * camera_input = [AVCaptureDeviceInput deviceInputWithDevice:camera error:nil];
[session addInput:camera_input];
microphone_input = [[AVCaptureDeviceInput deviceInputWithDevice:microphone error:nil] retain];
AVCaptureVideoDataOutput * output = [[[AVCaptureVideoDataOutput alloc] init] autorelease];
output.videoSettings = [NSDictionary dictionaryWithObject: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
[session addOutput:output];
output.minFrameDuration = CMTimeMake(1,30);
dispatch_queue_t queue = dispatch_queue_create("MY QUEUE", NULL);
[output setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);
audio_output = [[[AVCaptureAudioDataOutput alloc] init] retain];
queue = dispatch_queue_create("MY QUEUE", NULL);
AudioOutputBufferDelegate * special_delegate = [[[AudioOutputBufferDelegate alloc] init] autorelease];
special_delegate->normal_delegate = self;
[special_delegate retain];
[audio_output setSampleBufferDelegate:special_delegate queue:queue];
dispatch_release(queue);
[session startRunning];
Here is the beginning and end of recording:
if (recording) { //Hence stop recording
[video_button setTitle:#"Video" forState: UIControlStateNormal];
recording = NO;
[writer_input markAsFinished];
[audio_writer_input markAsFinished];
[video_writer endSessionAtSourceTime: CMTimeMakeWithSeconds([[NSDate date] timeIntervalSinceDate: start_time],30)];
[video_writer finishWriting];
UISaveVideoAtPathToSavedPhotosAlbum(temp_url,self,#selector(video:didFinishSavingWithError:contextInfo:),nil);
[start_time release];
[temp_url release];
[av_adaptor release];
[microphone lockForConfiguration:nil];
[session beginConfiguration];
[session removeInput:microphone_input];
[session removeOutput:audio_output];
[session commitConfiguration];
[microphone unlockForConfiguration];
[menu restateConfigiration];
[vid_off play];
}else{ //Start recording
[vid_on play];
[microphone lockForConfiguration:nil];
[session beginConfiguration];
[session addInput:microphone_input];
[session addOutput:audio_output];
[session commitConfiguration];
[microphone unlockForConfiguration];
[menu restateConfigiration];
[video_button setTitle:#"Stop" forState: UIControlStateNormal];
recording = YES;
NSError *error = nil;
NSFileManager * file_manager = [[NSFileManager alloc] init];
temp_url = [[NSString alloc] initWithFormat:#"%#/%#", NSTemporaryDirectory(), #"temp.mp4"];
[file_manager removeItemAtPath: temp_url error:NULL];
[file_manager release];
video_writer = [[AVAssetWriter alloc] initWithURL: [NSURL fileURLWithPath:temp_url] fileType: AVFileTypeMPEG4 error: &error];
NSDictionary *video_settings = [NSDictionary dictionaryWithObjectsAndKeys: AVVideoCodecH264, AVVideoCodecKey,[NSNumber numberWithInt:360], AVVideoWidthKey,[NSNumber numberWithInt:480], AVVideoHeightKey,nil];
writer_input = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:video_settings] retain];
AudioChannelLayout acl;
bzero( &acl, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
audio_writer_input = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings: [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,[NSNumber numberWithInt: 1], AVNumberOfChannelsKey,[NSNumber numberWithFloat: 44100.0], AVSampleRateKey,[NSNumber numberWithInt: 64000], AVEncoderBitRateKey,[NSData dataWithBytes: &acl length: sizeof(acl) ], AVChannelLayoutKey,nil]] retain];
audio_writer_input.expectsMediaDataInRealTime = YES;
av_adaptor = [[AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput: writer_input sourcePixelBufferAttributes:NULL] retain];
[video_writer addInput:writer_input];
[video_writer addInput: audio_writer_input];
[video_writer startWriting];
[video_writer startSessionAtSourceTime: CMTimeMake(0,1)];
start_time = [[NSDate alloc] init];
}
Here is the delegate for the audio:
#implementation AudioOutputBufferDelegate
-(void)captureOutput: (AVCaptureOutput *) captureOutput didOutputSampleBuffer: (CMSampleBufferRef) sampleBuffer fromConnection: (AVCaptureConnection *) conenction{
if (normal_delegate->recording) {
CMSampleBufferSetOutputPresentationTimeStamp(sampleBuffer,CMTimeMakeWithSeconds([[NSDate date] timeIntervalSinceDate: normal_delegate->start_time],30));
[normal_delegate->audio_writer_input appendSampleBuffer: sampleBuffer];
}
}
#end
The video method doesn't matter because it works. "restateConfigiration" just sorts out the session configuration otherwise the torch goes off etc:
[session beginConfiguration];
switch (quality) {
case Low:
session.sessionPreset = AVCaptureSessionPresetLow;
break;
case Medium:
session.sessionPreset = AVCaptureSessionPreset640x480;
break;
}
[session commitConfiguration];
[camera lockForConfiguration:nil];
if([camera isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]){
camera.exposureMode = AVCaptureExposureModeContinuousAutoExposure;
}
if([camera isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]){
camera.focusMode = AVCaptureFocusModeContinuousAutoFocus;
}
if([camera isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]){
camera.whiteBalanceMode = AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance;
}
if ([camera hasTorch]) {
if (torch) {
if([camera isTorchModeSupported:AVCaptureTorchModeOn]){
[camera setTorchMode:AVCaptureTorchModeOn];
}
}else{
if([camera isTorchModeSupported:AVCaptureTorchModeOff]){
[camera setTorchMode:AVCaptureTorchModeOff];
}
}
}
[camera unlockForConfiguration];
THank you for any help.
AVAssetWriter and Audio
This may be the same issue as mentioned in the linked post. Try commenting out these lines
[writer_input markAsFinished];
[audio_writer_input markAsFinished];
[video_writer endSessionAtSourceTime: CMTimeMakeWithSeconds([[NSDate date] timeIntervalSinceDate: start_time],30)];
Edit
I don't know if the way you are setting the presentation time stamp is necessarily wrong. The way I handle this is with a local variable that is set to 0 on start. Then when my delegate receives the first packet I do:
if (_startTime.value == 0) {
_startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
}
and then
[bufferWriter->writer startWriting];
[bufferWriter->writer startSessionAtSourceTime:_startTime];
Your code looks valid as you are calculating the time difference for each received packet. However, AVFoundation calculates this for you, and also optimizes the timestamps for placement in the interleaved container. Another thing I am unsure of is each CMSampleBufferRef for audio contains more then 1 data buffer where each data buffer has it's own PTS. I am not sure if setting the PTS automatically adjusts all the other data buffers.
Where my code differs from yours is I use a single dispatch queue for both audio and video. In the callback I use (some code removed).
switch (bufferWriter->writer.status) {
case AVAssetWriterStatusUnknown:
if (_startTime.value == 0) {
_startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
}
[bufferWriter->writer startWriting];
[bufferWriter->writer startSessionAtSourceTime:_startTime];
//Break if not ready, otherwise fall through.
if (bufferWriter->writer.status != AVAssetWriterStatusWriting) {
break ;
}
case AVAssetWriterStatusWriting:
if( captureOutput == self.captureManager.audioOutput) {
if( !bufferWriter->audioIn.readyForMoreMediaData) {
break;
}
#try {
if( ![bufferWriter->audioIn appendSampleBuffer:sampleBuffer] ) {
[self delegateMessage:#"Audio Writing Error" withType:ERROR];
}
}
#catch (NSException *e) {
NSLog(#"Audio Exception: %#", [e reason]);
}
}
else if( captureOutput == self.captureManager.videoOutput ) {
if( !bufferWriter->videoIn.readyForMoreMediaData) {
break;;
}
#try {
if (!frontCamera) {
if( ![bufferWriter->videoIn appendSampleBuffer:sampleBuffer] ) {
[self delegateMessage:#"Video Writing Error" withType:ERROR];
}
}
else {
CMTime pt = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
flipBuffer(sampleBuffer, pixelBuffer);
if( ![bufferWriter->adaptor appendPixelBuffer:pixelBuffer withPresentationTime:pt] ) {
[self delegateMessage:#"Video Writing Error" withType:ERROR];
}
}
}
#catch (NSException *e) {
NSLog(#"Video Exception Exception: %#", [e reason]);
}
}
break;
case AVAssetWriterStatusCompleted:
return;
case AVAssetWriterStatusFailed:
[self delegateMessage:#"Critical Error Writing Queues" withType:ERROR];
bufferWriter->writer_failed = YES ;
_broadcastError = YES;
[self stopCapture] ;
return;
case AVAssetWriterStatusCancelled:
break;
default:
break;
}