If I just have the video added as an input, the preview layer works fine, and the video records fine, but if I try to add audio, the preview layer freezes, and video file is corrupted. What could be going on here that is causing all of this?
-(void) record {
AVCaptureSession *session = [[AVCaptureSession alloc] init];
[session beginConfiguration];
session.sessionPreset = AVCaptureSessionPresetMedium;
CALayer *viewLayer = self.vImagePreview.layer;
NSLog(#"viewLayer = %#", viewLayer);
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
captureVideoPreviewLayer.frame = self.vImagePreview.bounds;
[self.vImagePreview.layer addSublayer:captureVideoPreviewLayer];
AVCaptureDevice *device = [self frontFacingCameraIfAvailable];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
NSLog(#"ERROR: trying to open camera: %#", error);
}
AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:nil];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectoryPath = [paths objectAtIndex:0];
AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
NSString *archives = [documentsDirectoryPath stringByAppendingPathComponent:#"archives"];
NSString *outputpathofmovie = [[archives stringByAppendingPathComponent:#"Test"] stringByAppendingString:#".mp4"];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputpathofmovie];
[session addInput:input];
[session addInput:audioInput];
[session addOutput:movieFileOutput];
[session commitConfiguration];
[session startRunning];
[movieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
}
-(AVCaptureDevice *)frontFacingCameraIfAvailable
{
NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
AVCaptureDevice *captureDevice = nil;
for (AVCaptureDevice *device in videoDevices)
{
if (device.position == AVCaptureDevicePositionFront)
{
captureDevice = device;
break;
}
}
// couldn't find one on the front, so just get the default video device.
if ( ! captureDevice)
{
captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
}
return captureDevice;
}
Related
I am using following code to embed Camera into my application view.
Here is my code
- (void)viewDidLoad
{
[super viewDidLoad];
AVCaptureSession *session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPresetMedium;
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
captureVideoPreviewLayer.frame = self.cameraView.bounds;
[self.cameraView.layer addSublayer:captureVideoPreviewLayer];
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input)
{
[Utilities alertDisplay:#"Error" message:#"Camera not found. Please use Photo Gallery instead."];
}
[session addInput:input];
stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[stillImageOutput setOutputSettings:outputSettings];
[session addOutput:stillImageOutput];
[session startRunning];
}
-(AVCaptureDevice *)backFacingCameraIfAvailable{
NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
AVCaptureDevice *captureDevice = nil;
for (AVCaptureDevice *device in videoDevices){
if (device.position == AVCaptureDevicePositionBack){
captureDevice = device;
break;
}
}
// couldn't find one on the front, so just get the default video device.
if (!captureDevice){
captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
}
return captureDevice;
}
- (IBAction)scanButtonPressed:(id)sender
{
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection) { break; }
}
NSLog(#"about to request a capture from: %#", stillImageOutput);
[stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
CFDictionaryRef exifAttachments = CMGetAttachment( imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
if (exifAttachments)
{
// Do something with the attachments.
NSLog(#"attachements: %#", exifAttachments);
}
else
NSLog(#"no attachments");
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
//self.vImage.image = image;
}];
}
The problem i am facing is, I don't get any camera opened in my cameraView and also on scanBtnPressed i get
stillImageOutput.connections = 0 objects.
What is wrong?
Well, I just copy pasted your code into a blank project and it worked fine when changing self.cameraView.layer to self.view.layer. However, I did try creating self.cameraView and never initializing it and it had similar consequences to those you described.
Overall, I would check to make sure that self.cameraView isn't nil. If it's done programmatically, make sure you're calling alloc/init and setting a frame, and if it's an IBOutlet make sure it's properly linked.
I need to capture Image & Video without opening imagepickerController.
You can Capture video and photo using AVCaptureSession
refer to iPhone SDK 4 AVFoundation - How to use captureStillImageAsynchronouslyFromConnection correctly?
-(void) viewDidAppear:(BOOL)animated
{
AVCaptureSession *session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPresetMedium;
CALayer *viewLayer = self.vImagePreview.layer;
NSLog(#"viewLayer = %#", viewLayer);
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
captureVideoPreviewLayer.frame = self.vImagePreview.bounds;
[self.vImagePreview.layer addSublayer:captureVideoPreviewLayer];
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
NSLog(#"ERROR: trying to open camera: %#", error);
}
[session addInput:input];
stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[stillImageOutput setOutputSettings:outputSettings];
[session addOutput:stillImageOutput];
[session startRunning];
}
-(IBAction) captureNow
{
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection) { break; }
}
NSLog(#"about to request a capture from: %#", stillImageOutput);
[stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
CFDictionaryRef exifAttachments = CMGetAttachment( imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
if (exifAttachments)
{
// Do something with the attachments.
NSLog(#"attachements: %#", exifAttachments);
}
else
NSLog(#"no attachments");
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
self.vImage.image = image;
}];
}
I have an app that plays some background music, when a certain button is pushed, it pushes a new view using the navigation controller. That new view has a MPMoviePlayerController and an AVCaptureSession with a shared AudioSession. After that view is dismissed, the sound in the background is really soft compared to how it was. What is it that is causing the volume to be so subdued after playing?
NSError* error4 = nil;
AVAudioSession* audioSession = [AVAudioSession sharedInstance];
if (![audioSession setCategory:AVAudioSessionCategoryPlayAndRecord error:&error4]) {
NSLog(#"AVAudioSession setCategory failed: %#", [error4 localizedDescription]);
}
// Set audio session property "allow mixing" to true so audio can be recorded while it is playing
UInt32 allowMixing = true;
OSStatus status = AudioSessionSetProperty(kAudioSessionProperty_OverrideCategoryMixWithOthers, sizeof(allowMixing), &allowMixing);
if (status != kAudioSessionNoError) {
NSLog(#"AudioSessionSetProperty(kAudioSessionProperty_OverrideCategoryMixWithOthers) failed: %ld", status);
}
// Activate the audio session
error4 = nil;
if (![audioSession setActive:YES error:&error4]) {
NSLog(#"AVAudioSession setActive:YES failed: %#", [error4 localizedDescription]);
}
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectoryPath = [paths objectAtIndex:0];
NSString *proud = [[documentsDirectoryPath stringByAppendingPathComponent:#"imissyou"] stringByAppendingPathComponent:selectedCountry];
NSURL *movieURL = [[NSURL fileURLWithPath:proud] retain];
player =
[[MPMoviePlayerController alloc] initWithContentURL: movieURL];
player.useApplicationAudioSession=YES;
[player prepareToPlay];
player.controlStyle = MPMovieControlStyleNone;
player.allowsAirPlay = NO;
player.scalingMode = MPMovieScalingModeFill;
player.view.frame = self.view.frame;
[self.view insertSubview:player.view belowSubview:vImagePreview];
[player setFullscreen:YES animated:YES];
// ...
[[NSNotificationCenter defaultCenter]
addObserver:self
selector:#selector(movieFinishedCallback:)
name:MPMoviePlayerPlaybackDidFinishNotification
object:player];
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(exitedFullscreen:) name:MPMoviePlayerDidExitFullscreenNotification object:player];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(moviePlayerWillExitFullscreen:)
name:MPMoviePlayerWillExitFullscreenNotification
object:player];
[player play];
session = [[AVCaptureSession alloc] init];
[session beginConfiguration];
session.sessionPreset = AVCaptureSessionPresetMedium;
CALayer *viewLayer = self.vImagePreview.layer;
NSLog(#"viewLayer = %#", viewLayer);
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
captureVideoPreviewLayer.frame = self.vImagePreview.bounds;
[captureVideoPreviewLayer setCornerRadius:14];
[captureVideoPreviewLayer setBorderWidth:3.0];
[captureVideoPreviewLayer setBorderColor:[[UIColor whiteColor] CGColor]];
[[vImagePreview layer] setCornerRadius:14];
[[vImagePreview layer] setBorderWidth:3.0];
[[vImagePreview layer] setBorderColor:[[UIColor whiteColor] CGColor]];
[self.vImagePreview.layer addSublayer:captureVideoPreviewLayer];
[captureVideoPreviewLayer release];
AVCaptureDevice *device = [self frontFacingCameraIfAvailable];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
NSLog(#"ERROR: trying to open camera: %#", error);
}
AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
NSError *error2 = nil;
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error2];
AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
NSString *archives = [documentsDirectoryPath stringByAppendingPathComponent:#"archives"];
NSString *editedfilename = [[selectedCountry lastPathComponent] stringByDeletingPathExtension];
NSString *datestring = [[editedfilename stringByAppendingString:#" "] stringByAppendingString:currentTime];
NSLog(#"%#", datestring);
NSString *outputpathofmovie = [[archives stringByAppendingPathComponent:datestring] stringByAppendingString:#".mp4"];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputpathofmovie];
[session addInput:input];
[session addInput:audioInput];
[session addOutput:movieFileOutput];
[session commitConfiguration];
[session startRunning];
[movieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
[movieURL release];
[outputURL release];
The audio session may be "ducking" the background music. You might want to set the property "kAudioSessionProperty_OtherMixableAudioShouldDuck" to false to disable that.
I got it figured out now. I had category set to PlayAndRecord. I changed it to Ambient (which also allows to record audio while playing audio) and removed property of OverrideCategoryMixWithOthers, leaving just
OSStatus propertySetError = 0;
UInt32 allowMixing = true;
propertySetError |= AudioSessionSetProperty(kAudioSessionProperty_OtherMixableAudioShouldDuck, sizeof(allowMixing), &allowMixing);
I want to make twin screen using built-in camera on iOS
I tried following code, but it shows just one view.
It's a natural result, I know.
Here's the code what I used..
- (void)prepareCameraView:(UIView *)window
{
AVCaptureSession *session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPresetMedium;
CALayer *viewLayer = window.layer;
NSLog(#"viewLayer = %#", viewLayer);
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc]
initWithSession:session];
captureVideoPreviewLayer.frame = window.bounds;
[window.layer addSublayer:captureVideoPreviewLayer];
AVCaptureDevice *captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
if (!input)
{
NSLog(#"ERROR : trying to open camera : %#", error);
}
[session addInput:input];
[session startRunning];
}
How can I get double screen on iOS?
// Use this code
AVCaptureSession *session = [AVCaptureSession new];
AVCaptureDevice *inputDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error;
AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:&error];
if ( [session canAddInput:deviceInput])
{
[session addInput:deviceInput];
}
AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
[previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
[previewLayer setFrame:CGRectMake(0.0, 0.0, self.view.bounds.size.width, self.view.bounds.size.height)];
NSUInteger replicatorInstances = 2;
CGFloat replicatorViewHeight = (self.view.bounds.size.height - 64)/replicatorInstances;
CAReplicatorLayer *replicatorLayer = [CAReplicatorLayer layer];
replicatorLayer.frame = CGRectMake(0, 0.0, self.view.bounds.size.width, replicatorViewHeight);
replicatorLayer.instanceCount = replicatorInstances;
replicatorLayer.instanceTransform = CATransform3DMakeTranslation(0.0, replicatorViewHeight, 0.0);
[replicatorLayer addSublayer:previewLayer];
[self.view.layer addSublayer:replicatorLayer];
[session startRunning];
Try this:
- (void)prepareCameraView:(UIView *)window
{
NSArray *captureDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
{
AVCaptureSession *session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPresetMedium;
CALayer *viewLayer = window.layer;
NSLog(#"viewLayer = %#", viewLayer);
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
captureVideoPreviewLayer.frame = CGRectMake(0.0f, 0.0f, window.bounds.size.width/2.0f, window.bounds.size.height);
[window.layer addSublayer:captureVideoPreviewLayer];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:[captureDevices objectAtIndex:0] error:&error];
if (!input)
{
NSLog(#"ERROR : trying to open camera : %#", error);
}
[session addInput:input];
[session startRunning];
}
{
AVCaptureSession *session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPresetMedium;
CALayer *viewLayer = window.layer;
NSLog(#"viewLayer = %#", viewLayer);
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
captureVideoPreviewLayer.frame = CGRectMake(window.bounds.size.width/2.0f, 0.0f, window.bounds.size.width/2.0f, window.bounds.size.height);
[window.layer addSublayer:captureVideoPreviewLayer];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:[captureDevices objectAtIndex:1] error:&error];
if (!input)
{
NSLog(#"ERROR : trying to open camera : %#", error);
}
[session addInput:input];
[session startRunning];
}
}
Note that it makes absolutely no checks that there are actually 2 cameras and it splits it vertically so this is probably best viewed in landscape. You'll want to add some checks into that code and work out exactly how you want to lay out the layers of each camera before using it.
I am trying to take the camera input and write the data to disk using avassetwriter. From the delegate, it simply appears that avassetwriterinputpixelbufferadator is failing to append data. I'm not sure why
- (NSURL*) assetURL{
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES);
NSString *basePath = ([paths count] > 0) ? [paths objectAtIndex:0] : nil;
NSString *filePath = [basePath stringByAppendingPathComponent:#"videoOutput"];
return [NSURL fileURLWithPath:filePath];
}
- (id) init {
if(![super init]) return nil;
self.captureSession = [[c alloc] init];
self.captureSession.sessionPreset = AVCaptureSessionPresetHigh;
// HIGH: 640 x 480
// MEDIUM: 360 x 480
// LOW: 192 x 144
[self loadVideoInput];
[self loadVideoOutput];
[self loadPreviewLayer];
[self loadWriter];
return self;
}
- (void) loadVideoInput{
AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices) {
if (device.position == AVCaptureDevicePositionFront) {
videoDevice = device;
break;
}
}
if ( videoDevice ) {
NSError *error;
AVCaptureDeviceInput *videoIn = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
if ( !error ) {
if ([self.captureSession canAddInput:videoIn])
[self.captureSession addInput:videoIn];
else NSLog(#"Couldn't add video input");
} else NSLog(#"Couldn't create video input");
} else NSLog(#"Couldn't create video capture device");
}
- (void) loadVideoOutput{
AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
output.alwaysDiscardsLateVideoFrames = YES;
[output setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
//dispatch_queue_t queue = dispatch_queue_create("myQueue", NULL);
//[output setSampleBufferDelegate:self queue:queue];
//dispatch_release(queue);
//output.minFrameDuration = CMTimeMake(15, 1); // If you wish to cap the frame rate to a known value, such as 15 fps, set
[output setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]]; // BGRA is necessary for manual preview
if ([self.captureSession canAddOutput:videoOut])
[self.captureSession addOutput:videoOut];
else
NSLog(#"Couldn't add video output");
//[self.captureSession addOutput:output];
[output release];
}
- (void) loadPreviewLayer{
previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.captureSession];
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
}
- (void) loadWriter{
NSError *error = nil;
videoWriter = [[AVAssetWriter alloc] initWithURL:[self assetURL] fileType:AVFileTypeQuickTimeMovie error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:640], AVVideoWidthKey,
[NSNumber numberWithInt:480], AVVideoHeightKey,
nil];
writerInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings] retain];
writerInput.expectsMediaDataInRealTime = YES;
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
[videoWriter addInput:writerInput];
currentTime = kCMTimeZero;
adaptor = [[AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:nil] retain];
NSLog(#"Error? %#",error);
}
- (void) captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
if(recording){
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
currentTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
// STUFF ISN'T WORKING HERE
BOOL success = [adaptor appendPixelBuffer:imageBuffer withPresentationTime:currentTime];
NSLog(#"%#",success ? #"YES" : #"NO");
}
}
- (void) startRecording{
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:currentTime];
recording = YES;
}
- (void) stopRecording{
recording = NO;
[writerInput markAsFinished];
[videoWriter endSessionAtSourceTime:currentTime];
[videoWriter finishWriting];
}
You need to remove the existing file ("filePath" in your case) each time, trying to record the video.
[adaptor appendPixelBuffer:imageBuffer withPresentationTime:currentTime];
maybe you need manager the video time by yourself, you get currentTime from sampleBuffer, it's not exactly the video time. try to accumulate the capture time pre frame