Play audio from AVCaptureAudioDataOutputSampleBufferDelegate - iphone

I'm capturing audio using AVCaptureAudioDataOutputSampleBufferDelegate
_captureSession = [[AVCaptureSession alloc] init];
[self.captureSession setSessionPreset:AVCaptureSessionPresetLow];
// Setup Audio input
AVCaptureDevice *audioDevice = [AVCaptureDevice
defaultDeviceWithMediaType:AVMediaTypeAudio];
AVCaptureDeviceInput *captureAudioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:&error];
if(error){
NSLog(#"Error Start capture Audio=%#", error);
}else{
if ([self.captureSession canAddInput:captureAudioInput]){
[self.captureSession addInput:captureAudioInput];
}
}
// Setup Audio output
AVCaptureAudioDataOutput *audioCaptureOutput = [[AVCaptureAudioDataOutput alloc] init];
if ([self.captureSession canAddOutput:audioCaptureOutput]){
[self.captureSession addOutput:audioCaptureOutput];
}
[audioCaptureOutput release];
//We create a serial queue
dispatch_queue_t audioQueue= dispatch_queue_create("audioQueue", NULL);
[audioCaptureOutput setSampleBufferDelegate:self queue:audioQueue];
dispatch_release(audioQueue);
/*We start the capture*/
[self.captureSession startRunning];
Delegate:
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
// do something with sampleBuffer
}
The question is how can i play audio from sampleBuffer?

You can create NSData from the CMSampleBufferRef using the following code and then play it with AVAudioPlayer.
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
AudioBufferList audioBufferList;
NSMutableData *data= [NSMutableData data];
CMBlockBufferRef blockBuffer;
CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(sampleBuffer, NULL, &audioBufferList, sizeof(audioBufferList), NULL, NULL, 0, &blockBuffer);
for( int y=0; y< audioBufferList.mNumberBuffers; y++ ){
AudioBuffer audioBuffer = audioBufferList.mBuffers[y];
Float32 *frame = (Float32*)audioBuffer.mData;
[data appendBytes:frame length:audioBuffer.mDataByteSize];
}
CFRelease(blockBuffer);
AVAudioPlayer *player = [[AVAudioPlayer alloc] initWithData:data error:nil];
[player play];
}
I'm worried about how this will do performance wise though. There probably is a better way to do what you are trying to accomplish.

Related

How to get FNumber and ISOSpeedRatings from iPhone cam in realtime

I need to get camera parameters (exif data) such as FNumber, ISOSpeedRatings in realtime without taking real photos. Is there any way to do that?
Here is the complete solution. Dont forget to import appropriate frameworks and headers.
#import <AVFoundation/AVFoundation.h>
#import <ImageIO/CGImageProperties.h>
AVCaptureStillImageOutput *stillImageOutput;
AVCaptureSession *session;
- (void)viewDidLoad
{
[super viewDidLoad];
[self setupCaptureSession];
// Do any additional setup after loading the view, typically from a nib.
}
-(void)captureNow{
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection) { break; }
}
[stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection
completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *__strong error) {
CFDictionaryRef exifAttachments = CMGetAttachment( imageDataSampleBuffer, kCGImagePropertyExifDictionary, NULL);
if (exifAttachments)
{
// Do something with the attachments.
NSLog(#"attachements: %#", exifAttachments);
}
else
NSLog(#"no attachments");
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
}];
}
// Create and configure a capture session and start it running
- (void)setupCaptureSession
{
NSError *error = nil;
// Create the session
session = [[AVCaptureSession alloc] init];
// Configure the session to produce lower resolution video frames, if your
// processing algorithm can cope. We'll specify medium quality for the
// chosen device.
session.sessionPreset = AVCaptureSessionPreset352x288;
// Find a suitable AVCaptureDevice
AVCaptureDevice *device = [AVCaptureDevice
defaultDeviceWithMediaType:AVMediaTypeVideo];
[device lockForConfiguration:nil];
device.whiteBalanceMode = AVCaptureWhiteBalanceModeLocked;
device.focusMode = AVCaptureFocusModeLocked;
[device unlockForConfiguration];
// Create a device input with the device and add it to the session.
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device
error:&error];
if (!input) {
// Handling the error appropriately.
}
[session addInput:input];
stillImageOutput = [AVCaptureStillImageOutput new];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[stillImageOutput setOutputSettings:outputSettings];
if ([session canAddOutput:stillImageOutput])
[session addOutput:stillImageOutput];
// Start the session running to start the flow of data
[session startRunning];
[self captureNow];
}

UIlabel is not getting updated inside AVCaptureSession Delegate

I am learning objective c and doing a sample app to fetch video feed from iPhone camera. I was able to get the feeds from camera and display it on screen. Also I was trying to update some UILabel in screen for each frame from the video inside the delegate method. But the label value is not getting updated always. Here is the code I am using
This section will initialize the capture
- (void)initCapture
{
NSError *error = nil;
device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if ([device isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus] && [device lockForConfiguration:&error]) {
[device setFocusMode:AVCaptureFocusModeContinuousAutoFocus];
[device unlockForConfiguration];
}
AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:nil];
//AVCaptureStillImageOutput *imageCaptureOutput = [[AVCaptureStillImageOutput alloc] init];
AVCaptureVideoDataOutput *captureOutput =[[AVCaptureVideoDataOutput alloc] init];
captureOutput.alwaysDiscardsLateVideoFrames = YES;
//captureOutput.minFrameDuration = CMTimeMake(1, 1);
captureOutput.alwaysDiscardsLateVideoFrames = YES;
dispatch_queue_t queue;
queue = dispatch_queue_create("cameraQueue", NULL);
[captureOutput setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);
// Set the video output to store frame in BGRA (It is supposed to be faster)
NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
[captureOutput setVideoSettings:videoSettings];
self.captureSession = [[AVCaptureSession alloc] init];
[self.captureSession addInput:captureInput];
[self.captureSession addOutput:captureOutput];
self.prevLayer = [AVCaptureVideoPreviewLayer layerWithSession: self.captureSession];
self.prevLayer.frame = CGRectMake(0, 0, 320, 320);
self.prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.videoPreview.layer addSublayer: self.prevLayer];
[self.captureSession startRunning];
}
This method is called for each video frame.
#pragma mark AVCaptureSession delegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
i++;
self.lblStatus.Text = [NSString stringWithFormat:#"%d",i];
}
I am trying to print UILabel inside this method but it is not printed always. THere is much delay for the label text to change.
Could someone help please?
Thanks.
Your sampleBufferDelegate's captureOutput is being called from a non-main thread - updating GUI objects from there can do no good. Try using performSelectorOnMainThread instead.

iphone . processing frames that are being recorded by the camera

I have this app the records video and I need to fire a method every time a frame is grabbed. After banging my head on the wall, I decided to try the following: create a dispatch queue, as I would grab a video from the output, just to have a method called when the frame is recorded by the camera.
I am trying to understand a section of code created by Apple to record videos to figure out how I should add the dispatch queue. This is the apple code and the section marked between asterisks is what I have added, in order to create the queue. It compiles without errors, but captureOutput: didOutputSampleBuffer: fromConnection: is never called.
- (BOOL) setupSessionWithPreset:(NSString *)sessionPreset error:(NSError **)error
{
BOOL success = NO;
// Init the device inputs
AVCaptureDeviceInput *videoInput = [[[AVCaptureDeviceInput alloc] initWithDevice:[self backFacingCamera] error:error] autorelease];
[self setVideoInput:videoInput]; // stash this for later use if we need to switch cameras
AVCaptureDeviceInput *audioInput = [[[AVCaptureDeviceInput alloc] initWithDevice:[self audioDevice] error:error] autorelease];
[self setAudioInput:audioInput];
AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
[self setMovieFileOutput:movieFileOutput];
[movieFileOutput release];
// Setup and start the capture session
AVCaptureSession *session = [[AVCaptureSession alloc] init];
if ([session canAddInput:videoInput]) {
[session addInput:videoInput];
}
if ([session canAddInput:audioInput]) {
[session addInput:audioInput];
}
if ([session canAddOutput:movieFileOutput]) {
[session addOutput:movieFileOutput];
}
[session setSessionPreset:sessionPreset];
// I added this *****************
dispatch_queue_t queue = dispatch_queue_create("myqueue", NULL);
[[self videoDataOutput] setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);
// ******************** end of my code
[session startRunning];
[self setSession:session];
[session release];
success = YES;
return success;
}
What I need is just a method where I can process every frame that is being recorded.
thanks
Having set yourself as the delegate, you'll receive a call to:
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
Every time a new frame is captured. You can put whatever code you want in there — just be careful because you won't be on the main thread. It's probably safest to do a quick [target performSelectorOnMainThread:#selector(methodYouActuallyWant)] in -captureOutput:didOutputSampleBuffer:fromConnection:.
Addition: I use the following as setup in my code, and that successfully leads to the delegate method being called. I'm unable to see any substantial difference between it and what you're using.
- (id)initWithSessionPreset:(NSString *)sessionPreset delegate:(id <AAVideoSourceDelegate>)aDelegate
{
#ifndef TARGET_OS_EMBEDDED
return nil;
#else
if(self = [super init])
{
delegate = aDelegate;
NSError *error = nil;
// create a low-quality capture session
session = [[AVCaptureSession alloc] init];
session.sessionPreset = sessionPreset;
// grab a suitable device...
device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
// ...and a device input
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if(!input || error)
{
[self release];
return nil;
}
[session addInput:input];
// create a VideDataOutput to route output to us
AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
[session addOutput:[output autorelease]];
// create a suitable dispatch queue, GCD style, and hook self up as the delegate
dispatch_queue_t queue = dispatch_queue_create("aQueue", NULL);
[output setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);
// set 32bpp BGRA pixel format, since I'll want to make sense of the frame
output.videoSettings =
[NSDictionary
dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
forKey:(id)kCVPixelBufferPixelFormatTypeKey];
}
return self;
#endif
}
- (void)start
{
[session startRunning];
}
- (void)stop
{
[session stopRunning];
}
// create a suitable dispatch queue, GCD style, and hook self up as the delegate
dispatch_queue_t queue = dispatch_queue_create("aQueue", NULL);
[output setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);
Also very important into
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
be sure to put a
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init]; at the beginning and a [pool drain] at the end else will crash after too many processes.

AVAssetWriterInput

I am trying to take the camera input and write the data to disk using avassetwriter. From the delegate, it simply appears that avassetwriterinputpixelbufferadator is failing to append data. I'm not sure why
- (NSURL*) assetURL{
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES);
NSString *basePath = ([paths count] > 0) ? [paths objectAtIndex:0] : nil;
NSString *filePath = [basePath stringByAppendingPathComponent:#"videoOutput"];
return [NSURL fileURLWithPath:filePath];
}
- (id) init {
if(![super init]) return nil;
self.captureSession = [[c alloc] init];
self.captureSession.sessionPreset = AVCaptureSessionPresetHigh;
// HIGH: 640 x 480
// MEDIUM: 360 x 480
// LOW: 192 x 144
[self loadVideoInput];
[self loadVideoOutput];
[self loadPreviewLayer];
[self loadWriter];
return self;
}
- (void) loadVideoInput{
AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices) {
if (device.position == AVCaptureDevicePositionFront) {
videoDevice = device;
break;
}
}
if ( videoDevice ) {
NSError *error;
AVCaptureDeviceInput *videoIn = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
if ( !error ) {
if ([self.captureSession canAddInput:videoIn])
[self.captureSession addInput:videoIn];
else NSLog(#"Couldn't add video input");
} else NSLog(#"Couldn't create video input");
} else NSLog(#"Couldn't create video capture device");
}
- (void) loadVideoOutput{
AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
output.alwaysDiscardsLateVideoFrames = YES;
[output setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
//dispatch_queue_t queue = dispatch_queue_create("myQueue", NULL);
//[output setSampleBufferDelegate:self queue:queue];
//dispatch_release(queue);
//output.minFrameDuration = CMTimeMake(15, 1); // If you wish to cap the frame rate to a known value, such as 15 fps, set
[output setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]]; // BGRA is necessary for manual preview
if ([self.captureSession canAddOutput:videoOut])
[self.captureSession addOutput:videoOut];
else
NSLog(#"Couldn't add video output");
//[self.captureSession addOutput:output];
[output release];
}
- (void) loadPreviewLayer{
previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.captureSession];
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
}
- (void) loadWriter{
NSError *error = nil;
videoWriter = [[AVAssetWriter alloc] initWithURL:[self assetURL] fileType:AVFileTypeQuickTimeMovie error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:640], AVVideoWidthKey,
[NSNumber numberWithInt:480], AVVideoHeightKey,
nil];
writerInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings] retain];
writerInput.expectsMediaDataInRealTime = YES;
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
[videoWriter addInput:writerInput];
currentTime = kCMTimeZero;
adaptor = [[AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:nil] retain];
NSLog(#"Error? %#",error);
}
- (void) captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
if(recording){
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
currentTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
// STUFF ISN'T WORKING HERE
BOOL success = [adaptor appendPixelBuffer:imageBuffer withPresentationTime:currentTime];
NSLog(#"%#",success ? #"YES" : #"NO");
}
}
- (void) startRecording{
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:currentTime];
recording = YES;
}
- (void) stopRecording{
recording = NO;
[writerInput markAsFinished];
[videoWriter endSessionAtSourceTime:currentTime];
[videoWriter finishWriting];
}
You need to remove the existing file ("filePath" in your case) each time, trying to record the video.
[adaptor appendPixelBuffer:imageBuffer withPresentationTime:currentTime];
maybe you need manager the video time by yourself, you get currentTime from sampleBuffer, it's not exactly the video time. try to accumulate the capture time pre frame

Video Recording using AVFoundation Framework iPhone?

I'm developing an application with the help of sample code from the WWDC 2010 AVCamDemo example. In the app I need to record a video from the front camera of iPhone, but since the new iPhone 4 is not available at my place I am not able to test the code properly.
I would be really thankful if someone can give me a heads up whether I'm going in the right direction or not. The limited code I could test on my iPhone 3G (upgraded to iOS 4.1) crashes when I set the AVCaptureSession, as shown in the code below:
- (void)recordVideo
{
NSLog(#"video recording on");
AVCaptureDevice *videoCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput *videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:videoCaptureDevice error:nil];
AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
[movieFileOutput release];
AVCaptureSession *session = [[AVCaptureSession alloc] init];
[session addInput:videoInput];
[session addOutput:movieFileOutput];
[self setSession:session]; // crashes
if (![session isRunning])
{
[self performSelector:#selector(startRecording) withObject:nil afterDelay:1.0];
[session startRunning];
}
}
- (void)startRecording
{
AVCaptureConnection *videoConnection = [playVideo connectionWithMediaType:AVMediaTypeVideo fromConnections:[[self movieFileOutput] connections]];
if ([videoConnection isVideoOrientationSupported]) {
[videoConnection setVideoOrientation:[self orientation]];
}
[[self movieFileOutput] startRecordingToOutputFileURL:[self tempFileURL]
recordingDelegate:self];
}
- (void) stopRecording
{
NSLog(#"stop recording");
[[self movieFileOutput] stopRecording];
}
- (NSURL *) tempFileURL
{
NSString *outputPath = [[NSString alloc] initWithFormat:#"%#%#", NSTemporaryDirectory(), #"output.mov"];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
NSFileManager *fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:outputPath]) {
NSLog(#"file saved");
}
[outputPath release];
return [outputURL autorelease];
}
+ (AVCaptureConnection *)connectionWithMediaType:(NSString *)mediaType fromConnections:(NSArray *)connections;
{
for ( AVCaptureConnection *connection in connections ) {
for ( AVCaptureInputPort *port in [connection inputPorts] ) {
if ( [[port mediaType] isEqual:mediaType] ) {
return [[connection retain] autorelease];
}
}
}
return nil;
}
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput
didStartRecordingToOutputFileAtURL:(NSURL *)fileURL
fromConnections:(NSArray *)connections
{
}
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput
didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
fromConnections:(NSArray *)connections
error:(NSError *)error
{
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL])
{
[library writeVideoAtPathToSavedPhotosAlbum:outputFileURL
completionBlock:^(NSURL *assetURL, NSError *error)];
}
[library release];
}
movieFileOutput is released immediately after it has been allocated. (line 9)