I record audio in .caf format and later need to convert it to .wav in order to upload the file as a dropbox. How can I convert the file to wav format in iOS?
i don't want record audio directly .wav format
how to Convert .CAF to .WAV file
while conversion takes much more time i need to implement conversion
progress bar
Try this
-(void) convertToWav
{
// set up an AVAssetReader to read from the iPod Library
NSString *cafFilePath=[[NSBundle mainBundle]pathForResource:#"test" ofType:#"caf"];
NSURL *assetURL = [NSURL fileURLWithPath:cafFilePath];
AVURLAsset *songAsset = [AVURLAsset URLAssetWithURL:assetURL options:nil];
NSError *assetError = nil;
AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:songAsset
error:&assetError]
;
if (assetError) {
NSLog (#"error: %#", assetError);
return;
}
AVAssetReaderOutput *assetReaderOutput = [AVAssetReaderAudioMixOutput
assetReaderAudioMixOutputWithAudioTracks:songAsset.tracks
audioSettings: nil];
if (! [assetReader canAddOutput: assetReaderOutput]) {
NSLog (#"can't add reader output... die!");
return;
}
[assetReader addOutput: assetReaderOutput];
NSString *title = #"MyRec";
NSArray *docDirs = NSSearchPathForDirectoriesInDomains (NSDocumentDirectory, NSUserDomainMask, YES);
NSString *docDir = [docDirs objectAtIndex: 0];
NSString *wavFilePath = [[docDir stringByAppendingPathComponent :title]
stringByAppendingPathExtension:#"wav"];
if ([[NSFileManager defaultManager] fileExistsAtPath:wavFilePath])
{
[[NSFileManager defaultManager] removeItemAtPath:wavFilePath error:nil];
}
NSURL *exportURL = [NSURL fileURLWithPath:wavFilePath];
AVAssetWriter *assetWriter = [AVAssetWriter assetWriterWithURL:exportURL
fileType:AVFileTypeWAVE
error:&assetError];
if (assetError)
{
NSLog (#"error: %#", assetError);
return;
}
AudioChannelLayout channelLayout;
memset(&channelLayout, 0, sizeof(AudioChannelLayout));
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
NSDictionary *outputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey,
[NSNumber numberWithFloat:44100.0], AVSampleRateKey,
[NSNumber numberWithInt:2], AVNumberOfChannelsKey,
[NSData dataWithBytes:&channelLayout length:sizeof(AudioChannelLayout)], AVChannelLayoutKey,
[NSNumber numberWithInt:16], AVLinearPCMBitDepthKey,
[NSNumber numberWithBool:NO], AVLinearPCMIsNonInterleaved,
[NSNumber numberWithBool:NO],AVLinearPCMIsFloatKey,
[NSNumber numberWithBool:NO], AVLinearPCMIsBigEndianKey,
nil];
AVAssetWriterInput *assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio
outputSettings:outputSettings];
if ([assetWriter canAddInput:assetWriterInput])
{
[assetWriter addInput:assetWriterInput];
}
else
{
NSLog (#"can't add asset writer input... die!");
return;
}
assetWriterInput.expectsMediaDataInRealTime = NO;
[assetWriter startWriting];
[assetReader startReading];
AVAssetTrack *soundTrack = [songAsset.tracks objectAtIndex:0];
CMTime startTime = CMTimeMake (0, soundTrack.naturalTimeScale);
[assetWriter startSessionAtSourceTime: startTime];
__block UInt64 convertedByteCount = 0;
dispatch_queue_t mediaInputQueue = dispatch_queue_create("mediaInputQueue", NULL);
[assetWriterInput requestMediaDataWhenReadyOnQueue:mediaInputQueue
usingBlock: ^
{
while (assetWriterInput.readyForMoreMediaData)
{
CMSampleBufferRef nextBuffer = [assetReaderOutput copyNextSampleBuffer];
if (nextBuffer)
{
// append buffer
[assetWriterInput appendSampleBuffer: nextBuffer];
convertedByteCount += CMSampleBufferGetTotalSampleSize (nextBuffer);
CMTime progressTime = CMSampleBufferGetPresentationTimeStamp(nextBuffer);
CMTime sampleDuration = CMSampleBufferGetDuration(nextBuffer);
if (CMTIME_IS_NUMERIC(sampleDuration))
progressTime= CMTimeAdd(progressTime, sampleDuration);
float dProgress= CMTimeGetSeconds(progressTime) / CMTimeGetSeconds(songAsset.duration);
NSLog(#"%f",dProgress);
}
else
{
[assetWriterInput markAsFinished];
// [assetWriter finishWriting];
[assetReader cancelReading];
}
}
}];
}
Related
I have sampleaudio.caf(300 MB) audio file. I convert the audio .caf to .wav format. It says warning "Received memory warning. Level=1" & "Received memory warning. Level=2" after that the app is crashed. But it works in ipad. How to read small amount of data using buffers while convertion is happening.
This is my Code:
NSString *soundFilePath = [[NSBundle mainBundle] pathForResource:#"sampleaudio" ofType:#"caf"];
NSURL *assetURL = [NSURL fileURLWithPath:soundFilePath];
AVURLAsset *songAsset = [AVURLAsset URLAssetWithURL:assetURL options:nil];
NSError *assetError = nil;
AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:songAsset
error:&assetError]
;
if (assetError) {
NSLog (#"error: %#", assetError);
return;
}
AVAssetReaderOutput *assetReaderOutput = [AVAssetReaderAudioMixOutput
assetReaderAudioMixOutputWithAudioTracks:songAsset.tracks
audioSettings: nil];
if (! [assetReader canAddOutput: assetReaderOutput]) {
NSLog (#"can't add reader output... die!");
return;
}
[assetReader addOutput: assetReaderOutput];
NSString *strWavFileName = [NSString stringWithFormat:#"%#.wav",[[soundFilePath lastPathComponent] stringByDeletingPathExtension]];
NSString *wavFilePath = [delegate.strCassettePathSide stringByAppendingPathComponent:strWavFileName];
if ([[NSFileManager defaultManager] fileExistsAtPath:wavFilePath])
{
[[NSFileManager defaultManager] removeItemAtPath:wavFilePath error:nil];
}
NSURL *exportURL = [NSURL fileURLWithPath:wavFilePath];
AVAssetWriter *assetWriter = [AVAssetWriter assetWriterWithURL:exportURL
fileType:AVFileTypeWAVE
error:&assetError];
if (assetError)
{
NSLog (#"error: %#", assetError);
return;
}
AppDelegate *appDelegate =[[UIApplication sharedApplication]delegate];
int nSampleRate=[[appDelegate.dictWAVQuality valueForKey:#"samplerate"] integerValue];
AudioChannelLayout channelLayout;
memset(&channelLayout, 0, sizeof(AudioChannelLayout));
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
NSDictionary *outputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey,
[NSNumber numberWithFloat:nSampleRate], AVSampleRateKey,
[NSNumber numberWithInt:2], AVNumberOfChannelsKey,
[NSData dataWithBytes:&channelLayout length:sizeof(AudioChannelLayout)], AVChannelLayoutKey,
[NSNumber numberWithInt:16], AVLinearPCMBitDepthKey,
[NSNumber numberWithBool:NO], AVLinearPCMIsNonInterleaved,
[NSNumber numberWithBool:NO],AVLinearPCMIsFloatKey,
[NSNumber numberWithBool:NO], AVLinearPCMIsBigEndianKey,
nil];
AVAssetWriterInput *assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio
outputSettings:outputSettings];
if ([assetWriter canAddInput:assetWriterInput])
{
[assetWriter addInput:assetWriterInput];
}
else
{
NSLog(#"can't add asset writer input... die!");
return;
}
assetWriterInput.expectsMediaDataInRealTime = NO;
[assetWriter startWriting];
[assetReader startReading];
AVAssetTrack *soundTrack = [songAsset.tracks objectAtIndex:0];
CMTime startTime = CMTimeMake (0, soundTrack.naturalTimeScale);
[assetWriter startSessionAtSourceTime: startTime];
__block UInt64 convertedByteCount = 0;
dispatch_queue_t mediaInputQueue = dispatch_queue_create("mediaInputQueue", NULL);
[assetWriterInput requestMediaDataWhenReadyOnQueue:mediaInputQueue
usingBlock: ^
{
while (assetWriterInput.readyForMoreMediaData)
{
CMSampleBufferRef nextBuffer = [assetReaderOutput copyNextSampleBuffer];
if (nextBuffer)
{
// append buffer
[assetWriterInput appendSampleBuffer: nextBuffer];
convertedByteCount += CMSampleBufferGetTotalSampleSize (nextBuffer);
}
else
{
[assetWriterInput markAsFinished];
// [assetWriter finishWriting];
[assetReader cancelReading];
break;
}
}
}];}
How to Rectify the memory issue.
CMSampleBufferRef nextBuffer = [assetReaderOutput copyNextSampleBuffer];
if (nextBuffer)
{
// append buffer
[assetWriterInput appendSampleBuffer: nextBuffer];
convertedByteCount += CMSampleBufferGetTotalSampleSize (nextBuffer);
////////////////////you need to add following line////////////////////
CMSampleBufferInvalidate(nextBuffer);
CFRelease(nextBuffer);
nextBuffer = NULL;
}
https://developer.apple.com/library/mac/documentation/AVFoundation/Reference/AVAssetReaderOutput_Class/Reference/Reference.html#//apple_ref/occ/instm/AVAssetReaderOutput/copyNextSampleBuffer
Ownership follows the “The Create Rule” in Memory Management
Programming Guide for Core Foundation.
You have to release CMSampleBufferRef obtained from -(CMSampleBufferRef)copyNextSampleBuffer or you will get a memory leak.
I am looking for a good audio format to save voice recordings locally and for transport over the network. Requirements are:
Decent quality. These clips, when received will be listened to many times
Workflow should support trimming and fading before transport
Decent file size
This is my current approach for recording:
// SEE IMA4 vs M4A http://stackoverflow.com/questions/3509921/recorder-works-on-iphone-3gs-but-not-on-iphone-3g
NSDictionary *recordSettings =
[[NSDictionary alloc] initWithObjectsAndKeys:
[NSNumber numberWithFloat: 11025], AVSampleRateKey,
[NSNumber numberWithInt: kAudioFormatLinearPCM], AVFormatIDKey,
[NSNumber numberWithInt: 1], AVNumberOfChannelsKey,
[NSNumber numberWithBool:NO], AVLinearPCMIsFloatKey,
[NSNumber numberWithInt: AVAudioQualityMax], AVEncoderAudioQualityKey,
nil];
NSError *error = nil;
self.audioRecorder = [[ AVAudioRecorder alloc] initWithURL:self.recordingFile settings:recordSettings error:&error];
And approach for encoding:
NSString *file = [NSString stringWithFormat:#"recordingConverted%x.caf", arc4random()];
self.filePath = [NSTemporaryDirectory() stringByAppendingPathComponent: file];
NSFileManager *fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:self.filePath]) {
NSError *error;
if ([fileManager removeItemAtPath:self.filePath error:&error] == NO) {
NSLog(#"removeItemAtPath %# error:%#", self.filePath, error);
}
}
NSLog(#"IN: %#", self.recordingFile);
NSLog(#"OUT: %#", self.filePath);
AVAsset *avAsset = [AVAsset assetWithURL:self.recordingFile];
// get the first audio track
NSArray *tracks = [avAsset tracksWithMediaType:AVMediaTypeAudio];
if ([tracks count] == 0) return nil;
AVAssetTrack *track = [tracks objectAtIndex:0];
// create the export session
// no need for a retain here, the session will be retained by the
// completion handler since it is referenced there
AVAssetExportSession *exportSession = [AVAssetExportSession
exportSessionWithAsset:avAsset
presetName:AVAssetExportPresetAppleM4A];
if (nil == exportSession) return nil;
// create trim time range
CMTime startTime = CMTimeMake(self.speakingBeginTime*44100, 44100);
CMTime stopTime = CMTimeMake((self.speakingBeginTime+[self.duration doubleValue])*44100, 44100);
CMTimeRange exportTimeRange = CMTimeRangeFromTimeToTime(startTime, stopTime);
// create fade in time range
CMTime startFadeInTime = startTime;
CMTime endFadeInTime = CMTimeMake((self.speakingBeginTime+RECORDING_INTERVAL)*1.5*44100, 44100);
CMTimeRange fadeInTimeRange = CMTimeRangeFromTimeToTime(startFadeInTime,
endFadeInTime);
// setup audio mix
AVMutableAudioMix *exportAudioMix = [AVMutableAudioMix audioMix];
AVMutableAudioMixInputParameters *exportAudioMixInputParameters =
[AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:track];
[exportAudioMixInputParameters setVolumeRampFromStartVolume:0.0 toEndVolume:1.0
timeRange:fadeInTimeRange];
exportAudioMix.inputParameters = [NSArray
arrayWithObject:exportAudioMixInputParameters];
// configure export session output with all our parameters
exportSession.outputURL = [NSURL fileURLWithPath:self.filePath]; // output path
exportSession.outputFileType = AVFileTypeAppleM4A; // output file type
exportSession.timeRange = exportTimeRange; // trim time range
exportSession.audioMix = exportAudioMix; // fade in audio mix
// MAKE THE EXPORT SYNCHRONOUS
dispatch_semaphore_t semaphore = dispatch_semaphore_create(0);
[exportSession exportAsynchronouslyWithCompletionHandler:^{
dispatch_semaphore_signal(semaphore);
}];
dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER);
dispatch_release(semaphore);
if (AVAssetExportSessionStatusCompleted == exportSession.status) {
return self.filePath;
//NSLog(#"AVAssetExportSessionStatusCompleted");
} else if (AVAssetExportSessionStatusFailed == exportSession.status) {
// a failure may happen because of an event out of your control
// for example, an interruption like a phone call comming in
// make sure and handle this case appropriately
NSLog(#"AVAssetExportSessionStatusFailed %#", exportSession.error.localizedDescription);
} else {
NSLog(#"Export Session Status: %d", exportSession.status);
}
Currently, performance on a 3 second audio clip is: 62,228 bytes for PCM and 36,654 bytes for encoded. Seems like I could do better.
I have found the guide here:
http://gamua.com/blog/2010/06/sound-on-ios-best-practices/
to be helpful in picking sound formats (especially the comments)
There are also some good examples here:
How do I record audio on iPhone with AVAudioRecorder?
especially the example with various formats for export, and this answer:
https://stackoverflow.com/a/3870385/2214106
which reduced file size significantly.
i have created one movie using AVAssetWriter, AVMutableComposition with images array and audio song and i am saving that to Photo Library...know i want to add effects to that images while creating movie...
is it possible to add effects(like CurlUp, CurlDown etc) to images while creating movie, if so can you please provide me the answer.
-(void)writeImageAsMovie:(NSArray *)array toPath:(NSString*)path size:(CGSize)size duration:(int)duration
{
CFDataRef imgData;
CGDataProviderRef imgDataProvider;
CGImageRef image1;
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES);
NSString *documentsDirectoryPath = ([paths count] > 0) ? [paths objectAtIndex:0] : nil;
NSString *savedVideoPath1 = [documentsDirectoryPath stringByAppendingPathComponent:#"videoOutput1234videoOutput1234.mp4"];
if ([[NSFileManager defaultManager] fileExistsAtPath:savedVideoPath1])
[[NSFileManager defaultManager] removeItemAtPath:savedVideoPath1 error:nil];
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:path] fileType:AVFileTypeMPEG4 error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* writerInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings] retain];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:nil];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
writerInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:writerInput];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
int frameCount = 0;
for(int i = 0; i<[array count]; i++)
{
imgData = (CFDataRef)[array objectAtIndex:i];
imgDataProvider = CGDataProviderCreateWithCFData (imgData);
image1 = CGImageCreateWithPNGDataProvider(imgDataProvider, NULL, true, kCGRenderingIntentDefault);
buffer = [self pixelBufferFromCGImage:image1];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30)
{
if (adaptor.assetWriterInput.readyForMoreMediaData)
{
printf("appending %d attemp %d\n", frameCount, j);
CMTime frameTime;
int64_t value = i;
int32_t preferredTimeScale = 1;
frameTime = CMTimeMake(value, preferredTimeScale);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
}
else
{
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n", frameCount, j);
}
frameCount++;
CVBufferRelease(buffer);
}
if (AVAssetWriterStatusCompleted) {
[writerInput markAsFinished];
[videoWriter finishWriting];
}
}
Im making a video merging a movie file from one image (only have one frame) and an audio of several seconds using this tutorial.
In iphone devices the video duration is equivalent of audio duration and i saw the image along all the video.
But when i share to android devices (through whatsapp) and i press play the playback duration is the movie from images duration (one frame). I make a test that if i create a movie file from one image repeated a hundred times (a 10fps, ten seconds), in android devices playblack duration is ten seconds.
I think that android devices only plays the shortest track in the video, but if i modify the timerange in addMutableTrackWithMediaType of video to the audio duration nothing happens.
Any advice?
Thank you for support
I put all the code here:
-(void) writeImagesToMovieAtPath:(NSString *)path withSize:(CGSize) size {
NSMutableArray *m_PictArray = [NSMutableArray arrayWithCapacity:1];
[m_PictArray addObject:[UIImage imageNamed:#"prueba.jpg"]];
NSString *documentsDirectoryPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSArray *dirContents = [[NSFileManager defaultManager] contentsOfDirectoryAtPath:documentsDirectoryPath error:nil];
for (NSString *tString in dirContents) {
if ([tString isEqualToString:#"essai.mp4"])
{
[[NSFileManager defaultManager]removeItemAtPath:[NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,tString] error:nil];
}
}
NSLog(#"Write Started");
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:path] fileType:AVFileTypeMPEG4
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *codecSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:128000], AVVideoAverageBitRateKey,
[NSNumber numberWithInt:15],AVVideoMaxKeyFrameIntervalKey,
AVVideoProfileLevelH264Main30, AVVideoProfileLevelKey,
nil];
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
codecSettings,AVVideoCompressionPropertiesKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings] retain];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
//Video encoding
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
int frameCount = 0;
for(int i = 0; i<[m_PictArray count]; i++)
{
buffer = [self newPixelBufferFromCGImage:[[m_PictArray objectAtIndex:i] CGImage] andSize:size];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30)
{
if (adaptor.assetWriterInput.readyForMoreMediaData)
{
printf("appending %d attemp %d\n", frameCount, j);
CMTime frameTime = CMTimeMake(frameCount,(int32_t) 10);
/*
Float64 seconds = 1;
int32_t preferredTimeScale = 10;
CMTime frameTime = CMTimeMakeWithSeconds(seconds, preferredTimeScale);
*/
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
CVPixelBufferPoolRef bufferPool = adaptor.pixelBufferPool;
NSParameterAssert(bufferPool != NULL);
[NSThread sleepForTimeInterval:0.05];
}
else
{
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n", frameCount, j);
}
frameCount++;
CVBufferRelease(buffer);
}
[videoWriterInput markAsFinished];
[videoWriter finishWriting];
[videoWriterInput release];
[videoWriter release];
[m_PictArray removeAllObjects];
NSLog(#"Write Ended");
[self saveVideoToAlbum:path];
}
-(void)CompileFilesToMakeMovie {
NSLog(#"CompileFilesToMakeMovie");
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSString *documentsDirectoryPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
//Audio file in AAC
NSString* audio_inputFileName = #"zApY4o8QY.m4a";
NSString* audio_inputFilePath = [NSString stringWithFormat:#"%#/%#",[[NSBundle mainBundle] resourcePath],audio_inputFileName];
NSURL* audio_inputFileUrl = [NSURL fileURLWithPath:audio_inputFilePath];
NSString* video_inputFileName = #"essai.mp4";
NSString* video_inputFilePath = [NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,video_inputFileName];
NSURL* video_inputFileUrl = [NSURL fileURLWithPath:video_inputFilePath];
NSString* outputFileName = #"outputFile.mov";
NSString* outputFilePath = [NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,outputFileName];
NSURL* outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
//CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetLowQuality];
_assetExport.shouldOptimizeForNetworkUse = YES;
_assetExport.outputFileType = #"com.apple.quicktime-movie";
_assetExport.outputURL = outputFileUrl;
_assetExport.timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
[self saveVideoToAlbum:outputFilePath];
}
];
NSLog(#"CompileFilesToMakeMovie Finish");
}
- (void) saveVideoToAlbum:(NSString*)path {
NSLog(#"saveVideoToAlbum");
if(UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(path)){
UISaveVideoAtPathToSavedPhotosAlbum (path, self, #selector(video:didFinishSavingWithError: contextInfo:), nil);
}
}
-(void) video:(NSString *)videoPath didFinishSavingWithError:(NSError *)error contextInfo:(void *)contextInfo {
if(error)
NSLog(#"Exportado con error: %#", error);
else
NSLog(#"Exportado OK");
}
- (CVPixelBufferRef) newPixelBufferFromCGImage: (CGImageRef)image andSize:(CGSize)frameSize {
CGAffineTransform frameTransform = CGAffineTransformMake(0, 0, 0, 0, 0, 0);
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, frameSize.width,
frameSize.height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) options,
&pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, frameSize.width,
frameSize.height, 8, 4*frameSize.width, rgbColorSpace,
kCGImageAlphaNoneSkipFirst);
NSParameterAssert(context);
//CGContextConcatCTM(context, frameTransform);
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return (CVPixelBufferRef)pxbuffer;
}
Just fixed!
I create the movie file repeating X times the image, and then in the composition process i scaled to the size of the audioAsset.duration
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
[a_compositionVideoTrack scaleTimeRange:video_timeRange toDuration:audioAsset.duration];
You need to repeat the image once to allow track to be scaled, but if the movie has only 2 frames, in android only plays eight seconds, so i made a video with a image repeated 10 times to allow me exceed the limit of 45 secons in video share of whatsapp
Inside CompileFilesToMakeMovie method, use video_timeRange instead of audio_timeRange wherever its needed...
I'm trying to make movie file using a picture array and an audio file. To make movie with a picture array i used the big post by zoul here. All is perfect, I have my movie with my picture. However when I try to add some audio tracks I have lot of problems. To understand I put my code :
When I call this method, picture array and song file are ready :
-(void) writeImagesToMovieAtPath:(NSString *) path withSize:(CGSize) size
{
NSString *documentsDirectoryPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSArray *dirContents = [[NSFileManager defaultManager] directoryContentsAtPath:documentsDirectoryPath];
for (NSString *tString in dirContents) {
if ([tString isEqualToString:#"essai.mp4"])
{
[[NSFileManager defaultManager]removeItemAtPath:[NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,tString] error:nil];
}
}
NSLog(#"Write Started");
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:path] fileType:AVFileTypeMPEG4
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AudioChannelLayout channelLayout;
memset(&channelLayout, 0, sizeof(AudioChannelLayout));
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
NSDictionary *audioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kAudioFormatMPEG4AAC], AVFormatIDKey,
[NSNumber numberWithFloat:44100.0] ,AVSampleRateKey,
[NSNumber numberWithInt: 1] ,AVNumberOfChannelsKey,
[NSNumber numberWithInt:192000],AVEncoderBitRateKey,
[NSData dataWithBytes:&channelLayout length:sizeof(AudioChannelLayout)],AVChannelLayoutKey,
nil];
AVAssetWriterInput* videoWriterInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings] retain];
AVAssetWriterInput* audioWriterInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeAudio
outputSettings:audioSettings] retain];
NSURL* fileURL = [[NSBundle mainBundle] URLForResource:#"Big_Voice_1" withExtension:#"caf"];
NSLog(#"%#",fileURL);
AVAsset *asset = [[AVURLAsset URLAssetWithURL:fileURL
options:nil] retain];
AVAssetReader *audioReader = [[AVAssetReader assetReaderWithAsset:asset error:&error] retain];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
AVAssetTrack* audioTrack = [asset.tracks objectAtIndex:0];
AVAssetReaderOutput *readerOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];
[audioReader addOutput:readerOutput];
NSParameterAssert(videoWriterInput);
NSParameterAssert(audioWriterInput);
NSParameterAssert([videoWriter canAddInput:audioWriterInput]);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
audioWriterInput.expectsMediaDataInRealTime = NO;
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:audioWriterInput];
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
//Video encoding
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
int frameCount = 0;
for(int i = 0; i<20; i++)
{
buffer = [self pixelBufferFromCGImage:[[m_PictArray objectAtIndex:i] CGImage] andSize:size];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30)
{
if (adaptor.assetWriterInput.readyForMoreMediaData)
{
printf("appending %d attemp %d\n", frameCount, j);
CMTime frameTime = CMTimeMake(frameCount,(int32_t) 10);
//CVPixelBufferPoolCreatePixelBuffer (kCFAllocatorDefault, adaptor.pixelBufferPool, &buffer);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
CVPixelBufferPoolRef bufferPool = adaptor.pixelBufferPool;
NSParameterAssert(bufferPool != NULL);
[NSThread sleepForTimeInterval:0.05];
}
else
{
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n", frameCount, j);
}
frameCount++;
}
//Finish the session:
[videoWriterInput markAsFinished];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
//Write all picture array in movie file.
int frameCount = 0;
for(int i = 0; i<[m_PictArray count]; i++)
{
buffer = [self pixelBufferFromCGImage:[[m_PictArray objectAtIndex:i] CGImage] andSize:size];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30)
{
if (adaptor.assetWriterInput.readyForMoreMediaData)
{
printf("appending %d attemp %d\n", frameCount, j);
CMTime frameTime = CMTimeMake(frameCount,(int32_t) 10);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
CVPixelBufferPoolRef bufferPool = adaptor.pixelBufferPool;
NSParameterAssert(bufferPool != NULL);
[NSThread sleepForTimeInterval:0.05];
}
else
{
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n", frameCount, j);
}
frameCount++;
}
//Finish writing picture:
[videoWriterInput markAsFinished];
I finish write picture in movie file and I want copy audio in the file and I do this :
[audioReader startReading];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
dispatch_queue_t mediaInputQueue = dispatch_queue_create("mediaInputQueue", NULL);
[audioWriterInput requestMediaDataWhenReadyOnQueue:mediaInputQueue usingBlock:^
{
NSLog(#"Request");
NSLog(#"Asset Writer ready :%d",audioWriterInput.readyForMoreMediaData);
while (audioWriterInput.readyForMoreMediaData) {
NSLog(#"Ready");
CMSampleBufferRef nextBuffer = [readerOutput copyNextSampleBuffer];
if (nextBuffer) {
NSLog(#"NextBuffer");
[audioWriterInput appendSampleBuffer:nextBuffer];
}
}
}
];
[audioWriterInput markAsFinished];
[videoWriter finishWriting];
However the status of AssetWriterInput of audio file is always "NO".
My question : How to add audio to a video file using AVFoundation?
So please can someone help me by telling me if I forget something or if something is wrong.
Thank you very much
I finally found how make movie with a picture array and an audio file. So if you want do the same thing I put my code here (be careful to memory):
First make a movie file with your picture array use zoul's post here:
-(void) writeImagesToMovieAtPath:(NSString *) path withSize:(CGSize) size
{
NSString *documentsDirectoryPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSArray *dirContents = [[NSFileManager defaultManager] contentsOfDirectoryAtPath:documentsDirectoryPath error:nil];
for (NSString *tString in dirContents)
{
if ([tString isEqualToString:#"essai.mp4"])
{
[[NSFileManager defaultManager]removeItemAtPath:[NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,tString] error:nil];
}
}
NSLog(#"Write Started");
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:path] fileType:AVFileTypeMPEG4
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings] retain];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
//Video encoding
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
int frameCount = 0;
for(int i = 0; i<[m_PictArray count]; i++)
{
buffer = [self pixelBufferFromCGImage:[[m_PictArray objectAtIndex:i] CGImage] andSize:size];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30)
{
if (adaptor.assetWriterInput.readyForMoreMediaData)
{
printf("appending %d attemp %d\n", frameCount, j);
CMTime frameTime = CMTimeMake(frameCount,(int32_t) 10);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
CVPixelBufferPoolRef bufferPool = adaptor.pixelBufferPool;
NSParameterAssert(bufferPool != NULL);
[NSThread sleepForTimeInterval:0.05];
}
else
{
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok)
{
printf("error appending image %d times %d\n", frameCount, j);
}
frameCount++;
CVBufferRelease(buffer);
}
[videoWriterInput markAsFinished];
[videoWriter finishWriting];
[videoWriterInput release];
[videoWriter release];
[m_PictArray removeAllObjects];
NSLog(#"Write Ended");
}
After that you must put together movie file and audio file. To do this follow my code:
-(void)CompileFilesToMakeMovie
{
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSString* audio_inputFileName = #"deformed.caf";
NSString* audio_inputFilePath = [Utilities documentsPath:audio_inputFileName];
NSURL* audio_inputFileUrl = [NSURL fileURLWithPath:audio_inputFilePath];
NSString* video_inputFileName = #"essai.mp4";
NSString* video_inputFilePath = [Utilities documentsPath:video_inputFileName];
NSURL* video_inputFileUrl = [NSURL fileURLWithPath:video_inputFilePath];
NSString* outputFileName = #"outputFile.mov";
NSString* outputFilePath = [Utilities documentsPath:outputFileName];
NSURL* outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
//nextClipStartTime = CMTimeAdd(nextClipStartTime, a_timeRange.duration);
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = #"com.apple.quicktime-movie";
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
[self saveVideoToAlbum:outputFilePath];
}
];
}
Sorry if there are some leak, I'm doing the optimization of memory.
The AVChannelLayoutKey should point to an NSData instance containing an AudioChannelLayout.
Yours points to an NSNumber.