Merge audio and video when the lengths are different - iphone

Im making a video merging a movie file from one image (only have one frame) and an audio of several seconds using this tutorial.
In iphone devices the video duration is equivalent of audio duration and i saw the image along all the video.
But when i share to android devices (through whatsapp) and i press play the playback duration is the movie from images duration (one frame). I make a test that if i create a movie file from one image repeated a hundred times (a 10fps, ten seconds), in android devices playblack duration is ten seconds.
I think that android devices only plays the shortest track in the video, but if i modify the timerange in addMutableTrackWithMediaType of video to the audio duration nothing happens.
Any advice?
Thank you for support
I put all the code here:
-(void) writeImagesToMovieAtPath:(NSString *)path withSize:(CGSize) size {
NSMutableArray *m_PictArray = [NSMutableArray arrayWithCapacity:1];
[m_PictArray addObject:[UIImage imageNamed:#"prueba.jpg"]];
NSString *documentsDirectoryPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSArray *dirContents = [[NSFileManager defaultManager] contentsOfDirectoryAtPath:documentsDirectoryPath error:nil];
for (NSString *tString in dirContents) {
if ([tString isEqualToString:#"essai.mp4"])
{
[[NSFileManager defaultManager]removeItemAtPath:[NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,tString] error:nil];
}
}
NSLog(#"Write Started");
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:path] fileType:AVFileTypeMPEG4
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *codecSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:128000], AVVideoAverageBitRateKey,
[NSNumber numberWithInt:15],AVVideoMaxKeyFrameIntervalKey,
AVVideoProfileLevelH264Main30, AVVideoProfileLevelKey,
nil];
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
codecSettings,AVVideoCompressionPropertiesKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings] retain];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
//Video encoding
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
int frameCount = 0;
for(int i = 0; i<[m_PictArray count]; i++)
{
buffer = [self newPixelBufferFromCGImage:[[m_PictArray objectAtIndex:i] CGImage] andSize:size];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30)
{
if (adaptor.assetWriterInput.readyForMoreMediaData)
{
printf("appending %d attemp %d\n", frameCount, j);
CMTime frameTime = CMTimeMake(frameCount,(int32_t) 10);
/*
Float64 seconds = 1;
int32_t preferredTimeScale = 10;
CMTime frameTime = CMTimeMakeWithSeconds(seconds, preferredTimeScale);
*/
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
CVPixelBufferPoolRef bufferPool = adaptor.pixelBufferPool;
NSParameterAssert(bufferPool != NULL);
[NSThread sleepForTimeInterval:0.05];
}
else
{
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n", frameCount, j);
}
frameCount++;
CVBufferRelease(buffer);
}
[videoWriterInput markAsFinished];
[videoWriter finishWriting];
[videoWriterInput release];
[videoWriter release];
[m_PictArray removeAllObjects];
NSLog(#"Write Ended");
[self saveVideoToAlbum:path];
}
-(void)CompileFilesToMakeMovie {
NSLog(#"CompileFilesToMakeMovie");
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSString *documentsDirectoryPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
//Audio file in AAC
NSString* audio_inputFileName = #"zApY4o8QY.m4a";
NSString* audio_inputFilePath = [NSString stringWithFormat:#"%#/%#",[[NSBundle mainBundle] resourcePath],audio_inputFileName];
NSURL* audio_inputFileUrl = [NSURL fileURLWithPath:audio_inputFilePath];
NSString* video_inputFileName = #"essai.mp4";
NSString* video_inputFilePath = [NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,video_inputFileName];
NSURL* video_inputFileUrl = [NSURL fileURLWithPath:video_inputFilePath];
NSString* outputFileName = #"outputFile.mov";
NSString* outputFilePath = [NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,outputFileName];
NSURL* outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
//CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetLowQuality];
_assetExport.shouldOptimizeForNetworkUse = YES;
_assetExport.outputFileType = #"com.apple.quicktime-movie";
_assetExport.outputURL = outputFileUrl;
_assetExport.timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
[self saveVideoToAlbum:outputFilePath];
}
];
NSLog(#"CompileFilesToMakeMovie Finish");
}
- (void) saveVideoToAlbum:(NSString*)path {
NSLog(#"saveVideoToAlbum");
if(UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(path)){
UISaveVideoAtPathToSavedPhotosAlbum (path, self, #selector(video:didFinishSavingWithError: contextInfo:), nil);
}
}
-(void) video:(NSString *)videoPath didFinishSavingWithError:(NSError *)error contextInfo:(void *)contextInfo {
if(error)
NSLog(#"Exportado con error: %#", error);
else
NSLog(#"Exportado OK");
}
- (CVPixelBufferRef) newPixelBufferFromCGImage: (CGImageRef)image andSize:(CGSize)frameSize {
CGAffineTransform frameTransform = CGAffineTransformMake(0, 0, 0, 0, 0, 0);
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, frameSize.width,
frameSize.height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) options,
&pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, frameSize.width,
frameSize.height, 8, 4*frameSize.width, rgbColorSpace,
kCGImageAlphaNoneSkipFirst);
NSParameterAssert(context);
//CGContextConcatCTM(context, frameTransform);
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return (CVPixelBufferRef)pxbuffer;
}

Just fixed!
I create the movie file repeating X times the image, and then in the composition process i scaled to the size of the audioAsset.duration
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
[a_compositionVideoTrack scaleTimeRange:video_timeRange toDuration:audioAsset.duration];
You need to repeat the image once to allow track to be scaled, but if the movie has only 2 frames, in android only plays eight seconds, so i made a video with a image repeated 10 times to allow me exceed the limit of 45 secons in video share of whatsapp

Inside CompileFilesToMakeMovie method, use video_timeRange instead of audio_timeRange wherever its needed...

Related

convert .caf file to .wav file with progress bar in ios

I record audio in .caf format and later need to convert it to .wav in order to upload the file as a dropbox. How can I convert the file to wav format in iOS?
i don't want record audio directly .wav format
how to Convert .CAF to .WAV file
while conversion takes much more time i need to implement conversion
progress bar
Try this
-(void) convertToWav
{
// set up an AVAssetReader to read from the iPod Library
NSString *cafFilePath=[[NSBundle mainBundle]pathForResource:#"test" ofType:#"caf"];
NSURL *assetURL = [NSURL fileURLWithPath:cafFilePath];
AVURLAsset *songAsset = [AVURLAsset URLAssetWithURL:assetURL options:nil];
NSError *assetError = nil;
AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:songAsset
error:&assetError]
;
if (assetError) {
NSLog (#"error: %#", assetError);
return;
}
AVAssetReaderOutput *assetReaderOutput = [AVAssetReaderAudioMixOutput
assetReaderAudioMixOutputWithAudioTracks:songAsset.tracks
audioSettings: nil];
if (! [assetReader canAddOutput: assetReaderOutput]) {
NSLog (#"can't add reader output... die!");
return;
}
[assetReader addOutput: assetReaderOutput];
NSString *title = #"MyRec";
NSArray *docDirs = NSSearchPathForDirectoriesInDomains (NSDocumentDirectory, NSUserDomainMask, YES);
NSString *docDir = [docDirs objectAtIndex: 0];
NSString *wavFilePath = [[docDir stringByAppendingPathComponent :title]
stringByAppendingPathExtension:#"wav"];
if ([[NSFileManager defaultManager] fileExistsAtPath:wavFilePath])
{
[[NSFileManager defaultManager] removeItemAtPath:wavFilePath error:nil];
}
NSURL *exportURL = [NSURL fileURLWithPath:wavFilePath];
AVAssetWriter *assetWriter = [AVAssetWriter assetWriterWithURL:exportURL
fileType:AVFileTypeWAVE
error:&assetError];
if (assetError)
{
NSLog (#"error: %#", assetError);
return;
}
AudioChannelLayout channelLayout;
memset(&channelLayout, 0, sizeof(AudioChannelLayout));
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
NSDictionary *outputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey,
[NSNumber numberWithFloat:44100.0], AVSampleRateKey,
[NSNumber numberWithInt:2], AVNumberOfChannelsKey,
[NSData dataWithBytes:&channelLayout length:sizeof(AudioChannelLayout)], AVChannelLayoutKey,
[NSNumber numberWithInt:16], AVLinearPCMBitDepthKey,
[NSNumber numberWithBool:NO], AVLinearPCMIsNonInterleaved,
[NSNumber numberWithBool:NO],AVLinearPCMIsFloatKey,
[NSNumber numberWithBool:NO], AVLinearPCMIsBigEndianKey,
nil];
AVAssetWriterInput *assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio
outputSettings:outputSettings];
if ([assetWriter canAddInput:assetWriterInput])
{
[assetWriter addInput:assetWriterInput];
}
else
{
NSLog (#"can't add asset writer input... die!");
return;
}
assetWriterInput.expectsMediaDataInRealTime = NO;
[assetWriter startWriting];
[assetReader startReading];
AVAssetTrack *soundTrack = [songAsset.tracks objectAtIndex:0];
CMTime startTime = CMTimeMake (0, soundTrack.naturalTimeScale);
[assetWriter startSessionAtSourceTime: startTime];
__block UInt64 convertedByteCount = 0;
dispatch_queue_t mediaInputQueue = dispatch_queue_create("mediaInputQueue", NULL);
[assetWriterInput requestMediaDataWhenReadyOnQueue:mediaInputQueue
usingBlock: ^
{
while (assetWriterInput.readyForMoreMediaData)
{
CMSampleBufferRef nextBuffer = [assetReaderOutput copyNextSampleBuffer];
if (nextBuffer)
{
// append buffer
[assetWriterInput appendSampleBuffer: nextBuffer];
convertedByteCount += CMSampleBufferGetTotalSampleSize (nextBuffer);
CMTime progressTime = CMSampleBufferGetPresentationTimeStamp(nextBuffer);
CMTime sampleDuration = CMSampleBufferGetDuration(nextBuffer);
if (CMTIME_IS_NUMERIC(sampleDuration))
progressTime= CMTimeAdd(progressTime, sampleDuration);
float dProgress= CMTimeGetSeconds(progressTime) / CMTimeGetSeconds(songAsset.duration);
NSLog(#"%f",dProgress);
}
else
{
[assetWriterInput markAsFinished];
// [assetWriter finishWriting];
[assetReader cancelReading];
}
}
}];
}

How to add effects(like CurlUp, CurlDown etc) to movie created using objective c iPhone

i have created one movie using AVAssetWriter, AVMutableComposition with images array and audio song and i am saving that to Photo Library...know i want to add effects to that images while creating movie...
is it possible to add effects(like CurlUp, CurlDown etc) to images while creating movie, if so can you please provide me the answer.
-(void)writeImageAsMovie:(NSArray *)array toPath:(NSString*)path size:(CGSize)size duration:(int)duration
{
CFDataRef imgData;
CGDataProviderRef imgDataProvider;
CGImageRef image1;
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES);
NSString *documentsDirectoryPath = ([paths count] > 0) ? [paths objectAtIndex:0] : nil;
NSString *savedVideoPath1 = [documentsDirectoryPath stringByAppendingPathComponent:#"videoOutput1234videoOutput1234.mp4"];
if ([[NSFileManager defaultManager] fileExistsAtPath:savedVideoPath1])
[[NSFileManager defaultManager] removeItemAtPath:savedVideoPath1 error:nil];
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:path] fileType:AVFileTypeMPEG4 error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* writerInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings] retain];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:nil];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
writerInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:writerInput];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
int frameCount = 0;
for(int i = 0; i<[array count]; i++)
{
imgData = (CFDataRef)[array objectAtIndex:i];
imgDataProvider = CGDataProviderCreateWithCFData (imgData);
image1 = CGImageCreateWithPNGDataProvider(imgDataProvider, NULL, true, kCGRenderingIntentDefault);
buffer = [self pixelBufferFromCGImage:image1];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30)
{
if (adaptor.assetWriterInput.readyForMoreMediaData)
{
printf("appending %d attemp %d\n", frameCount, j);
CMTime frameTime;
int64_t value = i;
int32_t preferredTimeScale = 1;
frameTime = CMTimeMake(value, preferredTimeScale);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
}
else
{
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n", frameCount, j);
}
frameCount++;
CVBufferRelease(buffer);
}
if (AVAssetWriterStatusCompleted) {
[writerInput markAsFinished];
[videoWriter finishWriting];
}
}

Make movie file with picture Array and song file, using AVAsset

I'm trying to make movie file using a picture array and an audio file. To make movie with a picture array i used the big post by zoul here. All is perfect, I have my movie with my picture. However when I try to add some audio tracks I have lot of problems. To understand I put my code :
When I call this method, picture array and song file are ready :
-(void) writeImagesToMovieAtPath:(NSString *) path withSize:(CGSize) size
{
NSString *documentsDirectoryPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSArray *dirContents = [[NSFileManager defaultManager] directoryContentsAtPath:documentsDirectoryPath];
for (NSString *tString in dirContents) {
if ([tString isEqualToString:#"essai.mp4"])
{
[[NSFileManager defaultManager]removeItemAtPath:[NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,tString] error:nil];
}
}
NSLog(#"Write Started");
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:path] fileType:AVFileTypeMPEG4
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AudioChannelLayout channelLayout;
memset(&channelLayout, 0, sizeof(AudioChannelLayout));
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
NSDictionary *audioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kAudioFormatMPEG4AAC], AVFormatIDKey,
[NSNumber numberWithFloat:44100.0] ,AVSampleRateKey,
[NSNumber numberWithInt: 1] ,AVNumberOfChannelsKey,
[NSNumber numberWithInt:192000],AVEncoderBitRateKey,
[NSData dataWithBytes:&channelLayout length:sizeof(AudioChannelLayout)],AVChannelLayoutKey,
nil];
AVAssetWriterInput* videoWriterInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings] retain];
AVAssetWriterInput* audioWriterInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeAudio
outputSettings:audioSettings] retain];
NSURL* fileURL = [[NSBundle mainBundle] URLForResource:#"Big_Voice_1" withExtension:#"caf"];
NSLog(#"%#",fileURL);
AVAsset *asset = [[AVURLAsset URLAssetWithURL:fileURL
options:nil] retain];
AVAssetReader *audioReader = [[AVAssetReader assetReaderWithAsset:asset error:&error] retain];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
AVAssetTrack* audioTrack = [asset.tracks objectAtIndex:0];
AVAssetReaderOutput *readerOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];
[audioReader addOutput:readerOutput];
NSParameterAssert(videoWriterInput);
NSParameterAssert(audioWriterInput);
NSParameterAssert([videoWriter canAddInput:audioWriterInput]);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
audioWriterInput.expectsMediaDataInRealTime = NO;
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:audioWriterInput];
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
//Video encoding
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
int frameCount = 0;
for(int i = 0; i<20; i++)
{
buffer = [self pixelBufferFromCGImage:[[m_PictArray objectAtIndex:i] CGImage] andSize:size];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30)
{
if (adaptor.assetWriterInput.readyForMoreMediaData)
{
printf("appending %d attemp %d\n", frameCount, j);
CMTime frameTime = CMTimeMake(frameCount,(int32_t) 10);
//CVPixelBufferPoolCreatePixelBuffer (kCFAllocatorDefault, adaptor.pixelBufferPool, &buffer);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
CVPixelBufferPoolRef bufferPool = adaptor.pixelBufferPool;
NSParameterAssert(bufferPool != NULL);
[NSThread sleepForTimeInterval:0.05];
}
else
{
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n", frameCount, j);
}
frameCount++;
}
//Finish the session:
[videoWriterInput markAsFinished];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
//Write all picture array in movie file.
int frameCount = 0;
for(int i = 0; i<[m_PictArray count]; i++)
{
buffer = [self pixelBufferFromCGImage:[[m_PictArray objectAtIndex:i] CGImage] andSize:size];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30)
{
if (adaptor.assetWriterInput.readyForMoreMediaData)
{
printf("appending %d attemp %d\n", frameCount, j);
CMTime frameTime = CMTimeMake(frameCount,(int32_t) 10);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
CVPixelBufferPoolRef bufferPool = adaptor.pixelBufferPool;
NSParameterAssert(bufferPool != NULL);
[NSThread sleepForTimeInterval:0.05];
}
else
{
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n", frameCount, j);
}
frameCount++;
}
//Finish writing picture:
[videoWriterInput markAsFinished];
I finish write picture in movie file and I want copy audio in the file and I do this :
[audioReader startReading];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
dispatch_queue_t mediaInputQueue = dispatch_queue_create("mediaInputQueue", NULL);
[audioWriterInput requestMediaDataWhenReadyOnQueue:mediaInputQueue usingBlock:^
{
NSLog(#"Request");
NSLog(#"Asset Writer ready :%d",audioWriterInput.readyForMoreMediaData);
while (audioWriterInput.readyForMoreMediaData) {
NSLog(#"Ready");
CMSampleBufferRef nextBuffer = [readerOutput copyNextSampleBuffer];
if (nextBuffer) {
NSLog(#"NextBuffer");
[audioWriterInput appendSampleBuffer:nextBuffer];
}
}
}
];
[audioWriterInput markAsFinished];
[videoWriter finishWriting];
However the status of AssetWriterInput of audio file is always "NO".
My question : How to add audio to a video file using AVFoundation?
So please can someone help me by telling me if I forget something or if something is wrong.
Thank you very much
I finally found how make movie with a picture array and an audio file. So if you want do the same thing I put my code here (be careful to memory):
First make a movie file with your picture array use zoul's post here:
-(void) writeImagesToMovieAtPath:(NSString *) path withSize:(CGSize) size
{
NSString *documentsDirectoryPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSArray *dirContents = [[NSFileManager defaultManager] contentsOfDirectoryAtPath:documentsDirectoryPath error:nil];
for (NSString *tString in dirContents)
{
if ([tString isEqualToString:#"essai.mp4"])
{
[[NSFileManager defaultManager]removeItemAtPath:[NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,tString] error:nil];
}
}
NSLog(#"Write Started");
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:path] fileType:AVFileTypeMPEG4
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings] retain];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
//Video encoding
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
int frameCount = 0;
for(int i = 0; i<[m_PictArray count]; i++)
{
buffer = [self pixelBufferFromCGImage:[[m_PictArray objectAtIndex:i] CGImage] andSize:size];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30)
{
if (adaptor.assetWriterInput.readyForMoreMediaData)
{
printf("appending %d attemp %d\n", frameCount, j);
CMTime frameTime = CMTimeMake(frameCount,(int32_t) 10);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
CVPixelBufferPoolRef bufferPool = adaptor.pixelBufferPool;
NSParameterAssert(bufferPool != NULL);
[NSThread sleepForTimeInterval:0.05];
}
else
{
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok)
{
printf("error appending image %d times %d\n", frameCount, j);
}
frameCount++;
CVBufferRelease(buffer);
}
[videoWriterInput markAsFinished];
[videoWriter finishWriting];
[videoWriterInput release];
[videoWriter release];
[m_PictArray removeAllObjects];
NSLog(#"Write Ended");
}
After that you must put together movie file and audio file. To do this follow my code:
-(void)CompileFilesToMakeMovie
{
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSString* audio_inputFileName = #"deformed.caf";
NSString* audio_inputFilePath = [Utilities documentsPath:audio_inputFileName];
NSURL* audio_inputFileUrl = [NSURL fileURLWithPath:audio_inputFilePath];
NSString* video_inputFileName = #"essai.mp4";
NSString* video_inputFilePath = [Utilities documentsPath:video_inputFileName];
NSURL* video_inputFileUrl = [NSURL fileURLWithPath:video_inputFilePath];
NSString* outputFileName = #"outputFile.mov";
NSString* outputFilePath = [Utilities documentsPath:outputFileName];
NSURL* outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
//nextClipStartTime = CMTimeAdd(nextClipStartTime, a_timeRange.duration);
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = #"com.apple.quicktime-movie";
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
[self saveVideoToAlbum:outputFilePath];
}
];
}
Sorry if there are some leak, I'm doing the optimization of memory.
The AVChannelLayoutKey should point to an NSData instance containing an AudioChannelLayout.
Yours points to an NSNumber.

AVFoundation + AssetWriter: Generate Movie With Images and Audio

I have to export a movie from my iPhone application which contains UIImage from an NSArray and add some audio files in .caf format that have to start at pre-specified times.
Now I have been able to use the AVAssetWriter (after going through many questions and answers on this and other sites) to export the video portion containing the images but cant seem to find a way to add the audio files to complete the movie.
Here is what I have gotten so far
-(void) writeImagesToMovieAtPath:(NSString *) path withSize:(CGSize) size
{
NSLog(#"Write Started");
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings] retain];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
int frameCount = 0;
for(UIImage * img in imageArray)
{
buffer = [self pixelBufferFromCGImage:[img CGImage] andSize:size];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30)
{
if (adaptor.assetWriterInput.readyForMoreMediaData)
{
printf("appending %d attemp %d\n", frameCount, j);
CMTime frameTime = CMTimeMake(frameCount,(int32_t) kRecordingFPS);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
if(buffer)
CVBufferRelease(buffer);
[NSThread sleepForTimeInterval:0.05];
}
else
{
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n", frameCount, j);
}
frameCount++;
}
}
//Finish the session:
[videoWriterInput markAsFinished];
[videoWriter finishWriting];
NSLog(#"Write Ended");
}
And now the code for pixelBufferFromCGImage
- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image andSize:(CGSize) size
{
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width,
size.height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) options,
&pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
size.height, 8, 4*size.width, rgbColorSpace,
kCGImageAlphaNoneSkipFirst);
NSParameterAssert(context);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
So can you help me out regarding how to add the audio files and how to make buffers for them and the adaptor and input settings etc
If this approach might cause a problem guide me about how to use a AVMutableComposition to use the image array for video export
I ended up exporting the video separately using the above code and added the audio files separately using AVComposition & AVExportSession.
Here is the code
-(void) addAudioToFileAtPath:(NSString *) filePath toPath:(NSString *)outFilePath
{
NSError * error = nil;
AVMutableComposition * composition = [AVMutableComposition composition];
AVURLAsset * videoAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:filePath] options:nil];
AVAssetTrack * videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID: kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,videoAsset.duration) ofTrack:videoAssetTrack atTime:kCMTimeZero
error:&error];
CMTime audioStartTime = kCMTimeZero;
for (NSDictionary * audioInfo in audioInfoArray)
{
NSString * pathString = [audioInfo objectForKey:audioFilePath];
AVURLAsset * urlAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:pathString] options:nil];
AVAssetTrack * audioAssetTrack = [[urlAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID: kCMPersistentTrackID_Invalid];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,urlAsset.duration) ofTrack:audioAssetTrack atTime:audioStartTime error:&error];
audioStartTime = CMTimeAdd(audioStartTime, CMTimeMake((int) (([[audioInfo objectForKey:audioDuration] floatValue] * kRecordingFPS) + 0.5), kRecordingFPS));
}
AVAssetExportSession* assetExport = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetMediumQuality];
assetExport.videoComposition = mutableVideoComposition;
assetExport.outputFileType =AVFileTypeQuickTimeMovie;// #"com.apple.quicktime-movie";
assetExport.outputURL = [NSURL fileURLWithPath:outFilePath];
[assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
switch (assetExport.status)
{
case AVAssetExportSessionStatusCompleted:
// export complete
NSLog(#"Export Complete");
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Export Failed");
NSLog(#"ExportSessionError: %#", [assetExport.error localizedDescription]);
// export error (see exportSession.error)
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export Failed");
NSLog(#"ExportSessionError: %#", [assetExport.error localizedDescription]);
// export cancelled
break;
}
}];
}
Can you please replace the "for" loop with a single "audioInfo" dictionary which has all the values which need to be set so that it becomes more copy-paste friendly? :)
If you just want to add a single audio file, the following code should replace the for loop :
NSString * pathString = [self getAudioFilePath];
AVURLAsset * urlAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:pathString] options:nil];
AVAssetTrack * audioAssetTrack = [[urlAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID: kCMPersistentTrackID_Invalid];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,urlAsset.duration) ofTrack:audioAssetTrack atTime:kCMTimeZero error:&error];

How to convert image sequences (PNG) into Animation File (gif/mov/others) (iPhone)?

I have a sequence of images (PNG format) in my iphone app. It is saved in the app's sandbox doc folder. How do I create animation file from these images?
I would prefer animated gif, but other types like video (mov/avi/etc) is ok too. I don't need audio. Just images. Thanks.
This works for me
-(void)viewDidLoad {
[super viewDidLoad];
// here u can replace with your images
imagesArray=[[NSMutableArray alloc]initWithObjects:#"BALL_EASY.png",#"Rat.png", #"HARD_CARD.png", #"EASY_CARD.png", #"MEDIUM_CARD.png", #"BattleMapSplashScreen.png", #"MENU_Bee.png", nil];
NSString *fileName =#"myMovie.txt";
NSArray *documentPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDir = [documentPaths objectAtIndex:0];
NSString *filePath = [documentsDir stringByAppendingPathComponent:fileName];
[self writeImageAsMovie:imagesArray toPath:filePath size:CGSizeMake(320.0, 440.0)];
}
-(CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image
{
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, self.view.frame.size.width,
self.view.frame.size.height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) options,
&pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata,self.view.frame.size.width,
self.view.frame.size.height, 8, 4*self.view.frame.size.width, rgbColorSpace,
kCGImageAlphaNoneSkipFirst);
NSParameterAssert(context);
CGContextConcatCTM(context, self.view.transform);
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
-(void)writeImageAsMovie:(NSArray *)array toPath:(NSString*)path size:(CGSize)size
{
NSMutableDictionary *attributes = [[NSMutableDictionary alloc]init];
[attributes setObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:320] forKey:(NSString*)kCVPixelBufferWidthKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:416] forKey:(NSString*)kCVPixelBufferHeightKey];
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* writerInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings] retain];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
sourcePixelBufferAttributes:attributes];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
[videoWriter addInput:writerInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
buffer = [self pixelBufferFromCGImage:[[array objectAtIndex:0] CGImage]];
[adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
//Write samples:
for (int i = 0;i<[array count]; i++)
{
if([writerInput isReadyForMoreMediaData])
{
NSLog(#"inside for loop %d",i);
CMTime frameTime = CMTimeMake(1, 20);
CMTime lastTime=CMTimeMake(i, 20); //i is from 0 to 19 of the loop above
CMTime presentTime=CMTimeAdd(lastTime, frameTime);
buffer = [self pixelBufferFromCGImage:[[array objectAtIndex:i] CGImage]];
[adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];
if(buffer)
CVBufferRelease(buffer);
}
else
{
NSLog(#"error");
i--;
}
}
//Finish the session:
[writerInput markAsFinished];
[videoWriter finishWriting];
NSURL *pathURL = [NSURL fileURLWithPath:path];
AVURLAsset *url = [[AVURLAsset alloc] initWithURL:pathURL options:nil];
[url release];
CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
[videoWriter release];
[writerInput release];
}