iOS: lowering bitrate of MPMediaItem containing an iPod music - iphone

I'm making an app which add a theme music to an video.
Some user complains that if their music is in apple lossless format, the video will be too large.
I found that is because the AVMutableComposition I use just put the original music format in to the video I generated.
So is there any way I can lower the bitrate of the music in MPMediaItem, or change the format it is encoded?
This is a code snippet of the code I use to add music to video.
AVMutableComposition* mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetPassthrough];
NSURL *exportUrl = [NSURL fileURLWithPath:_videoOutputPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:_videoOutputPath]){
[[NSFileManager defaultManager] removeItemAtPath:_videoOutputPath error:nil];
}
_assetExport.outputFileType = #"com.apple.quicktime-movie";
_assetExport.outputURL = exportUrl;
_assetExport.shouldOptimizeForNetworkUse = YES;
[_assetExport exportAsynchronouslyWithCompletionHandler:^(void ) {}

I finally got it, this is the code I use:
static NSString * const kWriterInputIsReadyForMoreData = #"readyForMoreMediaData";
#import <AVFoundation/AVFoundation.h>
#implementation AudioUtil
{
AVAssetReader *_assetReader;
AVAssetWriter *_assetWriter;
AVAssetWriterInput *_assetWriterInput;
AVAssetReaderTrackOutput *_readerOutput;
void (^_callback)(BOOL);
CMSampleBufferRef _sampleBufferToAppend;
}
-(void)downSamplingAudioWithSourceURL:(NSURL *)sourceURL destinationURL:(NSURL *)destURL timeRange:(CMTimeRange)timeRange callBack:(void (^)(BOOL))callback
{
NSError *error = nil;
_callback = callback;
[[NSFileManager defaultManager] removeItemAtURL:destURL error:nil];
//initialize reader
AVURLAsset *inputAsset = [AVURLAsset assetWithURL:sourceURL];
_assetReader = [[AVAssetReader alloc] initWithAsset:inputAsset error:&error];
_assetReader.timeRange = timeRange;
AVAssetTrack* track = [[inputAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
NSMutableDictionary* audioReadSettings = [NSMutableDictionary dictionary];
audioReadSettings[AVFormatIDKey] = #(kAudioFormatLinearPCM);
audioReadSettings[AVNumberOfChannelsKey] = #([QLVideoFormatProvider audioChannelCount]);
_readerOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:track outputSettings:audioReadSettings];
NSAssert([_assetReader canAddOutput:_readerOutput], #"reader can't add output");
[_assetReader addOutput:_readerOutput];
//initialize writer
_assetWriter = [[AVAssetWriter alloc] initWithURL:destURL fileType:[QLVideoFormatProvider audioFileType] error:nil];
NSMutableDictionary *audioOutputSettings = [NSMutableDictionary dictionary];
audioOutputSettings[AVFormatIDKey] = [QLVideoFormatProvider audioFormatKeyForEncoder];
audioOutputSettings[AVNumberOfChannelsKey] = #([QLVideoFormatProvider audioChannelCount]);
audioOutputSettings[AVSampleRateKey] = #([QLVideoFormatProvider audioSampleRate]);
audioOutputSettings[AVEncoderBitRateKey] = #([QLVideoFormatProvider audioBitrate]);
_assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSettings];
[_assetWriter addInput:_assetWriterInput];
//start
[_assetWriter startWriting];
[_assetWriter startSessionAtSourceTime:kCMTimeZero];
BOOL canStartReading = [_assetReader startReading];
NSLog(#"can start reading %d",canStartReading);
if (!canStartReading) {
callback(NO);
return;
}
[_assetWriterInput addObserver:self forKeyPath:kWriterInputIsReadyForMoreData options:NSKeyValueObservingOptionOld|NSKeyValueObservingOptionNew context:NULL];
_sampleBufferToAppend = [_readerOutput copyNextSampleBuffer];
[self appendBufferToAppend];
}
-(void)appendBufferToAppend
{
if ([_assetWriterInput isReadyForMoreMediaData]) {
if (_sampleBufferToAppend) {
[_assetWriterInput appendSampleBuffer:_sampleBufferToAppend];
CFRelease(_sampleBufferToAppend);
}
_sampleBufferToAppend = [_readerOutput copyNextSampleBuffer];
if (_sampleBufferToAppend) {
[self appendBufferToAppend];
}
else {
[_assetWriter finishWritingWithCompletionHandler:^(){
if (_callback) {
_callback(_assetWriter.status == AVAssetWriterStatusCompleted);
};
}];
}
}
else {
}
}
-(void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
{
if ([keyPath isEqualToString:kWriterInputIsReadyForMoreData]) {
if ([change[NSKeyValueChangeNewKey] boolValue] == YES) {
[self appendBufferToAppend];
}
}
}

Related

How to merge Audio and video?

I'm merging video with video and audio with video. It is working fine in video with video case but when audio file merge that give black screen. I don't know what thing I'm going to wrong with this code
-(void)mergeAllMediaAtTime:(NSMutableArray*)startTimeArray {
NSURL *firstURL = [NSURL fileURLWithPath:[urlArray objectAtIndex:counter]];
firstAsset = [AVAsset assetWithURL:firstURL];
NSString* videoDirPath = [NSHomeDirectory() stringByAppendingPathComponent:#"Documents/Crop Videos"];
NSString* fileName = [VideoAndAudioNameArray objectAtIndex:counter];
NSString *pSecondVideoPath = [videoDirPath stringByAppendingPathComponent:fileName];
NSURL *secondURL = [NSURL fileURLWithPath:pSecondVideoPath];
secondAsset = [AVAsset assetWithURL:secondURL];
if(firstAsset !=nil && secondAsset!=nil)
{
AVVideoComposition *origionalComposition = [AVVideoComposition videoCompositionWithPropertiesOfAsset:firstAsset];
//Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack.
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
//VIDEO TRACK
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack* track = [[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:track atTime:kCMTimeZero error:nil];
int time = [[startTimeArray objectAtIndex:counter] intValue];
CMTime pTime = CMTimeMake(time, 1);
///////////////////////
AVMutableCompositionTrack *secondTrack;
if ([[fileName pathExtension] isEqualToString:#"mov"])
{
secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, secondAsset.duration) ofTrack:[[secondAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:pTime error:nil];
}
// If Audio file
else
{
secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, secondAsset.duration) ofTrack:[[secondAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:pTime error:nil];
NSLog(#"Audio file's Merging");
}
/****** First Video *********/
AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, firstAsset.duration);
MainInstruction.backgroundColor = [[UIColor clearColor] CGColor];
//FIXING ORIENTATION//
AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
AVAssetTrack *FirstAssetTrack = [[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[FirstlayerInstruction setTransform:FirstAssetTrack.preferredTransform atTime:kCMTimeZero];
[FirstlayerInstruction setOpacity:0.0 atTime:firstAsset.duration];
if ([[fileName pathExtension] isEqualToString:#"mov"])
{
/****** Second Video *********/
AVMutableVideoCompositionLayerInstruction *SecondlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:secondTrack];
AVAssetTrack *SecondAssetTrack = [[secondAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[SecondlayerInstruction setOpacity:1.0 atTime:pTime];
UIImageOrientation SecondAssetOrientation_ = UIImageOrientationUp;
BOOL isSecondAssetPortrait_ = NO;
CGAffineTransform secondTransform = SecondAssetTrack.preferredTransform;
if(secondTransform.a == 0 && secondTransform.b == 1.0 && secondTransform.c == -1.0 && secondTransform.d == 0) {SecondAssetOrientation_= UIImageOrientationRight; isSecondAssetPortrait_ = YES;}
if(secondTransform.a == 0 && secondTransform.b == -1.0 && secondTransform.c == 1.0 && secondTransform.d == 0) {SecondAssetOrientation_ = UIImageOrientationLeft; isSecondAssetPortrait_ = YES;}
if(secondTransform.a == 1.0 && secondTransform.b == 0 && secondTransform.c == 0 && secondTransform.d == 1.0) {SecondAssetOrientation_ = UIImageOrientationUp;}
if(secondTransform.a == -1.0 && secondTransform.b == 0 && secondTransform.c == 0 && secondTransform.d == -1.0) {SecondAssetOrientation_ = UIImageOrientationDown;}
CGFloat SecondAssetScaleToFitRatioOfWidth = nRenderWidth/SecondAssetTrack.naturalSize.width;
if(isSecondAssetPortrait_)
{
CGFloat SecondAssetScaleToFitRatioOfHeight = nRenderWidth/SecondAssetTrack.naturalSize.height;
CGAffineTransform SecondAssetScaleFactor = CGAffineTransformMakeScale(SecondAssetScaleToFitRatioOfWidth,SecondAssetScaleToFitRatioOfHeight);
[SecondlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformMakeScale(1.0f,1.0f), SecondAssetScaleFactor) atTime:kCMTimeZero];
//CGAffineTransformConcat(CGAffineTransformMakeScale(1.0f,1.0f), SecondAssetScaleFactor)
}
else
{
CGFloat SecondAssetScaleToFitRatioOfWidth = nRenderWidth/SecondAssetTrack.naturalSize.width;
CGFloat SecondAssetScaleToFitRatioOfHeight = nRenderWidth/SecondAssetTrack.naturalSize.height;
CGAffineTransform SecondAssetScaleFactor = CGAffineTransformMakeScale(SecondAssetScaleToFitRatioOfWidth,SecondAssetScaleToFitRatioOfHeight);
//[SecondlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(SecondAssetTrack.preferredTransform, SecondAssetScaleFactor),CGAffineTransformMakeTranslation(0, 160)) atTime:firstAsset.duration];
[SecondlayerInstruction setTransform:CGAffineTransformConcat(SecondAssetScaleFactor ,CGAffineTransformMakeTranslation(1, 1)) atTime:kCMTimeZero];
//CGAffineTransformConcat(CGAffineTransformMakeScale(1.0f,1.0f),CGAffineTransformMakeTranslation(0, 100))
}
[SecondlayerInstruction setOpacity:0.0 atTime:CMTimeAdd(pTime, secondAsset.duration)];
MainInstruction.layerInstructions = [NSArray arrayWithObjects:SecondlayerInstruction, FirstlayerInstruction,nil];
}
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = origionalComposition.frameDuration;
MainCompositionInst.renderScale = 1.0;
MainCompositionInst.renderSize = CGSizeMake(nRenderWidth, nRenderHeight);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"mergeVideo_%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
[urlArray addObject:myPathDocs];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.videoComposition = MainCompositionInst;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exporter];
}
});
}];
}
}
In that case,you start with your audio from end of the video frames. You can use "atTime:kCMTimeZero".
i.e: shown in below(code below)
else
{
secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, secondAsset.duration) ofTrack:[[secondAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];
NSLog(#"Audio file's Merging");
}

Concatenating sound files on the iPhone (m4a) fails

I am trying to make one long sound file by concatenating several smaller sound files - all are in the m4a format, and the result should also be in the m4a format.
Here is the code (the audFiles array holds the names of the sound files to join).
Note I only print the file sizes for peace of mind...
CMTime nextClipStartTime = kCMTimeZero;
AVMutableComposition *combinedSounds = [AVMutableComposition composition];
NSString *tempDir = NSTemporaryDirectory();
NSArray *audFiles;
for (int i = 0; i < [audFiles count]; i++)
{
NSString *addSound = [tempDir stringByAppendingString:audFiles[i]];
if ([[NSFileManager defaultManager] fileExistsAtPath:addSound] == YES)
{
NSDictionary *fileAttributes = [[NSFileManager defaultManager] attributesOfItemAtPath:addSound error:nil];
if (fileAttributes != nil)
{
NSString *fileSize = [fileAttributes objectForKey:NSFileSize];
NSLog(#"file %# %#", addSound, fileSize);
NSURL *assetURL = [[NSURL alloc] initFileURLWithPath:addSound];
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:assetURL options:nil];
if (asset != nil)
{
CMTimeRange timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
NSLog(#"asset %d length %lld", i, asset.duration.value);
if (asset.duration.value > 0)
{
AVAssetTrack *audTrack = [asset tracksWithMediaType:AVMediaTypeAudio][0];
AVMutableCompositionTrack *audioTrack = [combinedSounds addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
if ([audioTrack insertTimeRange:timeRange ofTrack:audTrack atTime:nextClipStartTime error:nil] == NO)
{
NSLog(#"insertTimeRange %d FAILED", i);
}
nextClipStartTime = CMTimeAdd(nextClipStartTime, asset.duration);
nextClipStartTime = CMTimeAdd(nextClipStartTime, CMTimeMake(0.1, 1));
NSLog(#"nextClipStartTime %lld", nextClipStartTime.value);
}
}
}
}
}
NSString *finalSound = [tempDir stringByAppendingString:#"result.m4a"];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:combinedSounds presetName:AVAssetExportPresetPassthrough];
NSString *exported = [tempDir stringByAppendingString:finalSound];
[[NSFileManager defaultManager] removeItemAtPath:exported error:nil];
NSURL *exportedURL = [[NSURL alloc] initFileURLWithPath:exported];
exportSession.outputURL = exportedURL;
exportSession.shouldOptimizeForNetworkUse = YES;
exportSession.outputFileType = AVFileTypeAppleM4A;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch (exportSession.status)
{
case AVAssetExportSessionStatusFailed:
{
NSLog(#"exportSession FAIL");
break;
}
case AVAssetExportSessionStatusCompleted:
{
NSLog(#"exportSession SUCCESS");
}
}
}];
"exportSession SUCCESS" is reported, and the exported file exists and can be played, but it only contains the first of the constituent files.
Any ideas what I am doing wrong ?
Thanks.
Step 1 : Add AVFoundation.Framework and CoreMedia.Framework in your Project
Step 2 : #import <AVFoundation/AVFoundation.h> in your .h file
Step 3 : Write following method in your .m file
Step 4 : Call this function from ViewDidAppear to create Merged Audio file.
// Get Asset Detail
- (AVAsset *)getAvAssetForFile:(NSString *)fileName andType:(NSString *)fileType
{
NSString *URLPath = [[NSBundle mainBundle] pathForResource:fileName ofType:fileType];
NSURL *assetURL = [NSURL fileURLWithPath:URLPath];
AVAsset *asset = [AVAsset assetWithURL:assetURL];
return asset;
}
- (void)mergeAudioFiles
{
// `An Array with List of Audio Clips to be Merged, they could be of different type also`
NSMutableArray *listOfAudio = [[NSMutableArray alloc] initWithCapacity:0];
// `Create AVAsset Object from your audio files and add to array`
[listOfAudio addObject:[self getAvAssetForFile:#"audioFile1" andType:#"caf"]];
[listOfAudio addObject:[self getAvAssetForFile:#"audioFile2" andType:#"caf"]];
[listOfAudio addObject:[self getAvAssetForFile:#"audioFile3" andType:#"aiff"]];
AVMutableComposition *composition = [AVMutableComposition composition];
CMTime current = kCMTimeZero;
NSError *compositionError = nil;
for(AVAsset *asset in listOfAudio) {
BOOL result = [composition insertTimeRange:CMTimeRangeMake(kCMTimeZero, [asset duration])
ofAsset:asset
atTime:current
error:&compositionError];
if(!result) {
if(compositionError) {
// manage the composition error case
NSLog(#"Error in Merging : %#",[compositionError debugDescription]);
}
} else {
current = CMTimeAdd(current, [asset duration]);
}
}
AVAssetExportSession *exporter = [[AVAssetExportSession alloc]
initWithAsset: composition
presetName: AVAssetExportPresetAppleM4A];
exporter.outputFileType = #"com.apple.m4a-audio";
// Set Output FileName
NSString *fileName = #"myAudioTrack";
NSString *exportFile = [DOC_DIR_PATH stringByAppendingFormat: #"/%#.m4a", fileName];
// set up export
NSURL *exportURL = [NSURL fileURLWithPath:exportFile];
exporter.outputURL = exportURL;
// do the export
[exporter exportAsynchronouslyWithCompletionHandler:^{
int exportStatus = exporter.status;
switch (exportStatus) {
case AVAssetExportSessionStatusFailed: NSLog (#"AVAssetExportSessionStatusFailed: %#", exporter.debugDescription); break;
case AVAssetExportSessionStatusCompleted: NSLog (#"AVAssetExportSessionStatusCompleted"); break;
case AVAssetExportSessionStatusUnknown: NSLog (#"AVAssetExportSessionStatusUnknown"); break;
case AVAssetExportSessionStatusExporting: NSLog (#"AVAssetExportSessionStatusExporting"); break;
case AVAssetExportSessionStatusCancelled: NSLog (#"AVAssetExportSessionStatusCancelled"); break;
case AVAssetExportSessionStatusWaiting: NSLog (#"AVAssetExportSessionStatusWaiting"); break;
default: NSLog (#"didn't get export status"); break;
}
}];
}

Merge audio and video when the lengths are different

Im making a video merging a movie file from one image (only have one frame) and an audio of several seconds using this tutorial.
In iphone devices the video duration is equivalent of audio duration and i saw the image along all the video.
But when i share to android devices (through whatsapp) and i press play the playback duration is the movie from images duration (one frame). I make a test that if i create a movie file from one image repeated a hundred times (a 10fps, ten seconds), in android devices playblack duration is ten seconds.
I think that android devices only plays the shortest track in the video, but if i modify the timerange in addMutableTrackWithMediaType of video to the audio duration nothing happens.
Any advice?
Thank you for support
I put all the code here:
-(void) writeImagesToMovieAtPath:(NSString *)path withSize:(CGSize) size {
NSMutableArray *m_PictArray = [NSMutableArray arrayWithCapacity:1];
[m_PictArray addObject:[UIImage imageNamed:#"prueba.jpg"]];
NSString *documentsDirectoryPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSArray *dirContents = [[NSFileManager defaultManager] contentsOfDirectoryAtPath:documentsDirectoryPath error:nil];
for (NSString *tString in dirContents) {
if ([tString isEqualToString:#"essai.mp4"])
{
[[NSFileManager defaultManager]removeItemAtPath:[NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,tString] error:nil];
}
}
NSLog(#"Write Started");
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:path] fileType:AVFileTypeMPEG4
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *codecSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:128000], AVVideoAverageBitRateKey,
[NSNumber numberWithInt:15],AVVideoMaxKeyFrameIntervalKey,
AVVideoProfileLevelH264Main30, AVVideoProfileLevelKey,
nil];
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
codecSettings,AVVideoCompressionPropertiesKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings] retain];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
//Video encoding
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
int frameCount = 0;
for(int i = 0; i<[m_PictArray count]; i++)
{
buffer = [self newPixelBufferFromCGImage:[[m_PictArray objectAtIndex:i] CGImage] andSize:size];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30)
{
if (adaptor.assetWriterInput.readyForMoreMediaData)
{
printf("appending %d attemp %d\n", frameCount, j);
CMTime frameTime = CMTimeMake(frameCount,(int32_t) 10);
/*
Float64 seconds = 1;
int32_t preferredTimeScale = 10;
CMTime frameTime = CMTimeMakeWithSeconds(seconds, preferredTimeScale);
*/
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
CVPixelBufferPoolRef bufferPool = adaptor.pixelBufferPool;
NSParameterAssert(bufferPool != NULL);
[NSThread sleepForTimeInterval:0.05];
}
else
{
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n", frameCount, j);
}
frameCount++;
CVBufferRelease(buffer);
}
[videoWriterInput markAsFinished];
[videoWriter finishWriting];
[videoWriterInput release];
[videoWriter release];
[m_PictArray removeAllObjects];
NSLog(#"Write Ended");
[self saveVideoToAlbum:path];
}
-(void)CompileFilesToMakeMovie {
NSLog(#"CompileFilesToMakeMovie");
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSString *documentsDirectoryPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
//Audio file in AAC
NSString* audio_inputFileName = #"zApY4o8QY.m4a";
NSString* audio_inputFilePath = [NSString stringWithFormat:#"%#/%#",[[NSBundle mainBundle] resourcePath],audio_inputFileName];
NSURL* audio_inputFileUrl = [NSURL fileURLWithPath:audio_inputFilePath];
NSString* video_inputFileName = #"essai.mp4";
NSString* video_inputFilePath = [NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,video_inputFileName];
NSURL* video_inputFileUrl = [NSURL fileURLWithPath:video_inputFilePath];
NSString* outputFileName = #"outputFile.mov";
NSString* outputFilePath = [NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,outputFileName];
NSURL* outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
//CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetLowQuality];
_assetExport.shouldOptimizeForNetworkUse = YES;
_assetExport.outputFileType = #"com.apple.quicktime-movie";
_assetExport.outputURL = outputFileUrl;
_assetExport.timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
[self saveVideoToAlbum:outputFilePath];
}
];
NSLog(#"CompileFilesToMakeMovie Finish");
}
- (void) saveVideoToAlbum:(NSString*)path {
NSLog(#"saveVideoToAlbum");
if(UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(path)){
UISaveVideoAtPathToSavedPhotosAlbum (path, self, #selector(video:didFinishSavingWithError: contextInfo:), nil);
}
}
-(void) video:(NSString *)videoPath didFinishSavingWithError:(NSError *)error contextInfo:(void *)contextInfo {
if(error)
NSLog(#"Exportado con error: %#", error);
else
NSLog(#"Exportado OK");
}
- (CVPixelBufferRef) newPixelBufferFromCGImage: (CGImageRef)image andSize:(CGSize)frameSize {
CGAffineTransform frameTransform = CGAffineTransformMake(0, 0, 0, 0, 0, 0);
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, frameSize.width,
frameSize.height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) options,
&pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, frameSize.width,
frameSize.height, 8, 4*frameSize.width, rgbColorSpace,
kCGImageAlphaNoneSkipFirst);
NSParameterAssert(context);
//CGContextConcatCTM(context, frameTransform);
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return (CVPixelBufferRef)pxbuffer;
}
Just fixed!
I create the movie file repeating X times the image, and then in the composition process i scaled to the size of the audioAsset.duration
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
[a_compositionVideoTrack scaleTimeRange:video_timeRange toDuration:audioAsset.duration];
You need to repeat the image once to allow track to be scaled, but if the movie has only 2 frames, in android only plays eight seconds, so i made a video with a image repeated 10 times to allow me exceed the limit of 45 secons in video share of whatsapp
Inside CompileFilesToMakeMovie method, use video_timeRange instead of audio_timeRange wherever its needed...

Save audio with fade in fade out with setVolumeRampFromStartVolume not working in iOS

I am trying to cut an audio file for an iPhone project. I can cut it and save it, but any fade in / fade out that I try to apply doesn't work, the audio file is just saved cutted but not faded.
I am using the following code:
//
// NO PROBLEMS TO SEE HERE, MOVE ON
//
NSArray *documentsFolders = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
int currentFileNum = 10;
NSURL *url = [NSURL fileURLWithPath: [[documentsFolders objectAtIndex:0] stringByAppendingPathComponent:[NSString stringWithFormat:#"%#%d.%#", AUDIO_SOURCE_FILE_NAME ,currentFileNum, AUDIO_SOURCE_FILE_EXTENSION ]]];
NSDictionary *options = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES]
forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:url options:options];
AVAssetExportSession* exporter = [AVAssetExportSession exportSessionWithAsset:asset presetName:AVAssetExportPresetAppleM4A];
for (NSString* filetype in exporter.supportedFileTypes) {
if ([filetype isEqualToString:AVFileTypeAppleM4A]) {
exporter.outputFileType = AVFileTypeAppleM4A;
break;
}
}
if (exporter.outputFileType == nil) {
NSLog(#"Needed output file type not found? (%#)", AVFileTypeAppleM4A);
//return;
}
NSString* outPath = [[documentsFolders objectAtIndex:0] stringByAppendingPathComponent:[NSString stringWithFormat:#"%#%d.%#", AUDIO_CUTTED_FILE_NAME ,currentFileNum, AUDIO_SOURCE_FILE_EXTENSION ]];
NSURL* const outUrl = [NSURL fileURLWithPath:outPath];
exporter.outputURL = outUrl;
float endTrimTime = CMTimeGetSeconds(asset.duration);
float startTrimTime = fminf(AUDIO_DURATION, endTrimTime);
CMTime startTrimCMTime=CMTimeSubtract(asset.duration, CMTimeMake(startTrimTime, 1));
exporter.timeRange = CMTimeRangeMake(startTrimCMTime, asset.duration);
//
// TRYING TO APPLY FADEIN FADEOUT, NOT WORKING, NO RESULTS, "CODE IGNORED"
//
AVMutableAudioMix *exportAudioMix = [AVMutableAudioMix audioMix];
NSMutableArray* inputParameters = [NSMutableArray arrayWithCapacity:1];
CMTime startFadeInTime = startTrimCMTime;
CMTime endFadeInTime = CMTimeMake(startTrimTime+1, 1);
CMTime startFadeOutTime = CMTimeMake(endTrimTime-1, 1);
CMTime endFadeOutTime = CMTimeMake(endTrimTime, 1);
CMTimeRange fadeInTimeRange = CMTimeRangeFromTimeToTime(startFadeInTime, endFadeInTime);
CMTimeRange fadeOutTimeRange = CMTimeRangeFromTimeToTime(startFadeOutTime, endFadeOutTime);
AVMutableAudioMixInputParameters *exportAudioMixInputParameters = [AVMutableAudioMixInputParameters audioMixInputParameters];
[exportAudioMixInputParameters setVolume:0.0 atTime:CMTimeMakeWithSeconds(startTrimTime-0.01, 1)];
[exportAudioMixInputParameters setVolumeRampFromStartVolume:0.0 toEndVolume:1.0 timeRange:fadeInTimeRange];
[exportAudioMixInputParameters setVolumeRampFromStartVolume:1.0 toEndVolume:0.0 timeRange:fadeOutTimeRange];
[inputParameters insertObject:exportAudioMixInputParameters atIndex:0];
exportAudioMix.inputParameters = inputParameters;
exporter.audioMix = exportAudioMix;
[exporter exportAsynchronouslyWithCompletionHandler:^(void) {
NSString* message;
switch (exporter.status) {
case AVAssetExportSessionStatusFailed:
message = [NSString stringWithFormat:#"Export failed. Error: %#", exporter.error.description];
[asset release];
break;
case AVAssetExportSessionStatusCompleted: {
[asset release];
[self reallyConvert:currentFileNum];
message = [NSString stringWithFormat:#"Export completed: %#", outPath];
break;
}
case AVAssetExportSessionStatusCancelled:
message = [NSString stringWithFormat:#"Export cancelled!"];
[asset release];
break;
default:
NSLog(#"Export 4 unhandled status: %d", exporter.status);
[asset release];
break;
}
}];
You need to select the track. Instead of calling:
AVMutableAudioMixInputParameters *exportAudioMixInputParameters = [AVMutableAudioMixInputParameters audioMixInputParameters];
Call:
AVAssetTrack *assetTrack = [[asset tracksWithMediaType:AVMediaTypeAudio]objectAtIndex:0];
AVMutableAudioMixInputParameters *exportAudioMixInputParameters = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:assetTrack];
In your existing code you can also specify the track like this:
exportAudioMixInputParameters.trackID = [[[asset tracksWithMediaType:AVMediaTypeAudio]objectAtIndex:0] trackID];
Good luck!
Here is the solution.
setVolumeRampFromStartVolume doesn't work.
AVMutableAudioMix *exportAudioMix = [AVMutableAudioMix audioMix];
AVMutableAudioMixInputParameters *exportAudioMixInputParameters = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:track];
//fade in
[exportAudioMixInputParameters setVolume:0.0 atTime:CMTimeMakeWithSeconds(start-1, 1)];
[exportAudioMixInputParameters setVolume:0.1 atTime:CMTimeMakeWithSeconds(start, 1)];
[exportAudioMixInputParameters setVolume:0.5 atTime:CMTimeMakeWithSeconds(start+1, 1)];
[exportAudioMixInputParameters setVolume:1.0 atTime:CMTimeMakeWithSeconds(start+2, 1)];
//fade out
[exportAudioMixInputParameters setVolume:1.0 atTime:CMTimeMakeWithSeconds((start+length-2), 1)];
[exportAudioMixInputParameters setVolume:0.5 atTime:CMTimeMakeWithSeconds((start+length-1), 1)];
[exportAudioMixInputParameters setVolume:0.1 atTime:CMTimeMakeWithSeconds((start+length), 1)];
exportAudioMix.inputParameters = [NSArray arrayWithObject:exportAudioMixInputParameters];
// configure export session output with all our parameters
exportSession.outputURL = [NSURL fileURLWithPath:filePath]; // output path
exportSession.outputFileType = AVFileTypeAppleM4A; // output file type
exportSession.timeRange = exportTimeRange; // trim time ranges
exportSession.audioMix = exportAudioMix; // fade in audio mix
// perform the export
[exportSession exportAsynchronouslyWithCompletionHandler:^{
if (AVAssetExportSessionStatusCompleted == exportSession.status) {
NSLog(#"AVAssetExportSessionStatusCompleted");
} else if (AVAssetExportSessionStatusFailed == exportSession.status) {
NSLog(#"AVAssetExportSessionStatusFailed");
} else {
NSLog(#"Export Session Status: %d", exportSession.status);
}
}];
I've made the same mistake as you dozens of times !
Apple's API is really weird on this :
CMTimeRange fadeInTimeRange = CMTimeRangeFromTimeToTime(startFadeInTime, endFadeInTime);
CMTimeRange fadeOutTimeRange = CMTimeRangeFromTimeToTime(startFadeOutTime, endFadeOutTime);
Should be :
CMTimeRangeFromTimeToTime(startFadeInTime, fadeInDURATION);
CMTimeRangeFromTimeToTime(startFadeOutTime, fadeOutDURATION);
CMTimeRange is created from start and duration, not from start and end !
But most of the time, the end time is also the duration (if the start time is 0) that's why so many people (including me) make the mistake.
And no Apple, that's not intuitive at all !
This is my working code, just take it and have a nice day!
+(void)makeAudioFadeOutWithSourceURL:(NSURL*)sourceURL destinationURL:(NSURL*)destinationURL fadeOutBeginSecond:(NSInteger)beginTime fadeOutEndSecond:(NSInteger)endTime fadeOutBeginVolume:(CGFloat)beginVolume fadeOutEndVolume:(CGFloat)endVolume callback:(void(^)(BOOL))callback
{
NSAssert(callback, #"need callback");
NSParameterAssert(beginVolume >= 0 && beginVolume <=1);
NSParameterAssert(endVolume >= 0 && endVolume <= 1);
BOOL sourceExist = [[NSFileManager defaultManager] fileExistsAtPath:sourceURL.path];
NSAssert(sourceExist, #"source not exist");
AVURLAsset *asset = [AVAsset assetWithURL:sourceURL];;
AVAssetExportSession* exporter = [AVAssetExportSession exportSessionWithAsset:asset presetName:AVAssetExportPresetAppleM4A];
exporter.outputURL = destinationURL;
exporter.outputFileType = AVFileTypeAppleM4A;
AVMutableAudioMix *exportAudioMix = [AVMutableAudioMix audioMix];
AVMutableAudioMixInputParameters *exportAudioMixInputParameters = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:asset.tracks.lastObject];
[exportAudioMixInputParameters setVolumeRampFromStartVolume:beginVolume toEndVolume:endVolume timeRange:CMTimeRangeMake(CMTimeMakeWithSeconds(beginTime, 1), CMTimeSubtract(CMTimeMakeWithSeconds(endTime, 1), CMTimeMakeWithSeconds(beginTime, 1)))];
NSArray *audioMixParameters = #[exportAudioMixInputParameters];
exportAudioMix.inputParameters = audioMixParameters;
exporter.audioMix = exportAudioMix;
[exporter exportAsynchronouslyWithCompletionHandler:^(void){
AVAssetExportSessionStatus status = exporter.status;
if (status != AVAssetExportSessionStatusCompleted) {
if (callback) {
callback(NO);
}
}
else {
if (callback) {
callback(YES);
}
}
NSError *error = exporter.error;
NSLog(#"export done,error %#,status %d",error,status);
}];
}

Make movie file with picture Array and song file, using AVAsset

I'm trying to make movie file using a picture array and an audio file. To make movie with a picture array i used the big post by zoul here. All is perfect, I have my movie with my picture. However when I try to add some audio tracks I have lot of problems. To understand I put my code :
When I call this method, picture array and song file are ready :
-(void) writeImagesToMovieAtPath:(NSString *) path withSize:(CGSize) size
{
NSString *documentsDirectoryPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSArray *dirContents = [[NSFileManager defaultManager] directoryContentsAtPath:documentsDirectoryPath];
for (NSString *tString in dirContents) {
if ([tString isEqualToString:#"essai.mp4"])
{
[[NSFileManager defaultManager]removeItemAtPath:[NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,tString] error:nil];
}
}
NSLog(#"Write Started");
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:path] fileType:AVFileTypeMPEG4
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AudioChannelLayout channelLayout;
memset(&channelLayout, 0, sizeof(AudioChannelLayout));
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
NSDictionary *audioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kAudioFormatMPEG4AAC], AVFormatIDKey,
[NSNumber numberWithFloat:44100.0] ,AVSampleRateKey,
[NSNumber numberWithInt: 1] ,AVNumberOfChannelsKey,
[NSNumber numberWithInt:192000],AVEncoderBitRateKey,
[NSData dataWithBytes:&channelLayout length:sizeof(AudioChannelLayout)],AVChannelLayoutKey,
nil];
AVAssetWriterInput* videoWriterInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings] retain];
AVAssetWriterInput* audioWriterInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeAudio
outputSettings:audioSettings] retain];
NSURL* fileURL = [[NSBundle mainBundle] URLForResource:#"Big_Voice_1" withExtension:#"caf"];
NSLog(#"%#",fileURL);
AVAsset *asset = [[AVURLAsset URLAssetWithURL:fileURL
options:nil] retain];
AVAssetReader *audioReader = [[AVAssetReader assetReaderWithAsset:asset error:&error] retain];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
AVAssetTrack* audioTrack = [asset.tracks objectAtIndex:0];
AVAssetReaderOutput *readerOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];
[audioReader addOutput:readerOutput];
NSParameterAssert(videoWriterInput);
NSParameterAssert(audioWriterInput);
NSParameterAssert([videoWriter canAddInput:audioWriterInput]);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
audioWriterInput.expectsMediaDataInRealTime = NO;
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:audioWriterInput];
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
//Video encoding
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
int frameCount = 0;
for(int i = 0; i<20; i++)
{
buffer = [self pixelBufferFromCGImage:[[m_PictArray objectAtIndex:i] CGImage] andSize:size];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30)
{
if (adaptor.assetWriterInput.readyForMoreMediaData)
{
printf("appending %d attemp %d\n", frameCount, j);
CMTime frameTime = CMTimeMake(frameCount,(int32_t) 10);
//CVPixelBufferPoolCreatePixelBuffer (kCFAllocatorDefault, adaptor.pixelBufferPool, &buffer);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
CVPixelBufferPoolRef bufferPool = adaptor.pixelBufferPool;
NSParameterAssert(bufferPool != NULL);
[NSThread sleepForTimeInterval:0.05];
}
else
{
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n", frameCount, j);
}
frameCount++;
}
//Finish the session:
[videoWriterInput markAsFinished];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
//Write all picture array in movie file.
int frameCount = 0;
for(int i = 0; i<[m_PictArray count]; i++)
{
buffer = [self pixelBufferFromCGImage:[[m_PictArray objectAtIndex:i] CGImage] andSize:size];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30)
{
if (adaptor.assetWriterInput.readyForMoreMediaData)
{
printf("appending %d attemp %d\n", frameCount, j);
CMTime frameTime = CMTimeMake(frameCount,(int32_t) 10);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
CVPixelBufferPoolRef bufferPool = adaptor.pixelBufferPool;
NSParameterAssert(bufferPool != NULL);
[NSThread sleepForTimeInterval:0.05];
}
else
{
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n", frameCount, j);
}
frameCount++;
}
//Finish writing picture:
[videoWriterInput markAsFinished];
I finish write picture in movie file and I want copy audio in the file and I do this :
[audioReader startReading];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
dispatch_queue_t mediaInputQueue = dispatch_queue_create("mediaInputQueue", NULL);
[audioWriterInput requestMediaDataWhenReadyOnQueue:mediaInputQueue usingBlock:^
{
NSLog(#"Request");
NSLog(#"Asset Writer ready :%d",audioWriterInput.readyForMoreMediaData);
while (audioWriterInput.readyForMoreMediaData) {
NSLog(#"Ready");
CMSampleBufferRef nextBuffer = [readerOutput copyNextSampleBuffer];
if (nextBuffer) {
NSLog(#"NextBuffer");
[audioWriterInput appendSampleBuffer:nextBuffer];
}
}
}
];
[audioWriterInput markAsFinished];
[videoWriter finishWriting];
However the status of AssetWriterInput of audio file is always "NO".
My question : How to add audio to a video file using AVFoundation?
So please can someone help me by telling me if I forget something or if something is wrong.
Thank you very much
I finally found how make movie with a picture array and an audio file. So if you want do the same thing I put my code here (be careful to memory):
First make a movie file with your picture array use zoul's post here:
-(void) writeImagesToMovieAtPath:(NSString *) path withSize:(CGSize) size
{
NSString *documentsDirectoryPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSArray *dirContents = [[NSFileManager defaultManager] contentsOfDirectoryAtPath:documentsDirectoryPath error:nil];
for (NSString *tString in dirContents)
{
if ([tString isEqualToString:#"essai.mp4"])
{
[[NSFileManager defaultManager]removeItemAtPath:[NSString stringWithFormat:#"%#/%#",documentsDirectoryPath,tString] error:nil];
}
}
NSLog(#"Write Started");
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:path] fileType:AVFileTypeMPEG4
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings] retain];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
//Video encoding
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
int frameCount = 0;
for(int i = 0; i<[m_PictArray count]; i++)
{
buffer = [self pixelBufferFromCGImage:[[m_PictArray objectAtIndex:i] CGImage] andSize:size];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30)
{
if (adaptor.assetWriterInput.readyForMoreMediaData)
{
printf("appending %d attemp %d\n", frameCount, j);
CMTime frameTime = CMTimeMake(frameCount,(int32_t) 10);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
CVPixelBufferPoolRef bufferPool = adaptor.pixelBufferPool;
NSParameterAssert(bufferPool != NULL);
[NSThread sleepForTimeInterval:0.05];
}
else
{
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok)
{
printf("error appending image %d times %d\n", frameCount, j);
}
frameCount++;
CVBufferRelease(buffer);
}
[videoWriterInput markAsFinished];
[videoWriter finishWriting];
[videoWriterInput release];
[videoWriter release];
[m_PictArray removeAllObjects];
NSLog(#"Write Ended");
}
After that you must put together movie file and audio file. To do this follow my code:
-(void)CompileFilesToMakeMovie
{
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSString* audio_inputFileName = #"deformed.caf";
NSString* audio_inputFilePath = [Utilities documentsPath:audio_inputFileName];
NSURL* audio_inputFileUrl = [NSURL fileURLWithPath:audio_inputFilePath];
NSString* video_inputFileName = #"essai.mp4";
NSString* video_inputFilePath = [Utilities documentsPath:video_inputFileName];
NSURL* video_inputFileUrl = [NSURL fileURLWithPath:video_inputFilePath];
NSString* outputFileName = #"outputFile.mov";
NSString* outputFilePath = [Utilities documentsPath:outputFileName];
NSURL* outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
//nextClipStartTime = CMTimeAdd(nextClipStartTime, a_timeRange.duration);
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = #"com.apple.quicktime-movie";
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
[self saveVideoToAlbum:outputFilePath];
}
];
}
Sorry if there are some leak, I'm doing the optimization of memory.
The AVChannelLayoutKey should point to an NSData instance containing an AudioChannelLayout.
Yours points to an NSNumber.