How to append two audio files? - iphone

I want to append two audio files in iPhone. Please help me to do this.

I got the solution; Following is the sample code to do it.
AVMutableComposition* composition = [AVMutableComposition composition];
AVURLAsset* audioAsset1 = [[AVURLAsset alloc]initWithURL:[NSURL fileURLWithPath:filePath1] options:nil];
AVURLAsset* audioAsset2 = [[AVURLAsset alloc]initWithURL:[NSURL fileURLWithPath:filePath2] options:nil];
AVMutableCompositionTrack *audioTrack1 = [composition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
[audioTrack1 insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioAsset1.duration)
ofTrack:[[audioAsset1 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:kCMTimeZero
error:&error];
[audioTrack1 insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioAsset2.duration)
ofTrack:[[audioAsset2 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:audioAsset1.duration
error:&error];
Finally, we have to export it as a single file using AVAssetExportSession.
NOTE: It will work only for .m4a files

- (BOOL) combineVoices1
{
NSError *error = nil;
BOOL ok = NO;
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
CMTime nextClipStartTime = kCMTimeZero;
//Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack.
AVMutableComposition *composition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack setPreferredVolume:0.8];
NSString *soundOne =[[NSBundle mainBundle]pathForResource:#"test1" ofType:#"caf"];
NSURL *url = [NSURL fileURLWithPath:soundOne];
AVAsset *avAsset = [AVURLAsset URLAssetWithURL:url options:nil];
NSArray *tracks = [avAsset tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack *clipAudioTrack = [[avAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset.duration) ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *compositionAudioTrack1 = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack setPreferredVolume:0.3];
NSString *soundOne1 =[[NSBundle mainBundle]pathForResource:#"test" ofType:#"caf"];
NSURL *url1 = [NSURL fileURLWithPath:soundOne1];
AVAsset *avAsset1 = [AVURLAsset URLAssetWithURL:url1 options:nil];
NSArray *tracks1 = [avAsset1 tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack *clipAudioTrack1 = [[avAsset1 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[compositionAudioTrack1 insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset.duration) ofTrack:clipAudioTrack1 atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *compositionAudioTrack2 = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack2 setPreferredVolume:1.0];
NSString *soundOne2 =[[NSBundle mainBundle]pathForResource:#"song" ofType:#"caf"];
NSURL *url2 = [NSURL fileURLWithPath:soundOne2];
AVAsset *avAsset2 = [AVURLAsset URLAssetWithURL:url2 options:nil];
NSArray *tracks2 = [avAsset2 tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack *clipAudioTrack2 = [[avAsset2 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[compositionAudioTrack1 insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset2.duration) ofTrack:clipAudioTrack2 atTime:kCMTimeZero error:nil];
AVAssetExportSession *exportSession = [AVAssetExportSession
exportSessionWithAsset:composition
presetName:AVAssetExportPresetAppleM4A];
if (nil == exportSession) return NO;
NSString *soundOneNew = [documentsDirectory stringByAppendingPathComponent:#"combined10.m4a"];
//NSLog(#"Output file path - %#",soundOneNew);
// configure export session output with all our parameters
exportSession.outputURL = [NSURL fileURLWithPath:soundOneNew]; // output path
exportSession.outputFileType = AVFileTypeAppleM4A; // output file type
// perform the export
[exportSession exportAsynchronouslyWithCompletionHandler:^{
if (AVAssetExportSessionStatusCompleted == exportSession.status) {
NSLog(#"AVAssetExportSessionStatusCompleted");
} else if (AVAssetExportSessionStatusFailed == exportSession.status) {
// a failure may happen because of an event out of your control
// for example, an interruption like a phone call comming in
// make sure and handle this case appropriately
NSLog(#"AVAssetExportSessionStatusFailed");
} else {
NSLog(#"Export Session Status: %d", exportSession.status);
}
}];
return YES;
}

Related

Merging Audio with Video Objective-C

I'm attempting to merge an audio file with a video file using:
+ (void)CompileFilesToMakeMovie:(NSString *) audioPath {
NSLog(#"a");
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSURL *audio_inputFileUrl = [[NSURL alloc] initFileURLWithPath:audioPath];
NSString *videoPath = [[NSBundle bundleForClass:[self class]] pathForResource:#"input" ofType:#"mov"];
NSURL* video_inputFileUrl = [NSURL fileURLWithPath:videoPath];
NSArray *dirPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *docsDir = [dirPaths objectAtIndex:0];
NSString *outputFilePath = [docsDir stringByAppendingPathComponent:#"OutputFile.mov"];
NSURL* outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
NSLog(#"b");
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
NSLog(#"c");
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
NSLog(#"d");
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = #"com.apple.quicktime-movie";
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
UISaveVideoAtPathToSavedPhotosAlbum(outputFilePath, nil, nil, nil);
}
];
}
But I am getting an error after "c" is logged for 'NSRangeException', reason:
* -[__NSArrayM objectAtIndex:]: index 0 beyond bounds for empty
array'.
The audio file is located at
NSArray *dirPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *docsDir = [dirPaths objectAtIndex:0];
NSString *soundFilePath = [docsDir stringByAppendingPathComponent:#"%1.iaff"];
Any tips?
Thanks.
First of all I should say the audio format you have given is incorrect,
NSString *soundFilePath = [docsDir stringByAppendingPathComponent:#"%1.iaff"];
make that aiff NSString *soundFilePath = [docsDir stringByAppendingPathComponent:#"1.aiff"];
If you are not sure of the code you used,
This code works for me,
-(void) mergeAudio:(NSURL *) audioURL withVideo:(NSURL *) videoURL andSaveToPathUrl:(NSString *) savePath{
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audioURL options:nil];
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:videoURL options:nil];
AVMutableComposition* mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionCommentaryTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionCommentaryTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioAsset.duration)
ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetPassthrough];
NSURL *savetUrl = [NSURL fileURLWithPath:savePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:savePath])
{
[[NSFileManager defaultManager] removeItemAtPath:savePath error:nil];
}
_assetExport.outputFileType = #"com.apple.quicktime-movie";
_assetExport.outputURL = savetUrl;
_assetExport.shouldOptimizeForNetworkUse = YES;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
NSLog(#"fileSaved !");
}
];
}
Pass the videoURL, audioURL and save path of the final file
This is happening because the audio or the video file has yet not been completely written and you are trying to merge them. Make sure you use completion handlers to make sure audio and video file at the specific path is written completely.

Is it possible to combine 4 sound track in Single track? [duplicate]

I wanna merge a .caf file and a .mp3 file to a .mp3 file on iPhone,or I can convert them to .aac and then merge them.How can I do this ?
(Just Like Kala OK,I wanna merge my voice and the music together)
You need to decode both files to LPCM (plain old ints), add/mix them together then re-encode. The iPhone SDK does not support MP3 encoding, so you should re-encode to AAC.
The Apple sample code iPhoneExtAudioFileConvertTest would be a good place to start.
You can use this method
- (BOOL) combineVoices1
{
NSError *error = nil;
BOOL ok = NO;
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
CMTime nextClipStartTime = kCMTimeZero;
//Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack.
AVMutableComposition *composition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack setPreferredVolume:0.8];
NSString *soundOne =[[NSBundle mainBundle]pathForResource:#"test1" ofType:#"caf"];
NSURL *url = [NSURL fileURLWithPath:soundOne];
AVAsset *avAsset = [AVURLAsset URLAssetWithURL:url options:nil];
NSArray *tracks = [avAsset tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack *clipAudioTrack = [[avAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset.duration) ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *compositionAudioTrack1 = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack setPreferredVolume:0.3];
NSString *soundOne1 =[[NSBundle mainBundle]pathForResource:#"test" ofType:#"caf"];
NSURL *url1 = [NSURL fileURLWithPath:soundOne1];
AVAsset *avAsset1 = [AVURLAsset URLAssetWithURL:url1 options:nil];
NSArray *tracks1 = [avAsset1 tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack *clipAudioTrack1 = [[avAsset1 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[compositionAudioTrack1 insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset.duration) ofTrack:clipAudioTrack1 atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *compositionAudioTrack2 = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack2 setPreferredVolume:1.0];
NSString *soundOne2 =[[NSBundle mainBundle]pathForResource:#"song" ofType:#"caf"];
NSURL *url2 = [NSURL fileURLWithPath:soundOne2];
AVAsset *avAsset2 = [AVURLAsset URLAssetWithURL:url2 options:nil];
NSArray *tracks2 = [avAsset2 tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack *clipAudioTrack2 = [[avAsset2 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[compositionAudioTrack1 insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset2.duration) ofTrack:clipAudioTrack2 atTime:kCMTimeZero error:nil];
AVAssetExportSession *exportSession = [AVAssetExportSession
exportSessionWithAsset:composition
presetName:AVAssetExportPresetAppleM4A];
if (nil == exportSession) return NO;
NSString *soundOneNew = [documentsDirectory stringByAppendingPathComponent:#"combined10.m4a"];
//NSLog(#"Output file path - %#",soundOneNew);
// configure export session output with all our parameters
exportSession.outputURL = [NSURL fileURLWithPath:soundOneNew]; // output path
exportSession.outputFileType = AVFileTypeAppleM4A; // output file type
// perform the export
[exportSession exportAsynchronouslyWithCompletionHandler:^{
if (AVAssetExportSessionStatusCompleted == exportSession.status) {
NSLog(#"AVAssetExportSessionStatusCompleted");
} else if (AVAssetExportSessionStatusFailed == exportSession.status) {
// a failure may happen because of an event out of your control
// for example, an interruption like a phone call comming in
// make sure and handle this case appropriately
NSLog(#"AVAssetExportSessionStatusFailed");
} else {
NSLog(#"Export Session Status: %d", exportSession.status);
}
}];
return YES;
}
you can use the this SDAVAssetExportSession with the following code to export many files to AAC:
-(void)mergeAudioFiles
{
NSFileManager * fm = [[NSFileManager alloc] init];
NSError * error;
NSArray * filesNames = **NSArray of File Names;
NSString * filePath = #"Dest File Name";
NSString * pathToSave =[NSString stringWithFormat:#"%#%#",filePath,#".m4a"];
CMTime startTime = kCMTimeZero;
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionAudioTrack =[AVMutableCompositionTrack alloc];
compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
float audioEndTime=0;
for (NSString *fileName in filesNames) {
NSURL *audioUrl = [NSURL fileURLWithPath:fileName];
AVURLAsset *audioasset = [[AVURLAsset alloc]initWithURL:audioUrl options:nil];
CMTimeRange timeRange = CMTimeRangeMake(kCMTimeZero, audioasset.duration);
AVAssetTrack *audioAssetTrack= [[audioasset tracksWithMediaType:AVMediaTypeAudio] lastObject];
[compositionAudioTrack insertTimeRange:timeRange ofTrack:audioAssetTrack atTime:startTime error:&error];
startTime = CMTimeAdd(startTime, timeRange.duration);
CMTime assetTime2 = [audioasset duration];
Float64 duration2 = CMTimeGetSeconds(assetTime2);
audioEndTime+=duration2;
}
NSURL *exportUrl = [NSURL fileURLWithPath:pathToSave];
float audioStartTime=0;
CMTime startTime1 = CMTimeMake((int)(floor(audioStartTime * 100)), 100);
CMTime stopTime = CMTimeMake((int)(ceil(audioEndTime * 100)), 100);
CMTimeRange exportTimeRange = CMTimeRangeFromTimeToTime(startTime1, stopTime);
SDAVAssetExportSession *encoder = [SDAVAssetExportSession.alloc initWithAsset:composition];
encoder.outputFileType = AVFileTypeAppleM4A;
encoder.outputURL = exportUrl;
encoder.audioSettings = #
{
AVFormatIDKey: #(kAudioFormatMPEG4AAC),
AVNumberOfChannelsKey: #2,
AVSampleRateKey: #44100,
AVEncoderBitRateKey: #128000,
};
encoder.timeRange = exportTimeRange;
dispatch_semaphore_t semaphore = dispatch_semaphore_create(0);
NSLog(#"Starting Audio Marge");
[encoder exportAsynchronouslyWithCompletionHandler:^
{
if (encoder.status == AVAssetExportSessionStatusCompleted)
{
NSLog(#"Audio Marge succeeded");
NSError * err = NULL;
BOOL result = [fm moveItemAtPath:pathToSave toPath:filePath error:&err];
if(!result) {
NSLog(#"Error: %#", err);
}
NSLog(#"Audio Copied");
} else if (encoder.status == AVAssetExportSessionStatusCancelled) {
NSLog(#"Audio export cancelled");
} else {
NSLog(#"Audio export failed with error: %# (%ld)", encoder.error.localizedDescription, encoder.error.code);
}
dispatch_semaphore_signal(semaphore);
}];
NSLog(#"Audio Wait to Finish");
dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER);
//cleanup
for (NSString *fileName in filesNames) {
[fm removeItemAtPath:fileName error:&error];
}
NSLog(#"Audio Marge Finished");
}
merging two audio files code
-(void)mergeTwoAudioFile
{
NSError * error;
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [[paths objectAtIndex:0] stringByAppendingPathComponent:#"Music_Directory"];
//first audio path
NSString *firstPath= [documentsDirectory stringByAppendingPathComponent:#"audio01.m4a"];
NSURL *audioUrl1 = [NSURL fileURLWithPath:firstPath];
AVMutableComposition *mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *audioCompositionTrack1 =[[AVMutableCompositionTrack alloc]init];
AVURLAsset *audioasset1 = [[AVURLAsset alloc]initWithURL:audioUrl1 options:nil];
AVAssetTrack *audioAssetTrack1= [[audioasset1 tracksWithMediaType:AVMediaTypeAudio] lastObject];
audioCompositionTrack1 = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime tempTime1= mixComposition.duration;
[audioCompositionTrack1 insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioasset1.duration) ofTrack:audioAssetTrack1 atTime:tempTime1 error:&error];
//========second audio
NSString *secondPath= [documentsDirectory stringByAppendingPathComponent:#"audio02.m4a"];
NSURL *audioUrl2 = [NSURL fileURLWithPath:secondPath];
AVMutableCompositionTrack *audioCompositionTrack2 =[[AVMutableCompositionTrack alloc]init];
AVURLAsset *audioasset2 = [[AVURLAsset alloc]initWithURL:audioUrl2 options:nil];
AVAssetTrack *audioAssetTrack2= [[audioasset2 tracksWithMediaType:AVMediaTypeAudio] lastObject];
audioCompositionTrack2 = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime tempTime2 = mixComposition.duration;
[audioCompositionTrack2 insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioasset2.duration) ofTrack:audioAssetTrack2 atTime:tempTime2 error:&error];
NSString * pathToSave = [NSString stringWithFormat:#"finalTest.m4a"];
pathToSave =[documentsDirectory stringByAppendingPathComponent:pathToSave];
NSURL *movieUrl = [NSURL fileURLWithPath:pathToSave];
AVAssetExportSession *exporter =[[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetAppleM4A];
exporter.shouldOptimizeForNetworkUse = YES;
exporter.outputURL = movieUrl;
exporter.outputFileType = AVFileTypeAppleM4A;
//====================================================================
float audioStartTime=0;
CMTime assetTime1 = [audioasset1 duration];
Float64 duration1 = CMTimeGetSeconds(assetTime1);
CMTime assetTime2 = [audioasset2 duration];
Float64 duration2 = CMTimeGetSeconds(assetTime2);
float audioEndTime=duration1+duration2;
CMTime startTime = CMTimeMake((int)(floor(audioStartTime * 100)), 100);
CMTime stopTime = CMTimeMake((int)(ceil(audioEndTime * 100)), 100);
CMTimeRange exportTimeRange = CMTimeRangeFromTimeToTime(startTime, stopTime);
exporter.timeRange =exportTimeRange;
//====================================================================
[exporter exportAsynchronouslyWithCompletionHandler:^(void) {
NSString* message;
switch (exporter.status) {
case AVAssetExportSessionStatusFailed:
message = [NSString stringWithFormat:#"Export failed. Error: %#", exporter.error.description];
NSLog(#"%#", message);
break;
case AVAssetExportSessionStatusCompleted:
message = [NSString stringWithFormat:#"Export completed"];
NSLog(#"%#", message);
break;
case AVAssetExportSessionStatusCancelled:
message = [NSString stringWithFormat:#"Export cancelled!"];
NSLog(#"%#", message);
break;
default:
NSLog(#"Export unhandled status: %ld", (long)exporter.status);
break;
}
}];
}

How to concatenate 2 or 3 audio files in iOS?

I am newcomer in Objective-C and have experience only 5 months in iPhone development.
What I need:
I need to concatenate 2 or more audio files into one, and export result as aiff, mp3, caf or m4a format.
For example:
First audio file containing "You need", second "download" and third "document".
Every audio part depends on actions from user.
I spent 2 days without luck. That place is my last frontier.
I will very appreciate for a piece of code.
Code below can be used to merge audio files.
Input files: Ids of the files to be supplied in array audioIds.
Eg. audio1.mp3, audio2.mp3 … audioN.mp3 to be available in documents folder
Output file: combined.m4a
- (BOOL) combineVoices {
NSError *error = nil;
BOOL ok = NO;
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
CMTime nextClipStartTime = kCMTimeZero;
//Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack.
AVMutableComposition *composition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
for (int i = 0; i< [self.audioIds count]; i++) {
int key = [[self.audioIds objectAtIndex:i] intValue];
NSString *audioFileName = [NSString stringWithFormat:#"audio%d", key];
//Build the filename with path
NSString *soundOne = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"%#.mp3", audioFileName]];
//NSLog(#"voice file - %#",soundOne);
NSURL *url = [NSURL fileURLWithPath:soundOne];
AVAsset *avAsset = [AVURLAsset URLAssetWithURL:url options:nil];
NSArray *tracks = [avAsset tracksWithMediaType:AVMediaTypeAudio];
if ([tracks count] == 0)
return NO;
CMTimeRange timeRangeInAsset = CMTimeRangeMake(kCMTimeZero, [avAsset duration]);
AVAssetTrack *clipAudioTrack = [[avAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
ok = [compositionAudioTrack insertTimeRange:timeRangeInAsset ofTrack:clipAudioTrack atTime:nextClipStartTime error:&error];
if (!ok) {
NSLog(#"Current Video Track Error: %#",error);
}
nextClipStartTime = CMTimeAdd(nextClipStartTime, timeRangeInAsset.duration);
}
// create the export session
// no need for a retain here, the session will be retained by the
// completion handler since it is referenced there
AVAssetExportSession *exportSession = [AVAssetExportSession
exportSessionWithAsset:composition
presetName:AVAssetExportPresetAppleM4A];
if (nil == exportSession) return NO;
NSString *soundOneNew = [documentsDirectory stringByAppendingPathComponent:#"combined.m4a"];
//NSLog(#"Output file path - %#",soundOneNew);
// configure export session output with all our parameters
exportSession.outputURL = [NSURL fileURLWithPath:soundOneNew]; // output path
exportSession.outputFileType = AVFileTypeAppleM4A; // output file type
// perform the export
[exportSession exportAsynchronouslyWithCompletionHandler:^{
if (AVAssetExportSessionStatusCompleted == exportSession.status) {
NSLog(#"AVAssetExportSessionStatusCompleted");
} else if (AVAssetExportSessionStatusFailed == exportSession.status) {
// a failure may happen because of an event out of your control
// for example, an interruption like a phone call comming in
// make sure and handle this case appropriately
NSLog(#"AVAssetExportSessionStatusFailed");
} else {
NSLog(#"Export Session Status: %d", exportSession.status);
}
}];
return YES;
}
You can use this method to merge 3 sounds together.
- (BOOL) combineVoices1
{
NSError *error = nil;
BOOL ok = NO;
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
CMTime nextClipStartTime = kCMTimeZero;
//Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack.
AVMutableComposition *composition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack setPreferredVolume:0.8];
NSString *soundOne =[[NSBundle mainBundle]pathForResource:#"test1" ofType:#"caf"];
NSURL *url = [NSURL fileURLWithPath:soundOne];
AVAsset *avAsset = [AVURLAsset URLAssetWithURL:url options:nil];
NSArray *tracks = [avAsset tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack *clipAudioTrack = [[avAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset.duration) ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *compositionAudioTrack1 = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack setPreferredVolume:0.3];
NSString *soundOne1 =[[NSBundle mainBundle]pathForResource:#"test" ofType:#"caf"];
NSURL *url1 = [NSURL fileURLWithPath:soundOne1];
AVAsset *avAsset1 = [AVURLAsset URLAssetWithURL:url1 options:nil];
NSArray *tracks1 = [avAsset1 tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack *clipAudioTrack1 = [[avAsset1 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[compositionAudioTrack1 insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset.duration) ofTrack:clipAudioTrack1 atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *compositionAudioTrack2 = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack2 setPreferredVolume:1.0];
NSString *soundOne2 =[[NSBundle mainBundle]pathForResource:#"song" ofType:#"caf"];
NSURL *url2 = [NSURL fileURLWithPath:soundOne2];
AVAsset *avAsset2 = [AVURLAsset URLAssetWithURL:url2 options:nil];
NSArray *tracks2 = [avAsset2 tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack *clipAudioTrack2 = [[avAsset2 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[compositionAudioTrack1 insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset2.duration) ofTrack:clipAudioTrack2 atTime:kCMTimeZero error:nil];
AVAssetExportSession *exportSession = [AVAssetExportSession
exportSessionWithAsset:composition
presetName:AVAssetExportPresetAppleM4A];
if (nil == exportSession) return NO;
NSString *soundOneNew = [documentsDirectory stringByAppendingPathComponent:#"combined10.m4a"];
//NSLog(#"Output file path - %#",soundOneNew);
// configure export session output with all our parameters
exportSession.outputURL = [NSURL fileURLWithPath:soundOneNew]; // output path
exportSession.outputFileType = AVFileTypeAppleM4A; // output file type
// perform the export
[exportSession exportAsynchronouslyWithCompletionHandler:^{
if (AVAssetExportSessionStatusCompleted == exportSession.status) {
NSLog(#"AVAssetExportSessionStatusCompleted");
} else if (AVAssetExportSessionStatusFailed == exportSession.status) {
// a failure may happen because of an event out of your control
// for example, an interruption like a phone call comming in
// make sure and handle this case appropriately
NSLog(#"AVAssetExportSessionStatusFailed");
} else {
NSLog(#"Export Session Status: %d", exportSession.status);
}
}];
return YES;
}
You should probably look into this post for doing the same:
Combine two audio files into one in objective c
The answer is similar to what Dimitar has suggested. But 2 important things that you have to keep in mind are that, first - it works only for mp3 format and second the bit-rate of all the files that you are trying to concatenate should be same, or else only parts of your file would play in the final output. It would stop playing where the bit rate changes.
SSteve- Files like Wav files have their own headers and if you just write one file after another, It would play just the first file and would then stop playing, inspite that the file info showing a greater file size. this is because we do not have information updated into the header of the first file.
I have taken two diffrent mp3 files from AVMutableCompositionTrack. and these two mp3 files are stored in same AVMutableComposition.
when i will press the button the path of new mp3 will be shown by console.
-(IBAction)play
{
[self mixAudio];
}
-(void)mixAudio
{
CFAbsoluteTime currentTime=CFAbsoluteTimeGetCurrent();
AVMutableComposition *composition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack setPreferredVolume:0.8];
NSString *soundOne =[[NSBundle mainBundle]pathForResource:#"KICK1" ofType:#"mp3"];
NSURL *url = [NSURL fileURLWithPath:soundOne];
AVAsset *avAsset = [AVURLAsset URLAssetWithURL:url options:nil];
NSArray *tracks = [avAsset tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack *clipAudioTrack = [tracks objectAtIndex:0];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset.duration) ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *compositionAudioTrack1 = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack setPreferredVolume:0.8];
NSString *soundOne1 =[[NSBundle mainBundle]pathForResource:#"KICK2" ofType:#"mp3"];
NSURL *url1 = [NSURL fileURLWithPath:soundOne1];
AVAsset *avAsset1 = [AVURLAsset URLAssetWithURL:url1 options:nil];
NSArray *tracks1 = [avAsset1 tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack *clipAudioTrack1 = [tracks1 objectAtIndex:0];
[compositionAudioTrack1 insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset1.duration) ofTrack:clipAudioTrack1 atTime: kCMTimeZero error:nil];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSLibraryDirectory, NSUserDomainMask, YES);
NSString *libraryCachesDirectory = [paths objectAtIndex:0];
NSString *strOutputFilePath = [libraryCachesDirectory stringByAppendingPathComponent:#"output.mov"];
NSString *requiredOutputPath = [libraryCachesDirectory stringByAppendingPathComponent:#"output.m4a"];
NSURL *audioFileOutput = [NSURL fileURLWithPath:requiredOutputPath];
[[NSFileManager defaultManager] removeItemAtURL:audioFileOutput error:NULL];
AVAssetExportSession *exporter=[[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetAppleM4A];
exporter.outputURL=audioFileOutput;
exporter.outputFileType=AVFileTypeAppleM4A;
[exporter exportAsynchronouslyWithCompletionHandler:^{
NSLog(#" OUtput path is \n %#", requiredOutputPath);
NSFileManager * fm = [[NSFileManager alloc] init];
[fm moveItemAtPath:strOutputFilePath toPath:requiredOutputPath error:nil];
NSLog(#" OUtput path is \n %#", requiredOutputPath);
NSLog(#"export complete: %lf",CFAbsoluteTimeGetCurrent()-currentTime);
NSError *error;
audioPlayer=[[AVAudioPlayer alloc]initWithContentsOfURL:audioFileOutput error:&error];
audioPlayer.numberOfLoops=0;
[audioPlayer play];
}];
}
Have you tried something like this:
AudioFileCreateWithURL //to create the output file
For each input file:
AudioFileOpenURL //open file
repeat
AudioFileReadBytes //read from input file
AudioFileWriteBytes //write to output file
until eof(input file)
AudioFileClose //close input file
AudioFileClose //close output file
This would probably require that the input files are all the same format and would create the output file in that same format. If you need to convert the format, that might be better done after creating the output file.
The easiest way to implement multiple combinations of aac:
- (NSString *)concatenatedAACVoicesPath{
NSMutableData *concatenatedData = [[NSMutableData alloc] init];
NSArray *aacPathArr = [self queryAAC];
for (NSString *path in aacPathArr) {
NSData *data = [[NSData alloc] initWithContentsOfFile:path];
[concatenatedData appendData: data];
}
NSString *fileNamePath = [NSString stringWithFormat:#"%#/%#.aac",[NSString createPath:currRecordDocName],currRecordDocName];
[concatenatedData writeToFile:fileNamePath atomically:YES];
return fileNamePath;
}
I have an idea, not sure it will work. Try to get NSData from these 3 files, append the data into another NSData and then write it. Something like:
NSMutableData *concatenatedData = [NSMutableData alloc] init];
NSData *data1 = [[NSData alloc] initWithContentsOfFile:(NSString *)path];
NSData *data2 = [[NSData alloc] initWithContentsOfFile:(NSString *)path];
NSData *data3 = [[NSData alloc] initWithContentsOfFile:(NSString *)path];
[concatenatedData appendData: data1];
[concatenatedData appendData: data2];
[concatenatedData appendData: data3];
[concatenatedData writeToFile:#"/path/to/concatenatedData.mp3" atomically:YES];
It's a theory I'm not sure it will work :), it actually works if i open an mp3 with hex editor - copy everything and paste it in the end - then I have the same sound twice. Please try it and let us know if it works.

IPhone avcomposition issue

Im trying to create a video that shows two videos one after the other using avcomposition on the iphone. This code works, however i can only see one of the videos for the entire duration of the newly created video
- (void) startEdit{
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSString* a_inputFileName = #"export.mov";
NSString* a_inputFilePath = [NSTemporaryDirectory() stringByAppendingPathComponent:a_inputFileName];
NSURL* a_inputFileUrl = [NSURL fileURLWithPath:a_inputFilePath];
NSString* b_inputFileName = #"output.mov";
NSString* b_inputFilePath = [NSTemporaryDirectory() stringByAppendingPathComponent:b_inputFileName];
NSURL* b_inputFileUrl = [NSURL fileURLWithPath:b_inputFilePath];
NSString* outputFileName = #"outputFile.mov";
NSString* outputFilePath = [NSTemporaryDirectory() stringByAppendingPathComponent:outputFileName];
NSURL* outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* a_videoAsset = [[AVURLAsset alloc]initWithURL:a_inputFileUrl options:nil];
CMTimeRange a_timeRange = CMTimeRangeMake(kCMTimeZero,a_videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:a_timeRange ofTrack:[[a_videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
nextClipStartTime = CMTimeAdd(nextClipStartTime, a_timeRange.duration);
AVURLAsset* b_videoAsset = [[AVURLAsset alloc]initWithURL:b_inputFileUrl options:nil];
CMTimeRange b_timeRange = CMTimeRangeMake(kCMTimeZero, b_videoAsset.duration);
AVMutableCompositionTrack *b_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionVideoTrack insertTimeRange:b_timeRange ofTrack:[[b_videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetLowQuality];
_assetExport.outputFileType = #"com.apple.quicktime-movie";
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
[self saveVideoToAlbum:outputFilePath];
}
];
}
- (void) saveVideoToAlbum:(NSString*)path{
if(UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(path)){
UISaveVideoAtPathToSavedPhotosAlbum (path, self, #selector(video:didFinishSavingWithError: contextInfo:), nil);
}
}
- (void) video: (NSString *) videoPath didFinishSavingWithError: (NSError *) error contextInfo: (void *) contextInfo {
NSLog(#"Finished saving video with error: %#", error);
}
I've posted the whole code as it may help someone else.
Shouldn't
nextClipStartTime = CMTimeAdd(nextClipStartTime, a_timeRange.duration);
[b_compositionVideoTrack insertTimeRange:b_timeRange ofTrack:[[b_videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
add the second video to the end of the first
Cheers
Figured it out. Should have only had one AVMutableCompositionTrack.
Like so:
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* a_videoAsset = [[AVURLAsset alloc]initWithURL:a_inputFileUrl options:nil];
CMTimeRange a_timeRange = CMTimeRangeMake(kCMTimeZero,a_videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:a_timeRange ofTrack:[[a_videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
nextClipStartTime = CMTimeAdd(nextClipStartTime, a_timeRange.duration);
AVURLAsset* b_videoAsset = [[AVURLAsset alloc]initWithURL:b_inputFileUrl options:nil];
CMTimeRange b_timeRange = CMTimeRangeMake(kCMTimeZero, b_videoAsset.duration);
[a_compositionVideoTrack insertTimeRange:b_timeRange ofTrack:[[b_videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
I haven't spotted the flaw yet, but I do have a couple of suggestions:
First, capture the error in insertTimeRange and every other opportunity and inspect it.
Second, for the simple case of appending videos, you can use AVMutableComposition without so much track mucking. Use "insertTimeRange:ofAsset:atTime:error:" with AVAssets initialized from the files and you will simplify your code greatly. If you need to do something more complicated such as crossfades, you'll need to use a video composition and audio mix as well, and at that point you can deal with the complexity of tracks.

merge Audio files on iPhone

I wanna merge a .caf file and a .mp3 file to a .mp3 file on iPhone,or I can convert them to .aac and then merge them.How can I do this ?
(Just Like Kala OK,I wanna merge my voice and the music together)
You need to decode both files to LPCM (plain old ints), add/mix them together then re-encode. The iPhone SDK does not support MP3 encoding, so you should re-encode to AAC.
The Apple sample code iPhoneExtAudioFileConvertTest would be a good place to start.
You can use this method
- (BOOL) combineVoices1
{
NSError *error = nil;
BOOL ok = NO;
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
CMTime nextClipStartTime = kCMTimeZero;
//Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack.
AVMutableComposition *composition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack setPreferredVolume:0.8];
NSString *soundOne =[[NSBundle mainBundle]pathForResource:#"test1" ofType:#"caf"];
NSURL *url = [NSURL fileURLWithPath:soundOne];
AVAsset *avAsset = [AVURLAsset URLAssetWithURL:url options:nil];
NSArray *tracks = [avAsset tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack *clipAudioTrack = [[avAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset.duration) ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *compositionAudioTrack1 = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack setPreferredVolume:0.3];
NSString *soundOne1 =[[NSBundle mainBundle]pathForResource:#"test" ofType:#"caf"];
NSURL *url1 = [NSURL fileURLWithPath:soundOne1];
AVAsset *avAsset1 = [AVURLAsset URLAssetWithURL:url1 options:nil];
NSArray *tracks1 = [avAsset1 tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack *clipAudioTrack1 = [[avAsset1 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[compositionAudioTrack1 insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset.duration) ofTrack:clipAudioTrack1 atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *compositionAudioTrack2 = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack2 setPreferredVolume:1.0];
NSString *soundOne2 =[[NSBundle mainBundle]pathForResource:#"song" ofType:#"caf"];
NSURL *url2 = [NSURL fileURLWithPath:soundOne2];
AVAsset *avAsset2 = [AVURLAsset URLAssetWithURL:url2 options:nil];
NSArray *tracks2 = [avAsset2 tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack *clipAudioTrack2 = [[avAsset2 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[compositionAudioTrack1 insertTimeRange:CMTimeRangeMake(kCMTimeZero, avAsset2.duration) ofTrack:clipAudioTrack2 atTime:kCMTimeZero error:nil];
AVAssetExportSession *exportSession = [AVAssetExportSession
exportSessionWithAsset:composition
presetName:AVAssetExportPresetAppleM4A];
if (nil == exportSession) return NO;
NSString *soundOneNew = [documentsDirectory stringByAppendingPathComponent:#"combined10.m4a"];
//NSLog(#"Output file path - %#",soundOneNew);
// configure export session output with all our parameters
exportSession.outputURL = [NSURL fileURLWithPath:soundOneNew]; // output path
exportSession.outputFileType = AVFileTypeAppleM4A; // output file type
// perform the export
[exportSession exportAsynchronouslyWithCompletionHandler:^{
if (AVAssetExportSessionStatusCompleted == exportSession.status) {
NSLog(#"AVAssetExportSessionStatusCompleted");
} else if (AVAssetExportSessionStatusFailed == exportSession.status) {
// a failure may happen because of an event out of your control
// for example, an interruption like a phone call comming in
// make sure and handle this case appropriately
NSLog(#"AVAssetExportSessionStatusFailed");
} else {
NSLog(#"Export Session Status: %d", exportSession.status);
}
}];
return YES;
}
you can use the this SDAVAssetExportSession with the following code to export many files to AAC:
-(void)mergeAudioFiles
{
NSFileManager * fm = [[NSFileManager alloc] init];
NSError * error;
NSArray * filesNames = **NSArray of File Names;
NSString * filePath = #"Dest File Name";
NSString * pathToSave =[NSString stringWithFormat:#"%#%#",filePath,#".m4a"];
CMTime startTime = kCMTimeZero;
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionAudioTrack =[AVMutableCompositionTrack alloc];
compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
float audioEndTime=0;
for (NSString *fileName in filesNames) {
NSURL *audioUrl = [NSURL fileURLWithPath:fileName];
AVURLAsset *audioasset = [[AVURLAsset alloc]initWithURL:audioUrl options:nil];
CMTimeRange timeRange = CMTimeRangeMake(kCMTimeZero, audioasset.duration);
AVAssetTrack *audioAssetTrack= [[audioasset tracksWithMediaType:AVMediaTypeAudio] lastObject];
[compositionAudioTrack insertTimeRange:timeRange ofTrack:audioAssetTrack atTime:startTime error:&error];
startTime = CMTimeAdd(startTime, timeRange.duration);
CMTime assetTime2 = [audioasset duration];
Float64 duration2 = CMTimeGetSeconds(assetTime2);
audioEndTime+=duration2;
}
NSURL *exportUrl = [NSURL fileURLWithPath:pathToSave];
float audioStartTime=0;
CMTime startTime1 = CMTimeMake((int)(floor(audioStartTime * 100)), 100);
CMTime stopTime = CMTimeMake((int)(ceil(audioEndTime * 100)), 100);
CMTimeRange exportTimeRange = CMTimeRangeFromTimeToTime(startTime1, stopTime);
SDAVAssetExportSession *encoder = [SDAVAssetExportSession.alloc initWithAsset:composition];
encoder.outputFileType = AVFileTypeAppleM4A;
encoder.outputURL = exportUrl;
encoder.audioSettings = #
{
AVFormatIDKey: #(kAudioFormatMPEG4AAC),
AVNumberOfChannelsKey: #2,
AVSampleRateKey: #44100,
AVEncoderBitRateKey: #128000,
};
encoder.timeRange = exportTimeRange;
dispatch_semaphore_t semaphore = dispatch_semaphore_create(0);
NSLog(#"Starting Audio Marge");
[encoder exportAsynchronouslyWithCompletionHandler:^
{
if (encoder.status == AVAssetExportSessionStatusCompleted)
{
NSLog(#"Audio Marge succeeded");
NSError * err = NULL;
BOOL result = [fm moveItemAtPath:pathToSave toPath:filePath error:&err];
if(!result) {
NSLog(#"Error: %#", err);
}
NSLog(#"Audio Copied");
} else if (encoder.status == AVAssetExportSessionStatusCancelled) {
NSLog(#"Audio export cancelled");
} else {
NSLog(#"Audio export failed with error: %# (%ld)", encoder.error.localizedDescription, encoder.error.code);
}
dispatch_semaphore_signal(semaphore);
}];
NSLog(#"Audio Wait to Finish");
dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER);
//cleanup
for (NSString *fileName in filesNames) {
[fm removeItemAtPath:fileName error:&error];
}
NSLog(#"Audio Marge Finished");
}
merging two audio files code
-(void)mergeTwoAudioFile
{
NSError * error;
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [[paths objectAtIndex:0] stringByAppendingPathComponent:#"Music_Directory"];
//first audio path
NSString *firstPath= [documentsDirectory stringByAppendingPathComponent:#"audio01.m4a"];
NSURL *audioUrl1 = [NSURL fileURLWithPath:firstPath];
AVMutableComposition *mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *audioCompositionTrack1 =[[AVMutableCompositionTrack alloc]init];
AVURLAsset *audioasset1 = [[AVURLAsset alloc]initWithURL:audioUrl1 options:nil];
AVAssetTrack *audioAssetTrack1= [[audioasset1 tracksWithMediaType:AVMediaTypeAudio] lastObject];
audioCompositionTrack1 = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime tempTime1= mixComposition.duration;
[audioCompositionTrack1 insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioasset1.duration) ofTrack:audioAssetTrack1 atTime:tempTime1 error:&error];
//========second audio
NSString *secondPath= [documentsDirectory stringByAppendingPathComponent:#"audio02.m4a"];
NSURL *audioUrl2 = [NSURL fileURLWithPath:secondPath];
AVMutableCompositionTrack *audioCompositionTrack2 =[[AVMutableCompositionTrack alloc]init];
AVURLAsset *audioasset2 = [[AVURLAsset alloc]initWithURL:audioUrl2 options:nil];
AVAssetTrack *audioAssetTrack2= [[audioasset2 tracksWithMediaType:AVMediaTypeAudio] lastObject];
audioCompositionTrack2 = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime tempTime2 = mixComposition.duration;
[audioCompositionTrack2 insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioasset2.duration) ofTrack:audioAssetTrack2 atTime:tempTime2 error:&error];
NSString * pathToSave = [NSString stringWithFormat:#"finalTest.m4a"];
pathToSave =[documentsDirectory stringByAppendingPathComponent:pathToSave];
NSURL *movieUrl = [NSURL fileURLWithPath:pathToSave];
AVAssetExportSession *exporter =[[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetAppleM4A];
exporter.shouldOptimizeForNetworkUse = YES;
exporter.outputURL = movieUrl;
exporter.outputFileType = AVFileTypeAppleM4A;
//====================================================================
float audioStartTime=0;
CMTime assetTime1 = [audioasset1 duration];
Float64 duration1 = CMTimeGetSeconds(assetTime1);
CMTime assetTime2 = [audioasset2 duration];
Float64 duration2 = CMTimeGetSeconds(assetTime2);
float audioEndTime=duration1+duration2;
CMTime startTime = CMTimeMake((int)(floor(audioStartTime * 100)), 100);
CMTime stopTime = CMTimeMake((int)(ceil(audioEndTime * 100)), 100);
CMTimeRange exportTimeRange = CMTimeRangeFromTimeToTime(startTime, stopTime);
exporter.timeRange =exportTimeRange;
//====================================================================
[exporter exportAsynchronouslyWithCompletionHandler:^(void) {
NSString* message;
switch (exporter.status) {
case AVAssetExportSessionStatusFailed:
message = [NSString stringWithFormat:#"Export failed. Error: %#", exporter.error.description];
NSLog(#"%#", message);
break;
case AVAssetExportSessionStatusCompleted:
message = [NSString stringWithFormat:#"Export completed"];
NSLog(#"%#", message);
break;
case AVAssetExportSessionStatusCancelled:
message = [NSString stringWithFormat:#"Export cancelled!"];
NSLog(#"%#", message);
break;
default:
NSLog(#"Export unhandled status: %ld", (long)exporter.status);
break;
}
}];
}