How to change a video(.mp4) meta-info using a AVAssetWriter API?
I want to not re-encode. only wanted to modify the video meta-info.
how to write a next code?
AVAssetWriter *writer = [AVAssetWriter assetWriterWithURL:[NSURL URLWithString:myPath] fileType:AVFileTypeQuickTimeMovie error:nil];
if I mistaken, give me some hint.
Thanks!!
refer a following code.
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:"your path"] options:nil];
NSMutableArray *metadata = [NSMutableArray array];
AVMutableMetadataItem *metaItem = [AVMutableMetadataItem metadataItem];
metaItem.key = AVMetadataCommonKeyPublisher;
metaItem.keySpace = AVMetadataKeySpaceCommon;
metaItem.value = #"your_value";
[metadata addObject:metaItem];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetPassthrough];
exportSession.outputURL = [NSURL fileURLWithPath:"your output path"];
CMTime start = CMTimeMakeWithSeconds(0.0, BASIC_TIMESCALE);
CMTimeRange range = CMTimeRangeMake(start, [asset duration]);
exportSession.timeRange = range;
exportSession.outputFileType = AVFileTypeAppleM4V // AVFileTypeMPEG4 or AVFileTypeQuickTimeMovie (video format);
exportSession.metadata = metadata;
exportSession.shouldOptimizeForNetworkUse = YES;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch ([exportSession status])
{
case AVAssetExportSessionStatusCompleted:
NSLog(#"Export sucess");
case AVAssetExportSessionStatusFailed:
NSLog(#"Export failed: %#", [[exportSession error] localizedDescription]);
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export canceled");
default:
break;
}
}];
Related
I am trying to make one long sound file by concatenating several smaller sound files - all are in the m4a format, and the result should also be in the m4a format.
Here is the code (the audFiles array holds the names of the sound files to join).
Note I only print the file sizes for peace of mind...
CMTime nextClipStartTime = kCMTimeZero;
AVMutableComposition *combinedSounds = [AVMutableComposition composition];
NSString *tempDir = NSTemporaryDirectory();
NSArray *audFiles;
for (int i = 0; i < [audFiles count]; i++)
{
NSString *addSound = [tempDir stringByAppendingString:audFiles[i]];
if ([[NSFileManager defaultManager] fileExistsAtPath:addSound] == YES)
{
NSDictionary *fileAttributes = [[NSFileManager defaultManager] attributesOfItemAtPath:addSound error:nil];
if (fileAttributes != nil)
{
NSString *fileSize = [fileAttributes objectForKey:NSFileSize];
NSLog(#"file %# %#", addSound, fileSize);
NSURL *assetURL = [[NSURL alloc] initFileURLWithPath:addSound];
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:assetURL options:nil];
if (asset != nil)
{
CMTimeRange timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
NSLog(#"asset %d length %lld", i, asset.duration.value);
if (asset.duration.value > 0)
{
AVAssetTrack *audTrack = [asset tracksWithMediaType:AVMediaTypeAudio][0];
AVMutableCompositionTrack *audioTrack = [combinedSounds addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
if ([audioTrack insertTimeRange:timeRange ofTrack:audTrack atTime:nextClipStartTime error:nil] == NO)
{
NSLog(#"insertTimeRange %d FAILED", i);
}
nextClipStartTime = CMTimeAdd(nextClipStartTime, asset.duration);
nextClipStartTime = CMTimeAdd(nextClipStartTime, CMTimeMake(0.1, 1));
NSLog(#"nextClipStartTime %lld", nextClipStartTime.value);
}
}
}
}
}
NSString *finalSound = [tempDir stringByAppendingString:#"result.m4a"];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:combinedSounds presetName:AVAssetExportPresetPassthrough];
NSString *exported = [tempDir stringByAppendingString:finalSound];
[[NSFileManager defaultManager] removeItemAtPath:exported error:nil];
NSURL *exportedURL = [[NSURL alloc] initFileURLWithPath:exported];
exportSession.outputURL = exportedURL;
exportSession.shouldOptimizeForNetworkUse = YES;
exportSession.outputFileType = AVFileTypeAppleM4A;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch (exportSession.status)
{
case AVAssetExportSessionStatusFailed:
{
NSLog(#"exportSession FAIL");
break;
}
case AVAssetExportSessionStatusCompleted:
{
NSLog(#"exportSession SUCCESS");
}
}
}];
"exportSession SUCCESS" is reported, and the exported file exists and can be played, but it only contains the first of the constituent files.
Any ideas what I am doing wrong ?
Thanks.
Step 1 : Add AVFoundation.Framework and CoreMedia.Framework in your Project
Step 2 : #import <AVFoundation/AVFoundation.h> in your .h file
Step 3 : Write following method in your .m file
Step 4 : Call this function from ViewDidAppear to create Merged Audio file.
// Get Asset Detail
- (AVAsset *)getAvAssetForFile:(NSString *)fileName andType:(NSString *)fileType
{
NSString *URLPath = [[NSBundle mainBundle] pathForResource:fileName ofType:fileType];
NSURL *assetURL = [NSURL fileURLWithPath:URLPath];
AVAsset *asset = [AVAsset assetWithURL:assetURL];
return asset;
}
- (void)mergeAudioFiles
{
// `An Array with List of Audio Clips to be Merged, they could be of different type also`
NSMutableArray *listOfAudio = [[NSMutableArray alloc] initWithCapacity:0];
// `Create AVAsset Object from your audio files and add to array`
[listOfAudio addObject:[self getAvAssetForFile:#"audioFile1" andType:#"caf"]];
[listOfAudio addObject:[self getAvAssetForFile:#"audioFile2" andType:#"caf"]];
[listOfAudio addObject:[self getAvAssetForFile:#"audioFile3" andType:#"aiff"]];
AVMutableComposition *composition = [AVMutableComposition composition];
CMTime current = kCMTimeZero;
NSError *compositionError = nil;
for(AVAsset *asset in listOfAudio) {
BOOL result = [composition insertTimeRange:CMTimeRangeMake(kCMTimeZero, [asset duration])
ofAsset:asset
atTime:current
error:&compositionError];
if(!result) {
if(compositionError) {
// manage the composition error case
NSLog(#"Error in Merging : %#",[compositionError debugDescription]);
}
} else {
current = CMTimeAdd(current, [asset duration]);
}
}
AVAssetExportSession *exporter = [[AVAssetExportSession alloc]
initWithAsset: composition
presetName: AVAssetExportPresetAppleM4A];
exporter.outputFileType = #"com.apple.m4a-audio";
// Set Output FileName
NSString *fileName = #"myAudioTrack";
NSString *exportFile = [DOC_DIR_PATH stringByAppendingFormat: #"/%#.m4a", fileName];
// set up export
NSURL *exportURL = [NSURL fileURLWithPath:exportFile];
exporter.outputURL = exportURL;
// do the export
[exporter exportAsynchronouslyWithCompletionHandler:^{
int exportStatus = exporter.status;
switch (exportStatus) {
case AVAssetExportSessionStatusFailed: NSLog (#"AVAssetExportSessionStatusFailed: %#", exporter.debugDescription); break;
case AVAssetExportSessionStatusCompleted: NSLog (#"AVAssetExportSessionStatusCompleted"); break;
case AVAssetExportSessionStatusUnknown: NSLog (#"AVAssetExportSessionStatusUnknown"); break;
case AVAssetExportSessionStatusExporting: NSLog (#"AVAssetExportSessionStatusExporting"); break;
case AVAssetExportSessionStatusCancelled: NSLog (#"AVAssetExportSessionStatusCancelled"); break;
case AVAssetExportSessionStatusWaiting: NSLog (#"AVAssetExportSessionStatusWaiting"); break;
default: NSLog (#"didn't get export status"); break;
}
}];
}
So I have two audio files, same format, potentially different length. I would like to combine these files (overlay the audio from one onto the other, NOT join them at ends).
Lets say I have two files:
Audio File A, length 30 seconds, size 220k
Audio File B, length 45 seconds, size 300k
What I'd like, a combines audio file:
Audio File C, length 45 seconds, size 300k (I recognize this could be more)
Appreciate everyone's help!
Here's what I did in my app.
- (void) setUpAndAddAudioAtPath:(NSURL*)assetURL toComposition:(AVMutableComposition *)composition
{
AVURLAsset *songAsset = [AVURLAsset URLAssetWithURL:assetURL options:nil];
AVMutableCompositionTrack *track = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *sourceAudioTrack = [[songAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
NSError *error = nil;
BOOL ok = NO;
CMTime startTime = CMTimeMakeWithSeconds(0, 1);
CMTime trackDuration = songAsset.duration;
//CMTime longestTime = CMTimeMake(848896, 44100); //(19.24 seconds)
CMTimeRange tRange = CMTimeRangeMake(startTime, trackDuration);
//Set Volume
AVMutableAudioMixInputParameters *trackMix = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:track];
[trackMix setVolume:0.8f atTime:startTime];
[self.audioMixParams addObject:trackMix];
//Insert audio into track
ok = [track insertTimeRange:tRange ofTrack:sourceAudioTrack atTime:CMTimeMake(0, 44100) error:&error];
}
- (IBAction)saveRecording
{
AVMutableComposition *composition = [AVMutableComposition composition];
audioMixParams = [[NSMutableArray alloc] initWithObjects:nil];
//IMPLEMENT FOLLOWING CODE WHEN WANT TO MERGE ANOTHER AUDIO FILE
//Add Audio Tracks to Composition
NSString *URLPath1 = [[NSBundle mainBundle] pathForResource:#"mysound" ofType:#"mp3"];
NSString *URLPath2 = [[NSBundle mainBundle] pathForResource:#"mysound2" ofType:#"mp3"];
NSURL *assetURL1 = [NSURL fileURLWithPath:URLPath1];
[self setUpAndAddAudioAtPath:assetURL1 toComposition:composition];
NSURL *assetURL2 = [NSURL fileURLWithPath:URLPath2];
[self setUpAndAddAudioAtPath:assetURL2 toComposition:composition];
AVMutableAudioMix *audioMix = [AVMutableAudioMix audioMix];
audioMix.inputParameters = [NSArray arrayWithArray:audioMixParams];
//If you need to query what formats you can export to, here's a way to find out
NSLog (#"compatible presets for songAsset: %#",
[AVAssetExportSession exportPresetsCompatibleWithAsset:composition]);
AVAssetExportSession *exporter = [[AVAssetExportSession alloc]
initWithAsset: composition
presetName: AVAssetExportPresetAppleM4A];
exporter.audioMix = audioMix;
exporter.outputFileType = #"com.apple.m4a-audio";
NSURL *exportURL = [NSURL fileURLWithPath:exportFile];
exporter.outputURL = exportURL;
// do the export
[exporter exportAsynchronouslyWithCompletionHandler:^{
int exportStatus = exporter.status;
NSError *exportError = exporter.error;
switch (exportStatus) {
case AVAssetExportSessionStatusFailed:
break;
case AVAssetExportSessionStatusCompleted: NSLog (#"AVAssetExportSessionStatusCompleted");
break;
case AVAssetExportSessionStatusUnknown: NSLog (#"AVAssetExportSessionStatusUnknown"); break;
case AVAssetExportSessionStatusExporting: NSLog (#"AVAssetExportSessionStatusExporting"); break;
case AVAssetExportSessionStatusCancelled: NSLog (#"AVAssetExportSessionStatusCancelled"); break;
case AVAssetExportSessionStatusWaiting: NSLog (#"AVAssetExportSessionStatusWaiting"); break;
default: NSLog (#"didn't get export status"); break;
}
}];
}
Beware that I did do this a while ago, and you might have to work with it a tiny bit to make it work. But it did work at one point. Let me know if you're having problems.
If Audio Asset track is not found in selected asset.You can use this statement to check the sound of particular Video.
if([[songAsset tracksWithMediaType:AVMediaTypeAudio] firstObject]==NULL)
{
NSLog(#"Sound is not Present");
}
else
{
NSLog(#"Sound is Present");
//You will initalise all things
AVAssetTrack *sourceAudioTrack = [[songAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
}
I am trying to cut an audio file for an iPhone project. I can cut it and save it, but any fade in / fade out that I try to apply doesn't work, the audio file is just saved cutted but not faded.
I am using the following code:
//
// NO PROBLEMS TO SEE HERE, MOVE ON
//
NSArray *documentsFolders = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
int currentFileNum = 10;
NSURL *url = [NSURL fileURLWithPath: [[documentsFolders objectAtIndex:0] stringByAppendingPathComponent:[NSString stringWithFormat:#"%#%d.%#", AUDIO_SOURCE_FILE_NAME ,currentFileNum, AUDIO_SOURCE_FILE_EXTENSION ]]];
NSDictionary *options = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES]
forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:url options:options];
AVAssetExportSession* exporter = [AVAssetExportSession exportSessionWithAsset:asset presetName:AVAssetExportPresetAppleM4A];
for (NSString* filetype in exporter.supportedFileTypes) {
if ([filetype isEqualToString:AVFileTypeAppleM4A]) {
exporter.outputFileType = AVFileTypeAppleM4A;
break;
}
}
if (exporter.outputFileType == nil) {
NSLog(#"Needed output file type not found? (%#)", AVFileTypeAppleM4A);
//return;
}
NSString* outPath = [[documentsFolders objectAtIndex:0] stringByAppendingPathComponent:[NSString stringWithFormat:#"%#%d.%#", AUDIO_CUTTED_FILE_NAME ,currentFileNum, AUDIO_SOURCE_FILE_EXTENSION ]];
NSURL* const outUrl = [NSURL fileURLWithPath:outPath];
exporter.outputURL = outUrl;
float endTrimTime = CMTimeGetSeconds(asset.duration);
float startTrimTime = fminf(AUDIO_DURATION, endTrimTime);
CMTime startTrimCMTime=CMTimeSubtract(asset.duration, CMTimeMake(startTrimTime, 1));
exporter.timeRange = CMTimeRangeMake(startTrimCMTime, asset.duration);
//
// TRYING TO APPLY FADEIN FADEOUT, NOT WORKING, NO RESULTS, "CODE IGNORED"
//
AVMutableAudioMix *exportAudioMix = [AVMutableAudioMix audioMix];
NSMutableArray* inputParameters = [NSMutableArray arrayWithCapacity:1];
CMTime startFadeInTime = startTrimCMTime;
CMTime endFadeInTime = CMTimeMake(startTrimTime+1, 1);
CMTime startFadeOutTime = CMTimeMake(endTrimTime-1, 1);
CMTime endFadeOutTime = CMTimeMake(endTrimTime, 1);
CMTimeRange fadeInTimeRange = CMTimeRangeFromTimeToTime(startFadeInTime, endFadeInTime);
CMTimeRange fadeOutTimeRange = CMTimeRangeFromTimeToTime(startFadeOutTime, endFadeOutTime);
AVMutableAudioMixInputParameters *exportAudioMixInputParameters = [AVMutableAudioMixInputParameters audioMixInputParameters];
[exportAudioMixInputParameters setVolume:0.0 atTime:CMTimeMakeWithSeconds(startTrimTime-0.01, 1)];
[exportAudioMixInputParameters setVolumeRampFromStartVolume:0.0 toEndVolume:1.0 timeRange:fadeInTimeRange];
[exportAudioMixInputParameters setVolumeRampFromStartVolume:1.0 toEndVolume:0.0 timeRange:fadeOutTimeRange];
[inputParameters insertObject:exportAudioMixInputParameters atIndex:0];
exportAudioMix.inputParameters = inputParameters;
exporter.audioMix = exportAudioMix;
[exporter exportAsynchronouslyWithCompletionHandler:^(void) {
NSString* message;
switch (exporter.status) {
case AVAssetExportSessionStatusFailed:
message = [NSString stringWithFormat:#"Export failed. Error: %#", exporter.error.description];
[asset release];
break;
case AVAssetExportSessionStatusCompleted: {
[asset release];
[self reallyConvert:currentFileNum];
message = [NSString stringWithFormat:#"Export completed: %#", outPath];
break;
}
case AVAssetExportSessionStatusCancelled:
message = [NSString stringWithFormat:#"Export cancelled!"];
[asset release];
break;
default:
NSLog(#"Export 4 unhandled status: %d", exporter.status);
[asset release];
break;
}
}];
You need to select the track. Instead of calling:
AVMutableAudioMixInputParameters *exportAudioMixInputParameters = [AVMutableAudioMixInputParameters audioMixInputParameters];
Call:
AVAssetTrack *assetTrack = [[asset tracksWithMediaType:AVMediaTypeAudio]objectAtIndex:0];
AVMutableAudioMixInputParameters *exportAudioMixInputParameters = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:assetTrack];
In your existing code you can also specify the track like this:
exportAudioMixInputParameters.trackID = [[[asset tracksWithMediaType:AVMediaTypeAudio]objectAtIndex:0] trackID];
Good luck!
Here is the solution.
setVolumeRampFromStartVolume doesn't work.
AVMutableAudioMix *exportAudioMix = [AVMutableAudioMix audioMix];
AVMutableAudioMixInputParameters *exportAudioMixInputParameters = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:track];
//fade in
[exportAudioMixInputParameters setVolume:0.0 atTime:CMTimeMakeWithSeconds(start-1, 1)];
[exportAudioMixInputParameters setVolume:0.1 atTime:CMTimeMakeWithSeconds(start, 1)];
[exportAudioMixInputParameters setVolume:0.5 atTime:CMTimeMakeWithSeconds(start+1, 1)];
[exportAudioMixInputParameters setVolume:1.0 atTime:CMTimeMakeWithSeconds(start+2, 1)];
//fade out
[exportAudioMixInputParameters setVolume:1.0 atTime:CMTimeMakeWithSeconds((start+length-2), 1)];
[exportAudioMixInputParameters setVolume:0.5 atTime:CMTimeMakeWithSeconds((start+length-1), 1)];
[exportAudioMixInputParameters setVolume:0.1 atTime:CMTimeMakeWithSeconds((start+length), 1)];
exportAudioMix.inputParameters = [NSArray arrayWithObject:exportAudioMixInputParameters];
// configure export session output with all our parameters
exportSession.outputURL = [NSURL fileURLWithPath:filePath]; // output path
exportSession.outputFileType = AVFileTypeAppleM4A; // output file type
exportSession.timeRange = exportTimeRange; // trim time ranges
exportSession.audioMix = exportAudioMix; // fade in audio mix
// perform the export
[exportSession exportAsynchronouslyWithCompletionHandler:^{
if (AVAssetExportSessionStatusCompleted == exportSession.status) {
NSLog(#"AVAssetExportSessionStatusCompleted");
} else if (AVAssetExportSessionStatusFailed == exportSession.status) {
NSLog(#"AVAssetExportSessionStatusFailed");
} else {
NSLog(#"Export Session Status: %d", exportSession.status);
}
}];
I've made the same mistake as you dozens of times !
Apple's API is really weird on this :
CMTimeRange fadeInTimeRange = CMTimeRangeFromTimeToTime(startFadeInTime, endFadeInTime);
CMTimeRange fadeOutTimeRange = CMTimeRangeFromTimeToTime(startFadeOutTime, endFadeOutTime);
Should be :
CMTimeRangeFromTimeToTime(startFadeInTime, fadeInDURATION);
CMTimeRangeFromTimeToTime(startFadeOutTime, fadeOutDURATION);
CMTimeRange is created from start and duration, not from start and end !
But most of the time, the end time is also the duration (if the start time is 0) that's why so many people (including me) make the mistake.
And no Apple, that's not intuitive at all !
This is my working code, just take it and have a nice day!
+(void)makeAudioFadeOutWithSourceURL:(NSURL*)sourceURL destinationURL:(NSURL*)destinationURL fadeOutBeginSecond:(NSInteger)beginTime fadeOutEndSecond:(NSInteger)endTime fadeOutBeginVolume:(CGFloat)beginVolume fadeOutEndVolume:(CGFloat)endVolume callback:(void(^)(BOOL))callback
{
NSAssert(callback, #"need callback");
NSParameterAssert(beginVolume >= 0 && beginVolume <=1);
NSParameterAssert(endVolume >= 0 && endVolume <= 1);
BOOL sourceExist = [[NSFileManager defaultManager] fileExistsAtPath:sourceURL.path];
NSAssert(sourceExist, #"source not exist");
AVURLAsset *asset = [AVAsset assetWithURL:sourceURL];;
AVAssetExportSession* exporter = [AVAssetExportSession exportSessionWithAsset:asset presetName:AVAssetExportPresetAppleM4A];
exporter.outputURL = destinationURL;
exporter.outputFileType = AVFileTypeAppleM4A;
AVMutableAudioMix *exportAudioMix = [AVMutableAudioMix audioMix];
AVMutableAudioMixInputParameters *exportAudioMixInputParameters = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:asset.tracks.lastObject];
[exportAudioMixInputParameters setVolumeRampFromStartVolume:beginVolume toEndVolume:endVolume timeRange:CMTimeRangeMake(CMTimeMakeWithSeconds(beginTime, 1), CMTimeSubtract(CMTimeMakeWithSeconds(endTime, 1), CMTimeMakeWithSeconds(beginTime, 1)))];
NSArray *audioMixParameters = #[exportAudioMixInputParameters];
exportAudioMix.inputParameters = audioMixParameters;
exporter.audioMix = exportAudioMix;
[exporter exportAsynchronouslyWithCompletionHandler:^(void){
AVAssetExportSessionStatus status = exporter.status;
if (status != AVAssetExportSessionStatusCompleted) {
if (callback) {
callback(NO);
}
}
else {
if (callback) {
callback(YES);
}
}
NSError *error = exporter.error;
NSLog(#"export done,error %#,status %d",error,status);
}];
}
I've looked and looked for an answer, but can't seem to find one. Lots have asked, but none have gotten answers. I have an app that records audio using AVAudioRecorder. Now I just want to merge two or more recordings into one file that can be sent out via email. Does anyone have any clue as to how this can be done?
(This answer suggests using something called Audio Service Queues, but I don't know anything about that)
It's not quite as easy as you would think. I used the AVFoundation framework to do exactly what you're asking about to create iAmRingtones. It required creating AVAssets from the audio files and setting up an AVExportSession. The end result was great, but it certainly took a bit of work. Here's more or less how we created the export functionality in our app:
- (void) setUpAndAddAudioAtPath:(NSURL*)assetURL toComposition:(AVMutableComposition *)composition {
AVURLAsset *songAsset = [AVURLAsset URLAssetWithURL:assetURL options:nil];
AVMutableCompositionTrack *track = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *sourceAudioTrack = [[songAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
NSError *error = nil;
BOOL ok = NO;
CMTime startTime = CMTimeMakeWithSeconds(0, 1);
CMTime trackDuration = songAsset.duration;
CMTime longestTime = CMTimeMake(848896, 44100); //(19.24 seconds)
CMTimeRange tRange = CMTimeRangeMake(startTime, trackDuration);
//Set Volume
AVMutableAudioMixInputParameters *trackMix = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:track];
[trackMix setVolume:0.8f atTime:startTime];
[audioMixParams addObject:trackMix];
//Insert audio into track
ok = [track insertTimeRange:tRange ofTrack:sourceAudioTrack atTime:CMTimeMake(0, 44100) error:&error];
}
The above method gets called twice (once for each audio track) from the following method:
- (void) exportAudio {
AVMutableComposition *composition = [AVMutableComposition composition];
audioMixParams = [[NSMutableArray alloc] initWithObjects:nil];
//Add Audio Tracks to Composition
NSString *URLPath1 = pathToYourAudioFile1;
NSURL *assetURL1 = [NSURL fileURLWithPath:URLPath1];
[self setUpAndAddAudioAtPath:assetURL1 toComposition:composition];
NSString *URLPath2 = pathToYourAudioFile2;
NSURL *assetURL2 = [NSURL fileURLWithPath:URLPath2];
[self setUpAndAddAudioAtPath:assetURL2 toComposition:composition];
AVMutableAudioMix *audioMix = [AVMutableAudioMix audioMix];
audioMix.inputParameters = [NSArray arrayWithArray:audioMixParams];
//If you need to query what formats you can export to, here's a way to find out
NSLog (#"compatible presets for songAsset: %#",
[AVAssetExportSession exportPresetsCompatibleWithAsset:composition]);
AVAssetExportSession *exporter = [[AVAssetExportSession alloc]
initWithAsset: composition
presetName: AVAssetExportPresetAppleM4A];
exporter.audioMix = audioMix;
exporter.outputFileType = #"com.apple.m4a-audio";
NSString *fileName = #"someFilename";
NSString *exportFile = [[util getDocumentsDirectory] stringByAppendingFormat: #"/%#.m4a", fileName];
// set up export
myDeleteFile(exportFile);
NSURL *exportURL = [NSURL fileURLWithPath:exportFile];
exporter.outputURL = exportURL;
// do the export
[exporter exportAsynchronouslyWithCompletionHandler:^{
int exportStatus = exporter.status;
switch (exportStatus) {
case AVAssetExportSessionStatusFailed:
NSError *exportError = exporter.error;
NSLog (#"AVAssetExportSessionStatusFailed: %#", exportError);
break;
case AVAssetExportSessionStatusCompleted: NSLog (#"AVAssetExportSessionStatusCompleted"); break;
case AVAssetExportSessionStatusUnknown: NSLog (#"AVAssetExportSessionStatusUnknown"); break;
case AVAssetExportSessionStatusExporting: NSLog (#"AVAssetExportSessionStatusExporting"); break;
case AVAssetExportSessionStatusCancelled: NSLog (#"AVAssetExportSessionStatusCancelled"); break;
case AVAssetExportSessionStatusWaiting: NSLog (#"AVAssetExportSessionStatusWaiting"); break;
default: NSLog (#"didn't get export status"); break;
}
}];
// start up the export progress bar
progressView.hidden = NO;
progressView.progress = 0.0;
[NSTimer scheduledTimerWithTimeInterval:0.1
target:self
selector:#selector (updateExportProgress:)
userInfo:exporter
repeats:YES];
}
How to merge any number of Audio Files sequentially whose path is contained by an array called as recordingsArray
# pragma mark mergeRecording
- (void) mergeRecording
{
AVMutableComposition *composition = [AVMutableComposition composition];
[self buildSequenceComposition:composition]; //given Below
NSLog (#"compatible presets for songAsset: %#",[AVAssetExportSession exportPresetsCompatibleWithAsset:composition]);
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset: composition presetName: AVAssetExportPresetAppleM4A];
exporter.outputFileType = #"com.apple.m4a-audio";
//File Name
NSString *recordingFileName = [self setRecordingFileName];
self.recordingTimeLbl.text = #"00:00:00";
NSString *exportFile = [NSTemporaryDirectory() stringByAppendingFormat: #"/%#.m4a", recordingFileName];
// set up export
BOOL yes = [[NSFileManager defaultManager] removeItemAtPath:exportFile error:NULL];
NSURL *exportURL = [NSURL fileURLWithPath:exportFile];
exporter.outputURL = exportURL;
NSData *sound1Data = [[NSData alloc] initWithContentsOfURL: exportURL];
NSLog(#"Length %i",sound1Data.length);
[exporter exportAsynchronouslyWithCompletionHandler:^{
int exportStatus = exporter.status;
switch (exportStatus) {
case AVAssetExportSessionStatusFailed:
NSLog (#"AVAssetExportSessionStatusFailed:");
break;
case AVAssetExportSessionStatusCompleted: NSLog (#"AVAssetExportSessionStatusCompleted"); break;
case AVAssetExportSessionStatusUnknown: NSLog (#"AVAssetExportSessionStatusUnknown"); break;
case AVAssetExportSessionStatusExporting: NSLog (#"AVAssetExportSessionStatusExporting"); break;
case AVAssetExportSessionStatusCancelled: NSLog (#"AVAssetExportSessionStatusCancelled"); break;
case AVAssetExportSessionStatusWaiting: NSLog (#"AVAssetExportSessionStatusWaiting"); break;
default: NSLog (#"didn't get export status"); break;
}
}];
// start up the export progress bar
[NSTimer scheduledTimerWithTimeInterval:0.1 target:self selector:#selector (updateProgress:) userInfo:exporter repeats:NO];
}
- (NSString *) setRecordingFileName
{
NSDate *todaysDate = [NSDate date];
NSDateFormatter *dateFormat = [[NSDateFormatter alloc] init];
[dateFormat setDateFormat:#"dd-MM-yyyy"];
NSString *dateString11 = [dateFormat stringFromDate:todaysDate];
NSCalendar *gregorian = [[NSCalendar alloc] initWithCalendarIdentifier:NSGregorianCalendar];
NSDateComponents *dateComponents = [gregorian components:(NSHourCalendarUnit | NSMinuteCalendarUnit | NSSecondCalendarUnit) fromDate:todaysDate];
NSInteger hour = [dateComponents hour];
NSInteger minute = [dateComponents minute];
NSInteger second = [dateComponents second];
[gregorian release];
NSLog(#"Date: %# \n Time : %#-%#-%#",dateString11,[NSString stringWithFormat:#"%i",hour],[NSString stringWithFormat:#"%i",minute],[NSString stringWithFormat:#"%i",second]);
NSString *recordingFileName = #"Any Name";
if(recordingFileName.length > 0)
{
recordingFileName = [NSString stringWithFormat:#"%#AND%#AND%#-%#-%#", recordingFileName, dateString11, [NSString stringWithFormat:#"%i",hour], [NSString stringWithFormat:#"%i",minute], [NSString stringWithFormat:#"%i",second]];
}
else
{
recordingFileName = [NSString stringWithFormat:#"%#AND%#-%#-%#",dateString11,[NSString stringWithFormat:#"%i",hour],[NSString stringWithFormat:#"%i",minute],[NSString stringWithFormat:#"%i",second]];
}
return recordingFileName;
}
- (void)updateProgress:(id)timer
{
AVAssetExportSession *session;
if([timer isKindOfClass:[NSTimer class]])
session = (AVAssetExportSession *)[timer userInfo];
else if([timer isKindOfClass:[AVAssetExportSession class]])
session = timer;
if (session.status == AVAssetExportSessionStatusExporting)
{
NSArray *modes = [[[NSArray alloc] initWithObjects:NSDefaultRunLoopMode, UITrackingRunLoopMode, nil] autorelease];
[self performSelector:#selector(updateProgress:) withObject:session afterDelay:0.5 inModes:modes];
}
else if(session.status == AVAssetExportSessionStatusCompleted)
{
NSLog(#"Exporting Ended");
NSURL *exportURL = session.outputURL;
NSData *sound1Data = [[NSData alloc] initWithContentsOfURL: exportURL];
NSLog(#"Length %i \n Path %#",sound1Data.length,exportURL);
[self.activityIndicator stopAnimating];
self.activityIndicator.hidden = YES;
NSLog(#"Merging Complete");
for(int x = 0 ; x < [recordingsArray count] ; x++)
{
NSURL *recordingPathUrl = [recordingsArray objectAtIndex:x];
BOOL yes = [[NSFileManager defaultManager] removeItemAtPath:recordingPathUrl.relativePath error:NULL];
if (yes)
{
NSLog(#"File Removed at Path %#",recordingPathUrl.relativePath);
}
else
{
NSLog(#"Problem During Removal of Recording At Path %#",recordingPathUrl.relativePath);
}
}
NSString *exportFile = [NSString stringWithFormat:#"%#",exportURL];
NSString *recordingFileName = [self setRecordingFileName];
BOOL isInserted = [[DbFile sharedDatabase] insertRecordingDataIntoTable:recordingFileName recordingPath:exportFile];
if(isInserted)
{
NSLog(#"Recording Inserted In Database");
}
else
{
NSLog(#"Recording Inserted In Database");
}
if([timer isKindOfClass:[NSTimer class]])
[timer invalidate];
}
else if(session.status == AVAssetExportSessionStatusFailed)
{
[self.activityIndicator stopAnimating];
NSLog(#"Recording Export Failed");
UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:#"Error" message:#"Recording Export Failed" delegate:nil cancelButtonTitle:#"OK" otherButtonTitles: nil];
[alertView show];
[alertView release];
if([timer isKindOfClass:[NSTimer class]])
[timer invalidate];
}
else if(session.status == AVAssetExportSessionStatusCancelled)
{
[self.activityIndicator stopAnimating];
NSLog(#"Recording Export Cancelled");
UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:#"Error" message:#"Recording Export Cancelled" delegate:nil cancelButtonTitle:#"OK" otherButtonTitles: nil];
[alertView show];
[alertView release];
if([timer isKindOfClass:[NSTimer class]])
[timer invalidate];
}
}
- (void) buildSequenceComposition:(AVMutableComposition *)composition
{
AVMutableCompositionTrack *audioTrack1 = [composition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime nextClipStartTime = kCMTimeZero;
for(NSURL * view in recordingsArray)
{
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:view options:nil];
CMTimeRange timeRangeInAsset;
timeRangeInAsset = CMTimeRangeMake(kCMTimeZero, [audioAsset duration]);
AVAssetTrack *clipVideoTrack = [[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[audioTrack1 insertTimeRange:timeRangeInAsset ofTrack:clipVideoTrack atTime:nextClipStartTime error:nil];
nextClipStartTime = CMTimeAdd(nextClipStartTime, timeRangeInAsset.duration);
}
}
Can we store ipod music library file (song) to iPhone App folder?
if Yes Please suggest How can I do that?
Please Help
Thanks
i show audio list on table view when i click on audio tableview then that song i hold in nsdata for upload
-(void)startVideoStart:(NSNumber*)number
{
MPMediaItem *song = [miPodSongsArray objectAtIndex:[number intValue]];
songTitle = [song valueForProperty: MPMediaItemPropertyTitle];
NSURL *url = [song valueForProperty:MPMediaItemPropertyAssetURL];
[NSThread detachNewThreadSelector:#selector(loadAudioInBackground:) toTarget:self withObject:url];
[self performSelector:#selector(UploadSong:) withObject:songDict afterDelay:10];
}
-(void)loadAudioInBackground:(NSURL*)url
{
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc]init];
NSLog(#"%#",url);
AVURLAsset *songAsset = [AVURLAsset URLAssetWithURL: url options:nil];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSLog (#"compatible presets for songAsset: %#",[AVAssetExportSession exportPresetsCompatibleWithAsset:songAsset]);
AVAssetExportSession *exporter = [[AVAssetExportSession alloc]
initWithAsset: songAsset
presetName: AVAssetExportPresetPassthrough];
NSLog (#"created exporter. supportedFileTypes: %#", exporter.supportedFileTypes);
exporter.outputFileType = #"com.apple.m4a-audio";
NSString *exportFile = [documentsDirectory stringByAppendingPathComponent: #"item.m4a"];
NSError *error1;
if([[NSFileManager defaultManager] fileExistsAtPath:exportFile])
{
[[NSFileManager defaultManager] removeItemAtPath:exportFile error:&error1];
}
NSURL* exportURL = [[NSURL fileURLWithPath:exportFile] retain];
exporter.outputURL = exportURL;
// do the export
[exporter exportAsynchronouslyWithCompletionHandler:^{
NSData *data1 = [NSData dataWithContentsOfFile: [documentsDirectory stringByAppendingPathComponent: #"item.m4a"]];
//NSLog(#"%#",data1);
if (songDict) {
[songDict release];
songDict=nil;
}
songDict= [[NSMutableDictionary alloc]init];
[songDict setValue:data1 forKey:#"AudioData"];
[songDict setValue:songTitle forKey:#"AudioName"];
[songDict setValue:[[mAppDelegate.userInfoArray objectAtIndex:1]valueForKey:#"user_id"] forKey:#"user_id"];
//NSLog(#"%#",infoDict);
mAppDelegate.uploadType = #"Audio";
int exportStatus = exporter.status;
switch (exportStatus) {
case AVAssetExportSessionStatusFailed: {
// log error to text view
NSError *exportError = exporter.error;
NSLog (#"AVAssetExportSessionStatusFailed: %#", exportError);
// errorView.text = exportError ? [exportError description] : #"Unknown failure";
//errorView.hidden = NO;
break;
}
case AVAssetExportSessionStatusCompleted: {
NSLog (#"AVAssetExportSessionStatusCompleted");
break;
}
case AVAssetExportSessionStatusUnknown: { NSLog (#"AVAssetExportSessionStatusUnknown");
break;
}
case AVAssetExportSessionStatusExporting: { NSLog (#"AVAssetExportSessionStatusExporting");
break;
}
case AVAssetExportSessionStatusCancelled: { NSLog (#"AVAssetExportSessionStatusCancelled");
break;
}
case AVAssetExportSessionStatusWaiting: {
NSLog (#"AVAssetExportSessionStatusWaiting");
break;
}
default:
{ NSLog (#"didn't get export status");
break;
}
}
}];
[pool release];
}
Please have a look at TSLibraryImport also.It handles AVAssetExportSession code for you.