Concatenating sound files on the iPhone (m4a) fails - iphone

I am trying to make one long sound file by concatenating several smaller sound files - all are in the m4a format, and the result should also be in the m4a format.
Here is the code (the audFiles array holds the names of the sound files to join).
Note I only print the file sizes for peace of mind...
CMTime nextClipStartTime = kCMTimeZero;
AVMutableComposition *combinedSounds = [AVMutableComposition composition];
NSString *tempDir = NSTemporaryDirectory();
NSArray *audFiles;
for (int i = 0; i < [audFiles count]; i++)
{
NSString *addSound = [tempDir stringByAppendingString:audFiles[i]];
if ([[NSFileManager defaultManager] fileExistsAtPath:addSound] == YES)
{
NSDictionary *fileAttributes = [[NSFileManager defaultManager] attributesOfItemAtPath:addSound error:nil];
if (fileAttributes != nil)
{
NSString *fileSize = [fileAttributes objectForKey:NSFileSize];
NSLog(#"file %# %#", addSound, fileSize);
NSURL *assetURL = [[NSURL alloc] initFileURLWithPath:addSound];
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:assetURL options:nil];
if (asset != nil)
{
CMTimeRange timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
NSLog(#"asset %d length %lld", i, asset.duration.value);
if (asset.duration.value > 0)
{
AVAssetTrack *audTrack = [asset tracksWithMediaType:AVMediaTypeAudio][0];
AVMutableCompositionTrack *audioTrack = [combinedSounds addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
if ([audioTrack insertTimeRange:timeRange ofTrack:audTrack atTime:nextClipStartTime error:nil] == NO)
{
NSLog(#"insertTimeRange %d FAILED", i);
}
nextClipStartTime = CMTimeAdd(nextClipStartTime, asset.duration);
nextClipStartTime = CMTimeAdd(nextClipStartTime, CMTimeMake(0.1, 1));
NSLog(#"nextClipStartTime %lld", nextClipStartTime.value);
}
}
}
}
}
NSString *finalSound = [tempDir stringByAppendingString:#"result.m4a"];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:combinedSounds presetName:AVAssetExportPresetPassthrough];
NSString *exported = [tempDir stringByAppendingString:finalSound];
[[NSFileManager defaultManager] removeItemAtPath:exported error:nil];
NSURL *exportedURL = [[NSURL alloc] initFileURLWithPath:exported];
exportSession.outputURL = exportedURL;
exportSession.shouldOptimizeForNetworkUse = YES;
exportSession.outputFileType = AVFileTypeAppleM4A;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch (exportSession.status)
{
case AVAssetExportSessionStatusFailed:
{
NSLog(#"exportSession FAIL");
break;
}
case AVAssetExportSessionStatusCompleted:
{
NSLog(#"exportSession SUCCESS");
}
}
}];
"exportSession SUCCESS" is reported, and the exported file exists and can be played, but it only contains the first of the constituent files.
Any ideas what I am doing wrong ?
Thanks.

Step 1 : Add AVFoundation.Framework and CoreMedia.Framework in your Project
Step 2 : #import <AVFoundation/AVFoundation.h> in your .h file
Step 3 : Write following method in your .m file
Step 4 : Call this function from ViewDidAppear to create Merged Audio file.
// Get Asset Detail
- (AVAsset *)getAvAssetForFile:(NSString *)fileName andType:(NSString *)fileType
{
NSString *URLPath = [[NSBundle mainBundle] pathForResource:fileName ofType:fileType];
NSURL *assetURL = [NSURL fileURLWithPath:URLPath];
AVAsset *asset = [AVAsset assetWithURL:assetURL];
return asset;
}
- (void)mergeAudioFiles
{
// `An Array with List of Audio Clips to be Merged, they could be of different type also`
NSMutableArray *listOfAudio = [[NSMutableArray alloc] initWithCapacity:0];
// `Create AVAsset Object from your audio files and add to array`
[listOfAudio addObject:[self getAvAssetForFile:#"audioFile1" andType:#"caf"]];
[listOfAudio addObject:[self getAvAssetForFile:#"audioFile2" andType:#"caf"]];
[listOfAudio addObject:[self getAvAssetForFile:#"audioFile3" andType:#"aiff"]];
AVMutableComposition *composition = [AVMutableComposition composition];
CMTime current = kCMTimeZero;
NSError *compositionError = nil;
for(AVAsset *asset in listOfAudio) {
BOOL result = [composition insertTimeRange:CMTimeRangeMake(kCMTimeZero, [asset duration])
ofAsset:asset
atTime:current
error:&compositionError];
if(!result) {
if(compositionError) {
// manage the composition error case
NSLog(#"Error in Merging : %#",[compositionError debugDescription]);
}
} else {
current = CMTimeAdd(current, [asset duration]);
}
}
AVAssetExportSession *exporter = [[AVAssetExportSession alloc]
initWithAsset: composition
presetName: AVAssetExportPresetAppleM4A];
exporter.outputFileType = #"com.apple.m4a-audio";
// Set Output FileName
NSString *fileName = #"myAudioTrack";
NSString *exportFile = [DOC_DIR_PATH stringByAppendingFormat: #"/%#.m4a", fileName];
// set up export
NSURL *exportURL = [NSURL fileURLWithPath:exportFile];
exporter.outputURL = exportURL;
// do the export
[exporter exportAsynchronouslyWithCompletionHandler:^{
int exportStatus = exporter.status;
switch (exportStatus) {
case AVAssetExportSessionStatusFailed: NSLog (#"AVAssetExportSessionStatusFailed: %#", exporter.debugDescription); break;
case AVAssetExportSessionStatusCompleted: NSLog (#"AVAssetExportSessionStatusCompleted"); break;
case AVAssetExportSessionStatusUnknown: NSLog (#"AVAssetExportSessionStatusUnknown"); break;
case AVAssetExportSessionStatusExporting: NSLog (#"AVAssetExportSessionStatusExporting"); break;
case AVAssetExportSessionStatusCancelled: NSLog (#"AVAssetExportSessionStatusCancelled"); break;
case AVAssetExportSessionStatusWaiting: NSLog (#"AVAssetExportSessionStatusWaiting"); break;
default: NSLog (#"didn't get export status"); break;
}
}];
}

Related

iOS: lowering bitrate of MPMediaItem containing an iPod music

I'm making an app which add a theme music to an video.
Some user complains that if their music is in apple lossless format, the video will be too large.
I found that is because the AVMutableComposition I use just put the original music format in to the video I generated.
So is there any way I can lower the bitrate of the music in MPMediaItem, or change the format it is encoded?
This is a code snippet of the code I use to add music to video.
AVMutableComposition* mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetPassthrough];
NSURL *exportUrl = [NSURL fileURLWithPath:_videoOutputPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:_videoOutputPath]){
[[NSFileManager defaultManager] removeItemAtPath:_videoOutputPath error:nil];
}
_assetExport.outputFileType = #"com.apple.quicktime-movie";
_assetExport.outputURL = exportUrl;
_assetExport.shouldOptimizeForNetworkUse = YES;
[_assetExport exportAsynchronouslyWithCompletionHandler:^(void ) {}
I finally got it, this is the code I use:
static NSString * const kWriterInputIsReadyForMoreData = #"readyForMoreMediaData";
#import <AVFoundation/AVFoundation.h>
#implementation AudioUtil
{
AVAssetReader *_assetReader;
AVAssetWriter *_assetWriter;
AVAssetWriterInput *_assetWriterInput;
AVAssetReaderTrackOutput *_readerOutput;
void (^_callback)(BOOL);
CMSampleBufferRef _sampleBufferToAppend;
}
-(void)downSamplingAudioWithSourceURL:(NSURL *)sourceURL destinationURL:(NSURL *)destURL timeRange:(CMTimeRange)timeRange callBack:(void (^)(BOOL))callback
{
NSError *error = nil;
_callback = callback;
[[NSFileManager defaultManager] removeItemAtURL:destURL error:nil];
//initialize reader
AVURLAsset *inputAsset = [AVURLAsset assetWithURL:sourceURL];
_assetReader = [[AVAssetReader alloc] initWithAsset:inputAsset error:&error];
_assetReader.timeRange = timeRange;
AVAssetTrack* track = [[inputAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
NSMutableDictionary* audioReadSettings = [NSMutableDictionary dictionary];
audioReadSettings[AVFormatIDKey] = #(kAudioFormatLinearPCM);
audioReadSettings[AVNumberOfChannelsKey] = #([QLVideoFormatProvider audioChannelCount]);
_readerOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:track outputSettings:audioReadSettings];
NSAssert([_assetReader canAddOutput:_readerOutput], #"reader can't add output");
[_assetReader addOutput:_readerOutput];
//initialize writer
_assetWriter = [[AVAssetWriter alloc] initWithURL:destURL fileType:[QLVideoFormatProvider audioFileType] error:nil];
NSMutableDictionary *audioOutputSettings = [NSMutableDictionary dictionary];
audioOutputSettings[AVFormatIDKey] = [QLVideoFormatProvider audioFormatKeyForEncoder];
audioOutputSettings[AVNumberOfChannelsKey] = #([QLVideoFormatProvider audioChannelCount]);
audioOutputSettings[AVSampleRateKey] = #([QLVideoFormatProvider audioSampleRate]);
audioOutputSettings[AVEncoderBitRateKey] = #([QLVideoFormatProvider audioBitrate]);
_assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSettings];
[_assetWriter addInput:_assetWriterInput];
//start
[_assetWriter startWriting];
[_assetWriter startSessionAtSourceTime:kCMTimeZero];
BOOL canStartReading = [_assetReader startReading];
NSLog(#"can start reading %d",canStartReading);
if (!canStartReading) {
callback(NO);
return;
}
[_assetWriterInput addObserver:self forKeyPath:kWriterInputIsReadyForMoreData options:NSKeyValueObservingOptionOld|NSKeyValueObservingOptionNew context:NULL];
_sampleBufferToAppend = [_readerOutput copyNextSampleBuffer];
[self appendBufferToAppend];
}
-(void)appendBufferToAppend
{
if ([_assetWriterInput isReadyForMoreMediaData]) {
if (_sampleBufferToAppend) {
[_assetWriterInput appendSampleBuffer:_sampleBufferToAppend];
CFRelease(_sampleBufferToAppend);
}
_sampleBufferToAppend = [_readerOutput copyNextSampleBuffer];
if (_sampleBufferToAppend) {
[self appendBufferToAppend];
}
else {
[_assetWriter finishWritingWithCompletionHandler:^(){
if (_callback) {
_callback(_assetWriter.status == AVAssetWriterStatusCompleted);
};
}];
}
}
else {
}
}
-(void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
{
if ([keyPath isEqualToString:kWriterInputIsReadyForMoreData]) {
if ([change[NSKeyValueChangeNewKey] boolValue] == YES) {
[self appendBufferToAppend];
}
}
}

How can I overlap audio files and combine for iPhone in Xcode?

So I have two audio files, same format, potentially different length. I would like to combine these files (overlay the audio from one onto the other, NOT join them at ends).
Lets say I have two files:
Audio File A, length 30 seconds, size 220k
Audio File B, length 45 seconds, size 300k
What I'd like, a combines audio file:
Audio File C, length 45 seconds, size 300k (I recognize this could be more)
Appreciate everyone's help!
Here's what I did in my app.
- (void) setUpAndAddAudioAtPath:(NSURL*)assetURL toComposition:(AVMutableComposition *)composition
{
AVURLAsset *songAsset = [AVURLAsset URLAssetWithURL:assetURL options:nil];
AVMutableCompositionTrack *track = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *sourceAudioTrack = [[songAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
NSError *error = nil;
BOOL ok = NO;
CMTime startTime = CMTimeMakeWithSeconds(0, 1);
CMTime trackDuration = songAsset.duration;
//CMTime longestTime = CMTimeMake(848896, 44100); //(19.24 seconds)
CMTimeRange tRange = CMTimeRangeMake(startTime, trackDuration);
//Set Volume
AVMutableAudioMixInputParameters *trackMix = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:track];
[trackMix setVolume:0.8f atTime:startTime];
[self.audioMixParams addObject:trackMix];
//Insert audio into track
ok = [track insertTimeRange:tRange ofTrack:sourceAudioTrack atTime:CMTimeMake(0, 44100) error:&error];
}
- (IBAction)saveRecording
{
AVMutableComposition *composition = [AVMutableComposition composition];
audioMixParams = [[NSMutableArray alloc] initWithObjects:nil];
//IMPLEMENT FOLLOWING CODE WHEN WANT TO MERGE ANOTHER AUDIO FILE
//Add Audio Tracks to Composition
NSString *URLPath1 = [[NSBundle mainBundle] pathForResource:#"mysound" ofType:#"mp3"];
NSString *URLPath2 = [[NSBundle mainBundle] pathForResource:#"mysound2" ofType:#"mp3"];
NSURL *assetURL1 = [NSURL fileURLWithPath:URLPath1];
[self setUpAndAddAudioAtPath:assetURL1 toComposition:composition];
NSURL *assetURL2 = [NSURL fileURLWithPath:URLPath2];
[self setUpAndAddAudioAtPath:assetURL2 toComposition:composition];
AVMutableAudioMix *audioMix = [AVMutableAudioMix audioMix];
audioMix.inputParameters = [NSArray arrayWithArray:audioMixParams];
//If you need to query what formats you can export to, here's a way to find out
NSLog (#"compatible presets for songAsset: %#",
[AVAssetExportSession exportPresetsCompatibleWithAsset:composition]);
AVAssetExportSession *exporter = [[AVAssetExportSession alloc]
initWithAsset: composition
presetName: AVAssetExportPresetAppleM4A];
exporter.audioMix = audioMix;
exporter.outputFileType = #"com.apple.m4a-audio";
NSURL *exportURL = [NSURL fileURLWithPath:exportFile];
exporter.outputURL = exportURL;
// do the export
[exporter exportAsynchronouslyWithCompletionHandler:^{
int exportStatus = exporter.status;
NSError *exportError = exporter.error;
switch (exportStatus) {
case AVAssetExportSessionStatusFailed:
break;
case AVAssetExportSessionStatusCompleted: NSLog (#"AVAssetExportSessionStatusCompleted");
break;
case AVAssetExportSessionStatusUnknown: NSLog (#"AVAssetExportSessionStatusUnknown"); break;
case AVAssetExportSessionStatusExporting: NSLog (#"AVAssetExportSessionStatusExporting"); break;
case AVAssetExportSessionStatusCancelled: NSLog (#"AVAssetExportSessionStatusCancelled"); break;
case AVAssetExportSessionStatusWaiting: NSLog (#"AVAssetExportSessionStatusWaiting"); break;
default: NSLog (#"didn't get export status"); break;
}
}];
}
Beware that I did do this a while ago, and you might have to work with it a tiny bit to make it work. But it did work at one point. Let me know if you're having problems.
If Audio Asset track is not found in selected asset.You can use this statement to check the sound of particular Video.
if([[songAsset tracksWithMediaType:AVMediaTypeAudio] firstObject]==NULL)
{
NSLog(#"Sound is not Present");
}
else
{
NSLog(#"Sound is Present");
//You will initalise all things
AVAssetTrack *sourceAudioTrack = [[songAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
}

How to change video metadata using AVAssetWriter?

How to change a video(.mp4) meta-info using a AVAssetWriter API?
I want to not re-encode. only wanted to modify the video meta-info.
how to write a next code?
AVAssetWriter *writer = [AVAssetWriter assetWriterWithURL:[NSURL URLWithString:myPath] fileType:AVFileTypeQuickTimeMovie error:nil];
if I mistaken, give me some hint.
Thanks!!
refer a following code.
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:"your path"] options:nil];
NSMutableArray *metadata = [NSMutableArray array];
AVMutableMetadataItem *metaItem = [AVMutableMetadataItem metadataItem];
metaItem.key = AVMetadataCommonKeyPublisher;
metaItem.keySpace = AVMetadataKeySpaceCommon;
metaItem.value = #"your_value";
[metadata addObject:metaItem];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetPassthrough];
exportSession.outputURL = [NSURL fileURLWithPath:"your output path"];
CMTime start = CMTimeMakeWithSeconds(0.0, BASIC_TIMESCALE);
CMTimeRange range = CMTimeRangeMake(start, [asset duration]);
exportSession.timeRange = range;
exportSession.outputFileType = AVFileTypeAppleM4V // AVFileTypeMPEG4 or AVFileTypeQuickTimeMovie (video format);
exportSession.metadata = metadata;
exportSession.shouldOptimizeForNetworkUse = YES;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch ([exportSession status])
{
case AVAssetExportSessionStatusCompleted:
NSLog(#"Export sucess");
case AVAssetExportSessionStatusFailed:
NSLog(#"Export failed: %#", [[exportSession error] localizedDescription]);
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export canceled");
default:
break;
}
}];

Is there a way to save iPod music files to iPhone App using iPhone SDK?

Is there a way to save iPod music files to iPhone App using iPhone SDK?
i show audio list on table view when i click on audio tableview then that song i hold in nsdata for upload
-(void)startVideoStart:(NSNumber*)number
{
MPMediaItem *song = [miPodSongsArray objectAtIndex:[number intValue]];
songTitle = [song valueForProperty: MPMediaItemPropertyTitle];
NSURL *url = [song valueForProperty:MPMediaItemPropertyAssetURL];
[NSThread detachNewThreadSelector:#selector(loadAudioInBackground:) toTarget:self withObject:url];
[self performSelector:#selector(UploadSong:) withObject:songDict afterDelay:10];
}
-(void)loadAudioInBackground:(NSURL*)url
{
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc]init];
NSLog(#"%#",url);
AVURLAsset *songAsset = [AVURLAsset URLAssetWithURL: url options:nil];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSLog (#"compatible presets for songAsset: %#",[AVAssetExportSession exportPresetsCompatibleWithAsset:songAsset]);
AVAssetExportSession *exporter = [[AVAssetExportSession alloc]
initWithAsset: songAsset
presetName: AVAssetExportPresetPassthrough];
NSLog (#"created exporter. supportedFileTypes: %#", exporter.supportedFileTypes);
exporter.outputFileType = #"com.apple.m4a-audio";
NSString *exportFile = [documentsDirectory stringByAppendingPathComponent: #"item.m4a"];
NSError *error1;
if([[NSFileManager defaultManager] fileExistsAtPath:exportFile])
{
[[NSFileManager defaultManager] removeItemAtPath:exportFile error:&error1];
}
NSURL* exportURL = [[NSURL fileURLWithPath:exportFile] retain];
exporter.outputURL = exportURL;
// do the export
[exporter exportAsynchronouslyWithCompletionHandler:^{
NSData *data1 = [NSData dataWithContentsOfFile: [documentsDirectory stringByAppendingPathComponent: #"item.m4a"]];
//NSLog(#"%#",data1);
if (songDict) {
[songDict release];
songDict=nil;
}
songDict= [[NSMutableDictionary alloc]init];
[songDict setValue:data1 forKey:#"AudioData"];
[songDict setValue:songTitle forKey:#"AudioName"];
[songDict setValue:[[mAppDelegate.userInfoArray objectAtIndex:1]valueForKey:#"user_id"] forKey:#"user_id"];
//NSLog(#"%#",infoDict);
mAppDelegate.uploadType = #"Audio";
int exportStatus = exporter.status;
switch (exportStatus) {
case AVAssetExportSessionStatusFailed: {
// log error to text view
NSError *exportError = exporter.error;
NSLog (#"AVAssetExportSessionStatusFailed: %#", exportError);
// errorView.text = exportError ? [exportError description] : #"Unknown failure";
//errorView.hidden = NO;
break;
}
case AVAssetExportSessionStatusCompleted: {
NSLog (#"AVAssetExportSessionStatusCompleted");
break;
}
case AVAssetExportSessionStatusUnknown: { NSLog (#"AVAssetExportSessionStatusUnknown");
break; } case AVAssetExportSessionStatusExporting: { NSLog (#"AVAssetExportSessionStatusExporting"); break; }
case AVAssetExportSessionStatusCancelled: { NSLog (#"AVAssetExportSessionStatusCancelled");
break; }
case AVAssetExportSessionStatusWaiting: { NSLog (#"AVAssetExportSessionStatusWaiting"); break; }
default:
{ NSLog (#"didn't get export status"); break; } }
}];
[pool release];
}

copy ipod music library audio file to Iphone App folder?

Can we store ipod music library file (song) to iPhone App folder?
if Yes Please suggest How can I do that?
Please Help
Thanks
i show audio list on table view when i click on audio tableview then that song i hold in nsdata for upload
-(void)startVideoStart:(NSNumber*)number
{
MPMediaItem *song = [miPodSongsArray objectAtIndex:[number intValue]];
songTitle = [song valueForProperty: MPMediaItemPropertyTitle];
NSURL *url = [song valueForProperty:MPMediaItemPropertyAssetURL];
[NSThread detachNewThreadSelector:#selector(loadAudioInBackground:) toTarget:self withObject:url];
[self performSelector:#selector(UploadSong:) withObject:songDict afterDelay:10];
}
-(void)loadAudioInBackground:(NSURL*)url
{
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc]init];
NSLog(#"%#",url);
AVURLAsset *songAsset = [AVURLAsset URLAssetWithURL: url options:nil];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSLog (#"compatible presets for songAsset: %#",[AVAssetExportSession exportPresetsCompatibleWithAsset:songAsset]);
AVAssetExportSession *exporter = [[AVAssetExportSession alloc]
initWithAsset: songAsset
presetName: AVAssetExportPresetPassthrough];
NSLog (#"created exporter. supportedFileTypes: %#", exporter.supportedFileTypes);
exporter.outputFileType = #"com.apple.m4a-audio";
NSString *exportFile = [documentsDirectory stringByAppendingPathComponent: #"item.m4a"];
NSError *error1;
if([[NSFileManager defaultManager] fileExistsAtPath:exportFile])
{
[[NSFileManager defaultManager] removeItemAtPath:exportFile error:&error1];
}
NSURL* exportURL = [[NSURL fileURLWithPath:exportFile] retain];
exporter.outputURL = exportURL;
// do the export
[exporter exportAsynchronouslyWithCompletionHandler:^{
NSData *data1 = [NSData dataWithContentsOfFile: [documentsDirectory stringByAppendingPathComponent: #"item.m4a"]];
//NSLog(#"%#",data1);
if (songDict) {
[songDict release];
songDict=nil;
}
songDict= [[NSMutableDictionary alloc]init];
[songDict setValue:data1 forKey:#"AudioData"];
[songDict setValue:songTitle forKey:#"AudioName"];
[songDict setValue:[[mAppDelegate.userInfoArray objectAtIndex:1]valueForKey:#"user_id"] forKey:#"user_id"];
//NSLog(#"%#",infoDict);
mAppDelegate.uploadType = #"Audio";
int exportStatus = exporter.status;
switch (exportStatus) {
case AVAssetExportSessionStatusFailed: {
// log error to text view
NSError *exportError = exporter.error;
NSLog (#"AVAssetExportSessionStatusFailed: %#", exportError);
// errorView.text = exportError ? [exportError description] : #"Unknown failure";
//errorView.hidden = NO;
break;
}
case AVAssetExportSessionStatusCompleted: {
NSLog (#"AVAssetExportSessionStatusCompleted");
break;
}
case AVAssetExportSessionStatusUnknown: { NSLog (#"AVAssetExportSessionStatusUnknown");
break;
}
case AVAssetExportSessionStatusExporting: { NSLog (#"AVAssetExportSessionStatusExporting");
break;
}
case AVAssetExportSessionStatusCancelled: { NSLog (#"AVAssetExportSessionStatusCancelled");
break;
}
case AVAssetExportSessionStatusWaiting: {
NSLog (#"AVAssetExportSessionStatusWaiting");
break;
}
default:
{ NSLog (#"didn't get export status");
break;
}
}
}];
[pool release];
}
Please have a look at TSLibraryImport also.It handles AVAssetExportSession code for you.