Is it possible to access sound files gallery just like image gallery in my iOS app, I have tried searching on net but have not found any good content about it.
You can refer to the AddMusic sample application from Apple to see how its done...
Yes you can upload it to server. Use below code :
When you select a song from picker
- (void)mediaPicker: (MPMediaPickerController *)mediaPicker didPickMediaItems:(MPMediaItemCollection *)mediaItemCollection
{
MPMediaItem *song = nil ;
[self dismissModalViewControllerAnimated:YES];
if ([mediaItemCollection count] < 1)
{
return;
}
[song release];
song = [[[mediaItemCollection items] objectAtIndex:0] retain];
//song.accessibilityHin
[self uploadMusicFile:song];
}
- (void) uploadMusicFile:(MPMediaItem *)song
{
// Init audio with playback capability
AVAudioSession *audioSession = [AVAudioSession sharedInstance];
[audioSession setCategory:AVAudioSessionCategoryPlayback error:nil];
NSURL *assetURL = [song valueForProperty:MPMediaItemPropertyAssetURL];
AVURLAsset *songAsset = [AVURLAsset URLAssetWithURL:assetURL options:nil];
// to get uniqe time interval
[[NSDate date] timeIntervalSince1970];
// convert this ti string
NSTimeInterval seconds = [[NSDate date] timeIntervalSince1970];
NSString *intervalSeconds = [NSString stringWithFormat:#"%0.0f",seconds];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset: songAsset presetName: AVAssetExportPresetPassthrough];
exporter.outputFileType = #"public.mpeg-4";
NSString *exportFile = [[NSString alloc] initWithString:[DOCUMENTS_FOLDER stringByAppendingPathComponent:[NSString stringWithFormat:#"%#.mp4",intervalSeconds]]];
NSURL *exportURL = [[NSURL fileURLWithPath:exportFile] retain];
exporter.outputURL = exportURL;
[exporter exportAsynchronouslyWithCompletionHandler:
^{
NSData *data = [NSData dataWithContentsOfURL:exportURL];
// Here your upload code
}];
}
Related
I am capturing video it works fine but i want to get thumbnail of that video and show it in ImageView any idea how to get this below is my code.
if ([type isEqualToString:(NSString *)kUTTypeVideo] || [type isEqualToString:(NSString *)kUTTypeMovie])
{
NSURL*videoURL = [info objectForKey:UIImagePickerControllerMediaURL];
NSLog(#"found a video");
videoData = [[NSData dataWithContentsOfURL:videoURL] retain];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSDateFormatter *dateFormat = [[[NSDateFormatter alloc] init] autorelease];
[dateFormat setDateFormat:#"dd-MM-yyyy_HH:mm:SS"];
NSDate *now = [[[NSDate alloc] init] autorelease];
NSDate* theDate = [dateFormat stringFromDate:now];
NSString*myDate=[dateFormat stringFromDate:theDate];
NSString *dataPath = [documentsDirectory stringByAppendingPathComponent:#"Default Album"];
if (![[NSFileManager defaultManager] fileExistsAtPath:dataPath])
[[NSFileManager defaultManager] createDirectoryAtPath:dataPath withIntermediateDirectories:NO attributes:nil error:nil];
NSString*test=#"test";
NSString*testUser=[test stringByReplacingOccurrencesOfString:#" " withString:#""];
videopath= [[[NSString alloc] initWithString:[NSString stringWithFormat:#"%#/%#.mov",documentsDirectory,testUser]] autorelease];
BOOL success = [videoData writeToFile:videopath atomically:NO];
NSLog(#"Successs:::: %#", success ? #"YES" : #"NO");
NSLog(#"video path --> %#",videopath);
NSURL *movieURL = [NSURL fileURLWithPath:videopath];
AVURLAsset *avUrl = [AVURLAsset assetWithURL:movieURL];
CMTime time = [avUrl duration];
int seconds = ceil(time.value/time.timescale);
// durationTime=[NSString stringWithFormat:#"%d",seconds];
// insertTime=[NSString stringWithFormat:#"%d",seconds];
NSString*messageA=[NSString stringWithFormat:#"You have recorded video of duration of %d seconds ",seconds];
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"Alert" message:messageA
delegate:nil cancelButtonTitle:#"OK" otherButtonTitles:nil];
[alert show];
NSDate* date = [NSDate date];
NSDateFormatter* formatter = [[NSDateFormatter alloc] init];
[formatter setDateFormat:#"yyyy-MM-dd HH:MM:SS"];
//[formatter setDateFormat:#"MM-dd-yyyy"];
NSString* str = [formatter stringFromDate:date];
Try this one:
#import<AssetsLibrary/AssetsLibrary.h>
#import<AVFoundation/AVFoundation.h>
#import <AudioToolbox/AudioToolbox.h>
#import <CoreMedia/CoreMedia.h>
#import <MediaPlayer/MediaPlayer.h>
//create thumbnail from video
AVAsset *asset = [AVAsset assetWithURL:url];// url= give your url video here
AVAssetImageGenerator *imageGenerator = [[AVAssetImageGenerator alloc]initWithAsset:asset];
CMTime time = CMTimeMake(1, 5);//it will create the thumbnail after the 5 sec of video
CGImageRef imageRef = [imageGenerator copyCGImageAtTime:time actualTime:NULL error:NULL];
UIImage *thumbnail = [UIImage imageWithCGImage:imageRef];
cell.backGround.image=thumbnail;// whichever imageview you want give this image
it will help you.
add that MPMoviePlayerController (see below) is a much faster option. The code above takes a good 5-10 seconds to generate a thumbnail, and also you can show in UIImageview
NSURL *videoURL = [NSURL fileURLWithPath:url];
MPMoviePlayerController *player = [[MPMoviePlayerController alloc] initWithContentURL:videoURL];
UIImage *thumbnail = [player thumbnailImageAtTime:1.0 timeOption:MPMovieTimeOptionNearestKeyFrame];
//Player autoplays audio on init
[player stop];
[player release];
#import <AVFoundation/AVFoundation.h>
-(UIImage *)generateThumbImage : (NSString *)filepath {
NSURL *url = [NSURL fileURLWithPath:filepath];
AVAsset *asset = [AVAsset assetWithURL:url];
AVAssetImageGenerator *imageGenerator = [[AVAssetImageGenerator alloc]initWithAsset:asset];
CMTime time = [asset duration];
time.value = 1000; //Time in milliseconds
CGImageRef imageRef = [imageGenerator copyCGImageAtTime:time actualTime:NULL error:NULL];
UIImage *thumbnail = [UIImage imageWithCGImage:imageRef];
CGImageRelease(imageRef); // CGImageRef won't be released by ARC
return thumbnail;
}
I have tried following code :
These is my delegate method of MPMediPickerController :
- (void) mediaPicker: (MPMediaPickerController *) mediaPicker didPickMediaItems: (MPMediaItemCollection *) mediaItemCollection {
// Dismiss the media item picker.
[self dismissModalViewControllerAnimated: YES];
NSLog(#"%# %d",mediaItemCollection,mediaItemCollection.count);
NSArray *newMediaItem= [mediaItemCollection items];
MPMediaItem *item=[[newMediaItem objectAtIndex:0] retain];
[self uploadMusicFile:item];
}
This is my custom method MPMediaItem to NSData:
- (void) uploadMusicFile:(MPMediaItem *)song
{
NSURL *url = [song valueForProperty: MPMediaItemPropertyAssetURL];
AVURLAsset *songAsset = [AVURLAsset URLAssetWithURL: url options:nil];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset: songAsset
presetName: AVAssetExportPresetPassthrough];
exporter.outputFileType = #"public.mpeg-4";
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *exportFile = [documentsDirectory stringByAppendingPathComponent:
#"exported.mp4"];
NSURL *exportURL = [[NSURL fileURLWithPath:exportFile] retain];
exporter.outputURL = exportURL;
[exporter exportAsynchronouslyWithCompletionHandler:
^{
NSData *data = [NSData dataWithContentsOfFile: [documentsDirectory
stringByAppendingPathComponent: #"exported.mp4"]];
NSLog(#"%#",data);
}];
}
I am getting "null" in NSlog.
I have also check this post :
how to convert nsdata to MPMediaitem song iOS Sdk
but not getting solution.
I am using xcode 4.6 and ios 6.1.
Can any one tell me what is wrong here?
I have created a simple app that triggers 3 different sounds based on the x, y, z axis of the accelorometer, like an instrument. At the moment, if I set the frequency update interval of the accelometer too low, it plays the sound to much, and if I set it too high it it isn't responsive enough. I am a complete beginner to objective c and iphone development, can you tell by the code!..
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional
UIAccelerometer* accelerometer = [UIAccelerometer sharedAccelerometer];
[accelerometer setUpdateInterval: 25.0 / 10.0f];
[[AVAudioSession sharedInstance] setDelegate: self];
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback error: nil];
[accelerometer setDelegate:self];
UInt32 audioRouteOverride = kAudioSessionOverrideAudioRoute_Speaker;
AudioSessionSetProperty (kAudioSessionProperty_OverrideAudioRoute,sizeof (audioRouteOverride),&audioRouteOverride);
player.volume = 0.5;
player.numberOfLoops = 0;
player.delegate = self;
}
- (void)accelerometer:(UIAccelerometer *)acel didAccelerate:(UIAcceleration *)aceler
{
if (aceler.x > 0.5) {
NSString *fileName = [NSString stringWithFormat:#"snare"];
NSString *soundFilePath = [[NSBundle mainBundle] pathForResource:fileName ofType:#"mp3"];
NSURL *soundFileURL = [NSURL fileURLWithPath:soundFilePath];
player = [[AVAudioPlayer alloc] initWithContentsOfURL:soundFileURL error:nil];
NSLog(#"acceleration.x = %+.6f greater", aceler.x);
[player play];
}
else if (aceler.y > 0.5) {
NSString *fileName = [NSString stringWithFormat:#"kick2"];
NSString *soundFilePath = [[NSBundle mainBundle] pathForResource:fileName ofType:#"mp3"];
NSURL *soundFileURL = [NSURL fileURLWithPath:soundFilePath];
player = [[AVAudioPlayer alloc] initWithContentsOfURL:soundFileURL error:nil];
NSLog(#"acceleration.y = %+.6f greater", aceler.y);
[player play];
}
else if (aceler.z > 0.5) {
NSString *fileName = [NSString stringWithFormat:#"hat"];
NSString *soundFilePath = [[NSBundle mainBundle] pathForResource:fileName ofType:#"mp3"];
NSURL *soundFileURL = [NSURL fileURLWithPath:soundFilePath];
player = [[AVAudioPlayer alloc] initWithContentsOfURL:soundFileURL error:nil];
NSLog(#"acceleration.y = %+.6f greater", aceler.z);
[player play];
}
else {
[player stop];
};
}
Setting the accelerometer's update frequency to a low value wont help here. Imagine the following situation:
time: ------------------------>
real world acceleration event: ___X_____X_____X_____X___
acceleration update: X_____X_____X_____X_____X
sound output started: _________________________
This draft represents a user shaking the device with the same frequency as the accelerometer is updated. But the shake event occurs just in the middle between two accelerometer updates. As a result, the shake events won't be registered and no sound is played.
Consider a high accelerometer update frequency in contrast to the prior approach:
real world acceleration event: ___X_____X_____X_____X___
acceleration update: XXXXXXXXXXXXXXXXXXXXXXXXX
sound output started: ___X_____X_____X_____X___
Basically all real world events result in a sound play within this situation.
The amended code is the following:
- (void)viewDidLoad
{
[super viewDidLoad];
NSLog(#"viewDidLoad");
UIAccelerometer* accelerometer = [UIAccelerometer sharedAccelerometer];
//[accelerometer setUpdateInterval: 25.0 / 10.0f];
[accelerometer setUpdateInterval: 0.01f];
[[AVAudioSession sharedInstance] setDelegate: self];
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback error: nil];
[accelerometer setDelegate:self];
UInt32 audioRouteOverride = kAudioSessionOverrideAudioRoute_Speaker;
AudioSessionSetProperty (kAudioSessionProperty_OverrideAudioRoute,sizeof (audioRouteOverride),&audioRouteOverride);
//player.volume = 0.5;
//player.numberOfLoops = 0;
//player.delegate = self;
}
- (void)accelerometer:(UIAccelerometer *)acel didAccelerate:(UIAcceleration *)aceler
{
if ((aceler.x > ACC_THRESHOLD)&&((playerX == nil)||(playerX.isPlaying == NO))) {
NSString *fileName = [NSString stringWithFormat:#"snare"];
NSString *soundFilePath = [[NSBundle mainBundle] pathForResource:fileName ofType:#"mp3"];
NSURL *soundFileURL = [NSURL fileURLWithPath:soundFilePath];
playerX = [[AVAudioPlayer alloc] initWithContentsOfURL:soundFileURL error:nil];
NSLog(#"acceleration.x = %+.6f greater", aceler.x);
playerX.delegate = self;
[playerX play];
}
if ((aceler.y > ACC_THRESHOLD)&&((playerY == nil)||(playerY.isPlaying == NO))) {
NSString *fileName = [NSString stringWithFormat:#"kick2"];
NSString *soundFilePath = [[NSBundle mainBundle] pathForResource:fileName ofType:#"mp3"];
NSURL *soundFileURL = [NSURL fileURLWithPath:soundFilePath];
playerY = [[AVAudioPlayer alloc] initWithContentsOfURL:soundFileURL error:nil];
NSLog(#"acceleration.y = %+.6f greater", aceler.y);
playerY.delegate = self;
[playerY play];
}
if ((aceler.z > ACC_THRESHOLD)&&((playerZ== nil)||(playerZ.isPlaying == NO))) {
NSString *fileName = [NSString stringWithFormat:#"hat"];
NSString *soundFilePath = [[NSBundle mainBundle] pathForResource:fileName ofType:#"mp3"];
NSURL *soundFileURL = [NSURL fileURLWithPath:soundFilePath];
playerZ = [[AVAudioPlayer alloc] initWithContentsOfURL:soundFileURL error:nil];
NSLog(#"acceleration.z = %+.6f greater", aceler.z);
playerZ.delegate = self;
[playerZ play];
}
//else {
// [player stop];
//};
}
Please note, that multiple sound plays may be triggered by just one event as each dimension is evaluated separately now. A constant has been introduced by #define ACC_THRESHOLD 0.5f. A new sound play for one dimension is started only, after a previous play finished.
After these general change of event handling you may start optimizations with a signal filter.
Additionally you may use AVAudioPlayer's delegate methods for more detailed sound handling:
#pragma mark -
#pragma mark AVAudioPlayerDelegate methods
- (void)audioPlayerDidFinishPlaying:(AVAudioPlayer *)player successfully:(BOOL)flag{
NSLog(#"audioPlayerDidFinishPlaying: %i", flag);
}
- (void)audioPlayerDecodeErrorDidOccur:(AVAudioPlayer *)player error:(NSError *)error{
NSLog(#"audioPlayerDecodeErrorDidOccur: %#", error.description);
}
I am able to record video and output the movie to a file correctly. However, I have a problem with video recording (no video ouput) when trying to use AVAudioPlayer to play some audio. Does it mean that I cannot use AVCaptureSession and AVAudioPlayer at the same time? Here is my code to create the capture session and to play the audio. The video capture is started first, then during the capturing, I want to play some audio. Thanks so much for any help.
Code to create the capture session to record video (with audio) - output to a .mov file:
- (void)addVideoInput
AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
//... also some code for setting up videoDevice/frontCamera
NSError *error;
AVCaptureDeviceInput *videoIn = [AVCaptureDeviceInput
deviceInputWithDevice:frontCamera error:&error];
AVCaptureDeviceInput *audioIn = [AVCaptureDeviceInput
deviceInputWithDevice:audioDevice error:&error];
if (!error) {
if ([captureSession canAddInput:audioIn])
[captureSession addInput:audioIn];
else
NSLog(#"Couldn't add audio input.");
if ([captureSession canAddInput:videoIn])
[captureSession addInput:videoIn];
else
NSLog(#"Couldn't add video input.");
}
- (void)addVideoOutput
{
AVCaptureMovieFileOutput *m_captureFileOutput = [[AVCaptureMovieFileOutput alloc] init];
[captureSession addOutput:m_captureFileOutput];
[captureSession startRunning];
NSString *docDir = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSMutableString *filePath = [NSMutableString stringWithString:#"Movie.mov"];
NSString* fileRoot = [docDir stringByAppendingPathComponent:filePath];
NSURL *fileURL = [NSURL fileURLWithPath:fileRoot];
AVCaptureConnection *videoConnection = NULL;
for ( AVCaptureConnection *connection in [m_captureFileOutput connections] )
{
for ( AVCaptureInputPort *port in [connection inputPorts] )
{
if ( [[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
}
}
}
[videoConnection setVideoOrientation:AVCaptureVideoOrientationLandscapeRight];
[m_captureFileOutput startRecordingToOutputFileURL:fileURL recordingDelegate:self];
[m_captureFileOutput release];
}
Code to play the audio, this function is call during the video capture session. If I don't call this function, the video is recorded and I am able to save to the .mov file. However, if I call this function, there's no output .mov file.
- (void)playAudio
{
NSString *soundFilePath = [[NSBundle mainBundle] pathForResource:#"AudioFile" ofType:#"mp3"];
NSURL *fileURL = [[NSURL alloc] initFileURLWithPath:soundFilePath];
AVAudioPlayer *newPlayer = [[AVAudioPlayer alloc] initWithContentsOfURL:fileURL error:nil];
[fileURL release];
self.audioPlayer = newPlayer;
[newPlayer release];
[audioPlayer setDelegate:self];
[audioPlayer prepareToPlay];
[audioPlayer play];
}
I fixed the problem by setting up the audio session. I called the following function before creating the audio player object to play the audio. That way, I was able to record video (with audio) and play audio at the same time.
- (void)setupAudioSession {
static BOOL audioSessionSetup = NO;
if (audioSessionSetup) {
return;
}
[[AVAudioSession sharedInstance] setCategory: AVAudioSessionCategoryPlayback error: nil];
UInt32 doSetProperty = 1;
AudioSessionSetProperty (kAudioSessionProperty_OverrideCategoryMixWithOthers, sizeof(doSetProperty), &doSetProperty);
[[AVAudioSession sharedInstance] setActive: YES error: nil];
audioSessionSetup = YES;
}
- (void)playAudio
{
[self setupAudioSession];
NSString *soundFilePath = [[NSBundle mainBundle] pathForResource:#"AudioFile" ofType:#"mp3"];
NSURL *fileURL = [[NSURL alloc] initFileURLWithPath:soundFilePath];
AVAudioPlayer *newPlayer = [[AVAudioPlayer alloc] initWithContentsOfURL:fileURL error:nil];
[fileURL release];
self.audioPlayer = newPlayer;
[newPlayer release];
[audioPlayer setDelegate:self];
[audioPlayer prepareToPlay];
[audioPlayer play];
}
here i create the video is succeded and i combine the video and audio is merged in to the MOV Format and By Using the AVAssetExportSession the file is Exported, But When the file is played in media player is not played it just displays the blank screen
here i attached the merging code for video and audio
-(void)combine:(NSString *)audiopathvalue videoURL:(NSString *)videopathValue;
{
// 1. Create a AVMutableComposition
CFAbsoluteTime currentTime = CFAbsoluteTimeGetCurrent(); //Debug purpose - used to calculate the total time taken
NSError *error = nil;
AVMutableComposition *saveComposition = [AVMutableComposition composition];
// 2. Get the video and audio file path
NSString *tempPath = NSTemporaryDirectory();
NSString *videoPath = videopathValue ;//<Video file path>;
NSString *audioPath = audiopathvalue ;//<Audio file path>;;
//3. Create the video assetÂ
NSURL * url1 = [[NSURL alloc] initFileURLWithPath:videoPath];
AVURLAsset *video = [AVURLAsset URLAssetWithURL:url1 options:nil];
[url1 release];
// 4. Get the AVMutableCompositionTrack for video and add the video track to it.
// The method insertTimeRange: ofTrack: atTime: decides the what portion of the video to be added and also where the video track should appear in the final video created.
AVMutableCompositionTrack *compositionVideoTrack = [saveComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipVideoTrack = [[video tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [video duration]) ofTrack:clipVideoTrack atTime:kCMTimeZero error:nil];
NSLog(#"%f %#",CMTimeGetSeconds([video duration]),error);
//5. Create the Audio assetÂ
NSLog(#"audioPath:%#",audioPath);
NSURL * url2 = [[NSURL alloc] initFileURLWithPath:audioPath];
AVURLAsset *audio = [AVURLAsset URLAssetWithURL:url2 options:nil];
[url2 release];
//6. Get the AVMutableCompositionTrack for audio and add the audio track to it.
AVMutableCompositionTrack *compositionAudioTrack = [saveComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipAudioTrack = [[audio tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [audio duration]) ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil];
NSLog(#"%f %#",CMTimeGetSeconds([audio duration]),error);
//7. Get file path for of the final video.
NSString *path = [tempPath stringByAppendingPathComponent:#"mergedvideo.MOV"];
if([[NSFileManager defaultManager] fileExistsAtPath:path])
{
[[NSFileManager defaultManager] removeItemAtPath:path error:nil];
}
NSURL *url = [[NSURL alloc] initFileURLWithPath: path];
//8. Create the AVAssetExportSession and set the preset to it.
//The completion handler will be called upon the completion of the export.
AVAssetExportSession *exporter = [[[AVAssetExportSession alloc] initWithAsset:saveComposition presetName:AVAssetExportPresetHighestQuality] autorelease];
exporter.outputURL=url;
exporter.outputFileType = #"com.apple.quicktime-movie";
NSLog(#"file type %#",exporter.outputFileType);
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^{
switch ([exporter status]) {
case AVAssetExportSessionStatusFailed:
NSLog(#"Export failed: %#", [[exporter error] localizedDescription]);
NSLog(#"ExportSessionError: %#", exporter.error);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export canceled");
break;
case AVAssetExportSessionStatusCompleted:
{
NSLog(#"Export Completed");
ImageToAirPlayAppDelegate *theApp_iphone=(ImageToAirPlayAppDelegate *)[[UIApplication sharedApplication] delegate];
[theApp_iphone call];
break;
}
default:
break;
}
//[exporter release];
}];
in the video path it contains the series of images
and in the audio path only one audio
The function (not in your code):
- (void) captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
Try doing the processing there.
Gives you the outputFileURL that is the one you have to use in your mix. There is no reason to use an NSString in the function combine.
I also recommend you to use AVFileTypeQuickTimeMovie instead "com.apple.quicktime-movie". It is the same but easier to handle in case you want to experiment with other format.
To know the available formats just use
NSLog(#"%#", [exporter supportedFileTypes]);