i have to split the video into two parts.i am using with this trim function twicely is that good approach i have read the AVFoundation frame work but didn't found any direct solution to split the video.but there are some iPhone Apps which are splitting the video very smoothly.are they using the trim function ? need suggestion
-(void)splitSecondVideo{
NSString *deleteVideo = [_videoPath path];
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:_videoPath options:nil];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc]initWithAsset:asset presetName:AVAssetExportPresetLowQuality];
NSString *outputURL = nil;
NSString *videoFileName = nil;
videoFileName = [SPUtility getDefaultVideoName];
outputURL = [NSString stringWithFormat:#"%#/%#.mov", _clipFolderPath, videoFileName];
NSFileManager *manager = [NSFileManager defaultManager];
[manager createDirectoryAtPath:outputURL withIntermediateDirectories:YES attributes:nil error:nil];
[manager removeItemAtPath:outputURL error:nil] ;
exportSession.outputURL = [NSURL fileURLWithPath:outputURL];
exportSession.shouldOptimizeForNetworkUse = YES;
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
// Trim to half duration
double halfDuration = CMTimeGetSeconds([asset duration])/2.0;
double fullDuration = CMTimeGetSeconds([asset duration]);
CMTime firsthalfDuration = CMTimeMakeWithSeconds(halfDuration, 1);
CMTime secondhalfDuration = CMTimeMakeWithSeconds(fullDuration, 1);
CMTimeRange secondrange = CMTimeRangeMake(firsthalfDuration, secondhalfDuration);
exportSession.timeRange = secondrange;
[exportSession exportAsynchronouslyWithCompletionHandler:^(void)
{
switch (exportSession.status) {
case AVAssetExportSessionStatusCompleted:
[manager removeItemAtPath:deleteVideo error:nil];
NSLog(#"Export Complete %d %#", exportSession.status, exportSession.error);
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Failed:%#",exportSession.error);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Canceled:%#",exportSession.error);
break;
default:
break;
}
}];
}
Related
I want to trim a video:
-(void)trimVideo:(NSURL*)outputURL
{
//[[NSFileManager defaultManager] removeItemAtURL:outputURL error:nil];
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:outputURL options:nil];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetLowQuality];
NSString * outputFilePath = NSHomeDirectory();
outputFilePath = [outputFilePath stringByAppendingPathComponent:#"Library"];
outputFilePath = [outputFilePath stringByAppendingPathComponent:#"temp.mov"];
NSURL * outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
exportSession.outputURL = outputFileUrl;
exportSession.shouldOptimizeForNetworkUse = YES;
exportSession.outputFileType = AVFileTypeMPEG4;
CMTime start = CMTimeMakeWithSeconds(1.0, 600);
CMTime duration = CMTimeMakeWithSeconds(3.0, 600);
CMTimeRange range = CMTimeRangeMake(start, duration);
exportSession.timeRange = range;
[exportSession exportAsynchronouslyWithCompletionHandler:^(void)
{
NSLog(#"Export Complete %d %#", exportSession.status, exportSession.error);
//[exportSession release];
}];
}
But I get the error:
Export Complete 4 Error Domain=AVFoundationErrorDomain Code=-11823 "Cannot Save" UserInfo=0x2008f420 {NSLocalizedRecoverySuggestion=Try saving again., NSLocalizedDescription=Cannot Save}
Not exactly sure how to resolve.
This did the trick:
-(void)trimVideo:(NSURL*)videoToTrimURL
{
//[[NSFileManager defaultManager] removeItemAtURL:outputURL error:nil];
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:videoToTrimURL options:nil];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetHighestQuality];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *outputURL = paths[0];
NSFileManager *manager = [NSFileManager defaultManager];
[manager createDirectoryAtPath:outputURL withIntermediateDirectories:YES attributes:nil error:nil];
outputURL = [outputURL stringByAppendingPathComponent:#"output.mp4"];
// Remove Existing File
[manager removeItemAtPath:outputURL error:nil];
exportSession.outputURL = [NSURL fileURLWithPath:outputURL];
exportSession.shouldOptimizeForNetworkUse = YES;
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
CMTime start = CMTimeMakeWithSeconds(1.0, 600);
CMTime duration = CMTimeMakeWithSeconds(3.0, 600);
CMTimeRange range = CMTimeRangeMake(start, duration);
exportSession.timeRange = range;
[exportSession exportAsynchronouslyWithCompletionHandler:^(void)
{
switch (exportSession.status) {
case AVAssetExportSessionStatusCompleted:
[self writeVideoToPhotoLibrary:[NSURL fileURLWithPath:outputURL]];
NSLog(#"Export Complete %d %#", exportSession.status, exportSession.error);
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Failed:%#",exportSession.error);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Canceled:%#",exportSession.error);
break;
default:
break;
}
//[exportSession release];
}];
}
I am developing an iphone app which records its activities(screen recording) and uploads to youtube. Firstly the video has no sound before upload the app mixes some sound with the video. The output video plays in my iphone and ipad without any problem but the uploaded video plays without sound(have a sound at starting only). My video format is .mov.
my code to mix sound and video is
as in purplelilgirl's tutorial
-(NSString*) processVideo: (NSURL*) videoUrl{ NSLog(#"started processing %#",videoUrl);
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL: videoUrl options:nil];
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSError * error = nil;
for (NSMutableDictionary * audioInfo in audioInfoArray){
// NSString *pathString = [[NSHomeDirectory() stringByAppendingString:#"/Documents/"] stringByAppendingString: [audioInfo objectForKey: #"fileName"]];
// NSString *pathString = [audioInfo objectForKey: #"filePath"];
NSURL *audioUrl=[audioInfo objectForKey: #"filePath"];
// NSLog(#"audioUrl %#",audioUrl);
AVURLAsset * urlAsset = [AVURLAsset URLAssetWithURL:audioUrl options:nil];
AVAssetTrack * audioAssetTrack = [[urlAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID: kCMPersistentTrackID_Invalid];
// NSLog(#"%lf", [[audioInfo objectForKey: #"startTime"] doubleValue]);
CMTime audioStartTime = CMTimeMake(([[audioInfo objectForKey: #"startTime"] doubleValue]*TIME_SCALE), TIME_SCALE);
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,urlAsset.duration) ofTrack:audioAssetTrack atTime:audioStartTime error:&error];
}
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:&error];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetPassthrough];
NSString* videoName = #"export.mov";
NSString *exportPath = [[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0] stringByAppendingPathComponent:videoName];
NSURL *exportUrl = [NSURL fileURLWithPath:exportPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath])
{
[[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
}
_assetExport.outputFileType = #"com.apple.quicktime-movie";
// _assetExport.outputFileType=AVFileTypeMPEG4;
NSLog(#"file type %#",_assetExport.outputFileType);
_assetExport.outputURL = exportUrl;
_assetExport.shouldOptimizeForNetworkUse = YES;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
switch (_assetExport.status)
{
case AVAssetExportSessionStatusCompleted:
//export complete
NSLog(#"Export Complete");
//[self uploadToYouTube];
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Export Failed");
NSLog(#"ExportSessionError: %#", [_assetExport.error localizedDescription]);
//export error (see exportSession.error)
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export cancelled");
NSLog(#"ExportSessionError: %#", [_assetExport.error localizedDescription]);
//export cancelled
break;
}
}];
NSLog(#"completed processing exportPath %# ",exportPath);
return exportPath;
}
What is wrong with my code can you help me
What codec is the audio encoded with? As I understand it, if you are using some form of apple proprietary codec that could be causing your issues.
I used the following code to generate the video from array of images and audio (pre recorded)..
- (void)viewDidLoad
{
imagearray=[[NSMutableArray alloc]initWithObjects:#"Quiz pic1.jpg",#"Quiz pic2.jpg",#"Quiz pic3.jpg",#"Quiz pic6.jpg",#"Quiz pic7.jpg",nil];
image1array=[[NSMutableArray alloc]init];
for (int i=0; i<[imagearray count]; i++)
{
UIImage *aimage=[UIImage imageNamed:[imagearray objectAtIndex:i]];
[image1array addObject:aimage];
}
NSLog(#"%#",image1array);
ImageVideoPath=#"/Users/image/Library/Application Support/iPhone Simulator/4.3/Applications/6CC91208-5819-4BFF-B868-6605887861EB/Output";
FinalVideoPath=#"/Users/image/Library/Application Support/iPhone Simulator/4.3/Applications/6CC91208-5819-4BFF-B868-6605887861EB/VideoOutput";
CGSize size;
UIImage *image=[UIImage imageNamed:[imagearray objectAtIndex:0]];
size=image.size;
NSString *audioFilePath;
int duration=10;
//[self pixelBufferFromCGImage:[[image1array objectAtIndex:0] CGImage]];
[self writeImageAndAudioAsMovie:image andAudio:audioFilePath duration:duration];
//[self pixelBufferFromCGImage:[image CGImage] andSize:size];
[super viewDidLoad];
}
- (void)writeImageAndAudioAsMovie:(UIImage*)image andAudio:(NSString *)audioFilePath duration:(int)duration {
NSLog(#"start make movie: length:%d",duration);
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:ImageVideoPath] fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
if ([[NSFileManager defaultManager] fileExistsAtPath:ImageVideoPath])
[[NSFileManager defaultManager] removeItemAtPath:ImageVideoPath error:nil];
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:image.size.width],AVVideoWidthKey,[NSNumber numberWithInt:image.size.height], AVVideoHeightKey,nil];
AVAssetWriterInput* writerInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings] retain];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:nil];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
writerInput.expectsMediaDataInRealTime = YES;
[videoWriter setShouldOptimizeForNetworkUse:YES];
[videoWriter addInput:writerInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
//Write samples:
CVPixelBufferRef buffer = [self pixelBufferFromCGImage:image.CGImage];
[adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
//Finish the session:
[videoWriter endSessionAtSourceTime:CMTimeMake(duration, 1)];
[writerInput markAsFinished];
[videoWriter finishWriting];
CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
[videoWriter release];
[writerInput release];
audioFilePath=[[NSBundle mainBundle]pathForResource:#"Video" ofType:#"mp3"];
NSLog(#"%#",audioFilePath);
[self addAudioToFileAtPath:ImageVideoPath andAudioPath:audioFilePath];
}
-(CVPixelBufferRef)pixelBufferFromCGImage: (CGImageRef) image{
float width = CGImageGetWidth(image);
float height = CGImageGetHeight(image);
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, width,height, kCVPixelFormatType_32ARGB,(CFDictionaryRef)options,&pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata,width,height,8,4*width,rgbColorSpace,kCGImageAlphaNoneSkipFirst);
NSParameterAssert(context);
CGContextDrawImage(context, CGRectMake(0, 0,width, height), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
-(void) addAudioToFileAtPath:(NSString *)vidoPath andAudioPath:(NSString *)audioFilePath{
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSLog(#"%# %#",ImageVideoPath,audioFilePath);
NSURL* audio_inputFileUrl = [NSURL fileURLWithPath:audioFilePath];
NSURL* video_inputFileUrl = [NSURL fileURLWithPath:ImageVideoPath];
NSLog(#"%#",video_inputFileUrl);
NSString *outputFilePath = FinalVideoPath;
NSURL* outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
NSLog(#"asset:%#",videoAsset);
NSArray *tracks1=[videoAsset tracksWithMediaType:AVMediaTypeVideo];
if ([tracks1 count]>0)
{
//CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVAssetTrack *videoAssetTrack=[tracks1 objectAtIndex:0];
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,videoAsset.duration) ofTrack:videoAssetTrack atTime:kCMTimeZero error:nil];
}
NSArray *tracks = [audioAsset tracksWithMediaType:AVMediaTypeAudio];
if([tracks count]>0)
{
AVAssetTrack * audioAssetTrack = [tracks objectAtIndex:0];
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID: kCMPersistentTrackID_Invalid];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,audioAsset.duration) ofTrack:audioAssetTrack atTime:kCMTimeZero error:nil];
//nextClipStartTime = CMTimeAdd(nextClipStartTime, a_timeRange.duration);
[audioAsset release];audioAsset = nil;
}
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = AVFileTypeMPEG4;
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
switch (_assetExport.status)
{
case AVAssetExportSessionStatusCompleted:
//export complete
NSLog(#"Export Complete");
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Export Failed");
NSLog(#"ExportSessionError: %#", [_assetExport.error localizedDescription]);
//export error (see exportSession.error)
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export Failed");
NSLog(#"ExportSessionError: %#", [_assetExport.error localizedDescription]);
//export cancelled
break;
}
}];
}
I found that one of the video file is created from the method writeImageAndAudioAsMovie but itz nt supporting any player in my machine...
I don't knw wat I'm missing? Any suggestions plz....
The problem lies at two places :
1.The path you are providing should be something where you are allowed to write like documents directory. And it must have some specific extenstion. As you are creating HighestQualityVideo so it should be .mov.
2.You should provide proper outputFileType should be as per extension and presetType. so in your case it should be _assetExport.outputFileType = AVFileTypeQuickTimeMovie;.
Try with these changes.
Update:
To remove the crash you need to replace code for AVAssetTrack with following code in addAudioToFileAtPath method:
NSArray *tracks = [videoAsset tracksWithMediaType:AVMediaTypeAudio];
if([tracks count]>0)
{
AVAssetTrack * audioAssetTrack = [tracks objectAtIndex:0];
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID: kCMPersistentTrackID_Invalid];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,videoAsset.duration) ofTrack:audioAssetTrack atTime:kCMTimeZero error:nil];
//nextClipStartTime = CMTimeAdd(nextClipStartTime, a_timeRange.duration);
[audioAsset release];audioAsset = nil;
}
Regarding video types:
Facebook supports QuickTimeVideo(mov/qt) see http://www.facebook.com/help/?faq=218673814818907
To support other type of videos you will need to change the presetName while creating AVAssetExportSession object and extension of output file for this please go through this document.
http://www.google.co.in/url?sa=t&rct=j&q=AVAssetExportSession++class&source=web&cd=1&ved=0CCYQFjAA&url=http%3A%2F%2Fdeveloper.apple.com%2Flibrary%2Fios%2FDOCUMENTATION%2FAVFoundation%2FReference%2FAVAssetExportSession_Class%2FReference%2FReference.html&ei=xXxPT5akDsG8rAeck5XUDQ&usg=AFQjCNH1HqxIiT1kYJom6kZ82NS-qjVSyQ&cad=rja
Update 1:
Here we are accessing each image and appending it to buffer after displaying some time(I have divided duration.
for (int i=0; i<[image1array count]; i++)
{
int time = (int)i*(duration/[image1array count]);
CVPixelBufferRef buffer = [self pixelBufferFromCGImage:[[image1array objectAtIndex:i] CGImage]];
[adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(time, 1)];
}
Update 2:
Here is the code that I in which I have made few changes to compose a mixed asset.
-(void) addAudioToFileAtPath:(NSString *)vidoPath andAudioPath:(NSString *)audioFilePath{
NSURL* audio_inputFileUrl = [NSURL fileURLWithPath:audioFilePath];
NSURL* video_inputFileUrl = [NSURL fileURLWithPath:ImageVideoPath];
NSURL* outputFileUrl = [NSURL fileURLWithPath:FinalVideoPath];
AVMutableComposition *composition = [AVMutableComposition composition];
AVAsset * audioAsset = [AVURLAsset URLAssetWithURL:audio_inputFileUrl options:nil];;
AVAsset * videoAsset = [AVURLAsset URLAssetWithURL:video_inputFileUrl options:nil];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
NSError *error = nil;
BOOL ok = NO;
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVAssetTrack *sourceVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
ok = [compositionVideoTrack insertTimeRange:video_timeRange ofTrack:sourceVideoTrack atTime:kCMTimeZero error:&error];
if (!ok) {
// Deal with the error.
NSLog(#"Error : %# : %d",error,videoAsset.duration.value);
}
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
AVAssetTrack *sourceAudioTrack = [[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
ok = [compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:sourceAudioTrack atTime:kCMTimeZero error:&error];
if (!ok) {
// Deal with the error.
NSLog(#"Error : %# : %d",error,audioAsset.duration.value);
}
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = AVFileTypeQuickTimeMovie;
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
switch (_assetExport.status)
{
case AVAssetExportSessionStatusCompleted:
//export complete
NSLog(#"Export Complete");
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Export Failed");
NSLog(#"ExportSessionError: %#", [_assetExport.error localizedDescription]);
//export error (see exportSession.error)
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export Failed");
NSLog(#"ExportSessionError: %#", [_assetExport.error localizedDescription]);
//export cancelled
break;
}
NSLog(#"Error : %#",_assetExport.error);
}];
}
Thanks,
One thing that caught my eye was that you're calling CVPixelBufferPoolRelease(adaptor.pixelBufferPool); in your writeImageAndAudioAsMovie:andAudio:duration: method but since you didn't create adaptor.pixelBufferPool, you don't own it and therefore shouldn't be releasing it, right? Seems suspect to me.
I am developing media server for Play station 3 in iPhone.
I came to know that PS3 doesn't support .MOV file so I have to convert it into Mp4 or something other transcode which PS3 support.
This is what I have done but it crashes if I set different file type than its source files.
AVURLAsset *avAsset = [AVURLAsset URLAssetWithURL:videoURL options:nil];
NSArray *compatiblePresets = [AVAssetExportSession exportPresetsCompatibleWithAsset:avAsset];
if ([compatiblePresets containsObject:AVAssetExportPresetLowQuality])
{
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc]initWithAsset:avAsset presetName:AVAssetExportPresetLowQuality];
exportSession.outputURL = [NSURL fileURLWithPath:videoPath];
exportSession.outputFileType = AVFileTypeMPEG4;
CMTime start = CMTimeMakeWithSeconds(1.0, 600);
CMTime duration = CMTimeMakeWithSeconds(3.0, 600);
CMTimeRange range = CMTimeRangeMake(start, duration);
exportSession.timeRange = range;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch ([exportSession status]) {
case AVAssetExportSessionStatusFailed:
NSLog(#"Export failed: %#", [[exportSession error] localizedDescription]);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export canceled");
break;
default:
break;
}
[exportSession release];
}];
}
If I set AVFileTypeMPEG4 here then it crashes, saying "Invalid file type". So I have to set it to AVFileTypeQuickTimeMovie and it gives MOV file.
Is it possible in iOS to convert video from MOV to Mp4 through AVAssetExportSession...OR without any Thirdparty libraries?
presetName use "AVAssetExportPresetPassthrough" instead "AVAssetExportPresetLowQuality"
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc]initWithAsset:avAsset presetName:AVAssetExportPresetPassthrough];
MOV is very similar to MP4, you might be able to just change the extension and have it work, Windows Phone cant play .MOVS but can play mp4, all i did to get that to work is change the extension from .mov to .mp4 and it works fine, and this is from videos shot on the iphone...and if anything you can def try exporting with AVAssetExporter and try there is a file type in there for MP4 and M4A as you can see from the fileformat UTIs here
hope it helps
You can convert video in mp4 by AVAssets.
AVURLAsset *avAsset = [AVURLAsset URLAssetWithURL:videoURL options:nil];
NSArray *compatiblePresets = [AVAssetExportSession
exportPresetsCompatibleWithAsset:avAsset];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc]initWithAsset:avAsset presetName:AVAssetExportPresetLowQuality];
NSString* documentsDirectory=[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
exportSession.outputURL = url;
//set the output file format if you want to make it in other file format (ex .3gp)
exportSession.outputFileType = AVFileTypeMPEG4;
exportSession.shouldOptimizeForNetworkUse = YES;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch ([exportSession status])
{
case AVAssetExportSessionStatusFailed:
NSLog(#"Export session failed");
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export canceled");
break;
case AVAssetExportSessionStatusCompleted:
{
//Video conversion finished
NSLog(#"Successful!");
}
break;
default:
break;
}
}];
To easily convert video to mp4 use this link.
You can also find sample project to convert video to mp4.
You need AVMutableComposition to do this. Because Asset can't be transcode to MP4 directly under iOS 5.0.
- (BOOL)encodeVideo:(NSURL *)videoURL
{
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:videoURL options:nil];
// Create the composition and tracks
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableCompositionTrack *videoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *audioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
NSArray *assetVideoTracks = [asset tracksWithMediaType:AVMediaTypeVideo];
if (assetVideoTracks.count <= 0)
{
NSLog(#"Error reading the transformed video track");
return NO;
}
// Insert the tracks in the composition's tracks
AVAssetTrack *assetVideoTrack = [assetVideoTracks firstObject];
[videoTrack insertTimeRange:assetVideoTrack.timeRange ofTrack:assetVideoTrack atTime:CMTimeMake(0, 1) error:nil];
[videoTrack setPreferredTransform:assetVideoTrack.preferredTransform];
AVAssetTrack *assetAudioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[audioTrack insertTimeRange:assetAudioTrack.timeRange ofTrack:assetAudioTrack atTime:CMTimeMake(0, 1) error:nil];
// Export to mp4
NSString *mp4Quality = [MGPublic isIOSAbove:#"6.0"] ? AVAssetExportPresetMediumQuality : AVAssetExportPresetPassthrough;
NSString *exportPath = [NSString stringWithFormat:#"%#/%#.mp4",
[NSHomeDirectory() stringByAppendingString:#"/tmp"],
[BSCommon uuidString]];
NSURL *exportUrl = [NSURL fileURLWithPath:exportPath];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:composition presetName:mp4Quality];
exportSession.outputURL = exportUrl;
CMTime start = CMTimeMakeWithSeconds(0.0, 0);
CMTimeRange range = CMTimeRangeMake(start, [asset duration]);
exportSession.timeRange = range;
exportSession.outputFileType = AVFileTypeMPEG4;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch ([exportSession status])
{
case AVAssetExportSessionStatusCompleted:
NSLog(#"MP4 Successful!");
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Export failed: %#", [[exportSession error] localizedDescription]);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export canceled");
break;
default:
break;
}
}];
return YES;
}
Use the below code
NSURL * mediaURL = [info objectForKey:UIImagePickerControllerMediaURL];
AVAsset *video = [AVAsset assetWithURL:mediaURL];
AVAssetExportSession *exportSession = [AVAssetExportSession exportSessionWithAsset:video presetName:AVAssetExportPresetMediumQuality];
exportSession.shouldOptimizeForNetworkUse = YES;
exportSession.outputFileType = AVFileTypeMPEG4;
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *basePath = ([paths count] > 0) ? [paths objectAtIndex:0] : nil;
basePath = [basePath stringByAppendingPathComponent:#"videos"];
if (![[NSFileManager defaultManager] fileExistsAtPath:basePath])
[[NSFileManager defaultManager] createDirectoryAtPath:basePath withIntermediateDirectories:YES attributes:nil error:nil];
compressedVideoUrl=nil;
compressedVideoUrl = [NSURL fileURLWithPath:basePath];
long CurrentTime = [[NSDate date] timeIntervalSince1970];
NSString *strImageName = [NSString stringWithFormat:#"%ld",CurrentTime];
compressedVideoUrl=[compressedVideoUrl URLByAppendingPathComponent:[NSString stringWithFormat:#"%#.mp4",strImageName]];
exportSession.outputURL = compressedVideoUrl;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
NSLog(#"done processing video!");
NSLog(#"%#",compressedVideoUrl);
if(!dataMovie)
dataMovie = [[NSMutableData alloc] init];
dataMovie = [NSData dataWithContentsOfURL:compressedVideoUrl];
}];
Here is the code
func encodeVideo(videoURL: NSURL) {
let avAsset = AVURLAsset(URL: videoURL, options: nil)
var startDate = NSDate()
//Create Export session
exportSession = AVAssetExportSession(asset: avAsset, presetName: AVAssetExportPresetPassthrough)
// exportSession = AVAssetExportSession(asset: composition, presetName: mp4Quality)
//Creating temp path to save the converted video
let documentsDirectory = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0]
let myDocumentPath = NSURL(fileURLWithPath: documentsDirectory).URLByAppendingPathComponent("temp.mp4").absoluteString
let url = NSURL(fileURLWithPath: myDocumentPath)
let documentsDirectory2 = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0] as NSURL
let filePath = documentsDirectory2.URLByAppendingPathComponent("rendered-Video.mp4")
deleteFile(filePath)
//Check if the file already exists then remove the previous file
if NSFileManager.defaultManager().fileExistsAtPath(myDocumentPath) {
do {
try NSFileManager.defaultManager().removeItemAtPath(myDocumentPath)
}
catch let error {
print(error)
}
}
url
exportSession!.outputURL = filePath
exportSession!.outputFileType = AVFileTypeMPEG4
exportSession!.shouldOptimizeForNetworkUse = true
var start = CMTimeMakeWithSeconds(0.0, 0)
var range = CMTimeRangeMake(start, avAsset.duration)
exportSession.timeRange = range
exportSession!.exportAsynchronouslyWithCompletionHandler({() -> Void in
switch self.exportSession!.status {
case .Failed:
print("%#",self.exportSession?.error)
case .Cancelled:
print("Export canceled")
case .Completed:
//Video conversion finished
var endDate = NSDate()
var time = endDate.timeIntervalSinceDate(startDate)
print(time)
print("Successful!")
print(self.exportSession.outputURL)
default:
break
}
})
}
func deleteFile(filePath:NSURL) {
guard NSFileManager.defaultManager().fileExistsAtPath(filePath.path!) else {
return
}
do {
try NSFileManager.defaultManager().removeItemAtPath(filePath.path!)
}catch{
fatalError("Unable to delete file: \(error) : \(__FUNCTION__).")
}
}
just wanted to say that the URL can not be like
[NSURL URLWithString: [#"~/Documents/movie.mov" stringByExpandingTildeInPath]]
It must be like
[NSURL fileURLWithPath: [#"~/Documents/movie.mov" stringByExpandingTildeInPath]]
Took me a while to figure that out :-)
Im syncing the audio and video files using AVURLAsset.Im getting the exported file but only audio is exported and not the video.How to solve this issue.please help me.Thanks in advance.
Im using the code below:
moviePlayer = [[VideoPlay alloc]initWithNibName:#"VideoPlay" bundle:nil];
if(sp==1){
NSURL *VUrl = [NSURL URLWithString:elements.videoUrl];
NSURL *AUrl = [NSURL URLWithString:elements.audioUrl1 ];
NSLog(#"%#--%#",AUrl,VUrl);
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:AUrl options:nil];
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:VUrl options:nil];
AVMutableComposition* mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionCommentaryTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionCommentaryTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioAsset.duration)
ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetPassthrough];
NSString *videoName = #"export.m4v";
NSString *exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent:videoName];
NSURL *exportUrl = [NSURL fileURLWithPath:exportPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath])
{
[[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
}
NSLog(#"Export Status %d-- ", _assetExport.status);
_assetExport.outputFileType = #"com.apple.quicktime-movie";
NSLog(#"file type %#",_assetExport.outputFileType);
_assetExport.outputURL = exportUrl;
_assetExport.shouldOptimizeForNetworkUse = YES;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
NSLog(#"hello");
switch (_assetExport.status)
{
case AVAssetExportSessionStatusFailed:
{
NSLog (#"FAIL %#",_assetExport.error);
if ([[NSFileManager defaultManager] fileExistsAtPath:[_assetExport.outputURL path]])
{
[[NSFileManager defaultManager] removeItemAtPath:[_assetExport.outputURL path] error:nil];
}
// // [self performSelectorOnMainThread:#selector (ritenta)
// withObject:nil
// waitUntilDone:NO];
break;
}
case AVAssetExportSessionStatusCompleted:
{
// // [self performSelectorOnMainThread:#selector (saveVideoToAlbum:)
// withObject:exportPath
// waitUntilDone:NO];
break;
}
case AVAssetExportSessionStatusCancelled:
{
NSLog (#"CANCELED");
break;
}
}
NSLog(#"Export Status %d-- %#", _assetExport.status, _assetExport.outputURL);
if(_assetExport.status==3){
moviePlayer.videolink = _assetExport.outputURL;
[self presentModalViewController:moviePlayer animated:YES];
[moviePlayer readyPlayer];
}
}
];
}
Simplify your code to the smallest working sample that shows the problem.
Check the return errors.
Try a different export preset instead of AVAssetExportPresetPassthrough.
Try a different export preset instead of AVAssetExportPresetPassthrough
and NSString *videoName = #"export.mp4" instead of NSString *videoName = #"export.m4v";