How to fix video orientation issue in iOS - iphone

I am working with an app in which user picks video from Photos and uploads it to server. As my server is .Net server , the video gets rotated. I know the reason of problem is probably same as was in case of image (you may refer my earlier answer https://stackoverflow.com/a/10601175/1030951 ) , So i googled and got a code to fix video orientation , I got a code from RayWenderlich.com and modified in following way. Now my output video works fine but the video is mute. it plays but doesn't play audio. Kindly help me if I am missing something.
I pass Info dictionary of -(void)imagePickerController:(UIImagePickerController *)picker didFinishPickingMediaWithInfo:(NSDictionary *)info method
- (void)fix:(NSDictionary*)pobjInfoDirectory withFileName:(NSString*)pstrOutputFileName
{
firstAsset = [AVAsset assetWithURL:[pobjInfoDirectory objectForKey:UIImagePickerControllerMediaURL]];
if(firstAsset !=nil)
{
//Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack.
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
//VIDEO TRACK
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, firstAsset.duration);
//FIXING ORIENTATION//
AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
AVAssetTrack *FirstAssetTrack = [[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation FirstAssetOrientation_ = UIImageOrientationUp;
BOOL isFirstAssetPortrait_ = NO;
CGAffineTransform firstTransform = FirstAssetTrack.preferredTransform;
if(firstTransform.a == 0 && firstTransform.b == 1.0 && firstTransform.c == -1.0 && firstTransform.d == 0)
{
FirstAssetOrientation_= UIImageOrientationRight; isFirstAssetPortrait_ = YES;
}
if(firstTransform.a == 0 && firstTransform.b == -1.0 && firstTransform.c == 1.0 && firstTransform.d == 0)
{
FirstAssetOrientation_ = UIImageOrientationLeft; isFirstAssetPortrait_ = YES;
}
if(firstTransform.a == 1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == 1.0)
{
FirstAssetOrientation_ = UIImageOrientationUp;
}
if(firstTransform.a == -1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == -1.0)
{
FirstAssetOrientation_ = UIImageOrientationDown;
}
CGFloat FirstAssetScaleToFitRatio = 320.0/FirstAssetTrack.naturalSize.width;
if(isFirstAssetPortrait_)
{
FirstAssetScaleToFitRatio = 320.0/FirstAssetTrack.naturalSize.height;
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[FirstlayerInstruction setTransform:CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero];
}
else
{
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[FirstlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 160)) atTime:kCMTimeZero];
}
[FirstlayerInstruction setOpacity:0.0 atTime:firstAsset.duration];
MainInstruction.layerInstructions = [NSArray arrayWithObjects:FirstlayerInstruction,nil];;
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1, 30);
MainCompositionInst.renderSize = CGSizeMake(320.0, 480.0);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"mergeVideo-%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.videoComposition = MainCompositionInst;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exporter];
});
}];
}
}
- (void)exportDidFinish:(AVAssetExportSession*)session
{
if(session.status == AVAssetExportSessionStatusCompleted)
{
NSURL *outputURL = session.outputURL;
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputURL])
{
if ([self.delegate respondsToSelector:#selector(videoExported)])
[self.delegate videoExported];
}
}
firstAsset = nil;
}

Add this after the //VIDEO TRACK part
//AUDIO TRACK
AVMutableCompositionTrack *firstAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[firstAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];

AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:#"your video url here..." options:nil];
Add After AVMutableCompositionTrack.
set setPreferredTransform: set here your source video that you want to export with same orientation.
// Grab the source track from AVURLAsset for example.
AVAssetTrack *assetVideoTrack = [videoAsset tracksWithMediaType:AVMediaTypeVideo].lastObject;
// Grab the composition video track from AVMutableComposition you already made.
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition tracksWithMediaType:AVMediaTypeVideo].lastObject;
// Apply the original transform.
if (assetVideoTrack && compositionVideoTrack) {
[compositionVideoTrack setPreferredTransform:assetVideoTrack.preferredTransform];
}

Related

Adding Title on each Video

I am trying to merge multiple video into one video with AVMutableComposition and its working fine now I want to add different different title on each video.
Any Help will be appreciable.
Thank You
this is what i have tried so far
for(int i=0;i< [arrSelectedUrls count];i++)
{
AVAsset *currentAsset = [AVAsset assetWithURL:[arrSelectedUrls objectAtIndex:i]]; // i take the for loop for geting the asset
/* Current Asset is the asset of the video From the Url Using AVAsset */
// AVURLAsset *newAudioAsset = [AVURLAsset URLAssetWithURL:[arrSelectedUrls objectAtIndex:i] options:nil];
BOOL hasAudio = [currentAsset tracksWithMediaType:AVMediaTypeAudio].count > 0;
AVMutableCompositionTrack *currentTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:i];
[currentTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:duration error:nil];
// [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil];
audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
if(hasAudio)
{
[audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil];
}
float sect = CMTimeGetSeconds(currentAsset.duration);
NSString *strSect = [NSString stringWithFormat:#"%f",sect];
[arrDuration addObject:#"0.5"];
[arrDuration addObject:#"0.5"];
[arrDuration addObject:#"0.5"];
[arrDuration addObject:#"0.5"];
AVMutableVideoCompositionLayerInstruction *currentAssetLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:currentTrack];
AVAssetTrack *currentAssetTrack = [[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation currentAssetOrientation = UIImageOrientationUp;
BOOL isCurrentAssetPortrait = NO;
CGAffineTransform currentTransform = currentAssetTrack.preferredTransform;
if(currentTransform.a == 0 && currentTransform.b == 1.0 && currentTransform.c == -1.0 && currentTransform.d == 0) {currentAssetOrientation= UIImageOrientationRight; isCurrentAssetPortrait = YES;}
if(currentTransform.a == 0 && currentTransform.b == -1.0 && currentTransform.c == 1.0 && currentTransform.d == 0) {currentAssetOrientation = UIImageOrientationLeft; isCurrentAssetPortrait = YES;}
if(currentTransform.a == 1.0 && currentTransform.b == 0 && currentTransform.c == 0 && currentTransform.d == 1.0) {currentAssetOrientation = UIImageOrientationUp;}
if(currentTransform.a == -1.0 && currentTransform.b == 0 && currentTransform.c == 0 && currentTransform.d == -1.0) {currentAssetOrientation = UIImageOrientationDown;}
CGFloat FirstAssetScaleToFitRatio = 320.0/320.0;
if(isCurrentAssetPortrait){
FirstAssetScaleToFitRatio = 320.0/320.0;
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[currentAssetLayerInstruction setTransform:CGAffineTransformConcat(currentAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:duration];
}else{
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[currentAssetLayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(currentAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 0)) atTime:duration];
}
duration=CMTimeAdd(duration, currentAsset.duration);
[currentAssetLayerInstruction setOpacity:0.0 atTime:duration];
[arrayInstruction addObject:currentAssetLayerInstruction];
NSLog(#"%lld", duration.value/duration.timescale);
CATextLayer *titleLayer = [CATextLayer layer];
if (i==0) {
titleLayer.string = #"www.miivdo.com";
}
if (i==1) {
titleLayer.string = #"www.mail.com";
}
//titleLayer.backgroundColor = (__bridge CGColorRef)([UIColor redColor]);
CGSize videoSize = [currentAssetTrack naturalSize];
titleLayer.fontSize = videoSize.height / 14;
// titleLayer.foregroundColor = (__bridge CGColorRef)([UIColor redColor]);
titleLayer.shadowOpacity = 0.5;
titleLayer.alignmentMode = kCAAlignmentRight;
titleLayer.bounds = CGRectMake(0, 0, 320, 50); //You may need to adjust this for proper display
parentLayer = [CALayer layer];
videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:titleLayer];
}
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, duration);
MainInstruction.layerInstructions = arrayInstruction;
MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1,30);
MainCompositionInst.renderSize = CGSizeMake(320.0, 320.0);
You Can Try This one
- (void)MergeAndSave
{
for(int i=0;i< [arrSelectedUrls count];i++)
{
NSURL *url;
CALayer * parentLayer;
CALayer * videoLayer;
AVSynchronizedLayer *animationLayer = [AVSynchronizedLayer layer];
UIImage *image1 = [UIImage imageNamed:#"Fire1.jpeg"];
UIImage *image2 = [UIImage imageNamed:#"Fire2.jpeg"];
UIImage *image3 = [UIImage imageNamed:#"Fire3.jpeg"];
UIImage *image4 = [UIImage imageNamed:#"Fire4.jpeg"];
//int numberOfFile = [arrSelectedUrls count]; // Number Of Video You want to merge
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
NSMutableArray *starImageArray = [NSMutableArray arrayWithObjects:(id)image1.CGImage,(id)image2.CGImage,(id)image3.CGImage,(id)image4.CGImage, nil];
NSMutableArray *arrDuration = [[NSMutableArray alloc] init];
NSMutableArray *arrayInstruction = [[NSMutableArray alloc] init];
AVMutableVideoCompositionInstruction * MainInstruction =
[AVMutableVideoCompositionInstruction videoCompositionInstruction];
AVMutableCompositionTrack *audioTrack;
audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime duration = kCMTimeZero;
AVAsset *currentAsset = [AVAsset assetWithURL:[arrSelectedUrls objectAtIndex:i]]; // i take the for loop for geting the asset
/* Current Asset is the asset of the video From the Url Using AVAsset */
// AVURLAsset *newAudioAsset = [AVURLAsset URLAssetWithURL:[arrSelectedUrls objectAtIndex:i] options:nil];
BOOL hasAudio = [currentAsset tracksWithMediaType:AVMediaTypeAudio].count > 0;
AVMutableCompositionTrack *currentTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:i];
// [currentTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:duration error:nil];
[currentTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:duration error:nil];
//
//// [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil];
// audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
//
// // [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, mixComposition.duration) ofTrack:[[mixComposition tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil];
if(hasAudio)
{
[audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil];
audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, mixComposition.duration) ofTrack:[[mixComposition tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil];
}
float sect = CMTimeGetSeconds(currentAsset.duration);
NSString *strSect = [NSString stringWithFormat:#"%f",sect];
[arrDuration addObject:#"0.5"];
[arrDuration addObject:#"0.5"];
[arrDuration addObject:#"0.5"];
[arrDuration addObject:#"0.5"];
AVMutableVideoCompositionLayerInstruction *currentAssetLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:currentTrack];
AVAssetTrack *currentAssetTrack = [[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation currentAssetOrientation = UIImageOrientationUp;
BOOL isCurrentAssetPortrait = NO;
CGAffineTransform currentTransform = currentAssetTrack.preferredTransform;
if(currentTransform.a == 0 && currentTransform.b == 1.0 && currentTransform.c == -1.0 && currentTransform.d == 0) {currentAssetOrientation= UIImageOrientationRight; isCurrentAssetPortrait = YES;}
if(currentTransform.a == 0 && currentTransform.b == -1.0 && currentTransform.c == 1.0 && currentTransform.d == 0) {currentAssetOrientation = UIImageOrientationLeft; isCurrentAssetPortrait = YES;}
if(currentTransform.a == 1.0 && currentTransform.b == 0 && currentTransform.c == 0 && currentTransform.d == 1.0) {currentAssetOrientation = UIImageOrientationUp;}
if(currentTransform.a == -1.0 && currentTransform.b == 0 && currentTransform.c == 0 && currentTransform.d == -1.0) {currentAssetOrientation = UIImageOrientationDown;}
CGFloat FirstAssetScaleToFitRatio = 320.0/320.0;
if(isCurrentAssetPortrait){
FirstAssetScaleToFitRatio = 320.0/320.0;
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[currentAssetLayerInstruction setTransform:CGAffineTransformConcat(currentAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:duration];
}else{
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[currentAssetLayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(currentAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 0)) atTime:duration];
}
duration=CMTimeAdd(duration, currentAsset.duration);
[currentAssetLayerInstruction setOpacity:0.0 atTime:duration];
[arrayInstruction addObject:currentAssetLayerInstruction];
NSLog(#"%lld", duration.value/duration.timescale);
CATextLayer *titleLayer = [CATextLayer layer];
titleLayer.string = [NSString stringWithFormat:#"Final%#",[Titlearry objectAtIndex:i]];
CGSize videoSize = [currentAssetTrack naturalSize];
titleLayer.fontSize = videoSize.height / 14;
titleLayer.shadowOpacity = 0.5;
titleLayer.alignmentMode = kCAAlignmentRight;
titleLayer.bounds = CGRectMake(0, 0, 320, 500); //You may need to adjust this for proper display
parentLayer = [CALayer layer];
videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:titleLayer];
// new addition From Ritesh //
double time1 = 0.01;
// in ms, (0.2*1000)/1000 == 200/1000 == 0.2
// CMTime time2 = CMTimeMake(time1*1000, 1000);
NSMutableArray * keyTimesArray = [[NSMutableArray alloc]init];
for (int z = 1; z<4; z++)
{
NSNumber *temp = [NSNumber numberWithFloat:(time1+(float)z/30)];
[keyTimesArray addObject:temp];
}
animationLayer.opacity = 1.0;
// animationLayer.backgroundColor = [UIColor yellowColor].CGColor;
[animationLayer setFrame:CGRectMake(0, 0, 320, 50)];
[parentLayer addSublayer:animationLayer];
CAKeyframeAnimation *changeImageAnimation = [CAKeyframeAnimation animationWithKeyPath:#"contents"];
[changeImageAnimation setDelegate:self];
changeImageAnimation.calculationMode = kCAAnimationDiscrete;
[animationLayer setContents:[starImageArray lastObject]];
changeImageAnimation.duration = 10.0f;
changeImageAnimation.repeatCount = 30;
changeImageAnimation.values = [NSArray arrayWithArray:starImageArray];
//changeImageAnimation.removedOnCompletion = YES;
// [changeImageAnimation setKeyTimes:arrDuration];
[changeImageAnimation setBeginTime:1.0];
[changeImageAnimation setRemovedOnCompletion:NO];
[changeImageAnimation setDelegate:self];
[animationLayer addAnimation:changeImageAnimation forKey:#"contents"];
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, duration);
MainInstruction.layerInstructions = arrayInstruction;
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1,30);
MainCompositionInst.renderSize = CGSizeMake(320.0, 320.0);
// NSString *myPathDocs = [[[AppDelegate sharedAppDelegate] applicationCacheDirectory] stringByAppendingPathComponent:[NSString stringWithFormat:#"mergeVideo%-dtemp.mp4",arc4random() % 10000]];
NSArray *dirPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *docsDir = [dirPaths objectAtIndex:0];
NSString *outputFilePath = [docsDir stringByAppendingPathComponent:[NSString stringWithFormat:#"merge%#.mov",[Titlearry objectAtIndex:i]]];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
url = [NSURL fileURLWithPath:outputFilePath];
[UrlArray addObject:outputFilePath];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.videoComposition = MainCompositionInst;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
switch (exporter.status)
{
case AVAssetExportSessionStatusCompleted:
{
if(i == [arrSelectedUrls count]-1)
{
[self mergeAllVideoClipscompletionCallback];
}
}
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Failed:%#", exporter.error.description);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Canceled:%#", exporter.error);
break;
case AVAssetExportSessionStatusExporting:
NSLog(#"Exporting!");
break;
case AVAssetExportSessionStatusWaiting:
NSLog(#"Waiting");
break;
default:
break;
}
}];
}
// [self performSelector:#selector(MergeAndExport) withObject:nil afterDelay:3.0];
// [self MergeAndExport];
}
-(void)MergeAndExport
{
CALayer * parentLayer;
CALayer * videoLayer;
//int numberOfFile = [arrSelectedUrls count]; // Number Of Video You want to merge
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
NSMutableArray *arrayInstruction = [[NSMutableArray alloc] init];
AVMutableVideoCompositionInstruction * MainInstruction =
[AVMutableVideoCompositionInstruction videoCompositionInstruction];
AVMutableCompositionTrack *audioTrack;
audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime duration = kCMTimeZero;
for(int i=0;i< [arrSelectedUrls count];i++)
{
// AVAsset *currentAsset = [AVAsset assetWithURL:[arrSelectedUrls objectAtIndex:i]];
NSURL *url=[NSURL fileURLWithPath:[UrlArray objectAtIndex:i]];
AVURLAsset *currentAsset=[AVURLAsset URLAssetWithURL:url options:nil];
// i take the for loop for geting the asset
/* Current Asset is the asset of the video From the Url Using AVAsset */
// AVURLAsset *newAudioAsset = [AVURLAsset URLAssetWithURL:[arrSelectedUrls objectAtIndex:i] options:nil];
BOOL hasAudio = [currentAsset tracksWithMediaType:AVMediaTypeAudio].count > 0;
AVMutableCompositionTrack *currentTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[currentTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:duration error:nil];
[currentTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:duration error:nil];
// audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
// [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, mixComposition.duration) ofTrack:[[mixComposition tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil];
if(hasAudio)
{
[audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil];
audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, mixComposition.duration) ofTrack:[[mixComposition tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil];
}
AVMutableVideoCompositionLayerInstruction *currentAssetLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:currentTrack];
AVAssetTrack *currentAssetTrack = [[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation currentAssetOrientation = UIImageOrientationUp;
BOOL isCurrentAssetPortrait = NO;
CGAffineTransform currentTransform = currentAssetTrack.preferredTransform;
if(currentTransform.a == 0 && currentTransform.b == 1.0 && currentTransform.c == -1.0 && currentTransform.d == 0) {currentAssetOrientation= UIImageOrientationRight; isCurrentAssetPortrait = YES;}
if(currentTransform.a == 0 && currentTransform.b == -1.0 && currentTransform.c == 1.0 && currentTransform.d == 0) {currentAssetOrientation = UIImageOrientationLeft; isCurrentAssetPortrait = YES;}
if(currentTransform.a == 1.0 && currentTransform.b == 0 && currentTransform.c == 0 && currentTransform.d == 1.0) {currentAssetOrientation = UIImageOrientationUp;}
if(currentTransform.a == -1.0 && currentTransform.b == 0 && currentTransform.c == 0 && currentTransform.d == -1.0) {currentAssetOrientation = UIImageOrientationDown;}
CGFloat FirstAssetScaleToFitRatio = 320.0/320.0;
if(isCurrentAssetPortrait){
FirstAssetScaleToFitRatio = 320.0/320.0;
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[currentAssetLayerInstruction setTransform:CGAffineTransformConcat(currentAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:duration];
}else{
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[currentAssetLayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(currentAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 0)) atTime:duration];
}
duration=CMTimeAdd(duration, currentAsset.duration);
[currentAssetLayerInstruction setOpacity:0.0 atTime:duration];
[arrayInstruction addObject:currentAssetLayerInstruction];
NSLog(#"%lld", duration.value/duration.timescale);
CATextLayer *titleLayer = [CATextLayer layer];
titleLayer.string = #"www.miivdo.com";
//titleLayer.backgroundColor = (__bridge CGColorRef)([UIColor redColor]);
CGSize videoSize = [currentAssetTrack naturalSize];
titleLayer.fontSize = videoSize.height / 14;
// titleLayer.foregroundColor = (__bridge CGColorRef)([UIColor redColor]);
titleLayer.shadowOpacity = 0.5;
titleLayer.alignmentMode = kCAAlignmentRight;
titleLayer.bounds = CGRectMake(0, 0, 320, 50); //You may need to adjust this for proper display
parentLayer = [CALayer layer];
videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:titleLayer];
}
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, duration);
MainInstruction.layerInstructions = arrayInstruction;
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1, 30);
MainCompositionInst.renderSize = CGSizeMake(320.0, 320.0);
// NSString *myPathDocs = [[[AppDelegate sharedAppDelegate] applicationCacheDirectory] stringByAppendingPathComponent:[NSString stringWithFormat:#"mergeVideo%-dtemp.mp4",arc4random() % 10000]];
NSArray *dirPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *docsDir = [dirPaths objectAtIndex:0];
NSString *outputFilePath = [docsDir stringByAppendingPathComponent:[NSString stringWithFormat:#"merge.mov"]];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
NSURL *url = [NSURL fileURLWithPath:outputFilePath];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.videoComposition = MainCompositionInst;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
switch (exporter.status)
{
case AVAssetExportSessionStatusCompleted:
{
NSURL *outputURL = exporter.outputURL;
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputURL]) {
[self writeExportedVideoToAssetsLibrary:outputURL];
//
}
}
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Failed:%#", exporter.error.description);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Canceled:%#", exporter.error);
break;
case AVAssetExportSessionStatusExporting:
NSLog(#"Exporting!");
break;
case AVAssetExportSessionStatusWaiting:
NSLog(#"Waiting");
break;
default:
break;
}
}];
}

How to merge Audio and video?

I'm merging video with video and audio with video. It is working fine in video with video case but when audio file merge that give black screen. I don't know what thing I'm going to wrong with this code
-(void)mergeAllMediaAtTime:(NSMutableArray*)startTimeArray {
NSURL *firstURL = [NSURL fileURLWithPath:[urlArray objectAtIndex:counter]];
firstAsset = [AVAsset assetWithURL:firstURL];
NSString* videoDirPath = [NSHomeDirectory() stringByAppendingPathComponent:#"Documents/Crop Videos"];
NSString* fileName = [VideoAndAudioNameArray objectAtIndex:counter];
NSString *pSecondVideoPath = [videoDirPath stringByAppendingPathComponent:fileName];
NSURL *secondURL = [NSURL fileURLWithPath:pSecondVideoPath];
secondAsset = [AVAsset assetWithURL:secondURL];
if(firstAsset !=nil && secondAsset!=nil)
{
AVVideoComposition *origionalComposition = [AVVideoComposition videoCompositionWithPropertiesOfAsset:firstAsset];
//Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack.
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
//VIDEO TRACK
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack* track = [[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:track atTime:kCMTimeZero error:nil];
int time = [[startTimeArray objectAtIndex:counter] intValue];
CMTime pTime = CMTimeMake(time, 1);
///////////////////////
AVMutableCompositionTrack *secondTrack;
if ([[fileName pathExtension] isEqualToString:#"mov"])
{
secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, secondAsset.duration) ofTrack:[[secondAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:pTime error:nil];
}
// If Audio file
else
{
secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, secondAsset.duration) ofTrack:[[secondAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:pTime error:nil];
NSLog(#"Audio file's Merging");
}
/****** First Video *********/
AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, firstAsset.duration);
MainInstruction.backgroundColor = [[UIColor clearColor] CGColor];
//FIXING ORIENTATION//
AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
AVAssetTrack *FirstAssetTrack = [[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[FirstlayerInstruction setTransform:FirstAssetTrack.preferredTransform atTime:kCMTimeZero];
[FirstlayerInstruction setOpacity:0.0 atTime:firstAsset.duration];
if ([[fileName pathExtension] isEqualToString:#"mov"])
{
/****** Second Video *********/
AVMutableVideoCompositionLayerInstruction *SecondlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:secondTrack];
AVAssetTrack *SecondAssetTrack = [[secondAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[SecondlayerInstruction setOpacity:1.0 atTime:pTime];
UIImageOrientation SecondAssetOrientation_ = UIImageOrientationUp;
BOOL isSecondAssetPortrait_ = NO;
CGAffineTransform secondTransform = SecondAssetTrack.preferredTransform;
if(secondTransform.a == 0 && secondTransform.b == 1.0 && secondTransform.c == -1.0 && secondTransform.d == 0) {SecondAssetOrientation_= UIImageOrientationRight; isSecondAssetPortrait_ = YES;}
if(secondTransform.a == 0 && secondTransform.b == -1.0 && secondTransform.c == 1.0 && secondTransform.d == 0) {SecondAssetOrientation_ = UIImageOrientationLeft; isSecondAssetPortrait_ = YES;}
if(secondTransform.a == 1.0 && secondTransform.b == 0 && secondTransform.c == 0 && secondTransform.d == 1.0) {SecondAssetOrientation_ = UIImageOrientationUp;}
if(secondTransform.a == -1.0 && secondTransform.b == 0 && secondTransform.c == 0 && secondTransform.d == -1.0) {SecondAssetOrientation_ = UIImageOrientationDown;}
CGFloat SecondAssetScaleToFitRatioOfWidth = nRenderWidth/SecondAssetTrack.naturalSize.width;
if(isSecondAssetPortrait_)
{
CGFloat SecondAssetScaleToFitRatioOfHeight = nRenderWidth/SecondAssetTrack.naturalSize.height;
CGAffineTransform SecondAssetScaleFactor = CGAffineTransformMakeScale(SecondAssetScaleToFitRatioOfWidth,SecondAssetScaleToFitRatioOfHeight);
[SecondlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformMakeScale(1.0f,1.0f), SecondAssetScaleFactor) atTime:kCMTimeZero];
//CGAffineTransformConcat(CGAffineTransformMakeScale(1.0f,1.0f), SecondAssetScaleFactor)
}
else
{
CGFloat SecondAssetScaleToFitRatioOfWidth = nRenderWidth/SecondAssetTrack.naturalSize.width;
CGFloat SecondAssetScaleToFitRatioOfHeight = nRenderWidth/SecondAssetTrack.naturalSize.height;
CGAffineTransform SecondAssetScaleFactor = CGAffineTransformMakeScale(SecondAssetScaleToFitRatioOfWidth,SecondAssetScaleToFitRatioOfHeight);
//[SecondlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(SecondAssetTrack.preferredTransform, SecondAssetScaleFactor),CGAffineTransformMakeTranslation(0, 160)) atTime:firstAsset.duration];
[SecondlayerInstruction setTransform:CGAffineTransformConcat(SecondAssetScaleFactor ,CGAffineTransformMakeTranslation(1, 1)) atTime:kCMTimeZero];
//CGAffineTransformConcat(CGAffineTransformMakeScale(1.0f,1.0f),CGAffineTransformMakeTranslation(0, 100))
}
[SecondlayerInstruction setOpacity:0.0 atTime:CMTimeAdd(pTime, secondAsset.duration)];
MainInstruction.layerInstructions = [NSArray arrayWithObjects:SecondlayerInstruction, FirstlayerInstruction,nil];
}
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = origionalComposition.frameDuration;
MainCompositionInst.renderScale = 1.0;
MainCompositionInst.renderSize = CGSizeMake(nRenderWidth, nRenderHeight);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"mergeVideo_%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
[urlArray addObject:myPathDocs];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.videoComposition = MainCompositionInst;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exporter];
}
});
}];
}
}
In that case,you start with your audio from end of the video frames. You can use "atTime:kCMTimeZero".
i.e: shown in below(code below)
else
{
secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, secondAsset.duration) ofTrack:[[secondAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];
NSLog(#"Audio file's Merging");
}

How to set the video orientation from landscape to portrait using CAAffineTranslation or any other way?

I am merging the video using AVMutableComposition and the videos gets merged.But the video gets stored in the landscape (which is default mode for AVAsset) after merging.So i want the video to be fix in portrait mode only...Is there any way to set the video orientation to portrait mode.
Thanx in advance
Here is the code i used for merging the video
NSArray *arrVideoUrl=[objApp.dictSelectedVideos allKeys];
NSMutableArray *arrVideoAsset=[[NSMutableArray alloc]init];
for (int i=0; i<objApp.dictSelectedVideos.count; i++) {
AVURLAsset *video=[[AVURLAsset alloc]initWithURL:[arrVideoUrl objectAtIndex:i] options:nil];
[arrVideoAsset addObject:video];
}
AVURLAsset *music_track=[[AVURLAsset alloc]initWithURL:songUrl options:nil];
//to mix up the media items
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
//Setting Audio track
AVMutableCompositionTrack *AudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
//setting the track to add the videos
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVURLAsset *tempVideo1=[arrVideoAsset objectAtIndex:0];
AVURLAsset *tempVideo2=[arrVideoAsset objectAtIndex:1];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, tempVideo1.duration)ofTrack:[[tempVideo1 tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
if (arrVideoUrl.count==1)
{
[AudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, CMTimeAdd(kCMTimeZero, tempVideo2.duration))ofTrack:[[music_track tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];
}
else{
for (int j=1; j<=arrVideoAsset.count-1; j++)
{
AVURLAsset *tempVideoNext=[arrVideoAsset objectAtIndex:j];
AVURLAsset *tempVideoPrev=[arrVideoAsset objectAtIndex:j-1];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, tempVideoNext.duration)ofTrack:[[tempVideoNext tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:tempVideoPrev.duration error:nil];
}
[AudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, CMTimeAdd(tempVideo1.duration, tempVideo2.duration))ofTrack:[[music_track tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];
}
I found another solution also ... if u want to change the orientation from landscape to portrait ... use this code
AVMutableVideoComposition *videoComp = [AVMutableVideoComposition videoComposition];
videoComp.renderSize = videoSize;
videoComp.frameDuration = CMTimeMake(1, 30);
videoComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComp.instructions = [NSArray arrayWithObject: instruction];
UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp;
BOOL isVideoAssetPortrait_ = NO;
CGAffineTransform videoTransform = clipVideoTrack.preferredTransform;
if(videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {videoAssetOrientation_= UIImageOrientationRight; isVideoAssetPortrait_ = YES;}
if(videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {videoAssetOrientation_ = UIImageOrientationLeft; isVideoAssetPortrait_ = YES;}
if(videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {videoAssetOrientation_ = UIImageOrientationUp;}
if(videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {videoAssetOrientation_ = UIImageOrientationDown;}
CGFloat FirstAssetScaleToFitRatio = 320.0 / clipVideoTrack.naturalSize.width;
if(isVideoAssetPortrait_) {
FirstAssetScaleToFitRatio = 320.0/clipVideoTrack.naturalSize.height;
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[layerInstruction setTransform:CGAffineTransformConcat(clipVideoTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero];
}else{
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[layerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(clipVideoTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 160)) atTime:kCMTimeZero];
}
// [layerInstruction setOpacity:0.0 atTime:kCMTimeZero];
AVAssetExportSession *assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];//AVAssetExportPresetPassthrough
assetExport.videoComposition = videoComp;

Rotate video in iOS

I am new to iOS.
I have an app for record video and play it in iPad.
Now I have to open camera in view. So I used avcapturesession for that.
Now by my coding I can record and play video but the video recorded is in rotated mode.
I use LandscapeRight for recording.
Here is my coding:
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
captureVideoPreviewLayer.orientation=AVCaptureVideoOrientationLandscapeRight;
captureVideoPreviewLayer.frame = vwvideo.bounds;
[vwvideo.layer addSublayer:captureVideoPreviewLayer];
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
NSLog(#"ERROR: trying to open camera: %#", error);
}
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
NSString *documentsDirectory = [paths objectAtIndex:0];
NSDateFormatter *dateFormat = [[[NSDateFormatter alloc] init] autorelease];
[dateFormat setDateFormat:#"yyyy-MM-dd HH.mm.SS"];
NSDate *now = [[[NSDate alloc] init] autorelease];
theDate = [dateFormat stringFromDate:now];
NSString *tempPath = [NSString stringWithFormat:#"%#/%#.mp4",documentsDirectory,theDate];
[tempPath retain];
NSLog(#"Path::%#",tempPath);
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:tempPath];
[session addInput:input];
[session addOutput:movieFileOutput];
[session commitConfiguration];
[session startRunning];
[movieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
http://www.raywenderlich.com/13418/how-to-play-record-edit-videos-in-ios/videoplayrecord
If you go thorough the code of the sample project, given in that tutorial its easy to fix the rotated video to the correct orientation by this following snippet.(RecordVideo.m file)
//FIXING ORIENTATION//
AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
AVAssetTrack *FirstAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation FirstAssetOrientation_ = UIImageOrientationUp;
BOOL isFirstAssetPortrait_ = NO;
CGAffineTransform firstTransform = FirstAssetTrack.preferredTransform;
if(firstTransform.a == 0 && firstTransform.b == 1.0 && firstTransform.c == -1.0 && firstTransform.d == 0) {FirstAssetOrientation_= UIImageOrientationRight; isFirstAssetPortrait_ = YES;}
if(firstTransform.a == 0 && firstTransform.b == -1.0 && firstTransform.c == 1.0 && firstTransform.d == 0) {FirstAssetOrientation_ = UIImageOrientationLeft; isFirstAssetPortrait_ = YES;}
if(firstTransform.a == 1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == 1.0) {FirstAssetOrientation_ = UIImageOrientationUp;}
if(firstTransform.a == -1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == -1.0) {FirstAssetOrientation_ = UIImageOrientationDown;}
CGFloat FirstAssetScaleToFitRatio = 1.0;
if(isFirstAssetPortrait_){
FirstAssetScaleToFitRatio = 1.0;
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[FirstlayerInstruction setTransform:CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero];
}else{
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[FirstlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 160)) atTime:kCMTimeZero];
}
[FirstlayerInstruction setOpacity:0.0 atTime:videoAsset.duration];
AVMutableVideoCompositionLayerInstruction *SecondlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:AudioTrack];
MainInstruction.layerInstructions = [NSArray arrayWithObjects:FirstlayerInstruction,nil];;
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1, 30);
MainCompositionInst.renderSize = CGSizeMake(360.0, 480.0);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"orientationFixedVideo-%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.videoComposition = MainCompositionInst;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exporter];
});
}];

Save audio with fade in fade out with setVolumeRampFromStartVolume not working in iOS

I am trying to cut an audio file for an iPhone project. I can cut it and save it, but any fade in / fade out that I try to apply doesn't work, the audio file is just saved cutted but not faded.
I am using the following code:
//
// NO PROBLEMS TO SEE HERE, MOVE ON
//
NSArray *documentsFolders = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
int currentFileNum = 10;
NSURL *url = [NSURL fileURLWithPath: [[documentsFolders objectAtIndex:0] stringByAppendingPathComponent:[NSString stringWithFormat:#"%#%d.%#", AUDIO_SOURCE_FILE_NAME ,currentFileNum, AUDIO_SOURCE_FILE_EXTENSION ]]];
NSDictionary *options = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES]
forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:url options:options];
AVAssetExportSession* exporter = [AVAssetExportSession exportSessionWithAsset:asset presetName:AVAssetExportPresetAppleM4A];
for (NSString* filetype in exporter.supportedFileTypes) {
if ([filetype isEqualToString:AVFileTypeAppleM4A]) {
exporter.outputFileType = AVFileTypeAppleM4A;
break;
}
}
if (exporter.outputFileType == nil) {
NSLog(#"Needed output file type not found? (%#)", AVFileTypeAppleM4A);
//return;
}
NSString* outPath = [[documentsFolders objectAtIndex:0] stringByAppendingPathComponent:[NSString stringWithFormat:#"%#%d.%#", AUDIO_CUTTED_FILE_NAME ,currentFileNum, AUDIO_SOURCE_FILE_EXTENSION ]];
NSURL* const outUrl = [NSURL fileURLWithPath:outPath];
exporter.outputURL = outUrl;
float endTrimTime = CMTimeGetSeconds(asset.duration);
float startTrimTime = fminf(AUDIO_DURATION, endTrimTime);
CMTime startTrimCMTime=CMTimeSubtract(asset.duration, CMTimeMake(startTrimTime, 1));
exporter.timeRange = CMTimeRangeMake(startTrimCMTime, asset.duration);
//
// TRYING TO APPLY FADEIN FADEOUT, NOT WORKING, NO RESULTS, "CODE IGNORED"
//
AVMutableAudioMix *exportAudioMix = [AVMutableAudioMix audioMix];
NSMutableArray* inputParameters = [NSMutableArray arrayWithCapacity:1];
CMTime startFadeInTime = startTrimCMTime;
CMTime endFadeInTime = CMTimeMake(startTrimTime+1, 1);
CMTime startFadeOutTime = CMTimeMake(endTrimTime-1, 1);
CMTime endFadeOutTime = CMTimeMake(endTrimTime, 1);
CMTimeRange fadeInTimeRange = CMTimeRangeFromTimeToTime(startFadeInTime, endFadeInTime);
CMTimeRange fadeOutTimeRange = CMTimeRangeFromTimeToTime(startFadeOutTime, endFadeOutTime);
AVMutableAudioMixInputParameters *exportAudioMixInputParameters = [AVMutableAudioMixInputParameters audioMixInputParameters];
[exportAudioMixInputParameters setVolume:0.0 atTime:CMTimeMakeWithSeconds(startTrimTime-0.01, 1)];
[exportAudioMixInputParameters setVolumeRampFromStartVolume:0.0 toEndVolume:1.0 timeRange:fadeInTimeRange];
[exportAudioMixInputParameters setVolumeRampFromStartVolume:1.0 toEndVolume:0.0 timeRange:fadeOutTimeRange];
[inputParameters insertObject:exportAudioMixInputParameters atIndex:0];
exportAudioMix.inputParameters = inputParameters;
exporter.audioMix = exportAudioMix;
[exporter exportAsynchronouslyWithCompletionHandler:^(void) {
NSString* message;
switch (exporter.status) {
case AVAssetExportSessionStatusFailed:
message = [NSString stringWithFormat:#"Export failed. Error: %#", exporter.error.description];
[asset release];
break;
case AVAssetExportSessionStatusCompleted: {
[asset release];
[self reallyConvert:currentFileNum];
message = [NSString stringWithFormat:#"Export completed: %#", outPath];
break;
}
case AVAssetExportSessionStatusCancelled:
message = [NSString stringWithFormat:#"Export cancelled!"];
[asset release];
break;
default:
NSLog(#"Export 4 unhandled status: %d", exporter.status);
[asset release];
break;
}
}];
You need to select the track. Instead of calling:
AVMutableAudioMixInputParameters *exportAudioMixInputParameters = [AVMutableAudioMixInputParameters audioMixInputParameters];
Call:
AVAssetTrack *assetTrack = [[asset tracksWithMediaType:AVMediaTypeAudio]objectAtIndex:0];
AVMutableAudioMixInputParameters *exportAudioMixInputParameters = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:assetTrack];
In your existing code you can also specify the track like this:
exportAudioMixInputParameters.trackID = [[[asset tracksWithMediaType:AVMediaTypeAudio]objectAtIndex:0] trackID];
Good luck!
Here is the solution.
setVolumeRampFromStartVolume doesn't work.
AVMutableAudioMix *exportAudioMix = [AVMutableAudioMix audioMix];
AVMutableAudioMixInputParameters *exportAudioMixInputParameters = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:track];
//fade in
[exportAudioMixInputParameters setVolume:0.0 atTime:CMTimeMakeWithSeconds(start-1, 1)];
[exportAudioMixInputParameters setVolume:0.1 atTime:CMTimeMakeWithSeconds(start, 1)];
[exportAudioMixInputParameters setVolume:0.5 atTime:CMTimeMakeWithSeconds(start+1, 1)];
[exportAudioMixInputParameters setVolume:1.0 atTime:CMTimeMakeWithSeconds(start+2, 1)];
//fade out
[exportAudioMixInputParameters setVolume:1.0 atTime:CMTimeMakeWithSeconds((start+length-2), 1)];
[exportAudioMixInputParameters setVolume:0.5 atTime:CMTimeMakeWithSeconds((start+length-1), 1)];
[exportAudioMixInputParameters setVolume:0.1 atTime:CMTimeMakeWithSeconds((start+length), 1)];
exportAudioMix.inputParameters = [NSArray arrayWithObject:exportAudioMixInputParameters];
// configure export session output with all our parameters
exportSession.outputURL = [NSURL fileURLWithPath:filePath]; // output path
exportSession.outputFileType = AVFileTypeAppleM4A; // output file type
exportSession.timeRange = exportTimeRange; // trim time ranges
exportSession.audioMix = exportAudioMix; // fade in audio mix
// perform the export
[exportSession exportAsynchronouslyWithCompletionHandler:^{
if (AVAssetExportSessionStatusCompleted == exportSession.status) {
NSLog(#"AVAssetExportSessionStatusCompleted");
} else if (AVAssetExportSessionStatusFailed == exportSession.status) {
NSLog(#"AVAssetExportSessionStatusFailed");
} else {
NSLog(#"Export Session Status: %d", exportSession.status);
}
}];
I've made the same mistake as you dozens of times !
Apple's API is really weird on this :
CMTimeRange fadeInTimeRange = CMTimeRangeFromTimeToTime(startFadeInTime, endFadeInTime);
CMTimeRange fadeOutTimeRange = CMTimeRangeFromTimeToTime(startFadeOutTime, endFadeOutTime);
Should be :
CMTimeRangeFromTimeToTime(startFadeInTime, fadeInDURATION);
CMTimeRangeFromTimeToTime(startFadeOutTime, fadeOutDURATION);
CMTimeRange is created from start and duration, not from start and end !
But most of the time, the end time is also the duration (if the start time is 0) that's why so many people (including me) make the mistake.
And no Apple, that's not intuitive at all !
This is my working code, just take it and have a nice day!
+(void)makeAudioFadeOutWithSourceURL:(NSURL*)sourceURL destinationURL:(NSURL*)destinationURL fadeOutBeginSecond:(NSInteger)beginTime fadeOutEndSecond:(NSInteger)endTime fadeOutBeginVolume:(CGFloat)beginVolume fadeOutEndVolume:(CGFloat)endVolume callback:(void(^)(BOOL))callback
{
NSAssert(callback, #"need callback");
NSParameterAssert(beginVolume >= 0 && beginVolume <=1);
NSParameterAssert(endVolume >= 0 && endVolume <= 1);
BOOL sourceExist = [[NSFileManager defaultManager] fileExistsAtPath:sourceURL.path];
NSAssert(sourceExist, #"source not exist");
AVURLAsset *asset = [AVAsset assetWithURL:sourceURL];;
AVAssetExportSession* exporter = [AVAssetExportSession exportSessionWithAsset:asset presetName:AVAssetExportPresetAppleM4A];
exporter.outputURL = destinationURL;
exporter.outputFileType = AVFileTypeAppleM4A;
AVMutableAudioMix *exportAudioMix = [AVMutableAudioMix audioMix];
AVMutableAudioMixInputParameters *exportAudioMixInputParameters = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:asset.tracks.lastObject];
[exportAudioMixInputParameters setVolumeRampFromStartVolume:beginVolume toEndVolume:endVolume timeRange:CMTimeRangeMake(CMTimeMakeWithSeconds(beginTime, 1), CMTimeSubtract(CMTimeMakeWithSeconds(endTime, 1), CMTimeMakeWithSeconds(beginTime, 1)))];
NSArray *audioMixParameters = #[exportAudioMixInputParameters];
exportAudioMix.inputParameters = audioMixParameters;
exporter.audioMix = exportAudioMix;
[exporter exportAsynchronouslyWithCompletionHandler:^(void){
AVAssetExportSessionStatus status = exporter.status;
if (status != AVAssetExportSessionStatusCompleted) {
if (callback) {
callback(NO);
}
}
else {
if (callback) {
callback(YES);
}
}
NSError *error = exporter.error;
NSLog(#"export done,error %#,status %d",error,status);
}];
}