I am new to iOS.
I have an app for record video and play it in iPad.
Now I have to open camera in view. So I used avcapturesession for that.
Now by my coding I can record and play video but the video recorded is in rotated mode.
I use LandscapeRight for recording.
Here is my coding:
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
captureVideoPreviewLayer.orientation=AVCaptureVideoOrientationLandscapeRight;
captureVideoPreviewLayer.frame = vwvideo.bounds;
[vwvideo.layer addSublayer:captureVideoPreviewLayer];
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
NSLog(#"ERROR: trying to open camera: %#", error);
}
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
NSString *documentsDirectory = [paths objectAtIndex:0];
NSDateFormatter *dateFormat = [[[NSDateFormatter alloc] init] autorelease];
[dateFormat setDateFormat:#"yyyy-MM-dd HH.mm.SS"];
NSDate *now = [[[NSDate alloc] init] autorelease];
theDate = [dateFormat stringFromDate:now];
NSString *tempPath = [NSString stringWithFormat:#"%#/%#.mp4",documentsDirectory,theDate];
[tempPath retain];
NSLog(#"Path::%#",tempPath);
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:tempPath];
[session addInput:input];
[session addOutput:movieFileOutput];
[session commitConfiguration];
[session startRunning];
[movieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
http://www.raywenderlich.com/13418/how-to-play-record-edit-videos-in-ios/videoplayrecord
If you go thorough the code of the sample project, given in that tutorial its easy to fix the rotated video to the correct orientation by this following snippet.(RecordVideo.m file)
//FIXING ORIENTATION//
AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
AVAssetTrack *FirstAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation FirstAssetOrientation_ = UIImageOrientationUp;
BOOL isFirstAssetPortrait_ = NO;
CGAffineTransform firstTransform = FirstAssetTrack.preferredTransform;
if(firstTransform.a == 0 && firstTransform.b == 1.0 && firstTransform.c == -1.0 && firstTransform.d == 0) {FirstAssetOrientation_= UIImageOrientationRight; isFirstAssetPortrait_ = YES;}
if(firstTransform.a == 0 && firstTransform.b == -1.0 && firstTransform.c == 1.0 && firstTransform.d == 0) {FirstAssetOrientation_ = UIImageOrientationLeft; isFirstAssetPortrait_ = YES;}
if(firstTransform.a == 1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == 1.0) {FirstAssetOrientation_ = UIImageOrientationUp;}
if(firstTransform.a == -1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == -1.0) {FirstAssetOrientation_ = UIImageOrientationDown;}
CGFloat FirstAssetScaleToFitRatio = 1.0;
if(isFirstAssetPortrait_){
FirstAssetScaleToFitRatio = 1.0;
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[FirstlayerInstruction setTransform:CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero];
}else{
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[FirstlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 160)) atTime:kCMTimeZero];
}
[FirstlayerInstruction setOpacity:0.0 atTime:videoAsset.duration];
AVMutableVideoCompositionLayerInstruction *SecondlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:AudioTrack];
MainInstruction.layerInstructions = [NSArray arrayWithObjects:FirstlayerInstruction,nil];;
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1, 30);
MainCompositionInst.renderSize = CGSizeMake(360.0, 480.0);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"orientationFixedVideo-%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.videoComposition = MainCompositionInst;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exporter];
});
}];
Related
I am trying to merge multiple video into one video with AVMutableComposition and its working fine now I want to add different different title on each video.
Any Help will be appreciable.
Thank You
this is what i have tried so far
for(int i=0;i< [arrSelectedUrls count];i++)
{
AVAsset *currentAsset = [AVAsset assetWithURL:[arrSelectedUrls objectAtIndex:i]]; // i take the for loop for geting the asset
/* Current Asset is the asset of the video From the Url Using AVAsset */
// AVURLAsset *newAudioAsset = [AVURLAsset URLAssetWithURL:[arrSelectedUrls objectAtIndex:i] options:nil];
BOOL hasAudio = [currentAsset tracksWithMediaType:AVMediaTypeAudio].count > 0;
AVMutableCompositionTrack *currentTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:i];
[currentTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:duration error:nil];
// [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil];
audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
if(hasAudio)
{
[audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil];
}
float sect = CMTimeGetSeconds(currentAsset.duration);
NSString *strSect = [NSString stringWithFormat:#"%f",sect];
[arrDuration addObject:#"0.5"];
[arrDuration addObject:#"0.5"];
[arrDuration addObject:#"0.5"];
[arrDuration addObject:#"0.5"];
AVMutableVideoCompositionLayerInstruction *currentAssetLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:currentTrack];
AVAssetTrack *currentAssetTrack = [[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation currentAssetOrientation = UIImageOrientationUp;
BOOL isCurrentAssetPortrait = NO;
CGAffineTransform currentTransform = currentAssetTrack.preferredTransform;
if(currentTransform.a == 0 && currentTransform.b == 1.0 && currentTransform.c == -1.0 && currentTransform.d == 0) {currentAssetOrientation= UIImageOrientationRight; isCurrentAssetPortrait = YES;}
if(currentTransform.a == 0 && currentTransform.b == -1.0 && currentTransform.c == 1.0 && currentTransform.d == 0) {currentAssetOrientation = UIImageOrientationLeft; isCurrentAssetPortrait = YES;}
if(currentTransform.a == 1.0 && currentTransform.b == 0 && currentTransform.c == 0 && currentTransform.d == 1.0) {currentAssetOrientation = UIImageOrientationUp;}
if(currentTransform.a == -1.0 && currentTransform.b == 0 && currentTransform.c == 0 && currentTransform.d == -1.0) {currentAssetOrientation = UIImageOrientationDown;}
CGFloat FirstAssetScaleToFitRatio = 320.0/320.0;
if(isCurrentAssetPortrait){
FirstAssetScaleToFitRatio = 320.0/320.0;
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[currentAssetLayerInstruction setTransform:CGAffineTransformConcat(currentAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:duration];
}else{
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[currentAssetLayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(currentAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 0)) atTime:duration];
}
duration=CMTimeAdd(duration, currentAsset.duration);
[currentAssetLayerInstruction setOpacity:0.0 atTime:duration];
[arrayInstruction addObject:currentAssetLayerInstruction];
NSLog(#"%lld", duration.value/duration.timescale);
CATextLayer *titleLayer = [CATextLayer layer];
if (i==0) {
titleLayer.string = #"www.miivdo.com";
}
if (i==1) {
titleLayer.string = #"www.mail.com";
}
//titleLayer.backgroundColor = (__bridge CGColorRef)([UIColor redColor]);
CGSize videoSize = [currentAssetTrack naturalSize];
titleLayer.fontSize = videoSize.height / 14;
// titleLayer.foregroundColor = (__bridge CGColorRef)([UIColor redColor]);
titleLayer.shadowOpacity = 0.5;
titleLayer.alignmentMode = kCAAlignmentRight;
titleLayer.bounds = CGRectMake(0, 0, 320, 50); //You may need to adjust this for proper display
parentLayer = [CALayer layer];
videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:titleLayer];
}
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, duration);
MainInstruction.layerInstructions = arrayInstruction;
MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1,30);
MainCompositionInst.renderSize = CGSizeMake(320.0, 320.0);
You Can Try This one
- (void)MergeAndSave
{
for(int i=0;i< [arrSelectedUrls count];i++)
{
NSURL *url;
CALayer * parentLayer;
CALayer * videoLayer;
AVSynchronizedLayer *animationLayer = [AVSynchronizedLayer layer];
UIImage *image1 = [UIImage imageNamed:#"Fire1.jpeg"];
UIImage *image2 = [UIImage imageNamed:#"Fire2.jpeg"];
UIImage *image3 = [UIImage imageNamed:#"Fire3.jpeg"];
UIImage *image4 = [UIImage imageNamed:#"Fire4.jpeg"];
//int numberOfFile = [arrSelectedUrls count]; // Number Of Video You want to merge
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
NSMutableArray *starImageArray = [NSMutableArray arrayWithObjects:(id)image1.CGImage,(id)image2.CGImage,(id)image3.CGImage,(id)image4.CGImage, nil];
NSMutableArray *arrDuration = [[NSMutableArray alloc] init];
NSMutableArray *arrayInstruction = [[NSMutableArray alloc] init];
AVMutableVideoCompositionInstruction * MainInstruction =
[AVMutableVideoCompositionInstruction videoCompositionInstruction];
AVMutableCompositionTrack *audioTrack;
audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime duration = kCMTimeZero;
AVAsset *currentAsset = [AVAsset assetWithURL:[arrSelectedUrls objectAtIndex:i]]; // i take the for loop for geting the asset
/* Current Asset is the asset of the video From the Url Using AVAsset */
// AVURLAsset *newAudioAsset = [AVURLAsset URLAssetWithURL:[arrSelectedUrls objectAtIndex:i] options:nil];
BOOL hasAudio = [currentAsset tracksWithMediaType:AVMediaTypeAudio].count > 0;
AVMutableCompositionTrack *currentTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:i];
// [currentTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:duration error:nil];
[currentTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:duration error:nil];
//
//// [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil];
// audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
//
// // [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, mixComposition.duration) ofTrack:[[mixComposition tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil];
if(hasAudio)
{
[audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil];
audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, mixComposition.duration) ofTrack:[[mixComposition tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil];
}
float sect = CMTimeGetSeconds(currentAsset.duration);
NSString *strSect = [NSString stringWithFormat:#"%f",sect];
[arrDuration addObject:#"0.5"];
[arrDuration addObject:#"0.5"];
[arrDuration addObject:#"0.5"];
[arrDuration addObject:#"0.5"];
AVMutableVideoCompositionLayerInstruction *currentAssetLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:currentTrack];
AVAssetTrack *currentAssetTrack = [[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation currentAssetOrientation = UIImageOrientationUp;
BOOL isCurrentAssetPortrait = NO;
CGAffineTransform currentTransform = currentAssetTrack.preferredTransform;
if(currentTransform.a == 0 && currentTransform.b == 1.0 && currentTransform.c == -1.0 && currentTransform.d == 0) {currentAssetOrientation= UIImageOrientationRight; isCurrentAssetPortrait = YES;}
if(currentTransform.a == 0 && currentTransform.b == -1.0 && currentTransform.c == 1.0 && currentTransform.d == 0) {currentAssetOrientation = UIImageOrientationLeft; isCurrentAssetPortrait = YES;}
if(currentTransform.a == 1.0 && currentTransform.b == 0 && currentTransform.c == 0 && currentTransform.d == 1.0) {currentAssetOrientation = UIImageOrientationUp;}
if(currentTransform.a == -1.0 && currentTransform.b == 0 && currentTransform.c == 0 && currentTransform.d == -1.0) {currentAssetOrientation = UIImageOrientationDown;}
CGFloat FirstAssetScaleToFitRatio = 320.0/320.0;
if(isCurrentAssetPortrait){
FirstAssetScaleToFitRatio = 320.0/320.0;
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[currentAssetLayerInstruction setTransform:CGAffineTransformConcat(currentAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:duration];
}else{
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[currentAssetLayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(currentAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 0)) atTime:duration];
}
duration=CMTimeAdd(duration, currentAsset.duration);
[currentAssetLayerInstruction setOpacity:0.0 atTime:duration];
[arrayInstruction addObject:currentAssetLayerInstruction];
NSLog(#"%lld", duration.value/duration.timescale);
CATextLayer *titleLayer = [CATextLayer layer];
titleLayer.string = [NSString stringWithFormat:#"Final%#",[Titlearry objectAtIndex:i]];
CGSize videoSize = [currentAssetTrack naturalSize];
titleLayer.fontSize = videoSize.height / 14;
titleLayer.shadowOpacity = 0.5;
titleLayer.alignmentMode = kCAAlignmentRight;
titleLayer.bounds = CGRectMake(0, 0, 320, 500); //You may need to adjust this for proper display
parentLayer = [CALayer layer];
videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:titleLayer];
// new addition From Ritesh //
double time1 = 0.01;
// in ms, (0.2*1000)/1000 == 200/1000 == 0.2
// CMTime time2 = CMTimeMake(time1*1000, 1000);
NSMutableArray * keyTimesArray = [[NSMutableArray alloc]init];
for (int z = 1; z<4; z++)
{
NSNumber *temp = [NSNumber numberWithFloat:(time1+(float)z/30)];
[keyTimesArray addObject:temp];
}
animationLayer.opacity = 1.0;
// animationLayer.backgroundColor = [UIColor yellowColor].CGColor;
[animationLayer setFrame:CGRectMake(0, 0, 320, 50)];
[parentLayer addSublayer:animationLayer];
CAKeyframeAnimation *changeImageAnimation = [CAKeyframeAnimation animationWithKeyPath:#"contents"];
[changeImageAnimation setDelegate:self];
changeImageAnimation.calculationMode = kCAAnimationDiscrete;
[animationLayer setContents:[starImageArray lastObject]];
changeImageAnimation.duration = 10.0f;
changeImageAnimation.repeatCount = 30;
changeImageAnimation.values = [NSArray arrayWithArray:starImageArray];
//changeImageAnimation.removedOnCompletion = YES;
// [changeImageAnimation setKeyTimes:arrDuration];
[changeImageAnimation setBeginTime:1.0];
[changeImageAnimation setRemovedOnCompletion:NO];
[changeImageAnimation setDelegate:self];
[animationLayer addAnimation:changeImageAnimation forKey:#"contents"];
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, duration);
MainInstruction.layerInstructions = arrayInstruction;
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1,30);
MainCompositionInst.renderSize = CGSizeMake(320.0, 320.0);
// NSString *myPathDocs = [[[AppDelegate sharedAppDelegate] applicationCacheDirectory] stringByAppendingPathComponent:[NSString stringWithFormat:#"mergeVideo%-dtemp.mp4",arc4random() % 10000]];
NSArray *dirPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *docsDir = [dirPaths objectAtIndex:0];
NSString *outputFilePath = [docsDir stringByAppendingPathComponent:[NSString stringWithFormat:#"merge%#.mov",[Titlearry objectAtIndex:i]]];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
url = [NSURL fileURLWithPath:outputFilePath];
[UrlArray addObject:outputFilePath];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.videoComposition = MainCompositionInst;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
switch (exporter.status)
{
case AVAssetExportSessionStatusCompleted:
{
if(i == [arrSelectedUrls count]-1)
{
[self mergeAllVideoClipscompletionCallback];
}
}
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Failed:%#", exporter.error.description);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Canceled:%#", exporter.error);
break;
case AVAssetExportSessionStatusExporting:
NSLog(#"Exporting!");
break;
case AVAssetExportSessionStatusWaiting:
NSLog(#"Waiting");
break;
default:
break;
}
}];
}
// [self performSelector:#selector(MergeAndExport) withObject:nil afterDelay:3.0];
// [self MergeAndExport];
}
-(void)MergeAndExport
{
CALayer * parentLayer;
CALayer * videoLayer;
//int numberOfFile = [arrSelectedUrls count]; // Number Of Video You want to merge
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
NSMutableArray *arrayInstruction = [[NSMutableArray alloc] init];
AVMutableVideoCompositionInstruction * MainInstruction =
[AVMutableVideoCompositionInstruction videoCompositionInstruction];
AVMutableCompositionTrack *audioTrack;
audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime duration = kCMTimeZero;
for(int i=0;i< [arrSelectedUrls count];i++)
{
// AVAsset *currentAsset = [AVAsset assetWithURL:[arrSelectedUrls objectAtIndex:i]];
NSURL *url=[NSURL fileURLWithPath:[UrlArray objectAtIndex:i]];
AVURLAsset *currentAsset=[AVURLAsset URLAssetWithURL:url options:nil];
// i take the for loop for geting the asset
/* Current Asset is the asset of the video From the Url Using AVAsset */
// AVURLAsset *newAudioAsset = [AVURLAsset URLAssetWithURL:[arrSelectedUrls objectAtIndex:i] options:nil];
BOOL hasAudio = [currentAsset tracksWithMediaType:AVMediaTypeAudio].count > 0;
AVMutableCompositionTrack *currentTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[currentTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:duration error:nil];
[currentTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:duration error:nil];
// audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
// [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, mixComposition.duration) ofTrack:[[mixComposition tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil];
if(hasAudio)
{
[audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil];
audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, mixComposition.duration) ofTrack:[[mixComposition tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil];
}
AVMutableVideoCompositionLayerInstruction *currentAssetLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:currentTrack];
AVAssetTrack *currentAssetTrack = [[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation currentAssetOrientation = UIImageOrientationUp;
BOOL isCurrentAssetPortrait = NO;
CGAffineTransform currentTransform = currentAssetTrack.preferredTransform;
if(currentTransform.a == 0 && currentTransform.b == 1.0 && currentTransform.c == -1.0 && currentTransform.d == 0) {currentAssetOrientation= UIImageOrientationRight; isCurrentAssetPortrait = YES;}
if(currentTransform.a == 0 && currentTransform.b == -1.0 && currentTransform.c == 1.0 && currentTransform.d == 0) {currentAssetOrientation = UIImageOrientationLeft; isCurrentAssetPortrait = YES;}
if(currentTransform.a == 1.0 && currentTransform.b == 0 && currentTransform.c == 0 && currentTransform.d == 1.0) {currentAssetOrientation = UIImageOrientationUp;}
if(currentTransform.a == -1.0 && currentTransform.b == 0 && currentTransform.c == 0 && currentTransform.d == -1.0) {currentAssetOrientation = UIImageOrientationDown;}
CGFloat FirstAssetScaleToFitRatio = 320.0/320.0;
if(isCurrentAssetPortrait){
FirstAssetScaleToFitRatio = 320.0/320.0;
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[currentAssetLayerInstruction setTransform:CGAffineTransformConcat(currentAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:duration];
}else{
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[currentAssetLayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(currentAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 0)) atTime:duration];
}
duration=CMTimeAdd(duration, currentAsset.duration);
[currentAssetLayerInstruction setOpacity:0.0 atTime:duration];
[arrayInstruction addObject:currentAssetLayerInstruction];
NSLog(#"%lld", duration.value/duration.timescale);
CATextLayer *titleLayer = [CATextLayer layer];
titleLayer.string = #"www.miivdo.com";
//titleLayer.backgroundColor = (__bridge CGColorRef)([UIColor redColor]);
CGSize videoSize = [currentAssetTrack naturalSize];
titleLayer.fontSize = videoSize.height / 14;
// titleLayer.foregroundColor = (__bridge CGColorRef)([UIColor redColor]);
titleLayer.shadowOpacity = 0.5;
titleLayer.alignmentMode = kCAAlignmentRight;
titleLayer.bounds = CGRectMake(0, 0, 320, 50); //You may need to adjust this for proper display
parentLayer = [CALayer layer];
videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:titleLayer];
}
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, duration);
MainInstruction.layerInstructions = arrayInstruction;
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1, 30);
MainCompositionInst.renderSize = CGSizeMake(320.0, 320.0);
// NSString *myPathDocs = [[[AppDelegate sharedAppDelegate] applicationCacheDirectory] stringByAppendingPathComponent:[NSString stringWithFormat:#"mergeVideo%-dtemp.mp4",arc4random() % 10000]];
NSArray *dirPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *docsDir = [dirPaths objectAtIndex:0];
NSString *outputFilePath = [docsDir stringByAppendingPathComponent:[NSString stringWithFormat:#"merge.mov"]];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
NSURL *url = [NSURL fileURLWithPath:outputFilePath];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.videoComposition = MainCompositionInst;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
switch (exporter.status)
{
case AVAssetExportSessionStatusCompleted:
{
NSURL *outputURL = exporter.outputURL;
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputURL]) {
[self writeExportedVideoToAssetsLibrary:outputURL];
//
}
}
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Failed:%#", exporter.error.description);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Canceled:%#", exporter.error);
break;
case AVAssetExportSessionStatusExporting:
NSLog(#"Exporting!");
break;
case AVAssetExportSessionStatusWaiting:
NSLog(#"Waiting");
break;
default:
break;
}
}];
}
I'm merging video with video and audio with video. It is working fine in video with video case but when audio file merge that give black screen. I don't know what thing I'm going to wrong with this code
-(void)mergeAllMediaAtTime:(NSMutableArray*)startTimeArray {
NSURL *firstURL = [NSURL fileURLWithPath:[urlArray objectAtIndex:counter]];
firstAsset = [AVAsset assetWithURL:firstURL];
NSString* videoDirPath = [NSHomeDirectory() stringByAppendingPathComponent:#"Documents/Crop Videos"];
NSString* fileName = [VideoAndAudioNameArray objectAtIndex:counter];
NSString *pSecondVideoPath = [videoDirPath stringByAppendingPathComponent:fileName];
NSURL *secondURL = [NSURL fileURLWithPath:pSecondVideoPath];
secondAsset = [AVAsset assetWithURL:secondURL];
if(firstAsset !=nil && secondAsset!=nil)
{
AVVideoComposition *origionalComposition = [AVVideoComposition videoCompositionWithPropertiesOfAsset:firstAsset];
//Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack.
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
//VIDEO TRACK
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack* track = [[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:track atTime:kCMTimeZero error:nil];
int time = [[startTimeArray objectAtIndex:counter] intValue];
CMTime pTime = CMTimeMake(time, 1);
///////////////////////
AVMutableCompositionTrack *secondTrack;
if ([[fileName pathExtension] isEqualToString:#"mov"])
{
secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, secondAsset.duration) ofTrack:[[secondAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:pTime error:nil];
}
// If Audio file
else
{
secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, secondAsset.duration) ofTrack:[[secondAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:pTime error:nil];
NSLog(#"Audio file's Merging");
}
/****** First Video *********/
AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, firstAsset.duration);
MainInstruction.backgroundColor = [[UIColor clearColor] CGColor];
//FIXING ORIENTATION//
AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
AVAssetTrack *FirstAssetTrack = [[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[FirstlayerInstruction setTransform:FirstAssetTrack.preferredTransform atTime:kCMTimeZero];
[FirstlayerInstruction setOpacity:0.0 atTime:firstAsset.duration];
if ([[fileName pathExtension] isEqualToString:#"mov"])
{
/****** Second Video *********/
AVMutableVideoCompositionLayerInstruction *SecondlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:secondTrack];
AVAssetTrack *SecondAssetTrack = [[secondAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[SecondlayerInstruction setOpacity:1.0 atTime:pTime];
UIImageOrientation SecondAssetOrientation_ = UIImageOrientationUp;
BOOL isSecondAssetPortrait_ = NO;
CGAffineTransform secondTransform = SecondAssetTrack.preferredTransform;
if(secondTransform.a == 0 && secondTransform.b == 1.0 && secondTransform.c == -1.0 && secondTransform.d == 0) {SecondAssetOrientation_= UIImageOrientationRight; isSecondAssetPortrait_ = YES;}
if(secondTransform.a == 0 && secondTransform.b == -1.0 && secondTransform.c == 1.0 && secondTransform.d == 0) {SecondAssetOrientation_ = UIImageOrientationLeft; isSecondAssetPortrait_ = YES;}
if(secondTransform.a == 1.0 && secondTransform.b == 0 && secondTransform.c == 0 && secondTransform.d == 1.0) {SecondAssetOrientation_ = UIImageOrientationUp;}
if(secondTransform.a == -1.0 && secondTransform.b == 0 && secondTransform.c == 0 && secondTransform.d == -1.0) {SecondAssetOrientation_ = UIImageOrientationDown;}
CGFloat SecondAssetScaleToFitRatioOfWidth = nRenderWidth/SecondAssetTrack.naturalSize.width;
if(isSecondAssetPortrait_)
{
CGFloat SecondAssetScaleToFitRatioOfHeight = nRenderWidth/SecondAssetTrack.naturalSize.height;
CGAffineTransform SecondAssetScaleFactor = CGAffineTransformMakeScale(SecondAssetScaleToFitRatioOfWidth,SecondAssetScaleToFitRatioOfHeight);
[SecondlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformMakeScale(1.0f,1.0f), SecondAssetScaleFactor) atTime:kCMTimeZero];
//CGAffineTransformConcat(CGAffineTransformMakeScale(1.0f,1.0f), SecondAssetScaleFactor)
}
else
{
CGFloat SecondAssetScaleToFitRatioOfWidth = nRenderWidth/SecondAssetTrack.naturalSize.width;
CGFloat SecondAssetScaleToFitRatioOfHeight = nRenderWidth/SecondAssetTrack.naturalSize.height;
CGAffineTransform SecondAssetScaleFactor = CGAffineTransformMakeScale(SecondAssetScaleToFitRatioOfWidth,SecondAssetScaleToFitRatioOfHeight);
//[SecondlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(SecondAssetTrack.preferredTransform, SecondAssetScaleFactor),CGAffineTransformMakeTranslation(0, 160)) atTime:firstAsset.duration];
[SecondlayerInstruction setTransform:CGAffineTransformConcat(SecondAssetScaleFactor ,CGAffineTransformMakeTranslation(1, 1)) atTime:kCMTimeZero];
//CGAffineTransformConcat(CGAffineTransformMakeScale(1.0f,1.0f),CGAffineTransformMakeTranslation(0, 100))
}
[SecondlayerInstruction setOpacity:0.0 atTime:CMTimeAdd(pTime, secondAsset.duration)];
MainInstruction.layerInstructions = [NSArray arrayWithObjects:SecondlayerInstruction, FirstlayerInstruction,nil];
}
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = origionalComposition.frameDuration;
MainCompositionInst.renderScale = 1.0;
MainCompositionInst.renderSize = CGSizeMake(nRenderWidth, nRenderHeight);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"mergeVideo_%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
[urlArray addObject:myPathDocs];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.videoComposition = MainCompositionInst;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exporter];
}
});
}];
}
}
In that case,you start with your audio from end of the video frames. You can use "atTime:kCMTimeZero".
i.e: shown in below(code below)
else
{
secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, secondAsset.duration) ofTrack:[[secondAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];
NSLog(#"Audio file's Merging");
}
I have to implement functionality to repeatedly pause and resume video capture in a single session, but have each new segment (the captured segments after each pause) added to the same video file, with AVFoundation. Currently, every time I press "stop" then "record" again, it just saves a new video file to my iPhone's Document directory and starts capturing to a new file. I need to be able to press the "record/stop" button over, only capture video & audio when record is active... then when the "done" button is pressed, have a single AV file with all the segments together. And all this needs to happen in the same capture session / preview session.
I am not using AVAssetWriterInput.
The only way I can think of to try this is when the "done" button is pressed, taking each individual output file and combining them together into a single file.
This code is working for iOS 5 but not for iOS 6. Actually for iOS 6, the first time when I pause recording (stop recording) AVCaptureFileOutputRecordingDelegate method (captureOutput: didFinishRecordingToOutputFileAtURL: fromConnections: error:) is called but after that when I start the recording the delegate method (captureOutput: didFinishRecordingToOutputFileAtURL: fromConnections: error:) is called again but it is not called at the time of stop recording.
I need a solution for that issue. Please help me.
//View LifeCycle
- (void)viewDidLoad
{
[super viewDidLoad];
self.finalRecordedVideoName = [self stringWithNewUUID];
arrVideoName = [[NSMutableArray alloc]initWithCapacity:0];
arrOutputUrl = [[NSMutableArray alloc] initWithCapacity:0];
CaptureSession = [[AVCaptureSession alloc] init];
captureDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
if ([captureDevices count] > 0)
{
NSError *error;
VideoInputDevice = [[AVCaptureDeviceInput alloc] initWithDevice:[self backFacingCamera] error:&error];
if (!error)
{
if ([CaptureSession canAddInput:VideoInputDevice])
[CaptureSession addInput:VideoInputDevice];
else
NSLog(#"Couldn't add video input");
}
else
{
NSLog(#"Couldn't create video input");
}
}
else
{
NSLog(#"Couldn't create video capture device");
}
//ADD VIDEO PREVIEW LAYER
NSLog(#"Adding video preview layer");
AVCaptureVideoPreviewLayer *layer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:CaptureSession];
[self setPreviewLayer:layer];
UIDeviceOrientation currentOrientation = [UIDevice currentDevice].orientation;
NSLog(#"%d",currentOrientation);
if (currentOrientation == UIDeviceOrientationPortrait)
{
PreviewLayer.orientation = AVCaptureVideoOrientationPortrait;
}
else if (currentOrientation == UIDeviceOrientationPortraitUpsideDown)
{
PreviewLayer.orientation = AVCaptureVideoOrientationPortraitUpsideDown;
}
else if (currentOrientation == UIDeviceOrientationLandscapeRight)
{
PreviewLayer.orientation = AVCaptureVideoOrientationLandscapeRight;
}
else if (currentOrientation == UIDeviceOrientationLandscapeLeft)
{
PreviewLayer.orientation = AVCaptureVideoOrientationLandscapeLeft;
}
[[self PreviewLayer] setVideoGravity:AVLayerVideoGravityResizeAspectFill];
//ADD MOVIE FILE OUTPUT
NSLog(#"Adding movie file output");
MovieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
VideoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
[VideoDataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
NSString* key = (NSString*)kCVPixelBufferBytesPerRowAlignmentKey;
NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
[VideoDataOutput setVideoSettings:videoSettings];
Float64 TotalSeconds = 60; //Total seconds
int32_t preferredTimeScale = 30; //Frames per second
CMTime maxDuration = CMTimeMakeWithSeconds(TotalSeconds, preferredTimeScale);//<<SET MAX DURATION
MovieFileOutput.maxRecordedDuration = maxDuration;
MovieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024; //<<SET MIN FREE SPACE IN BYTES FOR RECORDING TO CONTINUE ON A VOLUME
//SET THE CONNECTION PROPERTIES (output properties)
[self CameraSetOutputProperties]; //(We call a method as it also has to be done after changing camera)
AVCaptureConnection *videoConnection = nil;
for ( AVCaptureConnection *connection in [MovieFileOutput connections] )
{
NSLog(#"%#", connection);
for ( AVCaptureInputPort *port in [connection inputPorts] )
{
NSLog(#"%#", port);
if ( [[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
}
}
}
if([videoConnection isVideoOrientationSupported]) // **Here it is, its always false**
{
[videoConnection setVideoOrientation:[[UIDevice currentDevice] orientation]];
} NSLog(#"Setting image quality");
[CaptureSession setSessionPreset:AVCaptureSessionPresetLow];
//----- DISPLAY THE PREVIEW LAYER -----
CGRect layerRect = CGRectMake(5, 5, 299, ([[UIScreen mainScreen] bounds].size.height == 568)?438:348);
[self.PreviewLayer setBounds:layerRect];
[self.PreviewLayer setPosition:CGPointMake(CGRectGetMidX(layerRect),CGRectGetMidY(layerRect))];
if ([CaptureSession canAddOutput:MovieFileOutput])
[CaptureSession addOutput:MovieFileOutput];
[CaptureSession addOutput:VideoDataOutput];
//We use this instead so it goes on a layer behind our UI controls (avoids us having to manually bring each control to the front):
CameraView = [[UIView alloc] init];
[videoPreviewLayer addSubview:CameraView];
[videoPreviewLayer sendSubviewToBack:CameraView];
[[CameraView layer] addSublayer:PreviewLayer];
//----- START THE CAPTURE SESSION RUNNING -----
[CaptureSession startRunning];
}
#pragma mark - IBACtion Methods
-(IBAction)btnStartAndStopPressed:(id)sender
{
UIButton *StartAndStopButton = (UIButton*)sender;
if ([StartAndStopButton isSelected] == NO)
{
[StartAndStopButton setSelected:YES];
[btnPauseAndResume setEnabled:YES];
[btnBack setEnabled:NO];
[btnSwitchCameraInput setHidden:YES];
NSDate *date = [NSDate date];
NSLog(#" date %#",date);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *recordedFileName = nil;
recordedFileName = [NSString stringWithFormat:#"output%#.mov",date];
NSString *documentsDirectory = [paths objectAtIndex:0];
self.outputPath = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"%#",recordedFileName]];
NSLog(#"%#",self.outputPath);
[arrVideoName addObject:recordedFileName];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:self.outputPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:self.outputPath])
{
NSError *error;
if ([[NSFileManager defaultManager] removeItemAtPath:self.outputPath error:&error] == NO)
{
//Error - handle if requried
}
}
//Start recording
[MovieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
recordingTimer = [NSTimer scheduledTimerWithTimeInterval:1.0 target:self selector:#selector(VideoRecording) userInfo:nil repeats:YES];
}
else
{
[StartAndStopButton setSelected:NO];
[btnPauseAndResume setEnabled:NO];
[btnBack setEnabled:YES];
[btnSwitchCameraInput setHidden:NO];
NSLog(#"STOP RECORDING");
WeAreRecording = NO;
[MovieFileOutput stopRecording];
[((ActOutAppDelegate *)ActOut_AppDelegate) showLoadingViewOnView:self.view withLabel:#"Please wait...."];
if ([recordingTimer isValid])
{
[recordingTimer invalidate];
recordingTimer = nil;
recordingTime = 30;
}
stopRecording = YES;
}
}
- (IBAction)btnPauseAndResumePressed:(id)sender
{
UIButton *PauseAndResumeButton = (UIButton*)sender;
if (PauseAndResumeButton.selected == NO)
{
PauseAndResumeButton.selected = YES;
NSLog(#"recording paused");
WeAreRecording = NO;
[MovieFileOutput stopRecording];
[self pauseTimer:recordingTimer];
[btnStartAndStop setEnabled:NO];
[btnBack setEnabled:YES];
[btnSwitchCameraInput setHidden:NO];
}
else
{
PauseAndResumeButton.selected = NO;
NSLog(#"recording resumed");
[btnStartAndStop setEnabled:YES];
[btnBack setEnabled:NO];
[btnSwitchCameraInput setHidden:YES];
WeAreRecording = YES;
NSDate *date = [NSDate date];
NSLog(#" date %#",date);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask, YES);
NSString *recordedFileName = nil;
recordedFileName = [NSString stringWithFormat:#"output%#.mov",date];
NSString *documentsDirectory = [paths objectAtIndex:0];
self.outputPath = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"%#",recordedFileName]];
NSLog(#"%#",self.outputPath);
[arrVideoName addObject:recordedFileName];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:self.outputPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:self.outputPath])
{
NSError *error;
if ([[NSFileManager defaultManager] removeItemAtPath:self.outputPath error:&error] == NO)
{
//Error - handle if requried
}
}
[self resumeTimer:recordingTimer];
//Start recording
[MovieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
}
}
- (void) CameraSetOutputProperties
{
//SET THE CONNECTION PROPERTIES (output properties)
AVCaptureConnection *CaptureConnection = [MovieFileOutput connectionWithMediaType:AVMediaTypeVideo];
[CaptureConnection setVideoOrientation:AVCaptureVideoOrientationPortrait];
//Set frame rate (if requried)
CMTimeShow(CaptureConnection.videoMinFrameDuration);
CMTimeShow(CaptureConnection.videoMaxFrameDuration);
if (CaptureConnection.supportsVideoMinFrameDuration)
CaptureConnection.videoMinFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
if (CaptureConnection.supportsVideoMaxFrameDuration)
CaptureConnection.videoMaxFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
CMTimeShow(CaptureConnection.videoMinFrameDuration);
CMTimeShow(CaptureConnection.videoMaxFrameDuration);
}
- (AVCaptureDevice *) CameraWithPosition:(AVCaptureDevicePosition) Position
{
NSArray *Devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *Device in Devices)
{
if ([Device position] == Position)
{
NSLog(#"%d",Position);
return Device;
}
}
return nil;
}
#pragma mark - AVCaptureFileOutputRecordingDelegate Method
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
if(videoWriterInput.readyForMoreMediaData && WeAreRecording) [videoWriterInput appendSampleBuffer:sampleBuffer];
for(AVCaptureConnection *captureConnection in [captureOutput connections])
{
if ([captureConnection isVideoOrientationSupported])
{
AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationLandscapeLeft;
[captureConnection setVideoOrientation:orientation];
}
}
UIDeviceOrientation curOr = [[UIDevice currentDevice] orientation];
CGAffineTransform t;
if (curOr == UIDeviceOrientationPortrait)
{
t = CGAffineTransformMakeRotation(-M_PI / 2);
}
else if (curOr == UIDeviceOrientationPortraitUpsideDown)
{
t = CGAffineTransformMakeRotation(M_PI / 2);
}
else if (curOr == UIDeviceOrientationLandscapeRight)
{
t = CGAffineTransformMakeRotation(M_PI);
}
else
{
t = CGAffineTransformMakeRotation(0);
}
}
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{
NSLog(#"didFinishRecordingToOutputFileAtURL - enter");
NSLog(#"output file url : %#", [outputFileURL absoluteString]);
BOOL RecordedSuccessfully = YES;
if ([error code] != noErr)
{
// A problem occurred: Find out if the recording was successful.
id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
if (value)
{
RecordedSuccessfully = [value boolValue];
}
}
AVCaptureConnection *videoConnection=nil;
for ( AVCaptureConnection *connection in [MovieFileOutput connections] )
{
NSLog(#"%#", connection);
for ( AVCaptureInputPort *port in [connection inputPorts] )
{
NSLog(#"%#", port);
if ( [[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
}
}
}
if([videoConnection isVideoOrientationSupported]) // **Here it is, its always false**
{
[videoConnection setVideoOrientation:[[UIDevice currentDevice] orientation]];
} NSLog(#"Setting image quality");
NSData *videoData = [NSData dataWithContentsOfURL:outputFileURL];
[videoData writeToFile:self.outputPath atomically:NO];
[arrOutputUrl addObject:outputFileURL];
if (stopRecording)
{
[self mergeMultipleVideo];
}
}
//Method to merge multiple audios
-(void)mergeMultipleVideo
{
mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime nextClipStartTime = kCMTimeZero;
NSLog(#"Array of output file url : %#", arrOutputUrl);
if (arrOutputUrl.count > 0)
{
for(int i = 0 ;i < [arrOutputUrl count];i++)
{
AVURLAsset* VideoAsset = [[AVURLAsset alloc]initWithURL:[arrOutputUrl objectAtIndex:i] options:nil];
CMTimeRange timeRangeInAsset;
timeRangeInAsset = CMTimeRangeMake(kCMTimeZero, [VideoAsset duration]);
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, VideoAsset.duration) ofTrack:[[VideoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
nextClipStartTime = CMTimeAdd(nextClipStartTime, timeRangeInAsset.duration);
}
}
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"%#.mov",self.finalRecordedVideoName]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exportSession.outputURL=url;
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
exportSession.shouldOptimizeForNetworkUse = YES;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exportSession path:myPathDocs];
});
}];
}
-(void)exportDidFinish:(AVAssetExportSession*)session path:(NSString*)outputVideoPath
{
NSLog(#"session.status : %d",session.status);
if (session.status == AVAssetExportSessionStatusCompleted)
{
NSURL *outputURL = session.outputURL;
NSData *videoData = [NSData dataWithContentsOfURL:outputURL];
[videoData writeToFile:outputVideoPath atomically:NO];
if ([arrVideoName count] > 0)
{
for (int i = 0; i < [arrVideoName count]; i++)
{
NSArray* documentPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString* fullFilePath = [[documentPaths objectAtIndex:0] stringByAppendingPathComponent: [NSString stringWithFormat:#"%#",[arrVideoName objectAtIndex:i]]];
NSLog(#"Full path of file to be deleted: %#",fullFilePath);
NSFileManager *fileManager = [NSFileManager defaultManager];
NSError *error;
if ([fileManager fileExistsAtPath:fullFilePath])
{
[fileManager removeItemAtPath:fullFilePath error:&error];
}
}
[arrVideoName removeAllObjects];
}
if (arrOutputUrl.count > 0)
{
[arrOutputUrl removeAllObjects];
}
[((ActOutAppDelegate *)ActOut_AppDelegate) removeLoadingViewfromView:self.view];
[self.view addSubview:afterRecordingPopupView];
}
}
Look at the AVCaptureConnection's enabled property. For your output connection, set enabled to NO instead of stopping the session.
I am working with an app in which user picks video from Photos and uploads it to server. As my server is .Net server , the video gets rotated. I know the reason of problem is probably same as was in case of image (you may refer my earlier answer https://stackoverflow.com/a/10601175/1030951 ) , So i googled and got a code to fix video orientation , I got a code from RayWenderlich.com and modified in following way. Now my output video works fine but the video is mute. it plays but doesn't play audio. Kindly help me if I am missing something.
I pass Info dictionary of -(void)imagePickerController:(UIImagePickerController *)picker didFinishPickingMediaWithInfo:(NSDictionary *)info method
- (void)fix:(NSDictionary*)pobjInfoDirectory withFileName:(NSString*)pstrOutputFileName
{
firstAsset = [AVAsset assetWithURL:[pobjInfoDirectory objectForKey:UIImagePickerControllerMediaURL]];
if(firstAsset !=nil)
{
//Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack.
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
//VIDEO TRACK
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, firstAsset.duration);
//FIXING ORIENTATION//
AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
AVAssetTrack *FirstAssetTrack = [[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation FirstAssetOrientation_ = UIImageOrientationUp;
BOOL isFirstAssetPortrait_ = NO;
CGAffineTransform firstTransform = FirstAssetTrack.preferredTransform;
if(firstTransform.a == 0 && firstTransform.b == 1.0 && firstTransform.c == -1.0 && firstTransform.d == 0)
{
FirstAssetOrientation_= UIImageOrientationRight; isFirstAssetPortrait_ = YES;
}
if(firstTransform.a == 0 && firstTransform.b == -1.0 && firstTransform.c == 1.0 && firstTransform.d == 0)
{
FirstAssetOrientation_ = UIImageOrientationLeft; isFirstAssetPortrait_ = YES;
}
if(firstTransform.a == 1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == 1.0)
{
FirstAssetOrientation_ = UIImageOrientationUp;
}
if(firstTransform.a == -1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == -1.0)
{
FirstAssetOrientation_ = UIImageOrientationDown;
}
CGFloat FirstAssetScaleToFitRatio = 320.0/FirstAssetTrack.naturalSize.width;
if(isFirstAssetPortrait_)
{
FirstAssetScaleToFitRatio = 320.0/FirstAssetTrack.naturalSize.height;
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[FirstlayerInstruction setTransform:CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero];
}
else
{
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[FirstlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 160)) atTime:kCMTimeZero];
}
[FirstlayerInstruction setOpacity:0.0 atTime:firstAsset.duration];
MainInstruction.layerInstructions = [NSArray arrayWithObjects:FirstlayerInstruction,nil];;
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1, 30);
MainCompositionInst.renderSize = CGSizeMake(320.0, 480.0);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"mergeVideo-%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.videoComposition = MainCompositionInst;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exporter];
});
}];
}
}
- (void)exportDidFinish:(AVAssetExportSession*)session
{
if(session.status == AVAssetExportSessionStatusCompleted)
{
NSURL *outputURL = session.outputURL;
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputURL])
{
if ([self.delegate respondsToSelector:#selector(videoExported)])
[self.delegate videoExported];
}
}
firstAsset = nil;
}
Add this after the //VIDEO TRACK part
//AUDIO TRACK
AVMutableCompositionTrack *firstAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[firstAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:#"your video url here..." options:nil];
Add After AVMutableCompositionTrack.
set setPreferredTransform: set here your source video that you want to export with same orientation.
// Grab the source track from AVURLAsset for example.
AVAssetTrack *assetVideoTrack = [videoAsset tracksWithMediaType:AVMediaTypeVideo].lastObject;
// Grab the composition video track from AVMutableComposition you already made.
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition tracksWithMediaType:AVMediaTypeVideo].lastObject;
// Apply the original transform.
if (assetVideoTrack && compositionVideoTrack) {
[compositionVideoTrack setPreferredTransform:assetVideoTrack.preferredTransform];
}
I am trying to cut an audio file for an iPhone project. I can cut it and save it, but any fade in / fade out that I try to apply doesn't work, the audio file is just saved cutted but not faded.
I am using the following code:
//
// NO PROBLEMS TO SEE HERE, MOVE ON
//
NSArray *documentsFolders = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
int currentFileNum = 10;
NSURL *url = [NSURL fileURLWithPath: [[documentsFolders objectAtIndex:0] stringByAppendingPathComponent:[NSString stringWithFormat:#"%#%d.%#", AUDIO_SOURCE_FILE_NAME ,currentFileNum, AUDIO_SOURCE_FILE_EXTENSION ]]];
NSDictionary *options = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES]
forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:url options:options];
AVAssetExportSession* exporter = [AVAssetExportSession exportSessionWithAsset:asset presetName:AVAssetExportPresetAppleM4A];
for (NSString* filetype in exporter.supportedFileTypes) {
if ([filetype isEqualToString:AVFileTypeAppleM4A]) {
exporter.outputFileType = AVFileTypeAppleM4A;
break;
}
}
if (exporter.outputFileType == nil) {
NSLog(#"Needed output file type not found? (%#)", AVFileTypeAppleM4A);
//return;
}
NSString* outPath = [[documentsFolders objectAtIndex:0] stringByAppendingPathComponent:[NSString stringWithFormat:#"%#%d.%#", AUDIO_CUTTED_FILE_NAME ,currentFileNum, AUDIO_SOURCE_FILE_EXTENSION ]];
NSURL* const outUrl = [NSURL fileURLWithPath:outPath];
exporter.outputURL = outUrl;
float endTrimTime = CMTimeGetSeconds(asset.duration);
float startTrimTime = fminf(AUDIO_DURATION, endTrimTime);
CMTime startTrimCMTime=CMTimeSubtract(asset.duration, CMTimeMake(startTrimTime, 1));
exporter.timeRange = CMTimeRangeMake(startTrimCMTime, asset.duration);
//
// TRYING TO APPLY FADEIN FADEOUT, NOT WORKING, NO RESULTS, "CODE IGNORED"
//
AVMutableAudioMix *exportAudioMix = [AVMutableAudioMix audioMix];
NSMutableArray* inputParameters = [NSMutableArray arrayWithCapacity:1];
CMTime startFadeInTime = startTrimCMTime;
CMTime endFadeInTime = CMTimeMake(startTrimTime+1, 1);
CMTime startFadeOutTime = CMTimeMake(endTrimTime-1, 1);
CMTime endFadeOutTime = CMTimeMake(endTrimTime, 1);
CMTimeRange fadeInTimeRange = CMTimeRangeFromTimeToTime(startFadeInTime, endFadeInTime);
CMTimeRange fadeOutTimeRange = CMTimeRangeFromTimeToTime(startFadeOutTime, endFadeOutTime);
AVMutableAudioMixInputParameters *exportAudioMixInputParameters = [AVMutableAudioMixInputParameters audioMixInputParameters];
[exportAudioMixInputParameters setVolume:0.0 atTime:CMTimeMakeWithSeconds(startTrimTime-0.01, 1)];
[exportAudioMixInputParameters setVolumeRampFromStartVolume:0.0 toEndVolume:1.0 timeRange:fadeInTimeRange];
[exportAudioMixInputParameters setVolumeRampFromStartVolume:1.0 toEndVolume:0.0 timeRange:fadeOutTimeRange];
[inputParameters insertObject:exportAudioMixInputParameters atIndex:0];
exportAudioMix.inputParameters = inputParameters;
exporter.audioMix = exportAudioMix;
[exporter exportAsynchronouslyWithCompletionHandler:^(void) {
NSString* message;
switch (exporter.status) {
case AVAssetExportSessionStatusFailed:
message = [NSString stringWithFormat:#"Export failed. Error: %#", exporter.error.description];
[asset release];
break;
case AVAssetExportSessionStatusCompleted: {
[asset release];
[self reallyConvert:currentFileNum];
message = [NSString stringWithFormat:#"Export completed: %#", outPath];
break;
}
case AVAssetExportSessionStatusCancelled:
message = [NSString stringWithFormat:#"Export cancelled!"];
[asset release];
break;
default:
NSLog(#"Export 4 unhandled status: %d", exporter.status);
[asset release];
break;
}
}];
You need to select the track. Instead of calling:
AVMutableAudioMixInputParameters *exportAudioMixInputParameters = [AVMutableAudioMixInputParameters audioMixInputParameters];
Call:
AVAssetTrack *assetTrack = [[asset tracksWithMediaType:AVMediaTypeAudio]objectAtIndex:0];
AVMutableAudioMixInputParameters *exportAudioMixInputParameters = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:assetTrack];
In your existing code you can also specify the track like this:
exportAudioMixInputParameters.trackID = [[[asset tracksWithMediaType:AVMediaTypeAudio]objectAtIndex:0] trackID];
Good luck!
Here is the solution.
setVolumeRampFromStartVolume doesn't work.
AVMutableAudioMix *exportAudioMix = [AVMutableAudioMix audioMix];
AVMutableAudioMixInputParameters *exportAudioMixInputParameters = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:track];
//fade in
[exportAudioMixInputParameters setVolume:0.0 atTime:CMTimeMakeWithSeconds(start-1, 1)];
[exportAudioMixInputParameters setVolume:0.1 atTime:CMTimeMakeWithSeconds(start, 1)];
[exportAudioMixInputParameters setVolume:0.5 atTime:CMTimeMakeWithSeconds(start+1, 1)];
[exportAudioMixInputParameters setVolume:1.0 atTime:CMTimeMakeWithSeconds(start+2, 1)];
//fade out
[exportAudioMixInputParameters setVolume:1.0 atTime:CMTimeMakeWithSeconds((start+length-2), 1)];
[exportAudioMixInputParameters setVolume:0.5 atTime:CMTimeMakeWithSeconds((start+length-1), 1)];
[exportAudioMixInputParameters setVolume:0.1 atTime:CMTimeMakeWithSeconds((start+length), 1)];
exportAudioMix.inputParameters = [NSArray arrayWithObject:exportAudioMixInputParameters];
// configure export session output with all our parameters
exportSession.outputURL = [NSURL fileURLWithPath:filePath]; // output path
exportSession.outputFileType = AVFileTypeAppleM4A; // output file type
exportSession.timeRange = exportTimeRange; // trim time ranges
exportSession.audioMix = exportAudioMix; // fade in audio mix
// perform the export
[exportSession exportAsynchronouslyWithCompletionHandler:^{
if (AVAssetExportSessionStatusCompleted == exportSession.status) {
NSLog(#"AVAssetExportSessionStatusCompleted");
} else if (AVAssetExportSessionStatusFailed == exportSession.status) {
NSLog(#"AVAssetExportSessionStatusFailed");
} else {
NSLog(#"Export Session Status: %d", exportSession.status);
}
}];
I've made the same mistake as you dozens of times !
Apple's API is really weird on this :
CMTimeRange fadeInTimeRange = CMTimeRangeFromTimeToTime(startFadeInTime, endFadeInTime);
CMTimeRange fadeOutTimeRange = CMTimeRangeFromTimeToTime(startFadeOutTime, endFadeOutTime);
Should be :
CMTimeRangeFromTimeToTime(startFadeInTime, fadeInDURATION);
CMTimeRangeFromTimeToTime(startFadeOutTime, fadeOutDURATION);
CMTimeRange is created from start and duration, not from start and end !
But most of the time, the end time is also the duration (if the start time is 0) that's why so many people (including me) make the mistake.
And no Apple, that's not intuitive at all !
This is my working code, just take it and have a nice day!
+(void)makeAudioFadeOutWithSourceURL:(NSURL*)sourceURL destinationURL:(NSURL*)destinationURL fadeOutBeginSecond:(NSInteger)beginTime fadeOutEndSecond:(NSInteger)endTime fadeOutBeginVolume:(CGFloat)beginVolume fadeOutEndVolume:(CGFloat)endVolume callback:(void(^)(BOOL))callback
{
NSAssert(callback, #"need callback");
NSParameterAssert(beginVolume >= 0 && beginVolume <=1);
NSParameterAssert(endVolume >= 0 && endVolume <= 1);
BOOL sourceExist = [[NSFileManager defaultManager] fileExistsAtPath:sourceURL.path];
NSAssert(sourceExist, #"source not exist");
AVURLAsset *asset = [AVAsset assetWithURL:sourceURL];;
AVAssetExportSession* exporter = [AVAssetExportSession exportSessionWithAsset:asset presetName:AVAssetExportPresetAppleM4A];
exporter.outputURL = destinationURL;
exporter.outputFileType = AVFileTypeAppleM4A;
AVMutableAudioMix *exportAudioMix = [AVMutableAudioMix audioMix];
AVMutableAudioMixInputParameters *exportAudioMixInputParameters = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:asset.tracks.lastObject];
[exportAudioMixInputParameters setVolumeRampFromStartVolume:beginVolume toEndVolume:endVolume timeRange:CMTimeRangeMake(CMTimeMakeWithSeconds(beginTime, 1), CMTimeSubtract(CMTimeMakeWithSeconds(endTime, 1), CMTimeMakeWithSeconds(beginTime, 1)))];
NSArray *audioMixParameters = #[exportAudioMixInputParameters];
exportAudioMix.inputParameters = audioMixParameters;
exporter.audioMix = exportAudioMix;
[exporter exportAsynchronouslyWithCompletionHandler:^(void){
AVAssetExportSessionStatus status = exporter.status;
if (status != AVAssetExportSessionStatusCompleted) {
if (callback) {
callback(NO);
}
}
else {
if (callback) {
callback(YES);
}
}
NSError *error = exporter.error;
NSLog(#"export done,error %#,status %d",error,status);
}];
}