I am trying to combine several video clips into one using AVFoundation.
I can create a single video using AVMutableComposition using the code below
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime startTime = kCMTimeZero;
/*videoClipPaths is a array of paths of the video clips recorded*/
//for loop to combine clips into a single video
for (NSInteger i=0; i < [videoClipPaths count]; i++) {
NSString *path = (NSString*)[videoClipPaths objectAtIndex:i];
NSURL *url = [[NSURL alloc] initFileURLWithPath:path];
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:url options:nil];
[url release];
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVAssetTrack *audioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
//set the orientation
if(i == 0)
{
[compositionVideoTrack setPreferredTransform:videoTrack.preferredTransform];
}
ok = [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [asset duration]) ofTrack:videoTrack atTime:startTime error:nil];
ok = [compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [asset duration]) ofTrack:audioTrack atTime:startTime error:nil];
startTime = CMTimeAdd(startTime, [asset duration]);
}
//export the combined video
NSString *combinedPath = /* path of the combined video*/;
NSURL *url = [[NSURL alloc] initFileURLWithPath: combinedPath];
AVAssetExportSession *exporter = [[[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPreset640x480] autorelease];
exporter.outputURL = url;
[url release];
exporter.outputFileType = [[exporter supportedFileTypes] objectAtIndex:0];
[exporter exportAsynchronouslyWithCompletionHandler:^(void){[self combineVideoFinished:exporter.outputURL status:exporter.status error:exporter.error];}];
The code above works fine if all the video clips were recorded in the same orientation (portrait or landscape). However if I have a mixture of orientations in the clips, the final video will have part of it rotated 90 degrees to the right (or left).
I was wondering is there a way to transform all clips to the same orientation (e.g. the orientation of the first clip) while composing them. From what I read from the XCode document AVMutableVideoCompositionLayerInstruction seems can be used to transform AVAsset, but I am not sure how to create and apply several different layer instruction to corresponding clips and use then in the composition (AVMutableComposition*)
Any help would be appreciated!
This is what I do. I then use an AVAssetExportSession to create the actual file. but I warn you, the CGAffineTransforms are sometimes applied late, so you'll see a or two of the original before the video transforms. I have no clue why this happens, a different combination of videos will yield the expected result, but sometimes its off.
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.frameDuration = CMTimeMake(1,30);
videoComposition.renderScale = 1.0;
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack];
// Get only paths the user selected NSMutableArray *array = [NSMutableArray array]; for(NSString* string in videoPathArray){
if(![string isEqualToString:#""]){
[array addObject:string];
}
self.videoPathArray = array;
float time = 0;
for (int i = 0; i<self.videoPathArray.count; i++) {
AVURLAsset *sourceAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:[videoPathArray objectAtIndex:i]] options:[NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey]];
NSError *error = nil;
BOOL ok = NO;
AVAssetTrack *sourceVideoTrack = [[sourceAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CGSize temp = CGSizeApplyAffineTransform(sourceVideoTrack.naturalSize, sourceVideoTrack.preferredTransform);
CGSize size = CGSizeMake(fabsf(temp.width), fabsf(temp.height));
CGAffineTransform transform = sourceVideoTrack.preferredTransform;
videoComposition.renderSize = sourceVideoTrack.naturalSize;
if (size.width > size.height) {
[layerInstruction setTransform:transform atTime:CMTimeMakeWithSeconds(time, 30)];
} else {
float s = size.width/size.height;
CGAffineTransform new = CGAffineTransformConcat(transform, CGAffineTransformMakeScale(s,s));
float x = (size.height - size.width*s)/2;
CGAffineTransform newer = CGAffineTransformConcat(new, CGAffineTransformMakeTranslation(x, 0));
[layerInstruction setTransform:newer atTime:CMTimeMakeWithSeconds(time, 30)];
}
ok = [compositionVideoTrack insertTimeRange:sourceVideoTrack.timeRange ofTrack:sourceVideoTrack atTime:[composition duration] error:&error];
if (!ok) {
// Deal with the error.
NSLog(#"something went wrong");
}
NSLog(#"\n source asset duration is %f \n source vid track timerange is %f %f \n composition duration is %f \n composition vid track time range is %f %f",CMTimeGetSeconds([sourceAsset duration]), CMTimeGetSeconds(sourceVideoTrack.timeRange.start),CMTimeGetSeconds(sourceVideoTrack.timeRange.duration),CMTimeGetSeconds([composition duration]), CMTimeGetSeconds(compositionVideoTrack.timeRange.start),CMTimeGetSeconds(compositionVideoTrack.timeRange.duration));
time += CMTimeGetSeconds(sourceVideoTrack.timeRange.duration);
}
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
instruction.timeRange = compositionVideoTrack.timeRange;
videoComposition.instructions = [NSArray arrayWithObject:instruction];
This is what I do. I then use an AVAssetExportSession to create the actual file. but I warn you, the CGAffineTransforms are sometimes applied late, so you'll see a or two of the original before the video transforms. I have no clue why this happens, a different combination of videos will yield the expected result, but sometimes its off.
Here is #bogardon's answer in swift 4+
import ARKit
class ARKitSampleViewController: UIViewController {
var label: UILabel?
var planeFound = false
func plane(from anchor: ARPlaneAnchor?) -> SCNNode? {
let plane = SCNPlane(width: CGFloat(anchor?.extent.x ?? 0.0), height: CGFloat(anchor?.extent.z ?? 0.0))
plane.firstMaterial?.diffuse.contents = UIColor.clear
let planeNode = SCNNode(geometry: plane)
planeNode.position = SCNVector3Make(anchor?.center.x ?? 0.0, 0, anchor?.center.z ?? 0.0)
// SCNPlanes are vertically oriented in their local coordinate space.
// Rotate it to match the horizontal orientation of the ARPlaneAnchor.
planeNode.transform = SCNMatrix4MakeRotation(-.pi * 0.5, 1, 0, 0)
return planeNode
}
// MARK: - ARSCNViewDelegate
func renderer(_ renderer: SCNSceneRenderer, didAdd node: SCNNode, for anchor: ARAnchor) {
if planeFound == false {
if (anchor is ARPlaneAnchor) {
DispatchQueue.main.async(execute: {
self.planeFound = true
self.label?.text = "DANCEFLOOR FOUND. LET'S BOOGIE"
let overlay = UIView(frame: self.view.frame)
overlay.backgroundColor = UIColor.black
overlay.alpha = 0
if let label = self.label {
self.view.insertSubview(overlay, belowSubview: label)
}
UIView.animate(withDuration: 1.5, delay: 2, options: .curveEaseIn, animations: {
self.label?.alpha = 0
overlay.alpha = 0.5
}) { finished in
let planeAnchor = anchor as? ARPlaneAnchor
// Show the disco ball here
}
})
}
}
}
}
Related
I'm using AVFoundation to put a watermark in my movies. This works well with the code that's been going around on the internet and Apple. But I don't want to show the watermark the complete time and I want to show different watermarks in the same movie.
I've an AVAsset:
NSString *path = [[NSBundle mainBundle] pathForResource:#"test" ofType:#"MOV"];
NSURL *url = [[NSURL alloc] initFileURLWithPath: path];
avasset_camera = [AVAsset assetWithURL:url];
An AVMutableComposition:
AVMutableComposition *mix = [AVMutableComposition composition];
The UIImage converted to a CALayer and than added to another layer to incorporate with the animationTool:
UIImage *myImage = [UIImage imageNamed:#"watermark.png"];
CALayer *aLayer = [CALayer layer];
aLayer.contents = (id)myImage.CGImage;
aLayer.frame = CGRectMake(0, 0, 568, 320);
aLayer.opacity = 1.0;
CGSize videoSize = [avasset_camera naturalSize];
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:aLayer];
And than a AVMutableVideoComposition:
AVMutableVideoComposition* videoComp = [[AVMutableVideoComposition videoComposition] retain];
videoComp.renderSize = videoSize;
videoComp.frameDuration = CMTimeMake(1, 30);
AVVideoCompositionCoreAnimationTool *animationVideoTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
videoComp.animationTool = animationVideoTool;
The instruction for the VideoComposition:
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, avasset_camera.duration);
And the instruction for the layer:
AVAssetTrack *videoTrack = [[mix tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComp.instructions = [NSArray arrayWithObject: instruction];
And than export it with an AVAssetExportSession with the property of the VideoComposition
This will result in a video with the watermark for the complete video. What I want to achieve is a video from camera with the first 5 seconds the watermark. It than disappears for some time and than another image is shown(also a watermark).
I'm stuck... I've watched the WWDC vid on AVFoundation for trillions of times but it lacks in-depth sight.
When I change the timeRange of the instruction it doesn't export because the duration (range) has to be the same at that of the AVAssetTrack. I've been trying to insert multiple instructions but so far with no success.
AVMutableVideoCompositionLayerInstruction has a method [setOpacityRampFromStartOpacity: toEndOpacity: timeRange:] which you can set for different segments (non overlaping for each layerInstruction). Not the best solution would be to create two video tracks - one original video and another with watermark and ramp opacity when needed (showing only original on some segments and with watermark on others).
Im trying to add a fade in to a wav file and then exporting a new file with the added fade using AVAssetExportSession. All the examples I have seen have seen are exporting as m4u Is it even possible to do this with wav or aif?
The error I get is:
AVAssetExportSessionStatusFailed Error Domain=AVFoundationErrorDomain Code=-11822 "Cannot Open" UserInfo=0x1f01c9f0 {NSLocalizedDescription=Cannot Open, NSLocalizedFailureReason=This media format is not supported.}
My code looks like below
NSString *inpath = [path stringByAppendingFormat:#"/%#",file];
NSString *ename = [file stringByDeletingPathExtension];
NSString *incname = [ename stringByAppendingString:#"1t"];
NSString *outname = [incname stringByAppendingPathExtension:#"wav"];
NSString *outpath = [path stringByAppendingFormat:#"/%#",outname];
NSURL *urlpath = [NSURL fileURLWithPath:inpath];
NSURL *urlout = [NSURL fileURLWithPath:outpath];
NSDictionary *options = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES]
forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
AVURLAsset *anAsset = [[AVURLAsset alloc] initWithURL:urlpath options:options];
//check the soundfile is greater than 50seconds
CMTime assetTime = [anAsset duration];
Float64 duration = CMTimeGetSeconds(assetTime);
if (duration < 50.0) return NO;
// get the first audio track
NSArray *tracks = [anAsset tracksWithMediaType:AVMediaTypeAudio];
if ([tracks count] == 0) return NO;
AVAssetTrack *track = [tracks objectAtIndex:0];
// create trim time range - 20 seconds starting from 30 seconds into the asset
CMTime startTime = CMTimeMake(30, 1);
CMTime stopTime = CMTimeMake(50, 1);
CMTimeRange exportTimeRange = CMTimeRangeFromTimeToTime(startTime, stopTime);
// create fade in time range - 10 seconds starting at the beginning of trimmed asset
CMTime startFadeInTime = startTime;
CMTime endFadeInTime = CMTimeMake(40, 1);
CMTimeRange fadeInTimeRange = CMTimeRangeFromTimeToTime(startFadeInTime,
endFadeInTime);
// setup audio mix
AVMutableAudioMix *exportAudioMix = [AVMutableAudioMix audioMix];
AVMutableAudioMixInputParameters *exportAudioMixInputParameters =
[AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:track];
[exportAudioMixInputParameters setVolumeRampFromStartVolume:0.0 toEndVolume:1.0 timeRange:fadeInTimeRange];
exportAudioMix.inputParameters = [NSArray arrayWithObject:exportAudioMixInputParameters];
AVAssetExportSession *exportSession = [AVAssetExportSession
exportSessionWithAsset:anAsset presetName:AVAssetExportPresetPassthrough];
//NSArray *listof = [AVAssetExportSession exportPresetsCompatibleWithAsset:anAsset];
//NSLog(#"LISTOF %#",listof);
id desc = [track.formatDescriptions objectAtIndex:0];
const AudioStreamBasicDescription *audioDesc = CMAudioFormatDescriptionGetStreamBasicDescription((CMAudioFormatDescriptionRef)desc);
FourCharCode formatID = audioDesc->mFormatID;
NSString *fileType = nil;
NSString *ex = nil;
switch (formatID) {
case kAudioFormatLinearPCM:
{
UInt32 flags = audioDesc->mFormatFlags;
if (flags & kAudioFormatFlagIsBigEndian) {
fileType = #"public.aiff-audio";
ex = #"aif";
} else {
fileType = #"com.microsoft.waveform-audio";
ex = #"wav";
}
}
break;
case kAudioFormatMPEGLayer3:
fileType = #"com.apple.quicktime-movie";
ex = #"mp3";
break;
case kAudioFormatMPEG4AAC:
fileType = #"com.apple.m4a-audio";
ex = #"m4a";
break;
case kAudioFormatAppleLossless:
fileType = #"com.apple.m4a-audio";
ex = #"m4a";
break;
default:
break;
}
exportSession.outputFileType = fileType;
exportSession.outputURL = urlout;
//exportSession.outputFileType = AVFileTypeWAVE; // output file type
exportSession.timeRange = exportTimeRange; // trim time range
exportSession.audioMix = exportAudioMix; // fade in audio mix
// perform the export
[exportSession exportAsynchronouslyWithCompletionHandler:^{
if (AVAssetExportSessionStatusCompleted == exportSession.status) {
NSLog(#"AVAssetExportSessionStatusCompleted");
} else if (AVAssetExportSessionStatusFailed == exportSession.status) {
// a failure may happen because of an event out of your control
// for example, an interruption like a phone call comming in
// make sure and handle this case appropriately
NSLog(#"AVAssetExportSessionStatusFailed %#",exportSession.error);
} else {
NSLog(#"Export Session Status: %d", exportSession.status);
}
}];
return YES;
}
You can't do that with AVAssetExportSession because the presets are quite fixed in their usage. A preset value of AVAssetExportPresetPassthrough will keep your input formats on output.
As your task will be manipulating the audio sample buffers directly you should use the second variant that AVFoundation will give you: paired AVAssetReader and AVAssetWriter setup.
You'll find proper sample code as in AVReaderWriterOSX from Apple developer source. This should also work with iOS besides you have different I/O format settings available. The availability to decompress audio as PCM and write back to uncompressed .wav file should be given.
I currently have an iPhone app that lets the users take video, upload it to the server, and allows others to view their video from the app. Never had an issue with the video's orientation until I went to make a web site to view the different videos (along with other content).
I consume the video's from the web service, and load them with ajax using videojs, but every single one of them is rotated left 90 degrees. From what I've read, it sounds like orientation information is able to be read in iOS, but not on a website. Is there any way to save a new orientation for the video on the iphone, before sending to the server?
//change Orientation Of video
//in videoorientation.h
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#interface videoorientationViewController : UIViewController
#property AVMutableComposition *mutableComposition;
#property AVMutableVideoComposition *mutableVideoComposition;
#property AVMutableAudioMix *mutableAudioMix;
#property AVAssetExportSession *exportSession;
- (void)performWithAsset : (NSURL *)moviename;
#end
In //viewcontroller.m
- (void)performWithAsset : (NSURL *)moviename
{
self.mutableComposition=nil;
self.mutableVideoComposition=nil;
self.mutableAudioMix=nil;
// NSString* filename = [NSString stringWithFormat:#"temp1.mov"];
//
// NSLog(#"file name== %#",filename);
//
// [[NSUserDefaults standardUserDefaults]setObject:filename forKey:#"currentName"];
// NSString* path = [NSTemporaryDirectory() stringByAppendingPathComponent:filename];
// NSLog(#"file number %i",_currentFile);
// NSURL* url = [NSURL fileURLWithPath:path];
// NSString *videoURL = [[NSBundle mainBundle] pathForResource:#"Movie" ofType:#"m4v"];
AVAsset *asset = [[AVURLAsset alloc] initWithURL:moviename options:nil];
AVMutableVideoCompositionInstruction *instruction = nil;
AVMutableVideoCompositionLayerInstruction *layerInstruction = nil;
CGAffineTransform t1;
CGAffineTransform t2;
AVAssetTrack *assetVideoTrack = nil;
AVAssetTrack *assetAudioTrack = nil;
// Check if the asset contains video and audio tracks
if ([[asset tracksWithMediaType:AVMediaTypeVideo] count] != 0) {
assetVideoTrack = [asset tracksWithMediaType:AVMediaTypeVideo][0];
}
if ([[asset tracksWithMediaType:AVMediaTypeAudio] count] != 0) {
assetAudioTrack = [asset tracksWithMediaType:AVMediaTypeAudio][0];
}
CMTime insertionPoint = kCMTimeZero;
NSError *error = nil;
// Step 1
// Create a composition with the given asset and insert audio and video tracks into it from the asset
if (!self.mutableComposition) {
// Check whether a composition has already been created, i.e, some other tool has already been applied
// Create a new composition
self.mutableComposition = [AVMutableComposition composition];
// Insert the video and audio tracks from AVAsset
if (assetVideoTrack != nil) {
AVMutableCompositionTrack *compositionVideoTrack = [self.mutableComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [asset duration]) ofTrack:assetVideoTrack atTime:insertionPoint error:&error];
}
if (assetAudioTrack != nil) {
AVMutableCompositionTrack *compositionAudioTrack = [self.mutableComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [asset duration]) ofTrack:assetAudioTrack atTime:insertionPoint error:&error];
}
}
// Step 2
// Translate the composition to compensate the movement caused by rotation (since rotation would cause it to move out of frame)
t1 = CGAffineTransformMakeTranslation(assetVideoTrack.naturalSize.height, 0.0);
float width=assetVideoTrack.naturalSize.width;
float height=assetVideoTrack.naturalSize.height;
float toDiagonal=sqrt(width*width+height*height);
float toDiagonalAngle = radiansToDegrees(acosf(width/toDiagonal));
float toDiagonalAngle2=90-radiansToDegrees(acosf(width/toDiagonal));
float toDiagonalAngleComple;
float toDiagonalAngleComple2;
float finalHeight = 0.0;
float finalWidth = 0.0;
float degrees=90;
if(degrees>=0&°rees<=90){
toDiagonalAngleComple=toDiagonalAngle+degrees;
toDiagonalAngleComple2=toDiagonalAngle2+degrees;
finalHeight=ABS(toDiagonal*sinf(degreesToRadians(toDiagonalAngleComple)));
finalWidth=ABS(toDiagonal*sinf(degreesToRadians(toDiagonalAngleComple2)));
t1 = CGAffineTransformMakeTranslation(height*sinf(degreesToRadians(degrees)), 0.0);
}
else if(degrees>90&°rees<=180){
float degrees2 = degrees-90;
toDiagonalAngleComple=toDiagonalAngle+degrees2;
toDiagonalAngleComple2=toDiagonalAngle2+degrees2;
finalHeight=ABS(toDiagonal*sinf(degreesToRadians(toDiagonalAngleComple2)));
finalWidth=ABS(toDiagonal*sinf(degreesToRadians(toDiagonalAngleComple)));
t1 = CGAffineTransformMakeTranslation(width*sinf(degreesToRadians(degrees2))+height*cosf(degreesToRadians(degrees2)), height*sinf(degreesToRadians(degrees2)));
}
else if(degrees>=-90&°rees<0){
float degrees2 = degrees-90;
float degreesabs = ABS(degrees);
toDiagonalAngleComple=toDiagonalAngle+degrees2;
toDiagonalAngleComple2=toDiagonalAngle2+degrees2;
finalHeight=ABS(toDiagonal*sinf(degreesToRadians(toDiagonalAngleComple2)));
finalWidth=ABS(toDiagonal*sinf(degreesToRadians(toDiagonalAngleComple)));
t1 = CGAffineTransformMakeTranslation(0, width*sinf(degreesToRadians(degreesabs)));
}
else if(degrees>=-180&°rees<-90){
float degreesabs = ABS(degrees);
float degreesplus = degreesabs-90;
toDiagonalAngleComple=toDiagonalAngle+degrees;
toDiagonalAngleComple2=toDiagonalAngle2+degrees;
finalHeight=ABS(toDiagonal*sinf(degreesToRadians(toDiagonalAngleComple)));
finalWidth=ABS(toDiagonal*sinf(degreesToRadians(toDiagonalAngleComple2)));
t1 = CGAffineTransformMakeTranslation(width*sinf(degreesToRadians(degreesplus)), height*sinf(degreesToRadians(degreesplus))+width*cosf(degreesToRadians(degreesplus)));
}
// Rotate transformation
t2 = CGAffineTransformRotate(t1, degreesToRadians(degrees));
//t2 = CGAffineTransformRotate(t1, -90);
// Step 3
// Set the appropriate render sizes and rotational transforms
if (!self.mutableVideoComposition) {
// Create a new video composition
self.mutableVideoComposition = [AVMutableVideoComposition videoComposition];
// self.mutableVideoComposition.renderSize = CGSizeMake(assetVideoTrack.naturalSize.height,assetVideoTrack.naturalSize.width);
self.mutableVideoComposition.renderSize = CGSizeMake(finalWidth,finalHeight);
self.mutableVideoComposition.frameDuration = CMTimeMake(1,30);
// The rotate transform is set on a layer instruction
instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [self.mutableComposition duration]);
layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:(self.mutableComposition.tracks)[0]];
[layerInstruction setTransform:t2 atTime:kCMTimeZero];
} else {
self.mutableVideoComposition.renderSize = CGSizeMake(self.mutableVideoComposition.renderSize.height, self.mutableVideoComposition.renderSize.width);
// Extract the existing layer instruction on the mutableVideoComposition
instruction = (self.mutableVideoComposition.instructions)[0];
layerInstruction = (instruction.layerInstructions)[0];
// Check if a transform already exists on this layer instruction, this is done to add the current transform on top of previous edits
CGAffineTransform existingTransform;
if (![layerInstruction getTransformRampForTime:[self.mutableComposition duration] startTransform:&existingTransform endTransform:NULL timeRange:NULL]) {
[layerInstruction setTransform:t2 atTime:kCMTimeZero];
} else {
// Note: the point of origin for rotation is the upper left corner of the composition, t3 is to compensate for origin
CGAffineTransform t3 = CGAffineTransformMakeTranslation(-1*assetVideoTrack.naturalSize.height/2, 0.0);
CGAffineTransform newTransform = CGAffineTransformConcat(existingTransform, CGAffineTransformConcat(t2, t3));
[layerInstruction setTransform:newTransform atTime:kCMTimeZero];
}
}
// Step 4
// Add the transform instructions to the video composition
instruction.layerInstructions = #[layerInstruction];
self.mutableVideoComposition.instructions = #[instruction];
// Step 5
// Notify AVSEViewController about rotation operation completion
// [[NSNotificationCenter defaultCenter] postNotificationName:AVSEEditCommandCompletionNotification object:self];
[self performWithAssetExport];
}
- (void)performWithAssetExport
{
// Step 1
// Create an outputURL to which the exported movie will be saved
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *outputURL = paths[0];
NSFileManager *manager = [NSFileManager defaultManager];
[manager createDirectoryAtPath:outputURL withIntermediateDirectories:YES attributes:nil error:nil];
outputURL = [outputURL stringByAppendingPathComponent:#"output.mov"];
// Remove Existing File
[manager removeItemAtPath:outputURL error:nil];
// Step 2
// Create an export session with the composition and write the exported movie to the photo library
self.exportSession = [[AVAssetExportSession alloc] initWithAsset:[self.mutableComposition copy] presetName:AVAssetExportPreset1280x720];
self.exportSession.videoComposition = self.mutableVideoComposition;
self.exportSession.audioMix = self.mutableAudioMix;
self.exportSession.outputURL = [NSURL fileURLWithPath:outputURL];
self.exportSession.outputFileType=AVFileTypeQuickTimeMovie;
[self.exportSession exportAsynchronouslyWithCompletionHandler:^(void){
switch (self.exportSession.status) {
case AVAssetExportSessionStatusCompleted:
//[self playfunction];
[[NSNotificationCenter defaultCenter]postNotificationName:#"Backhome" object:nil];
// Step 3
// Notify AVSEViewController about export completion
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Failed:%#",self.exportSession.error);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Canceled:%#",self.exportSession.error);
break;
default:
break;
}
}];
}
It appears that this issue occurs because videojs has trouble reading the orientation. information here: http://help.videojs.com/discussions/problems/1508-video-orientation-for-iphone-wrong
Based on the implied solution you should be checking to make sure when you save the video you are using AVFramework to set the orientation value. Information on how to do that is available in this previous stack overflow post: How do I set the orientation for a frame-by-frame-generated video using AVFoundation?
This question already has an answer here:
Grabbing First Frame of a Video - Thumbnail Resolution - iPhone
(1 answer)
Closed 3 years ago.
I am using code based on the code in the following thread to generate a video thumbnail :
Getting a thumbnail from a video url or data in iPhone SDK
My code is as follows :
if (selectionType == kUserSelectedMedia) {
NSURL * assetURL = [info objectForKey:UIImagePickerControllerReferenceURL];
AVURLAsset *asset=[[AVURLAsset alloc] initWithURL:assetURL options:nil];
AVAssetImageGenerator *generator = [[AVAssetImageGenerator alloc] initWithAsset:asset];
generator.appliesPreferredTrackTransform=TRUE;
[asset release];
CMTime thumbTime = CMTimeMakeWithSeconds(0,30);
//NSLog(#"Starting Async Queue");
AVAssetImageGeneratorCompletionHandler handler = ^(CMTime requestedTime, CGImageRef im, CMTime actualTime, AVAssetImageGeneratorResult result, NSError *error){
if (result != AVAssetImageGeneratorSucceeded) {
NSLog(#"couldn't generate thumbnail, error:%#", error);
}
//NSLog(#"Updating UI");
selectMediaButton.hidden = YES;
selectedMedia.hidden = NO;
cancelMediaChoiceButton.hidden = NO;
whiteBackgroundMedia.hidden = NO;
//Convert CGImage thumbnail to UIImage.
UIImage * thumbnail = [UIImage imageWithCGImage:im];
int checkSizeW = thumbnail.size.width;
int checkSizeH = thumbnail.size.height;
NSLog(#"Image width is %d", checkSizeW);
NSLog(#"Image height is %d", checkSizeH);
if (checkSizeW >=checkSizeH) {
//This is a landscape image or video.
NSLog(#"This is a landscape image - will resize automatically");
}
if (checkSizeH >=checkSizeW) {
//This is a portrait image or video.
selectedIntroHeight = thumbnail.size.height;
}
//Set the image once resized.
selectedMedia.image = thumbnail;
//Set out confirm button BOOL to YES and check if we need to display confirm button.
mediaReady = YES;
[self checkIfConfirmButtonShouldBeDisplayed];
//[button setImage:[UIImage imageWithCGImage:im] forState:UIControlStateNormal];
//thumbImg=[[UIImage imageWithCGImage:im] retain];
[generator release];
};
CGSize maxSize = CGSizeMake(320, 180);
generator.maximumSize = maxSize;
[generator generateCGImagesAsynchronouslyForTimes:[NSArray arrayWithObject:[NSValue valueWithCMTime:thumbTime]] completionHandler:handler];
}
}
The issue is that there is a delay of about 5-10 seconds in generating the thumbnail image. Is there anyway that I could improve the speed of this code and generate the thumbnail quicker ?
Thank you.
This is a generic code, you should just pass a path for the media file and set the ratio between 0 and 1.0.
+ (UIImage*)previewFromFileAtPath:(NSString*)path ratio:(CGFloat)ratio
{
AVAsset *asset = [AVURLAsset assetWithURL:[NSURL fileURLWithPath:path]];
AVAssetImageGenerator *imageGenerator = [[AVAssetImageGenerator alloc]initWithAsset:asset];
CMTime duration = asset.duration;
CGFloat durationInSeconds = duration.value / duration.timescale;
CMTime time = CMTimeMakeWithSeconds(durationInSeconds * ratio, (int)duration.value);
CGImageRef imageRef = [imageGenerator copyCGImageAtTime:time actualTime:NULL error:NULL];
UIImage *thumbnail = [UIImage imageWithCGImage:imageRef];
CGImageRelease(imageRef);
return thumbnail;
}
Swift solution:
func previewImageForLocalVideo(url:NSURL) -> UIImage?
{
let asset = AVAsset(URL: url)
let imageGenerator = AVAssetImageGenerator(asset: asset)
imageGenerator.appliesPreferredTrackTransform = true
var time = asset.duration
//If possible - take not the first frame (it could be completely black or white on camara's videos)
time.value = min(time.value, 2)
do {
let imageRef = try imageGenerator.copyCGImageAtTime(time, actualTime: nil)
return UIImage(CGImage: imageRef)
}
catch let error as NSError
{
print("Image generation failed with error \(error)")
return nil
}
}
I have an application that allows to append multiple video assets and add one or multiple audio tracks to a composition. All seems to work, I can play the resulting composition using AVPlayer (although the audio level seems low). After exporting the composition to file, the audio track is missing.
My code is largely based on the AVEditDemo sample code from the WWDC10 sessions. I have double checked my code against the AVEditDemo code and cannot find what could be the problem. I have also checked forums but there is not much AVFoundation related posts/solutions.
Any help is most welcome. Cheers,
Jean-Pierre
Method to build the composition with extra audio tracks
Notes:
compositionArray: contains assets to build the composition.
AssetView: object containing a AVURLAsset.
- (AVMutableComposition *)buildCompositionObjects
{
// no assets available, return nil
if ([compositionArray count] < 1)
{
return nil;
}
// get the asset video size
AssetView * view = [compositionArray objectAtIndex:0];
AVURLAsset * asset = view.asset;
CGSize videoSize = [asset naturalSize];
// create new composition
AVMutableComposition * cmp = [AVMutableComposition composition];
// set the size
cmp.naturalSize = videoSize;
// build composition
[self buildComposition:cmp];
// add any extra audio track
[self addAudioTrackToComposition:cmp];
// return the new composition
return cmp;
}
Method to build the base composition
- (void) buildComposition:(AVMutableComposition *)cmp
{
// set the start time of contiguous tracks
CMTime nextClipStartTime = kCMTimeZero;
// clear the composition
[cmp removeTimeRange:CMTimeRangeMake(CMTimeMake(0, 600), cmp.duration)];
// add audio and video tracks
AVMutableCompositionTrack *compositionVideoTrack = [cmp addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *compositionAudioTrack = [cmp addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
// loop through all available assets
for (AssetView * view in compositionArray)
{
AVURLAsset *asset = view.asset;
CMTimeRange timeRangeInAsset;
timeRangeInAsset = CMTimeRangeMake(kCMTimeZero, [asset duration]);
AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[compositionVideoTrack insertTimeRange:timeRangeInAsset ofTrack:clipVideoTrack atTime:nextClipStartTime error:nil];
// make sure there is an audio track. Had to do this becaaause of this missing audio track issue. Crashes if check is not done (out of bounds).
if ([[asset tracksWithMediaType:AVMediaTypeAudio] count] > 0)
{
AVAssetTrack *clipAudioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[compositionAudioTrack insertTimeRange:timeRangeInAsset ofTrack:clipAudioTrack atTime:nextClipStartTime error:nil];
}
// adjust next asset start
nextClipStartTime = CMTimeAdd(nextClipStartTime, timeRangeInAsset.duration);
}
}
Method to add the additional audio tracks
- (void)addAudioTrackToComposition:(AVMutableComposition *)cmp
{
// no audio track, return
if ([audioTracks count] < 1)
{
return;
}
// base track ID for additional audio tracks
long baseTrackID = 100;
for (AVURLAsset * audioAsset in audioTracks)
{
// make sure the audio track fits in the composition
CMTimeRange commentaryTimeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
if (CMTIME_COMPARE_INLINE(CMTimeRangeGetEnd(commentaryTimeRange), >, [cmp duration]))
{
commentaryTimeRange.duration = CMTimeSubtract([cmp duration], commentaryTimeRange.start);
}
// Add the audio track.
AVMutableCompositionTrack *compositionCommentaryTrack = [cmp addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:baseTrackID++];
[compositionCommentaryTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, commentaryTimeRange.duration) ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:commentaryTimeRange.start error:nil];
}
}
Method to export the composition
- (void) save
{
NSString * eventFolder = [NSString stringWithFormat:#"%#/%#-%#",
DOCUMENTS_FOLDER,
event.title,
[StringUtils stringForDate:event.timeStamp]];
NSString * exportVideoPath = [NSString stringWithFormat:#"%#/Edits/%#.MOV", eventFolder, [StringUtils stringForDate:[NSDate date]]];
video.path = exportVideoPath;
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetHighestQuality];
NSURL *exportURL = [NSURL fileURLWithPath:exportVideoPath];
exportSession.outputURL = exportURL;
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
[exportSession exportAsynchronouslyWithCompletionHandler:^
{
switch (exportSession.status)
{
case AVAssetExportSessionStatusFailed:
{
NSLog (#"FAIL");
[self performSelectorOnMainThread:#selector (doPostExportFailed:)
withObject:nil
waitUntilDone:NO];
break;
}
case AVAssetExportSessionStatusCompleted:
{
NSLog (#"SUCCESS");
[self performSelectorOnMainThread:#selector (doPostExportSuccess:)
withObject:nil
waitUntilDone:NO];
break;
}
case AVAssetExportSessionStatusCancelled:
{
NSLog (#"CANCELED");
[self performSelectorOnMainThread:#selector (doPostExportCancelled:)
withObject:nil
waitUntilDone:NO];
break;
}
};
}];
}
Did not get any reply. I have the thing working by adding one line of code:
exportSession.shouldOptimizeForNetworkUse = YES;
before
[exportSession exportAsynchronouslyWithCompletionHandler:^
I'm not sure why this fixes the problem. since this seems totaly unrelated to the problem but I was able to export a dozen composition with up to 5 extra audio tracks without problems.
I hope this helps other people who have been scratching their heads for days.
Cheers