I currently have an iPhone app that lets the users take video, upload it to the server, and allows others to view their video from the app. Never had an issue with the video's orientation until I went to make a web site to view the different videos (along with other content).
I consume the video's from the web service, and load them with ajax using videojs, but every single one of them is rotated left 90 degrees. From what I've read, it sounds like orientation information is able to be read in iOS, but not on a website. Is there any way to save a new orientation for the video on the iphone, before sending to the server?
//change Orientation Of video
//in videoorientation.h
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#interface videoorientationViewController : UIViewController
#property AVMutableComposition *mutableComposition;
#property AVMutableVideoComposition *mutableVideoComposition;
#property AVMutableAudioMix *mutableAudioMix;
#property AVAssetExportSession *exportSession;
- (void)performWithAsset : (NSURL *)moviename;
#end
In //viewcontroller.m
- (void)performWithAsset : (NSURL *)moviename
{
self.mutableComposition=nil;
self.mutableVideoComposition=nil;
self.mutableAudioMix=nil;
// NSString* filename = [NSString stringWithFormat:#"temp1.mov"];
//
// NSLog(#"file name== %#",filename);
//
// [[NSUserDefaults standardUserDefaults]setObject:filename forKey:#"currentName"];
// NSString* path = [NSTemporaryDirectory() stringByAppendingPathComponent:filename];
// NSLog(#"file number %i",_currentFile);
// NSURL* url = [NSURL fileURLWithPath:path];
// NSString *videoURL = [[NSBundle mainBundle] pathForResource:#"Movie" ofType:#"m4v"];
AVAsset *asset = [[AVURLAsset alloc] initWithURL:moviename options:nil];
AVMutableVideoCompositionInstruction *instruction = nil;
AVMutableVideoCompositionLayerInstruction *layerInstruction = nil;
CGAffineTransform t1;
CGAffineTransform t2;
AVAssetTrack *assetVideoTrack = nil;
AVAssetTrack *assetAudioTrack = nil;
// Check if the asset contains video and audio tracks
if ([[asset tracksWithMediaType:AVMediaTypeVideo] count] != 0) {
assetVideoTrack = [asset tracksWithMediaType:AVMediaTypeVideo][0];
}
if ([[asset tracksWithMediaType:AVMediaTypeAudio] count] != 0) {
assetAudioTrack = [asset tracksWithMediaType:AVMediaTypeAudio][0];
}
CMTime insertionPoint = kCMTimeZero;
NSError *error = nil;
// Step 1
// Create a composition with the given asset and insert audio and video tracks into it from the asset
if (!self.mutableComposition) {
// Check whether a composition has already been created, i.e, some other tool has already been applied
// Create a new composition
self.mutableComposition = [AVMutableComposition composition];
// Insert the video and audio tracks from AVAsset
if (assetVideoTrack != nil) {
AVMutableCompositionTrack *compositionVideoTrack = [self.mutableComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [asset duration]) ofTrack:assetVideoTrack atTime:insertionPoint error:&error];
}
if (assetAudioTrack != nil) {
AVMutableCompositionTrack *compositionAudioTrack = [self.mutableComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [asset duration]) ofTrack:assetAudioTrack atTime:insertionPoint error:&error];
}
}
// Step 2
// Translate the composition to compensate the movement caused by rotation (since rotation would cause it to move out of frame)
t1 = CGAffineTransformMakeTranslation(assetVideoTrack.naturalSize.height, 0.0);
float width=assetVideoTrack.naturalSize.width;
float height=assetVideoTrack.naturalSize.height;
float toDiagonal=sqrt(width*width+height*height);
float toDiagonalAngle = radiansToDegrees(acosf(width/toDiagonal));
float toDiagonalAngle2=90-radiansToDegrees(acosf(width/toDiagonal));
float toDiagonalAngleComple;
float toDiagonalAngleComple2;
float finalHeight = 0.0;
float finalWidth = 0.0;
float degrees=90;
if(degrees>=0&°rees<=90){
toDiagonalAngleComple=toDiagonalAngle+degrees;
toDiagonalAngleComple2=toDiagonalAngle2+degrees;
finalHeight=ABS(toDiagonal*sinf(degreesToRadians(toDiagonalAngleComple)));
finalWidth=ABS(toDiagonal*sinf(degreesToRadians(toDiagonalAngleComple2)));
t1 = CGAffineTransformMakeTranslation(height*sinf(degreesToRadians(degrees)), 0.0);
}
else if(degrees>90&°rees<=180){
float degrees2 = degrees-90;
toDiagonalAngleComple=toDiagonalAngle+degrees2;
toDiagonalAngleComple2=toDiagonalAngle2+degrees2;
finalHeight=ABS(toDiagonal*sinf(degreesToRadians(toDiagonalAngleComple2)));
finalWidth=ABS(toDiagonal*sinf(degreesToRadians(toDiagonalAngleComple)));
t1 = CGAffineTransformMakeTranslation(width*sinf(degreesToRadians(degrees2))+height*cosf(degreesToRadians(degrees2)), height*sinf(degreesToRadians(degrees2)));
}
else if(degrees>=-90&°rees<0){
float degrees2 = degrees-90;
float degreesabs = ABS(degrees);
toDiagonalAngleComple=toDiagonalAngle+degrees2;
toDiagonalAngleComple2=toDiagonalAngle2+degrees2;
finalHeight=ABS(toDiagonal*sinf(degreesToRadians(toDiagonalAngleComple2)));
finalWidth=ABS(toDiagonal*sinf(degreesToRadians(toDiagonalAngleComple)));
t1 = CGAffineTransformMakeTranslation(0, width*sinf(degreesToRadians(degreesabs)));
}
else if(degrees>=-180&°rees<-90){
float degreesabs = ABS(degrees);
float degreesplus = degreesabs-90;
toDiagonalAngleComple=toDiagonalAngle+degrees;
toDiagonalAngleComple2=toDiagonalAngle2+degrees;
finalHeight=ABS(toDiagonal*sinf(degreesToRadians(toDiagonalAngleComple)));
finalWidth=ABS(toDiagonal*sinf(degreesToRadians(toDiagonalAngleComple2)));
t1 = CGAffineTransformMakeTranslation(width*sinf(degreesToRadians(degreesplus)), height*sinf(degreesToRadians(degreesplus))+width*cosf(degreesToRadians(degreesplus)));
}
// Rotate transformation
t2 = CGAffineTransformRotate(t1, degreesToRadians(degrees));
//t2 = CGAffineTransformRotate(t1, -90);
// Step 3
// Set the appropriate render sizes and rotational transforms
if (!self.mutableVideoComposition) {
// Create a new video composition
self.mutableVideoComposition = [AVMutableVideoComposition videoComposition];
// self.mutableVideoComposition.renderSize = CGSizeMake(assetVideoTrack.naturalSize.height,assetVideoTrack.naturalSize.width);
self.mutableVideoComposition.renderSize = CGSizeMake(finalWidth,finalHeight);
self.mutableVideoComposition.frameDuration = CMTimeMake(1,30);
// The rotate transform is set on a layer instruction
instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [self.mutableComposition duration]);
layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:(self.mutableComposition.tracks)[0]];
[layerInstruction setTransform:t2 atTime:kCMTimeZero];
} else {
self.mutableVideoComposition.renderSize = CGSizeMake(self.mutableVideoComposition.renderSize.height, self.mutableVideoComposition.renderSize.width);
// Extract the existing layer instruction on the mutableVideoComposition
instruction = (self.mutableVideoComposition.instructions)[0];
layerInstruction = (instruction.layerInstructions)[0];
// Check if a transform already exists on this layer instruction, this is done to add the current transform on top of previous edits
CGAffineTransform existingTransform;
if (![layerInstruction getTransformRampForTime:[self.mutableComposition duration] startTransform:&existingTransform endTransform:NULL timeRange:NULL]) {
[layerInstruction setTransform:t2 atTime:kCMTimeZero];
} else {
// Note: the point of origin for rotation is the upper left corner of the composition, t3 is to compensate for origin
CGAffineTransform t3 = CGAffineTransformMakeTranslation(-1*assetVideoTrack.naturalSize.height/2, 0.0);
CGAffineTransform newTransform = CGAffineTransformConcat(existingTransform, CGAffineTransformConcat(t2, t3));
[layerInstruction setTransform:newTransform atTime:kCMTimeZero];
}
}
// Step 4
// Add the transform instructions to the video composition
instruction.layerInstructions = #[layerInstruction];
self.mutableVideoComposition.instructions = #[instruction];
// Step 5
// Notify AVSEViewController about rotation operation completion
// [[NSNotificationCenter defaultCenter] postNotificationName:AVSEEditCommandCompletionNotification object:self];
[self performWithAssetExport];
}
- (void)performWithAssetExport
{
// Step 1
// Create an outputURL to which the exported movie will be saved
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *outputURL = paths[0];
NSFileManager *manager = [NSFileManager defaultManager];
[manager createDirectoryAtPath:outputURL withIntermediateDirectories:YES attributes:nil error:nil];
outputURL = [outputURL stringByAppendingPathComponent:#"output.mov"];
// Remove Existing File
[manager removeItemAtPath:outputURL error:nil];
// Step 2
// Create an export session with the composition and write the exported movie to the photo library
self.exportSession = [[AVAssetExportSession alloc] initWithAsset:[self.mutableComposition copy] presetName:AVAssetExportPreset1280x720];
self.exportSession.videoComposition = self.mutableVideoComposition;
self.exportSession.audioMix = self.mutableAudioMix;
self.exportSession.outputURL = [NSURL fileURLWithPath:outputURL];
self.exportSession.outputFileType=AVFileTypeQuickTimeMovie;
[self.exportSession exportAsynchronouslyWithCompletionHandler:^(void){
switch (self.exportSession.status) {
case AVAssetExportSessionStatusCompleted:
//[self playfunction];
[[NSNotificationCenter defaultCenter]postNotificationName:#"Backhome" object:nil];
// Step 3
// Notify AVSEViewController about export completion
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Failed:%#",self.exportSession.error);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Canceled:%#",self.exportSession.error);
break;
default:
break;
}
}];
}
It appears that this issue occurs because videojs has trouble reading the orientation. information here: http://help.videojs.com/discussions/problems/1508-video-orientation-for-iphone-wrong
Based on the implied solution you should be checking to make sure when you save the video you are using AVFramework to set the orientation value. Information on how to do that is available in this previous stack overflow post: How do I set the orientation for a frame-by-frame-generated video using AVFoundation?
Related
Im trying to add a fade in to a wav file and then exporting a new file with the added fade using AVAssetExportSession. All the examples I have seen have seen are exporting as m4u Is it even possible to do this with wav or aif?
The error I get is:
AVAssetExportSessionStatusFailed Error Domain=AVFoundationErrorDomain Code=-11822 "Cannot Open" UserInfo=0x1f01c9f0 {NSLocalizedDescription=Cannot Open, NSLocalizedFailureReason=This media format is not supported.}
My code looks like below
NSString *inpath = [path stringByAppendingFormat:#"/%#",file];
NSString *ename = [file stringByDeletingPathExtension];
NSString *incname = [ename stringByAppendingString:#"1t"];
NSString *outname = [incname stringByAppendingPathExtension:#"wav"];
NSString *outpath = [path stringByAppendingFormat:#"/%#",outname];
NSURL *urlpath = [NSURL fileURLWithPath:inpath];
NSURL *urlout = [NSURL fileURLWithPath:outpath];
NSDictionary *options = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES]
forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
AVURLAsset *anAsset = [[AVURLAsset alloc] initWithURL:urlpath options:options];
//check the soundfile is greater than 50seconds
CMTime assetTime = [anAsset duration];
Float64 duration = CMTimeGetSeconds(assetTime);
if (duration < 50.0) return NO;
// get the first audio track
NSArray *tracks = [anAsset tracksWithMediaType:AVMediaTypeAudio];
if ([tracks count] == 0) return NO;
AVAssetTrack *track = [tracks objectAtIndex:0];
// create trim time range - 20 seconds starting from 30 seconds into the asset
CMTime startTime = CMTimeMake(30, 1);
CMTime stopTime = CMTimeMake(50, 1);
CMTimeRange exportTimeRange = CMTimeRangeFromTimeToTime(startTime, stopTime);
// create fade in time range - 10 seconds starting at the beginning of trimmed asset
CMTime startFadeInTime = startTime;
CMTime endFadeInTime = CMTimeMake(40, 1);
CMTimeRange fadeInTimeRange = CMTimeRangeFromTimeToTime(startFadeInTime,
endFadeInTime);
// setup audio mix
AVMutableAudioMix *exportAudioMix = [AVMutableAudioMix audioMix];
AVMutableAudioMixInputParameters *exportAudioMixInputParameters =
[AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:track];
[exportAudioMixInputParameters setVolumeRampFromStartVolume:0.0 toEndVolume:1.0 timeRange:fadeInTimeRange];
exportAudioMix.inputParameters = [NSArray arrayWithObject:exportAudioMixInputParameters];
AVAssetExportSession *exportSession = [AVAssetExportSession
exportSessionWithAsset:anAsset presetName:AVAssetExportPresetPassthrough];
//NSArray *listof = [AVAssetExportSession exportPresetsCompatibleWithAsset:anAsset];
//NSLog(#"LISTOF %#",listof);
id desc = [track.formatDescriptions objectAtIndex:0];
const AudioStreamBasicDescription *audioDesc = CMAudioFormatDescriptionGetStreamBasicDescription((CMAudioFormatDescriptionRef)desc);
FourCharCode formatID = audioDesc->mFormatID;
NSString *fileType = nil;
NSString *ex = nil;
switch (formatID) {
case kAudioFormatLinearPCM:
{
UInt32 flags = audioDesc->mFormatFlags;
if (flags & kAudioFormatFlagIsBigEndian) {
fileType = #"public.aiff-audio";
ex = #"aif";
} else {
fileType = #"com.microsoft.waveform-audio";
ex = #"wav";
}
}
break;
case kAudioFormatMPEGLayer3:
fileType = #"com.apple.quicktime-movie";
ex = #"mp3";
break;
case kAudioFormatMPEG4AAC:
fileType = #"com.apple.m4a-audio";
ex = #"m4a";
break;
case kAudioFormatAppleLossless:
fileType = #"com.apple.m4a-audio";
ex = #"m4a";
break;
default:
break;
}
exportSession.outputFileType = fileType;
exportSession.outputURL = urlout;
//exportSession.outputFileType = AVFileTypeWAVE; // output file type
exportSession.timeRange = exportTimeRange; // trim time range
exportSession.audioMix = exportAudioMix; // fade in audio mix
// perform the export
[exportSession exportAsynchronouslyWithCompletionHandler:^{
if (AVAssetExportSessionStatusCompleted == exportSession.status) {
NSLog(#"AVAssetExportSessionStatusCompleted");
} else if (AVAssetExportSessionStatusFailed == exportSession.status) {
// a failure may happen because of an event out of your control
// for example, an interruption like a phone call comming in
// make sure and handle this case appropriately
NSLog(#"AVAssetExportSessionStatusFailed %#",exportSession.error);
} else {
NSLog(#"Export Session Status: %d", exportSession.status);
}
}];
return YES;
}
You can't do that with AVAssetExportSession because the presets are quite fixed in their usage. A preset value of AVAssetExportPresetPassthrough will keep your input formats on output.
As your task will be manipulating the audio sample buffers directly you should use the second variant that AVFoundation will give you: paired AVAssetReader and AVAssetWriter setup.
You'll find proper sample code as in AVReaderWriterOSX from Apple developer source. This should also work with iOS besides you have different I/O format settings available. The availability to decompress audio as PCM and write back to uncompressed .wav file should be given.
I am aware of how to save metadata using ALAssets. But, I want to save an image, or upload it somewhere, with exif intact. I have exif data as an NSDictionary. But how can I inject it properly into a UIImage (or probably an NSData JPEG representation)?
I am using UIImagePickerController to get the image from the camera and my flow is a bit different than the one described by Chiquis. Here it is:
- (void)imagePickerController:(UIImagePickerController *)picker didFinishPickingMediaWithInfo:(NSDictionary *)info {
UIImage *image = info[#"UIImagePickerControllerOriginalImage"];
NSString *fullPhotoFilename = ...; // generate the photo name and path here
NSData *photoData = [UIImage taggedImageData:image.jpegData metadata:info[#"UIImagePickerControllerMediaMetadata"] orientation:image.imageOrientation];
[photoData writeToFile:fullPhotoFilename atomically:YES];
}
And using a UIImage category to put combine the image data with its metadata:
#import <ImageIO/ImageIO.h>
#import "UIImage+Tagging.h"
#import "LocationHelper.h"
#implementation UIImage (Tagging)
+ (NSData *)writeMetadataIntoImageData:(NSData *)imageData metadata:(NSMutableDictionary *)metadata {
// create an imagesourceref
CGImageSourceRef source = CGImageSourceCreateWithData((__bridge CFDataRef) imageData, NULL);
// this is the type of image (e.g., public.jpeg)
CFStringRef UTI = CGImageSourceGetType(source);
// create a new data object and write the new image into it
NSMutableData *dest_data = [NSMutableData data];
CGImageDestinationRef destination = CGImageDestinationCreateWithData((__bridge CFMutableDataRef)dest_data, UTI, 1, NULL);
if (!destination) {
NSLog(#"Error: Could not create image destination");
}
// add the image contained in the image source to the destination, overidding the old metadata with our modified metadata
CGImageDestinationAddImageFromSource(destination, source, 0, (__bridge CFDictionaryRef) metadata);
BOOL success = NO;
success = CGImageDestinationFinalize(destination);
if (!success) {
NSLog(#"Error: Could not create data from image destination");
}
CFRelease(destination);
CFRelease(source);
return dest_data;
}
+ (NSData *)taggedImageData:(NSData *)imageData metadata:(NSDictionary *)metadata orientation:(UIImageOrientation)orientation {
CLLocationManager *locationManager = [CLLocationManager new];
CLLocation *location = [locationManager location];
NSMutableDictionary *newMetadata = [NSMutableDictionary dictionaryWithDictionary:metadata];
if (!newMetadata[(NSString *)kCGImagePropertyGPSDictionary] && location) {
newMetadata[(NSString *)kCGImagePropertyGPSDictionary] = [LocationHelper gpsDictionaryForLocation:location];
}
// Reference: http://sylvana.net/jpegcrop/exif_orientation.html
int newOrientation;
switch (orientation) {
case UIImageOrientationUp:
newOrientation = 1;
break;
case UIImageOrientationDown:
newOrientation = 3;
break;
case UIImageOrientationLeft:
newOrientation = 8;
break;
case UIImageOrientationRight:
newOrientation = 6;
break;
case UIImageOrientationUpMirrored:
newOrientation = 2;
break;
case UIImageOrientationDownMirrored:
newOrientation = 4;
break;
case UIImageOrientationLeftMirrored:
newOrientation = 5;
break;
case UIImageOrientationRightMirrored:
newOrientation = 7;
break;
default:
newOrientation = -1;
}
if (newOrientation != -1) {
newMetadata[(NSString *)kCGImagePropertyOrientation] = #(newOrientation);
}
NSData *newImageData = [self writeMetadataIntoImageData:imageData metadata:newMetadata];
return newImageData;
}
And finally, here is the method I am using to generate the needed GPS dictionary:
+ (NSDictionary *)gpsDictionaryForLocation:(CLLocation *)location {
NSTimeZone *timeZone = [NSTimeZone timeZoneWithName:#"UTC"];
NSDateFormatter *formatter = [[NSDateFormatter alloc] init];
[formatter setTimeZone:timeZone];
[formatter setDateFormat:#"HH:mm:ss.SS"];
NSDictionary *gpsDict = #{(NSString *)kCGImagePropertyGPSLatitude: #(fabs(location.coordinate.latitude)),
(NSString *)kCGImagePropertyGPSLatitudeRef: ((location.coordinate.latitude >= 0) ? #"N" : #"S"),
(NSString *)kCGImagePropertyGPSLongitude: #(fabs(location.coordinate.longitude)),
(NSString *)kCGImagePropertyGPSLongitudeRef: ((location.coordinate.longitude >= 0) ? #"E" : #"W"),
(NSString *)kCGImagePropertyGPSTimeStamp: [formatter stringFromDate:[location timestamp]],
(NSString *)kCGImagePropertyGPSAltitude: #(fabs(location.altitude)),
};
return gpsDict;
}
Hope it helps someone. Thanks to Gustavo Ambrozio, Chiquis and several others SO members I was able to piece it together and use it in my project.
UIImage does not contain metadata information (it is stripped). So if you want to save it without using the imagepicker method (not in camera roll):
Follow the answer here to write to a file with the metadata intact:
Problem setting exif data for an image
no idea why would this be downvoted but here is the method:
In this case im getting the image through AVFoundation and this is what goes in the
[[self stillImageOutput] captureStillImageAsynchronouslyFromConnection:videoConnection
completionHandler:^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
// code here
}
block code:
CFDictionaryRef metaDict = CMCopyDictionaryOfAttachments(NULL, imageSampleBuffer, kCMAttachmentMode_ShouldPropagate);
CFMutableDictionaryRef mutable = CFDictionaryCreateMutableCopy(NULL, 0, metaDict);
// Create formatted date
NSTimeZone *timeZone = [NSTimeZone timeZoneWithName:#"UTC"];
NSDateFormatter *formatter = [[NSDateFormatter alloc] init];
[formatter setTimeZone:timeZone];
[formatter setDateFormat:#"HH:mm:ss.SS"];
// Create GPS Dictionary
NSDictionary *gpsDict = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithFloat:fabs(loc.coordinate.latitude)], kCGImagePropertyGPSLatitude
, ((loc.coordinate.latitude >= 0) ? #"N" : #"S"), kCGImagePropertyGPSLatitudeRef
, [NSNumber numberWithFloat:fabs(loc.coordinate.longitude)], kCGImagePropertyGPSLongitude
, ((loc.coordinate.longitude >= 0) ? #"E" : #"W"), kCGImagePropertyGPSLongitudeRef
, [formatter stringFromDate:[loc timestamp]], kCGImagePropertyGPSTimeStamp
, [NSNumber numberWithFloat:fabs(loc.altitude)], kCGImagePropertyGPSAltitude
, nil];
// The gps info goes into the gps metadata part
CFDictionarySetValue(mutable, kCGImagePropertyGPSDictionary, (__bridge void *)gpsDict);
// Here just as an example im adding the attitude matrix in the exif comment metadata
CMRotationMatrix m = att.rotationMatrix;
GLKMatrix4 attMat = GLKMatrix4Make(m.m11, m.m12, m.m13, 0, m.m21, m.m22, m.m23, 0, m.m31, m.m32, m.m33, 0, 0, 0, 0, 1);
NSMutableDictionary *EXIFDictionary = (__bridge NSMutableDictionary*)CFDictionaryGetValue(mutable, kCGImagePropertyExifDictionary);
[EXIFDictionary setValue:NSStringFromGLKMatrix4(attMat) forKey:(NSString *)kCGImagePropertyExifUserComment];
CFDictionarySetValue(mutable, kCGImagePropertyExifDictionary, (__bridge void *)EXIFDictionary);
NSData *jpeg = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer] ;
After this code you will have your image in the jpeg nsdata and the correspoding dictionary for that image in the mutable cfdictionary.
All you have to do now is:
CGImageSourceRef source = CGImageSourceCreateWithData((__bridge CFDataRef)jpeg, NULL);
CFStringRef UTI = CGImageSourceGetType(source); //this is the type of image (e.g., public.jpeg)
NSMutableData *dest_data = [NSMutableData data];
CGImageDestinationRef destination = CGImageDestinationCreateWithData((__bridge CFMutableDataRef)dest_data,UTI,1,NULL);
if(!destination) {
NSLog(#"***Could not create image destination ***");
}
//add the image contained in the image source to the destination, overidding the old metadata with our modified metadata
CGImageDestinationAddImageFromSource(destination,source,0, (CFDictionaryRef) mutable);
//tell the destination to write the image data and metadata into our data object.
//It will return false if something goes wrong
BOOL success = CGImageDestinationFinalize(destination);
if(!success) {
NSLog(#"***Could not create data from image destination ***");
}
//now we have the data ready to go, so do whatever you want with it
//here we just write it to disk at the same path we were passed
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0]; // Get documents folder
NSString *dataPath = [documentsDirectory stringByAppendingPathComponent:#"ImagesFolder"];
NSError *error;
if (![[NSFileManager defaultManager] fileExistsAtPath:dataPath])
[[NSFileManager defaultManager] createDirectoryAtPath:dataPath withIntermediateDirectories:NO attributes:nil error:&error]; //Create folder
// NSString *imageName = #"ImageName";
NSString *fullPath = [dataPath stringByAppendingPathComponent:[NSString stringWithFormat:#"%#.jpg", name]]; //add our image to the path
[dest_data writeToFile:fullPath atomically:YES];
//cleanup
CFRelease(destination);
CFRelease(source);
Note how I'm not saving using the ALAssets but directly into a folder of my choice.
Btw most of this code can be found in the link I posted at first.
There is easier way. If you need to save some exif, you can use SimpleExif pod
First create a ExifContainer:
ExifContainer *container = [[ExifContainer alloc] init];
and populate it with all requred data:
[container addUserComment:#"A long time ago, in a galaxy far, far away"];
[container addCreationDate:[NSDate dateWithTimeIntervalSinceNow:-10000000]];
[container addLocation:locations[0]];
Then you can add this data to image:
NSData *imageData = [[UIImage imageNamed:#"DemoImage"] addExif:container];
Then you just save this data as a JPEG
I faced the same problem, now I can upload files with EXIF data, also you can compress photo if need it, this solved the issue for me:
// Get your image.
UIImage *loImgPhoto = [self getImageFromAsset:loPHAsset];
// Get your metadata (includes the EXIF data).
CGImageSourceRef loImageOriginalSource = CGImageSourceCreateWithData(( CFDataRef) loDataFotoOriginal, NULL);
NSDictionary *loDicMetadata = (__bridge NSDictionary *) CGImageSourceCopyPropertiesAtIndex(loImageOriginalSource, 0, NULL);
// Set your compression quality (0.0 to 1.0).
NSMutableDictionary *loDicMutableMetadata = [loDicMetadata mutableCopy];
[loDicMutableMetadata setObject:#(lfCompressionQualityValue) forKey:(__bridge NSString *)kCGImageDestinationLossyCompressionQuality];
// Create an image destination.
NSMutableData *loNewImageDataWithExif = [NSMutableData data];
CGImageDestinationRef loImgDestination = CGImageDestinationCreateWithData((__bridge CFMutableDataRef)loNewImageDataWithExif, CGImageSourceGetType(loImageOriginalSource), 1, NULL);
// Add your image to the destination.
CGImageDestinationAddImage(loImgDestination, loImgPhoto.CGImage, (__bridge CFDictionaryRef) loDicMutableMetadata);
// Finalize the destination.
if (CGImageDestinationFinalize(loImgDestination))
{
NSLog(#"Successful image creation.");
// process the image rendering, adjustment data creation and finalize the asset edit.
//Upload photo with EXIF metadata
[self myUploadMethod:loNewImageDataWithExif];
}
else
{
NSLog(#"Error -> failed to finalize the image.");
}
CFRelease(loImageOriginalSource);
CFRelease(loImgDestination);
getImageFromAsset method:
-(UIImage *)getImageFromAsset:(PHAsset *)aPHAsset
{
__block UIImage *limgImageResult;
PHImageRequestOptions *lPHImageRequestOptions = [PHImageRequestOptions new];
lPHImageRequestOptions.synchronous = YES;
[self.imageManager requestImageForAsset:aPHAsset
targetSize:PHImageManagerMaximumSize
contentMode:PHImageContentModeDefault//PHImageContentModeAspectFit
options:lPHImageRequestOptions
resultHandler:^(UIImage *limgImage, NSDictionary *info) {
limgImageResult = limgImage;
}];
return limgImageResult;
}
Here's the basics of setting Make and Model metadata on a .jpg file in Swift 3 https://gist.github.com/lacyrhoades/09d8a367125b6225df5038aec68ed9e7 The higher level versions, like using ExifContainer pod, did not work for me.
I am trying to combine several video clips into one using AVFoundation.
I can create a single video using AVMutableComposition using the code below
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime startTime = kCMTimeZero;
/*videoClipPaths is a array of paths of the video clips recorded*/
//for loop to combine clips into a single video
for (NSInteger i=0; i < [videoClipPaths count]; i++) {
NSString *path = (NSString*)[videoClipPaths objectAtIndex:i];
NSURL *url = [[NSURL alloc] initFileURLWithPath:path];
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:url options:nil];
[url release];
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVAssetTrack *audioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
//set the orientation
if(i == 0)
{
[compositionVideoTrack setPreferredTransform:videoTrack.preferredTransform];
}
ok = [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [asset duration]) ofTrack:videoTrack atTime:startTime error:nil];
ok = [compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [asset duration]) ofTrack:audioTrack atTime:startTime error:nil];
startTime = CMTimeAdd(startTime, [asset duration]);
}
//export the combined video
NSString *combinedPath = /* path of the combined video*/;
NSURL *url = [[NSURL alloc] initFileURLWithPath: combinedPath];
AVAssetExportSession *exporter = [[[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPreset640x480] autorelease];
exporter.outputURL = url;
[url release];
exporter.outputFileType = [[exporter supportedFileTypes] objectAtIndex:0];
[exporter exportAsynchronouslyWithCompletionHandler:^(void){[self combineVideoFinished:exporter.outputURL status:exporter.status error:exporter.error];}];
The code above works fine if all the video clips were recorded in the same orientation (portrait or landscape). However if I have a mixture of orientations in the clips, the final video will have part of it rotated 90 degrees to the right (or left).
I was wondering is there a way to transform all clips to the same orientation (e.g. the orientation of the first clip) while composing them. From what I read from the XCode document AVMutableVideoCompositionLayerInstruction seems can be used to transform AVAsset, but I am not sure how to create and apply several different layer instruction to corresponding clips and use then in the composition (AVMutableComposition*)
Any help would be appreciated!
This is what I do. I then use an AVAssetExportSession to create the actual file. but I warn you, the CGAffineTransforms are sometimes applied late, so you'll see a or two of the original before the video transforms. I have no clue why this happens, a different combination of videos will yield the expected result, but sometimes its off.
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.frameDuration = CMTimeMake(1,30);
videoComposition.renderScale = 1.0;
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack];
// Get only paths the user selected NSMutableArray *array = [NSMutableArray array]; for(NSString* string in videoPathArray){
if(![string isEqualToString:#""]){
[array addObject:string];
}
self.videoPathArray = array;
float time = 0;
for (int i = 0; i<self.videoPathArray.count; i++) {
AVURLAsset *sourceAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:[videoPathArray objectAtIndex:i]] options:[NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey]];
NSError *error = nil;
BOOL ok = NO;
AVAssetTrack *sourceVideoTrack = [[sourceAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CGSize temp = CGSizeApplyAffineTransform(sourceVideoTrack.naturalSize, sourceVideoTrack.preferredTransform);
CGSize size = CGSizeMake(fabsf(temp.width), fabsf(temp.height));
CGAffineTransform transform = sourceVideoTrack.preferredTransform;
videoComposition.renderSize = sourceVideoTrack.naturalSize;
if (size.width > size.height) {
[layerInstruction setTransform:transform atTime:CMTimeMakeWithSeconds(time, 30)];
} else {
float s = size.width/size.height;
CGAffineTransform new = CGAffineTransformConcat(transform, CGAffineTransformMakeScale(s,s));
float x = (size.height - size.width*s)/2;
CGAffineTransform newer = CGAffineTransformConcat(new, CGAffineTransformMakeTranslation(x, 0));
[layerInstruction setTransform:newer atTime:CMTimeMakeWithSeconds(time, 30)];
}
ok = [compositionVideoTrack insertTimeRange:sourceVideoTrack.timeRange ofTrack:sourceVideoTrack atTime:[composition duration] error:&error];
if (!ok) {
// Deal with the error.
NSLog(#"something went wrong");
}
NSLog(#"\n source asset duration is %f \n source vid track timerange is %f %f \n composition duration is %f \n composition vid track time range is %f %f",CMTimeGetSeconds([sourceAsset duration]), CMTimeGetSeconds(sourceVideoTrack.timeRange.start),CMTimeGetSeconds(sourceVideoTrack.timeRange.duration),CMTimeGetSeconds([composition duration]), CMTimeGetSeconds(compositionVideoTrack.timeRange.start),CMTimeGetSeconds(compositionVideoTrack.timeRange.duration));
time += CMTimeGetSeconds(sourceVideoTrack.timeRange.duration);
}
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
instruction.timeRange = compositionVideoTrack.timeRange;
videoComposition.instructions = [NSArray arrayWithObject:instruction];
This is what I do. I then use an AVAssetExportSession to create the actual file. but I warn you, the CGAffineTransforms are sometimes applied late, so you'll see a or two of the original before the video transforms. I have no clue why this happens, a different combination of videos will yield the expected result, but sometimes its off.
Here is #bogardon's answer in swift 4+
import ARKit
class ARKitSampleViewController: UIViewController {
var label: UILabel?
var planeFound = false
func plane(from anchor: ARPlaneAnchor?) -> SCNNode? {
let plane = SCNPlane(width: CGFloat(anchor?.extent.x ?? 0.0), height: CGFloat(anchor?.extent.z ?? 0.0))
plane.firstMaterial?.diffuse.contents = UIColor.clear
let planeNode = SCNNode(geometry: plane)
planeNode.position = SCNVector3Make(anchor?.center.x ?? 0.0, 0, anchor?.center.z ?? 0.0)
// SCNPlanes are vertically oriented in their local coordinate space.
// Rotate it to match the horizontal orientation of the ARPlaneAnchor.
planeNode.transform = SCNMatrix4MakeRotation(-.pi * 0.5, 1, 0, 0)
return planeNode
}
// MARK: - ARSCNViewDelegate
func renderer(_ renderer: SCNSceneRenderer, didAdd node: SCNNode, for anchor: ARAnchor) {
if planeFound == false {
if (anchor is ARPlaneAnchor) {
DispatchQueue.main.async(execute: {
self.planeFound = true
self.label?.text = "DANCEFLOOR FOUND. LET'S BOOGIE"
let overlay = UIView(frame: self.view.frame)
overlay.backgroundColor = UIColor.black
overlay.alpha = 0
if let label = self.label {
self.view.insertSubview(overlay, belowSubview: label)
}
UIView.animate(withDuration: 1.5, delay: 2, options: .curveEaseIn, animations: {
self.label?.alpha = 0
overlay.alpha = 0.5
}) { finished in
let planeAnchor = anchor as? ARPlaneAnchor
// Show the disco ball here
}
})
}
}
}
}
I have an application that allows to append multiple video assets and add one or multiple audio tracks to a composition. All seems to work, I can play the resulting composition using AVPlayer (although the audio level seems low). After exporting the composition to file, the audio track is missing.
My code is largely based on the AVEditDemo sample code from the WWDC10 sessions. I have double checked my code against the AVEditDemo code and cannot find what could be the problem. I have also checked forums but there is not much AVFoundation related posts/solutions.
Any help is most welcome. Cheers,
Jean-Pierre
Method to build the composition with extra audio tracks
Notes:
compositionArray: contains assets to build the composition.
AssetView: object containing a AVURLAsset.
- (AVMutableComposition *)buildCompositionObjects
{
// no assets available, return nil
if ([compositionArray count] < 1)
{
return nil;
}
// get the asset video size
AssetView * view = [compositionArray objectAtIndex:0];
AVURLAsset * asset = view.asset;
CGSize videoSize = [asset naturalSize];
// create new composition
AVMutableComposition * cmp = [AVMutableComposition composition];
// set the size
cmp.naturalSize = videoSize;
// build composition
[self buildComposition:cmp];
// add any extra audio track
[self addAudioTrackToComposition:cmp];
// return the new composition
return cmp;
}
Method to build the base composition
- (void) buildComposition:(AVMutableComposition *)cmp
{
// set the start time of contiguous tracks
CMTime nextClipStartTime = kCMTimeZero;
// clear the composition
[cmp removeTimeRange:CMTimeRangeMake(CMTimeMake(0, 600), cmp.duration)];
// add audio and video tracks
AVMutableCompositionTrack *compositionVideoTrack = [cmp addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *compositionAudioTrack = [cmp addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
// loop through all available assets
for (AssetView * view in compositionArray)
{
AVURLAsset *asset = view.asset;
CMTimeRange timeRangeInAsset;
timeRangeInAsset = CMTimeRangeMake(kCMTimeZero, [asset duration]);
AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[compositionVideoTrack insertTimeRange:timeRangeInAsset ofTrack:clipVideoTrack atTime:nextClipStartTime error:nil];
// make sure there is an audio track. Had to do this becaaause of this missing audio track issue. Crashes if check is not done (out of bounds).
if ([[asset tracksWithMediaType:AVMediaTypeAudio] count] > 0)
{
AVAssetTrack *clipAudioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[compositionAudioTrack insertTimeRange:timeRangeInAsset ofTrack:clipAudioTrack atTime:nextClipStartTime error:nil];
}
// adjust next asset start
nextClipStartTime = CMTimeAdd(nextClipStartTime, timeRangeInAsset.duration);
}
}
Method to add the additional audio tracks
- (void)addAudioTrackToComposition:(AVMutableComposition *)cmp
{
// no audio track, return
if ([audioTracks count] < 1)
{
return;
}
// base track ID for additional audio tracks
long baseTrackID = 100;
for (AVURLAsset * audioAsset in audioTracks)
{
// make sure the audio track fits in the composition
CMTimeRange commentaryTimeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
if (CMTIME_COMPARE_INLINE(CMTimeRangeGetEnd(commentaryTimeRange), >, [cmp duration]))
{
commentaryTimeRange.duration = CMTimeSubtract([cmp duration], commentaryTimeRange.start);
}
// Add the audio track.
AVMutableCompositionTrack *compositionCommentaryTrack = [cmp addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:baseTrackID++];
[compositionCommentaryTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, commentaryTimeRange.duration) ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:commentaryTimeRange.start error:nil];
}
}
Method to export the composition
- (void) save
{
NSString * eventFolder = [NSString stringWithFormat:#"%#/%#-%#",
DOCUMENTS_FOLDER,
event.title,
[StringUtils stringForDate:event.timeStamp]];
NSString * exportVideoPath = [NSString stringWithFormat:#"%#/Edits/%#.MOV", eventFolder, [StringUtils stringForDate:[NSDate date]]];
video.path = exportVideoPath;
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetHighestQuality];
NSURL *exportURL = [NSURL fileURLWithPath:exportVideoPath];
exportSession.outputURL = exportURL;
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
[exportSession exportAsynchronouslyWithCompletionHandler:^
{
switch (exportSession.status)
{
case AVAssetExportSessionStatusFailed:
{
NSLog (#"FAIL");
[self performSelectorOnMainThread:#selector (doPostExportFailed:)
withObject:nil
waitUntilDone:NO];
break;
}
case AVAssetExportSessionStatusCompleted:
{
NSLog (#"SUCCESS");
[self performSelectorOnMainThread:#selector (doPostExportSuccess:)
withObject:nil
waitUntilDone:NO];
break;
}
case AVAssetExportSessionStatusCancelled:
{
NSLog (#"CANCELED");
[self performSelectorOnMainThread:#selector (doPostExportCancelled:)
withObject:nil
waitUntilDone:NO];
break;
}
};
}];
}
Did not get any reply. I have the thing working by adding one line of code:
exportSession.shouldOptimizeForNetworkUse = YES;
before
[exportSession exportAsynchronouslyWithCompletionHandler:^
I'm not sure why this fixes the problem. since this seems totaly unrelated to the problem but I was able to export a dozen composition with up to 5 extra audio tracks without problems.
I hope this helps other people who have been scratching their heads for days.
Cheers
Using the new asset library framework available in iOS 4 i see that I can get the url for a given video using the UIImagePickerControllerReferenceURL. The url returned is in the following format:
assets-library://asset/asset.M4V?id=1000000004&ext=M4V
I am trying to upload this video to a website so as a quick proof of concept I am trying the following
NSData *data = [NSData dataWithContentsOfURL:videourl];
[data writeToFile:tmpfile atomically:NO];
Data is never initialized in this case. Has anyone managed to access the url directly via the new assets library? Thanks for your help.
I use the following category on ALAsset:
static const NSUInteger BufferSize = 1024*1024;
#implementation ALAsset (Export)
- (BOOL) exportDataToURL: (NSURL*) fileURL error: (NSError**) error
{
[[NSFileManager defaultManager] createFileAtPath:[fileURL path] contents:nil attributes:nil];
NSFileHandle *handle = [NSFileHandle fileHandleForWritingToURL:fileURL error:error];
if (!handle) {
return NO;
}
ALAssetRepresentation *rep = [self defaultRepresentation];
uint8_t *buffer = calloc(BufferSize, sizeof(*buffer));
NSUInteger offset = 0, bytesRead = 0;
do {
#try {
bytesRead = [rep getBytes:buffer fromOffset:offset length:BufferSize error:error];
[handle writeData:[NSData dataWithBytesNoCopy:buffer length:bytesRead freeWhenDone:NO]];
offset += bytesRead;
} #catch (NSException *exception) {
free(buffer);
return NO;
}
} while (bytesRead > 0);
free(buffer);
return YES;
}
#end
This is not the best way to do this. I am answering this question in case another SO user comes across the same issue.
Basically my need was to be able to spool the video file to a tmp file so I can upload it to a website using ASIHTTPFormDataRequest. There is probably a way of streaming from the asset url to the ASIHTTPFormDataRequest upload but I could not figure it out. Instead I wrote the following function to drop the file to a tmp file to add to ASIHTTPFormDataRequest.
+(NSString*) videoAssetURLToTempFile:(NSURL*)url
{
NSString * surl = [url absoluteString];
NSString * ext = [surl substringFromIndex:[surl rangeOfString:#"ext="].location + 4];
NSTimeInterval ti = [[NSDate date]timeIntervalSinceReferenceDate];
NSString * filename = [NSString stringWithFormat: #"%f.%#",ti,ext];
NSString * tmpfile = [NSTemporaryDirectory() stringByAppendingPathComponent:filename];
ALAssetsLibraryAssetForURLResultBlock resultblock = ^(ALAsset *myasset)
{
ALAssetRepresentation * rep = [myasset defaultRepresentation];
NSUInteger size = [rep size];
const int bufferSize = 8192;
NSLog(#"Writing to %#",tmpfile);
FILE* f = fopen([tmpfile cStringUsingEncoding:1], "wb+");
if (f == NULL) {
NSLog(#"Can not create tmp file.");
return;
}
Byte * buffer = (Byte*)malloc(bufferSize);
int read = 0, offset = 0, written = 0;
NSError* err;
if (size != 0) {
do {
read = [rep getBytes:buffer
fromOffset:offset
length:bufferSize
error:&err];
written = fwrite(buffer, sizeof(char), read, f);
offset += read;
} while (read != 0);
}
fclose(f);
};
ALAssetsLibraryAccessFailureBlock failureblock = ^(NSError *myerror)
{
NSLog(#"Can not get asset - %#",[myerror localizedDescription]);
};
if(url)
{
ALAssetsLibrary* assetslibrary = [[[ALAssetsLibrary alloc] init] autorelease];
[assetslibrary assetForURL:url
resultBlock:resultblock
failureBlock:failureblock];
}
return tmpfile;
}
Here is a clean swift solution to get videos as NSData.
It uses the Photos framework as ALAssetLibrary is deprecated as of iOS9:
IMPORTANT
The Assets Library framework is deprecated as of iOS 9.0. Instead, use the Photos framework instead, which in iOS 8.0 and later provides more features and better performance for working with a user’s photo library. For more information, see Photos Framework Reference.
import Photos
func imagePickerController(picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [String : AnyObject]) {
self.dismissViewControllerAnimated(true, completion: nil)
if let referenceURL = info[UIImagePickerControllerReferenceURL] as? NSURL {
let fetchResult = PHAsset.fetchAssetsWithALAssetURLs([referenceURL], options: nil)
if let phAsset = fetchResult.firstObject as? PHAsset {
PHImageManager.defaultManager().requestAVAssetForVideo(phAsset, options: PHVideoRequestOptions(), resultHandler: { (asset, audioMix, info) -> Void in
if let asset = asset as? AVURLAsset {
let videoData = NSData(contentsOfURL: asset.URL)
// optionally, write the video to the temp directory
let videoPath = NSTemporaryDirectory() + "tmpMovie.MOV"
let videoURL = NSURL(fileURLWithPath: videoPath)
let writeResult = videoData?.writeToURL(videoURL, atomically: true)
if let writeResult = writeResult where writeResult {
print("success")
}
else {
print("failure")
}
}
})
}
}
}
There you go...
AVAssetExportSession* m_session=nil;
-(void)export:(ALAsset*)asset withHandler:(void (^)(NSURL* url, NSError* error))handler
{
ALAssetRepresentation* representation=asset.defaultRepresentation;
m_session=[AVAssetExportSession exportSessionWithAsset:[AVURLAsset URLAssetWithURL:representation.url options:nil] presetName:AVAssetExportPresetPassthrough];
m_session.outputFileType=AVFileTypeQuickTimeMovie;
m_session.outputURL=[NSURL fileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:[NSString stringWithFormat:#"%f.mov",[NSDate timeIntervalSinceReferenceDate]]]];
[m_session exportAsynchronouslyWithCompletionHandler:^
{
if (m_session.status!=AVAssetExportSessionStatusCompleted)
{
NSError* error=m_session.error;
m_session=nil;
handler(nil,error);
return;
}
NSURL* url=m_session.outputURL;
m_session=nil;
handler(url,nil);
}];
}
You can use a different preset key if you wish to re-encode the movie (AVAssetExportPresetMediumQuality for example)
Here is the Objective C solution of Alonzo answer, Using photos framework
-(NSURL*)createVideoCopyFromReferenceUrl:(NSURL*)inputUrlFromVideoPicker{
NSURL __block *videoURL;
PHFetchResult *phAssetFetchResult = [PHAsset fetchAssetsWithALAssetURLs:#[inputUrlFromVideoPicker ] options:nil];
PHAsset *phAsset = [phAssetFetchResult firstObject];
dispatch_group_t group = dispatch_group_create();
dispatch_group_enter(group);
[[PHImageManager defaultManager] requestAVAssetForVideo:phAsset options:nil resultHandler:^(AVAsset *asset, AVAudioMix *audioMix, NSDictionary *info) {
if ([asset isKindOfClass:[AVURLAsset class]]) {
NSURL *url = [(AVURLAsset *)asset URL];
NSLog(#"Final URL %#",url);
NSData *videoData = [NSData dataWithContentsOfURL:url];
// optionally, write the video to the temp directory
NSString *videoPath = [NSTemporaryDirectory() stringByAppendingPathComponent:[NSString stringWithFormat:#"%f.mp4",[NSDate timeIntervalSinceReferenceDate]]];
videoURL = [NSURL fileURLWithPath:videoPath];
BOOL writeResult = [videoData writeToURL:videoURL atomically:true];
if(writeResult) {
NSLog(#"video success");
}
else {
NSLog(#"video failure");
}
dispatch_group_leave(group);
// use URL to get file content
}
}];
dispatch_group_wait(group, DISPATCH_TIME_FOREVER);
return videoURL;
}
this from Zoul's Answer
thanks
Similar Code in Xamarin C#
Xamarin C# Equivalent
IntPtr buffer = CFAllocator.Malloc.Allocate(representation.Size);
NSError error;
nuint buffered = representation.GetBytes(buffer, Convert.ToInt64(0.0),Convert.ToUInt32(representation.Size),out error);
NSData sourceData = NSData.FromBytesNoCopy(buffer,buffered,true);
NSFileManager fileManager = NSFileManager.DefaultManager;
NSFileAttributes attr = NSFileAttributes.FromDictionary(NSDictionary.FromFile(outputPath));
fileManager.CreateFile(outputPath, sourceData,attr);