please tell me where is leak in this code...
//here I did video with images from Document Directory
- (void) testCompressionSession:(NSString *)path
{
if ([[NSFileManager defaultManager] fileExistsAtPath:path]) {
[[NSFileManager defaultManager] removeItemAtPath:path error:nil];
}
NSArray *array = [dictInfo objectForKey:#"sortedKeys"];
NSString *betaCompressionDirectory = path;
NSError *error = nil;
unlink([betaCompressionDirectory UTF8String]);
NSLog(#"array = %#",array);
NSData *imgDataTmp = [NSData dataWithContentsOfFile:[projectPath stringByAppendingPathComponent:[array objectAtIndex:0]]];
NSLog(#"link : %#",[projectPath stringByAppendingPathComponent:[array objectAtIndex:0]]);
CGSize size = CGSizeMake([UIImage imageWithData:imgDataTmp].size.width, [UIImage imageWithData:imgDataTmp].size.height);
//----initialize compression engine
NSLog(#"size : w : %f, h : %f",size.width,size.height);
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:betaCompressionDirectory]
fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
if(error)
NSLog(#"error = %#", [error localizedDescription]);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey, nil];
AVAssetWriterInput *writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
if ([videoWriter canAddInput:writerInput])
NSLog(#"I can add this input");
else
NSLog(#"i can't add this input");
[videoWriter addInput:writerInput];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
dispatch_queue_t dispatchQueue = dispatch_queue_create("mediaInputQueue", NULL);
[writerInput requestMediaDataWhenReadyOnQueue:dispatchQueue usingBlock:^{
//BOOL isEffect = NO;
int i = 0;
float totalTime = 0.0f;
float nextTime = 0;
if ([writerInput isReadyForMoreMediaData]) {
while (1)
{
if (i <= [array count] && i > 0) {
nextTime = [[dictInfo objectForKey:[array objectAtIndex:i-1]] floatValue];
}
totalTime += i == 0 ? 0 : nextTime;
CMTime presentTime=CMTimeMake(totalTime, 1);
printf("presentTime : %f ",CMTimeGetSeconds(presentTime));
if (i >= [array count])
{
NSData *imgData = [NSData dataWithContentsOfFile:[projectPath stringByAppendingPathComponent:[array objectAtIndex:i-1]]];
UIImage* tmpImg = [UIImage imageWithData:imgData];
tmpImg = [self imageWithImage:tmpImg scaledToSize:size];
while ( !writerInput.readyForMoreMediaData)
{
sleep(0.01);
}
CVPixelBufferRef buffer = NULL;
buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:[tmpImg CGImage] size:size];
[adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(totalTime-nextTime+(nextTime/2.0), 1)];
NSLog(#"%f",totalTime-nextTime+(nextTime/2.0));
[writerInput markAsFinished];
[videoWriter finishWriting];
//CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
[videoWriter release];
break;
} else {
NSData *imgData = [NSData dataWithContentsOfFile:[projectPath stringByAppendingPathComponent:[array objectAtIndex:i]]];
UIImage* tmpImg = [UIImage imageWithData:imgData];
//tmpImg = [self imageWithImage:tmpImg scaledToSize:size];
//UIImageWriteToSavedPhotosAlbum(tmpImg, nil, nil, nil);
while (!adaptor.assetWriterInput.readyForMoreMediaData && !writerInput.readyForMoreMediaData)
{
sleep(0.01);
}
CVPixelBufferRef buffer = NULL;
buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:[tmpImg CGImage] size:size];
if (buffer)
{
if(![adaptor appendPixelBuffer:buffer withPresentationTime:presentTime])
NSLog(#"FAIL");
else
NSLog(#"Success:%d",i);
CVPixelBufferRelease(buffer);
}
}
i++;
}
}
}];
//and here I did CVPixelBufferRef from CGImageRef
- (CVPixelBufferRef )pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size
{
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) options, &pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width, size.height, 8, 4*size.width, rgbColorSpace, kCGImageAlphaPremultipliedFirst);
NSParameterAssert(context);
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
leak log is :
CVObject CFRetain 00:37.957.985 2 0x1ecae0 0 CoreVideo CVPixelBufferPool::createPixelBuffer(__CFAllocator const*, __CFDictionary const*, int*)
Malloc 96 Bytes Malloc 00:40.015.872 1 0x1f0750 96 CoreVideo CVBuffer::init()
CVPixelBuffer Malloc 00:40.969.716 1 0x1f2570 96 CoreVideo CVObject::alloc(unsigned long, __CFAllocator const*, unsigned long, unsigned long)
Look here:
CVPixelBufferRef buffer = NULL;
CVPixelBufferPoolCreatePixelBuffer(NULL, adaptor.pixelBufferPool, &buffer);
CVPixelBufferLockBaseAddress(buffer, 0);
buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:[tmpImg CGImage] size:size];
first a pixel buffer gets created and its address put info buffer variable, then the same variable gets overwritten by pixelBufferFromCGImage, so its previous content cannot be released any more.
EDIT
you've just removed the code I used, so my answer is now no more applicable.
Now this part:
CVPixelBufferRef buffer = NULL;
buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:[tmpImg CGImage] size:size];
[adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(totalTime-nextTime+(nextTime/2.0), 1)];
NSLog(#"%f",totalTime-nextTime+(nextTime/2.0));
...
You have a commented out CVPixelBufferPoolRelease(adaptor.pixelBufferPool), which is okay, since in this version you have not pixel buffer pool, but I miss here a call to CVPixelBufferRelease(buffer).
Related
I am trying to create a movie from some pictures. It works just fine with hd pictures ({720, 1280}) or lower resolutions . But when i try to create the movie with full hd pictures {1080, 1920} , the video is scrambled. Here is a link to see how it looks http://www.youtube.com/watch?v=BfYldb8e_18 . Do you have any ideas what i may be doing wrong?
- (void) createMovieWithOptions:(NSDictionary *) options
{
#autoreleasepool {
NSString *path = [options valueForKey:#"path"];
CGSize size = [(NSValue *)[options valueForKey:#"size"] CGSizeValue];
NSArray *imageArray = [options valueForKey:#"pictures"];
NSInteger recordingFPS = [[options valueForKey:#"fps"] integerValue];
BOOL success=YES;
NSError *error = nil;
AVAssetWriter *assetWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:path]
fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(assetWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithFloat:size.width], AVVideoWidthKey,
[NSNumber numberWithFloat:size.height], AVVideoHeightKey,
nil];
AVAssetWriterInput *videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
// Configure settings for the pixel buffer adaptor.
NSDictionary* bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:bufferAttributes];
NSParameterAssert(videoWriterInput);
NSParameterAssert([assetWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = NO;
[assetWriter addInput:videoWriterInput];
//Start a session:
[assetWriter startWriting];
[assetWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
int frameCount = 0;
float progress = 0;
float progressFromFrames = _progressView.progress; //only for create iflipbook movie
for(UIImage * img in imageArray)
{
if([[NSThread currentThread] isCancelled])
{
[NSThread exit];
}
[condCreateMovie lock];
if(isCreateMoviePaused)
{
[condCreateMovie wait];
}
uint64_t totalFreeSpace=[Utils getFreeDiskspace];
if(((totalFreeSpace/1024ll)/1024ll)<50)
{
success=NO;
break;
}
// #autoreleasepool {
NSLog(#"size:%#",NSStringFromCGSize(img.size));
buffer = [[MovieWritter sharedMovieWritter] pixelBufferFromCGImage:[img CGImage] andSize:size];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 60)
{
if(adaptor.assetWriterInput.readyForMoreMediaData)
{
CMTime frameTime = CMTimeMake(frameCount, recordingFPS);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
CVPixelBufferRelease(buffer);
[NSThread sleepForTimeInterval:0.1];
if(isCreatingiFlipBookFromImported)
progress = (float)frameCount/(float)[imageArray count]/2.0 + progressFromFrames;
else
progress = (float)frameCount/(float)[imageArray count];
[[NSNotificationCenter defaultCenter] postNotificationName:#"movieCreationProgress" object:[NSNumber numberWithFloat:progress]];
}
else
{
[NSThread sleepForTimeInterval:0.5];
}
j++;
}
if (!append_ok)
{
NSLog(#"error appending image %d times %d\n", frameCount, j);
}
frameCount++;
[condCreateMovie unlock];
}
//Finish the session:
[videoWriterInput markAsFinished];
[assetWriter finishWriting];
NSDictionary *dict = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:success], #"success",
path, #"path", nil];
[[NSNotificationCenter defaultCenter] postNotificationName:#"movieCreationFinished" object:dict];
}
}
*Edit . Here is the code for [[MovieWritter sharedMovieWritter] pixelBufferFromCGImage:]
- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image andSize:(CGSize) size
{
#autoreleasepool {
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width,
size.height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options,
&pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
size.height, 8, 4*size.width, rgbColorSpace,
kCGImageAlphaNoneSkipFirst);
NSParameterAssert(context);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
}
I had the same problem and this answer resolved it: the size of the video must be a multiple of 16.
Pretty sure that this is either a HW limitation or a bug. Please file a Radar.
how about something like this to get pixel buffer
//you could use a cgiimageref here instead
CFDataRef imageData= CGDataProviderCopyData(CGImageGetDataProvider(imageView.image.CGImage));
NSLog (#"copied image data");
cvErr = CVPixelBufferCreateWithBytes(kCFAllocatorDefault,
FRAME_WIDTH,
FRAME_HEIGHT,
kCVPixelFormatType_32BGRA,
(void*)CFDataGetBytePtr(imageData),
CGImageGetBytesPerRow(imageView.image.CGImage),
NULL,
NULL,
NULL,
&pixelBuffer);
NSLog (#"CVPixelBufferCreateWithBytes returned %d", cvErr);
CFAbsoluteTime thisFrameWallClockTime = CFAbsoluteTimeGetCurrent();
CFTimeInterval elapsedTime = thisFrameWallClockTime - firstFrameWallClockTime;
NSLog (#"elapsedTime: %f", elapsedTime);
CMTime presentationTime = CMTimeMake(elapsedTime * TIME_SCALE, TIME_SCALE);
// write the sample
BOOL appended = [assetWriterPixelBufferAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:presentationTime];
CVPixelBufferRelease(pixelBuffer);
CFRelease(imageData);
if (appended) {
NSLog (#"appended sample at time %lf", CMTimeGetSeconds(presentationTime));
} else {
NSLog (#"failed to append");
[self stopRecording];
self.startStopButton.selected = NO;
}
You may also want to set the capture settings preset , although high usually is suitable and that is default
*/
Constants to define capture setting presets using the sessionPreset property.
NSString *const AVCaptureSessionPresetPhoto;
NSString *const AVCaptureSessionPresetHigh;
NSString *const AVCaptureSessionPresetMedium;
NSString *const AVCaptureSessionPresetLow;
NSString *const AVCaptureSessionPreset352x288;
NSString *const AVCaptureSessionPreset640x480;
NSString *const AVCaptureSessionPreset1280x720;
NSString *const AVCaptureSessionPreset1920x1080;
NSString *const AVCaptureSessionPresetiFrame960x540;
NSString *const AVCaptureSessionPresetiFrame1280x720;
*/
//set it like this
self.captureSession.sessionPreset = AVCaptureSessionPreset1920x1080;
//or like this when you define avcapturesession
[self.captureSession setSessionPreset:AVCaptureSessionPreset1920x1080];
I am relatively new to programming and although i am ok with normal functions, i am however completely new to video editing
So i have managed to find some code online to do the jobs shown below:
- (void)writeImagesAsMovie:(NSArray *)array {
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDirectory, YES);
NSString *documentDirectory = [paths objectAtIndex:0];
NSString *saveLocation = [documentDirectory stringByAppendingString:#"/temp.mov"];
if ([[NSFileManager defaultManager] fileExistsAtPath:saveLocation]) {
[[NSFileManager defaultManager] removeItemAtPath:saveLocation error:NULL];
}
UIImage *first = [array objectAtIndex:0];
CGSize frameSize = first.size;
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:saveLocation] fileType:AVFileTypeQuickTimeMovie
error:&error];
if(error) {
NSLog(#"error creating AssetWriter: %#",[error description]);
}
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:frameSize.width], AVVideoWidthKey,
[NSNumber numberWithInt:frameSize.height], AVVideoHeightKey,
nil];
AVAssetWriterInput *writerInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
NSMutableDictionary *attributes = [[NSMutableDictionary alloc] init];
[attributes setObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:frameSize.width] forKey:(NSString*)kCVPixelBufferWidthKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:frameSize.height] forKey:(NSString*)kCVPixelBufferHeightKey];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
sourcePixelBufferAttributes:attributes];
[videoWriter addInput:writerInput];
// fixes all errors
writerInput.expectsMediaDataInRealTime = YES;
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
buffer = [self pixelBufferFromCGImage:[first CGImage]];
BOOL result = [adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
if (result == NO) //failes on 3GS, but works on iphone 4
NSLog(#"failed to append buffer");
if(buffer) {
CVBufferRelease(buffer);
}
//int reverseSort = NO;
NSArray *newArray = array;
int fps = 10;
int i = 0;
for (UIImage *image in newArray)
{
[NSThread sleepForTimeInterval:0.02];
if (adaptor.assetWriterInput.readyForMoreMediaData) {
i++;
CMTime frameTime = CMTimeMake(1, fps);
CMTime lastTime = CMTimeMake(i, fps);
CMTime presentTime = CMTimeAdd(lastTime, frameTime);
UIImage *imgFrame = image;//[UIImage imageWithContentsOfFile:filePath] ;
buffer = [self pixelBufferFromCGImage:[imgFrame CGImage]];
BOOL result = [adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];
if (result == NO) //failes on 3GS, but works on iphone 4
{
NSLog(#"failed to append buffer");
NSLog(#"The error is %#", [videoWriter error]);
[NSThread sleepForTimeInterval:0.5];
}
if(buffer) {
CVBufferRelease(buffer);
}
} else {
NSLog(#"error");
i--;
}
}
//Finish the session:
[writerInput markAsFinished];
[videoWriter finishWriting];
CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
NSLog(#"Movie created successfully");
}
- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image
{
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVPixelBufferCreate(kCFAllocatorDefault, CGImageGetWidth(image),
CGImageGetHeight(image), kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options,
&pxbuffer);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, CGImageGetWidth(image),
CGImageGetHeight(image), 8, 4*CGImageGetWidth(image), rgbColorSpace,
kCGImageAlphaNoneSkipFirst);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
// CGAffineTransform flipVertical = CGAffineTransformMake(
// 1, 0, 0, -1, 0, CGImageGetHeight(image)
// );
// CGContextConcatCTM(context, flipVertical);
// CGAffineTransform flipHorizontal = CGAffineTransformMake(
// -1.0, 0.0, 0.0, 1.0, CGImageGetWidth(image), 0.0
// );
//
// CGContextConcatCTM(context, flipHorizontal);
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
But the problem that i am having is that the output of the video is some how corrupted (it does play although it has funny lines shown below:
I would be so grateful for any help
Many Thanks
Thomas
I have been seeing problems with the H264 video encoding hardware where it can corrupt input that does not match an known aspect ratio. For example, my testing shows that if one video dimension is smaller than 128 pixel, the video will not encode.
What I have seen working is 128x128, 192x128, 240x160, 480x320, and others.
See this page on aspect ratios
P.S.
You will likely want to use the AVAssetWriterInputPixelBufferAdaptor since it contains a pixel buffer pool that you can use via CVPixelBufferPoolCreatePixelBuffer(). Also, you will want to assert(adaptor.pixelBufferPool); after calling startSessionAtSourceTime to ensure that your adaptor can write to the writer.
I'm trying to create a video from a single image, and save it to my photos library, I've been googling around for ages - and cannot find a solution.
I have this code:
#autoreleasepool {
NSString *path = [NSHomeDirectory() stringByAppendingPathComponent:[NSString stringWithFormat:#"Documents/movie2.mp4"]];
UIImage *img = [UIImage imageWithData:[[self imageDataArrya]objectAtIndex:0]imageData];
[self writeImageAsMovie:img toPath:path size:CGSizeMake(640, 960) duration:10];
UISaveVideoAtPathToSavedPhotosAlbum (path,self, #selector(video:didFinishSavingWithError: contextInfo:), nil);
}
I call the above mentioned method in a background thread. This is the code for 'writeImageAsMovie':
- (void)writeImageAsMovie:(UIImage*)image toPath:(NSString*)path size:(CGSize)size duration:(int)duration {
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie
error:&error];
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
[self setInput:[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings]];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:input
sourcePixelBufferAttributes:nil];
[videoWriter addInput:input];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = [self pixelBufferFromCGImage:image.CGImage];
[adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
[adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(duration-1, 2)];
[input markAsFinished];
[videoWriter endSessionAtSourceTime:CMTimeMake(duration, 2)];
[videoWriter finishWriting];
}
The utility method for converting an Image to a CVPixelBufferRef:
- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image {
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault,
self.view.frame.size.width,
self.view.frame.size.height,
kCVPixelFormatType_32ARGB,
(__bridge CFDictionaryRef) options,
&pxbuffer);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, self.view.frame.size.width,
self.view.frame.size.height, 8, 4*self.view.frame.size.width, rgbColorSpace,
kCGImageAlphaNoneSkipFirst);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
Now if I try to run the code from the Simulator, it gives me an error saying that the data is corrupt.
If I run it on my device, it saves a 2 second video to my photo library but its only green, my image isn't in there.
Any help will be appreciated :)
I totally got this working - sorry I didn't see your reply before today.
This is what I used:
Create a Temp File
NSString *path = [NSHomeDirectory() stringByAppendingPathComponent:[NSString stringWithFormat:#"Documents/flipimator-tempfile.mp4"]];
//overwrites it if it already exists.
if([fileManager fileExistsAtPath:path])
[fileManager removeItemAtPath:path error:NULL];
Call the export images method to save images to the temp file:
[self exportImages:frames
asVideoToPath:path
withFrameSize:imageSize
framesPerSecond:fps];
Save the temp file to the photo album:
UISaveVideoAtPathToSavedPhotosAlbum (path,self, #selector(video:didFinishSavingWithError: contextInfo:), nil);
- (void)video:(NSString *) videoPath didFinishSavingWithError: (NSError *) error contextInfo: (void *) contextInfo {
NSLog(#"Finished saving video with error: %#", error);
UIAlertView *alert = [[UIAlertView alloc]initWithTitle:#"Done"
message:#"Movie succesfully exported."
delegate:nil
cancelButtonTitle:#"OK"
otherButtonTitles:nil, nil];
[alert show];
}
Code for the exportImages method:
- (void)exportImages:(NSArray *)imageArray
asVideoToPath:(NSString *)path
withFrameSize:(CGSize)imageSize
framesPerSecond:(NSUInteger)fps {
NSLog(#"Start building video from defined frames.");
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:imageSize.width], AVVideoWidthKey,
[NSNumber numberWithInt:imageSize.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
int frameCount = 0;
for(UIImage * img in imageArray) {
buffer = [self pixelBufferFromCGImage:[img CGImage] andSize:imageSize];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30) {
if (adaptor.assetWriterInput.readyForMoreMediaData) {
//print out status::
NSString *border = #"**************************************************";
NSLog(#"\n%#\nProcessing video frame (%d,%d).\n%#",border,frameCount,[imageArray count],border);
CMTime frameTime = CMTimeMake(frameCount,(int32_t) fps);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
if(!append_ok){
NSError *error = videoWriter.error;
if(error!=nil) {
NSLog(#"Unresolved error %#,%#.", error, [error userInfo]);
}
}
}
else {
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n, with error.", frameCount, j);
}
frameCount++;
}
//Finish the session:
[videoWriterInput markAsFinished];
[videoWriter finishWriting];
NSLog(#"Write Ended");
}
Paramenters to the method
imageArray : NSArray of UIImage.
path : Temporary path to write to while you process (temp defined above).
imageSize : The size of the video in pixels (width, and height).
fps : How many images should be displayed per second in the video.
Hope it helps!
Sorry about the formatting - I'm still very new to StackOverflow.com.
This is where I used the code: http://www.youtube.com/watch?v=DDckJyF2bnA
I have successfully created video from images using the following code
-(void)writeImageAsMovie:(NSArray *)array toPath:(NSString*)path size:(CGSize)size duration:(int)duration
{
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* writerInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings] retain];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
[videoWriter addInput:writerInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
buffer = [self pixelBufferFromCGImage:[[array objectAtIndex:0] CGImage]];
[adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
//Write samples:
for (int i = 0;i<[array count]; i++)
{
if([writerInput isReadyForMoreMediaData])
{
NSLog(#"inside for loop %d",i);
CMTime frameTime = CMTimeMake(1, 20);
CMTime lastTime=CMTimeMake(i, 20); //i is from 0 to 24 of the loop above
CMTime presentTime=CMTimeAdd(lastTime, frameTime);
buffer = [self pixelBufferFromCGImage:[[array objectAtIndex:i] CGImage]];
[adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];
}
else
{
NSLog(#"error");
i--;
}
}
NSLog(#"outside for loop");
//Finish the session:
[writerInput markAsFinished];
[videoWriter finishWriting];
}
Here I have used CVPixelBufferRef. Instead of this, I want to use the CVPixelBufferPoolRef in conjunction with AVAssetWriterInputPixelBufferAdaptor.
Can anybody provide an example which I can debug and use?
You are passing nil 'sourcePixelBufferAttributes', because of which the pixel buffer pool will not get created:
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:nil];
Instead pass some attributes, for example:
NSDictionary *bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
Then you can use the pool to create the pixel buffers, like:
CVPixelBufferPoolCreatePixelBuffer (NULL, adaptor.pixelBufferPool, &pixelBuffer);
#Atulkumar V. Jain : great! good luck ^^
#Brian : you are right thanks, I correct it and I am getting it work now here is the working code (if someone else need it :-) )
CVPixelBufferRef buffer = NULL;
buffer = [self pixelBufferFromCGImage:[[imagesArray objectAtIndex:0] CGImage]];
CVPixelBufferPoolCreatePixelBuffer (NULL, adaptor_.pixelBufferPool, &buffer);
[adaptor_ appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
__block UInt64 convertedByteCount = 0;
dispatch_queue_t mediaInputQueue = dispatch_queue_create("mediaInputQueue", NULL);
static int i = 1;
int frameNumber = [imagesArray count];
[writerInput requestMediaDataWhenReadyOnQueue:mediaInputQueue usingBlock:^{
while (1){
if (i == frameNumber) {
break;
}
if ([writerInput isReadyForMoreMediaData]) {
CVPixelBufferRef sampleBuffer = [self pixelBufferFromCGImage:[[imagesArray objectAtIndex:i] CGImage]];
NSLog(#"inside for loop %d",i);
CMTime frameTime = CMTimeMake(1, 20);
CMTime lastTime=CMTimeMake(i, 20); //i is from 0 to 19 of the loop above
CMTime presentTime=CMTimeAdd(lastTime, frameTime);
if (sampleBuffer) {
[adaptor_ appendPixelBuffer:sampleBuffer withPresentationTime:presentTime];
i++;
CFRelease(sampleBuffer);
} else {
break;
}
}
}
NSLog (#"done");
[writerInput markAsFinished];
[videoWriter finishWriting];
CVPixelBufferPoolRelease(adaptor_.pixelBufferPool);
[videoWriter release];
[writerInput release];
[imagesArray removeAllObjects];
}];
Instead of using "for" use this code :
dispatch_queue_t mediaInputQueue = dispatch_queue_create("mediaInputQueue", NULL);
[writerInput requestMediaDataWhenReadyOnQueue:mediaInputQueue usingBlock:^{
CVPixelBufferRef buffer = NULL;
buffer = [self pixelBufferFromCGImage:[[array objectAtIndex:0] CGImage]];
CVPixelBufferPoolCreatePixelBuffer (NULL, adaptor.pixelBufferPool, &buffer);
[adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
int i = 1;
while (writerInput.readyForMoreMediaData) {
NSLog(#"inside for loop %d",i);
CMTime frameTime = CMTimeMake(1, 20);
CMTime lastTime=CMTimeMake(i, 20); //i is from 0 to 19 of the loop above
CMTime presentTime=CMTimeAdd(lastTime, frameTime);
if (i >= [array count]) {
buffer = NULL;
}else {
buffer = [self pixelBufferFromCGImage:[[array objectAtIndex:i] CGImage]];
}
//CVBufferRetain(buffer);
if (buffer) {
// append buffer
[adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];
i++;
} else {
// done!
//Finish the session:
[writerInput markAsFinished];
[videoWriter finishWriting];
CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
[videoWriter release];
[writerInput release];
NSLog (#"Done");
[imageArray removeAllObjects];
break;
}
}
}];
I got it all working!
Here is the sample code link: git#github.com:RudyAramayo/AVAssetWriterInputPixelBufferAdaptorSample.git
Here is the code you need:
- (void) testCompressionSession
{
CGSize size = CGSizeMake(480, 320);
NSString *betaCompressionDirectory = [NSHomeDirectory() stringByAppendingPathComponent:#"Documents/Movie.m4v"];
NSError *error = nil;
unlink([betaCompressionDirectory UTF8String]);
//----initialize compression engine
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:betaCompressionDirectory]
fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
if(error)
NSLog(#"error = %#", [error localizedDescription]);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey, nil];
AVAssetWriterInput *writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
if ([videoWriter canAddInput:writerInput])
NSLog(#"I can add this input");
else
NSLog(#"i can't add this input");
[videoWriter addInput:writerInput];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
//---
// insert demo debugging code to write the same image repeated as a movie
CGImageRef theImage = [[UIImage imageNamed:#"Lotus.png"] CGImage];
dispatch_queue_t dispatchQueue = dispatch_queue_create("mediaInputQueue", NULL);
int __block frame = 0;
[writerInput requestMediaDataWhenReadyOnQueue:dispatchQueue usingBlock:^{
while ([writerInput isReadyForMoreMediaData])
{
if(++frame >= 120)
{
[writerInput markAsFinished];
[videoWriter finishWriting];
[videoWriter release];
break;
}
CVPixelBufferRef buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:theImage size:size];
if (buffer)
{
if(![adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(frame, 20)])
NSLog(#"FAIL");
else
NSLog(#"Success:%d", frame);
CFRelease(buffer);
}
}
}];
NSLog(#"outside for loop");
}
- (CVPixelBufferRef )pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size
{
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) options, &pxbuffer);
// CVReturn status = CVPixelBufferPoolCreatePixelBuffer(NULL, adaptor.pixelBufferPool, &pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width, size.height, 8, 4*size.width, rgbColorSpace, kCGImageAlphaPremultipliedFirst);
NSParameterAssert(context);
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
I have a sequence of images (PNG format) in my iphone app. It is saved in the app's sandbox doc folder. How do I create animation file from these images?
I would prefer animated gif, but other types like video (mov/avi/etc) is ok too. I don't need audio. Just images. Thanks.
This works for me
-(void)viewDidLoad {
[super viewDidLoad];
// here u can replace with your images
imagesArray=[[NSMutableArray alloc]initWithObjects:#"BALL_EASY.png",#"Rat.png", #"HARD_CARD.png", #"EASY_CARD.png", #"MEDIUM_CARD.png", #"BattleMapSplashScreen.png", #"MENU_Bee.png", nil];
NSString *fileName =#"myMovie.txt";
NSArray *documentPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDir = [documentPaths objectAtIndex:0];
NSString *filePath = [documentsDir stringByAppendingPathComponent:fileName];
[self writeImageAsMovie:imagesArray toPath:filePath size:CGSizeMake(320.0, 440.0)];
}
-(CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image
{
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, self.view.frame.size.width,
self.view.frame.size.height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) options,
&pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata,self.view.frame.size.width,
self.view.frame.size.height, 8, 4*self.view.frame.size.width, rgbColorSpace,
kCGImageAlphaNoneSkipFirst);
NSParameterAssert(context);
CGContextConcatCTM(context, self.view.transform);
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
-(void)writeImageAsMovie:(NSArray *)array toPath:(NSString*)path size:(CGSize)size
{
NSMutableDictionary *attributes = [[NSMutableDictionary alloc]init];
[attributes setObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:320] forKey:(NSString*)kCVPixelBufferWidthKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:416] forKey:(NSString*)kCVPixelBufferHeightKey];
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* writerInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings] retain];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
sourcePixelBufferAttributes:attributes];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
[videoWriter addInput:writerInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
buffer = [self pixelBufferFromCGImage:[[array objectAtIndex:0] CGImage]];
[adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
//Write samples:
for (int i = 0;i<[array count]; i++)
{
if([writerInput isReadyForMoreMediaData])
{
NSLog(#"inside for loop %d",i);
CMTime frameTime = CMTimeMake(1, 20);
CMTime lastTime=CMTimeMake(i, 20); //i is from 0 to 19 of the loop above
CMTime presentTime=CMTimeAdd(lastTime, frameTime);
buffer = [self pixelBufferFromCGImage:[[array objectAtIndex:i] CGImage]];
[adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];
if(buffer)
CVBufferRelease(buffer);
}
else
{
NSLog(#"error");
i--;
}
}
//Finish the session:
[writerInput markAsFinished];
[videoWriter finishWriting];
NSURL *pathURL = [NSURL fileURLWithPath:path];
AVURLAsset *url = [[AVURLAsset alloc] initWithURL:pathURL options:nil];
[url release];
CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
[videoWriter release];
[writerInput release];
}