application crashed when make video from images - iphone

Please find the below code.
-(void) writeImagesAsMovie:(NSArray *)array toPath:(NSString*)path numPhoto:(NSInteger)totPics {
ALAsset *asset = [assets objectAtIndex:0];
ALAssetRepresentation *assetRepresentation = [asset defaultRepresentation];
UIImage *getImage = [UIImage imageWithCGImage:[assetRepresentation fullScreenImage] scale:[assetRepresentation scale] orientation:(UIImageOrientation)[assetRepresentation orientation]];
UIImage *first = [getImage imageByScalingAndCroppingForSize:CGSizeMake(720.0, 960.0)];
CGSize frameSize = CGSizeMake(first.size.width,first.size.height);
NSLog(#"frameSize = %#",NSStringFromCGSize(frameSize));
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie
error:&error];
if(error) {
NSLog(#"error creating AssetWriter: %#",[error description]);
}
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:frameSize.width], AVVideoWidthKey,
[NSNumber numberWithInt:frameSize.height], AVVideoHeightKey,
AVVideoScalingModeResizeAspect,AVVideoScalingModeKey,
nil];
AVAssetWriterInput* writerInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
CGAffineTransform transform = CGAffineTransformIdentity;
UIImageOrientation orient = first.imageOrientation;
CGSize imageSize = first.size;
switch(orient) {
case UIImageOrientationUp: //EXIF = 1
transform = CGAffineTransformIdentity;
break;
case UIImageOrientationUpMirrored: //EXIF = 2
transform = CGAffineTransformMakeTranslation(imageSize.width, 0.0);
transform = CGAffineTransformScale(transform, -1.0, 1.0);
break;
case UIImageOrientationDown: //EXIF = 3
transform = CGAffineTransformMakeTranslation(imageSize.width, imageSize.height);
transform = CGAffineTransformRotate(transform, M_PI);
break;
case UIImageOrientationDownMirrored: //EXIF = 4
transform = CGAffineTransformMakeTranslation(0.0, imageSize.height);
transform = CGAffineTransformScale(transform, 1.0, -1.0);
break;
case UIImageOrientationLeftMirrored: //EXIF = 5
transform = CGAffineTransformMakeTranslation(imageSize.height, imageSize.width);
transform = CGAffineTransformScale(transform, -1.0, 1.0);
transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
break;
case UIImageOrientationLeft: //EXIF = 6
transform = CGAffineTransformMakeTranslation(0.0, imageSize.width);
transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
break;
case UIImageOrientationRightMirrored: //EXIF = 7
transform = CGAffineTransformMakeScale(-1.0, 1.0);
transform = CGAffineTransformRotate(transform, M_PI / 2.0);
break;
case UIImageOrientationRight: //EXIF = 8
transform = CGAffineTransformMakeTranslation(imageSize.height, 0.0);
transform = CGAffineTransformRotate(transform, M_PI / 2.0);
break;
default:
[NSException raise:NSInternalInconsistencyException format:#"Invalid image orientation"];
}
writerInput.transform = transform;
NSMutableDictionary *attributes = [[NSMutableDictionary alloc] init];
[attributes setObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:frameSize.width] forKey:(NSString*)kCVPixelBufferWidthKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:frameSize.height] forKey:(NSString*)kCVPixelBufferHeightKey];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
sourcePixelBufferAttributes:attributes];
[videoWriter addInput:writerInput];
// fixes all errors
writerInput.expectsMediaDataInRealTime = YES;
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
buffer = [self pixelBufferFromCGImage:[first CGImage]];
BOOL result = [adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
if (result == NO)
NSLog(#"failed to append buffer");
if(buffer) {
CVBufferRelease(buffer);
}
int fps = 2;
for(int i=0; i<totPics; i++)
{
if (adaptor.assetWriterInput.readyForMoreMediaData) {
CMTime frameTime = CMTimeMake(1, fps);
CMTime lastTime = CMTimeMake(i, fps);
CMTime presentTime = CMTimeAdd(lastTime, frameTime);
NSLog(#"presentTime = %f",CMTimeGetSeconds(presentTime));
ALAsset *asset = [assets objectAtIndex:i];
ALAssetRepresentation *assetRepresentation = [asset defaultRepresentation];
UIImage *imgGetFrame = [UIImage imageWithCGImage:[assetRepresentation fullScreenImage] scale:[assetRepresentation scale] orientation:(UIImageOrientation)[assetRepresentation orientation]];
UIImage *imgFrame = [imgGetFrame imageByScalingAndCroppingForSize:CGSizeMake(720.0, 960.0)];
buffer = [self pixelBufferFromCGImage:[imgFrame CGImage]];
BOOL result = [adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];
if (result == NO) //failes on 3GS, but works on iphone 4
{
NSLog(#"failed to append buffer");
NSLog(#"The error is %#", [videoWriter error]);
}
if(buffer) {
CVBufferRelease(buffer);
}
} else {
NSLog(#"error");
}
}
//Finish the session:
[writerInput markAsFinished];
[videoWriter finishWritingWithCompletionHandler:^{
NSLog(#"Complete");
}];
CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
}
-(CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image {
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVPixelBufferCreate(kCFAllocatorDefault, CGImageGetWidth(image),
CGImageGetHeight(image), kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options,
&pxbuffer);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, CGImageGetWidth(image),
CGImageGetHeight(image), 8, 4*CGImageGetWidth(image), rgbColorSpace,
kCGImageAlphaNoneSkipFirst);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
Cropped Image method
-(UIImage*)imageByScalingAndCroppingForSize:(CGSize)targetSize {
UIImage *sourceImage = self;
UIImage *newImage = nil;
CGSize imageSize = sourceImage.size;
CGFloat width = imageSize.width;
CGFloat height = imageSize.height;
CGFloat targetWidth = targetSize.width;
CGFloat targetHeight = targetSize.height;
CGFloat scaleFactor = 0.0;
CGFloat scaledWidth = targetWidth;
CGFloat scaledHeight = targetHeight;
CGPoint thumbnailPoint = CGPointMake(0.0,0.0);
if (CGSizeEqualToSize(imageSize, targetSize) == NO)
{
CGFloat widthFactor = targetWidth / width;
CGFloat heightFactor = targetHeight / height;
if (widthFactor > heightFactor)
{
scaleFactor = widthFactor; // scale to fit height
}
else
{
scaleFactor = heightFactor; // scale to fit width
}
scaledWidth = width * scaleFactor;
scaledHeight = height * scaleFactor;
// center the image
if (widthFactor > heightFactor)
{
thumbnailPoint.y = (targetHeight - scaledHeight) * 0.5;
}
else
{
if (widthFactor < heightFactor)
{
thumbnailPoint.x = (targetWidth - scaledWidth) * 0.5;
}
}
}
UIGraphicsBeginImageContext(targetSize); // this will crop
CGRect thumbnailRect = CGRectZero;
thumbnailRect.origin = thumbnailPoint;
thumbnailRect.size.width = scaledWidth;
thumbnailRect.size.height = scaledHeight;
[sourceImage drawInRect:thumbnailRect];
newImage = UIGraphicsGetImageFromCurrentImageContext();
if(newImage == nil) {
NSLog(#"could not scale image");
}
//pop the context to get back to the default
UIGraphicsEndImageContext();
return newImage;
}
I have pass the image from the user photo library and add the image after crop it to 720 x 960.
When I took 100 images then I got memory warning error. Also when I checked the application in instrument then it took around 400 mb. So please help me if anyone has an idea what I am doing wrong.

The problem is that you are using up all app memory in your video processing loop. You cannot just allocate hundreds of images in memory at the same time, the code will crash when run on your iOS device. Please read my blog post on the subject at video_and_memory_usage_on_ios_devices and then change your for loop so that an autorelease pool is created for each iteration of the loop to fix the runaway memory usage. Also note that kCVPixelFormatType_32ARGB will be very slow, you should use kCVPixelFormatType_32BGRA.

Related

Image not returned by didFinishPickingMediaWithInfo: in ipad

On ipad using
- (void)imagePickerController:(UIImagePickerController *)picker didFinishPickingMediaWithInfo:(NSDictionary *)info method donot give image. Info dictionary is
> dic = {
> UIImagePickerControllerMediaType = "public.image";
> UIImagePickerControllerReferenceURL = "assets-library://asset/asset.JPG?id=1D8618CC-CDF1-478C-A36D-455A66501A02&ext=JPG";
> }
So i used referenceURL as asnwered in this question.
My code for that is
-(void)imagePickerController:(UIImagePickerController *)picker didFinishPickingMediaWithInfo:(NSDictionary *)info {
editPhoto=YES;
[self dismissModalViewControllerAnimated:YES];
NSLog(#"dic = %#",info);
defaultImageView.image = [info valueForKey:#"UIImagePickerControllerOriginalImage"];
if(defaultImageView.image == nil)
{
NSLog(#"image from assets");
NSURL *imageSource = [info objectForKey:#"UIImagePickerControllerReferenceURL"];
[self findLargeImage:imageSource];
}
NSLog(#"image picked in block");
}
-(UIImage*)findLargeImage :(NSURL*)path
{
__block UIImage * largeimage=nil;
ALAssetsLibraryAssetForURLResultBlock resultblock = ^(ALAsset *myasset)
{
ALAssetRepresentation *rep = [myasset defaultRepresentation];
CGImageRef iref = [rep fullResolutionImage];
if (iref) {
largeimage =[UIImage imageWithCGImage:iref];
defaultImageView.image = [ImageOreintation fixOrientation:largeimage];
int width = defaultImageView.image.size.width;
int height = defaultImageView.image.size.height;
int frameWidth = (158.0/height) * width;
if(frameWidth>212)
{
defaultImageView.frame = CGRectMake(26, 10, 212, 158);
}
else
{
defaultImageView.frame = CGRectMake(132-frameWidth/2, 10, frameWidth, 158);
}
NSLog(#"in find large image3");
}
};
NSLog(#"in find large image4");
ALAssetsLibraryAccessFailureBlock failureblock = ^(NSError *myerror)
{
NSLog(#"booya, cant get image - %#",[myerror localizedDescription]);
};
ALAssetsLibrary* assetslibrary = [[ALAssetsLibrary alloc] init];
[assetslibrary assetForURL:path resultBlock:resultblock failureBlock:failureblock];
return largeimage ;
}
I get Image this way, but image orientation is upside down.
Plese help either to get image directly from picker or chage the orintation of image that i get from url.
Thank You
you can try to make use of this category to fix the orientation of the images.
Header File
#import <UIKit/UIKit.h>
#interface UIImage (Orientation)
- (UIImage *)fixOrientation;
#end
Implementation File
#import "UIImage+Orientation.h"
#implementation UIImage (Orientation)
- (UIImage *)fixOrientation {
if (self.imageOrientation == UIImageOrientationUp) return self;
CGAffineTransform transform = CGAffineTransformIdentity;
switch (self.imageOrientation) {
case UIImageOrientationDown:
case UIImageOrientationDownMirrored:
transform = CGAffineTransformTranslate(transform, self.size.width, self.size.height);
transform = CGAffineTransformRotate(transform, M_PI);
break;
case UIImageOrientationLeft:
case UIImageOrientationLeftMirrored:
transform = CGAffineTransformTranslate(transform, self.size.width, 0);
transform = CGAffineTransformRotate(transform, M_PI_2);
break;
case UIImageOrientationRight:
case UIImageOrientationRightMirrored:
transform = CGAffineTransformTranslate(transform, 0, self.size.height);
transform = CGAffineTransformRotate(transform, -M_PI_2);
break;
case UIImageOrientationUp:
case UIImageOrientationUpMirrored:
break;
}
switch (self.imageOrientation) {
case UIImageOrientationUpMirrored:
case UIImageOrientationDownMirrored:
transform = CGAffineTransformTranslate(transform, self.size.width, 0);
transform = CGAffineTransformScale(transform, -1, 1);
break;
case UIImageOrientationLeftMirrored:
case UIImageOrientationRightMirrored:
transform = CGAffineTransformTranslate(transform, self.size.height, 0);
transform = CGAffineTransformScale(transform, -1, 1);
break;
case UIImageOrientationUp:
case UIImageOrientationDown:
case UIImageOrientationLeft:
case UIImageOrientationRight:
break;
}
CGContextRef ctx = CGBitmapContextCreate(NULL, self.size.width, self.size.height,
CGImageGetBitsPerComponent(self.CGImage), 0,
CGImageGetColorSpace(self.CGImage),
CGImageGetBitmapInfo(self.CGImage));
CGContextConcatCTM(ctx, transform);
switch (self.imageOrientation) {
case UIImageOrientationLeft:
case UIImageOrientationLeftMirrored:
case UIImageOrientationRight:
case UIImageOrientationRightMirrored:
CGContextDrawImage(ctx, CGRectMake(0,0,self.size.height,self.size.width), self.CGImage);
break;
default:
CGContextDrawImage(ctx, CGRectMake(0,0,self.size.width,self.size.height), self.CGImage);
break;
}
CGImageRef cgimg = CGBitmapContextCreateImage(ctx);
UIImage *img = [UIImage imageWithCGImage:cgimg];
CGContextRelease(ctx);
CGImageRelease(cgimg);
return img;
}
#end
I got the solution for this orientation. While taking an image from ALAssetRepresentation *rep, I had used fullResolutionImage. Instead of that, I used fullScreenImage, and it solved the problem.
Here is the code for that method.
-(UIImage*)findLargeImage :(NSURL*)path
{
__block UIImage * largeimage=nil;
ALAssetsLibraryAssetForURLResultBlock resultblock = ^(ALAsset *myasset)
{
ALAssetRepresentation *rep = [myasset defaultRepresentation];
largeimage = [UIImage imageWithCGImage:[rep fullScreenImage] scale:[rep scale] orientation:0];
}
};
NSLog(#"in find large image4");
ALAssetsLibraryAccessFailureBlock failureblock = ^(NSError *myerror)
{
NSLog(#"cant get image - %#",[myerror localizedDescription]);
};
ALAssetsLibrary* assetslibrary = [[ALAssetsLibrary alloc] init];
[assetslibrary assetForURL:path resultBlock:resultblock failureBlock:failureblock];
return largeimage ;
}

Capture overlay image without setting drawInRect

I need to take overlay image without setting drawInRect. When i set drawInRect it gives output of setting size. I need to take picture with new size without using following code.
- (void)captureStillImageWithOverlay:(UIImage*)overlay
{
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in [[self stillImageOutput] connections]) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([[port mediaType] isEqual:AVMediaTypeVideo]) {
videoConnection = connection;
break;
}
}
if (videoConnection) {
break;
}
}
NSLog(#"about to request a capture from: %#", [self stillImageOutput]);
[[self stillImageOutput] captureStillImageAsynchronouslyFromConnection:videoConnection
completionHandler:^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
CFDictionaryRef exifAttachments = CMGetAttachment(imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
if (exifAttachments) {
NSLog(#"attachements: %#", exifAttachments);
} else {
NSLog(#"no attachments");
}
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
CGSize imageSize = [image size];
CGSize overlaySize = [overlay size];
UIGraphicsBeginImageContext(imageSize);
[image drawInRect:CGRectMake(0, 0, imageSize.width, imageSize.height)];
CGFloat xScaleFactor = imageSize.width / 320;
CGFloat yScaleFactor = imageSize.height / 480;
[overlay drawInRect:CGRectMake(30 * xScaleFactor, 100 * yScaleFactor, overlaySize.width * xScaleFactor, overlaySize.height * yScaleFactor)]; // rect used in AROverlayViewController was (30,100,260,200)
// [overlay drawInRect:CGRectMake(30 * xScaleFactor, 100 * yScaleFactor, overlaySize.width * xScaleFactor, overlaySize.width* yScaleFactor)];
UIImage *combinedImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
// NSData * data = UIImagePNGRepresentation(image);
// [data writeToFile:#"foo.png" atomically:YES];
[self setStillImage:combinedImage];
[image release];
[[NSNotificationCenter defaultCenter] postNotificationName:kImageCapturedSuccessfully object:nil];
}];
}
Try this code :
UIGraphicsBeginImageContext(self.view.bounds.size);
// retrieve the current graphics context
CGContextRef context = UIGraphicsGetCurrentContext();
// render view into context
[self.view.layer renderInContext:context];
// create image from context
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
image=[self cropImage:image];
UIGraphicsEndImageContext();
- (UIImage *)cropImage:(UIImage *)oldImage
{
CGSize imageSize = oldImage.size;
UIGraphicsBeginImageContextWithOptions(CGSizeMake( imageSize.width,imageSize.height - 150),NO,0.);
[oldImage drawAtPoint:CGPointMake( 0, -80) blendMode:kCGBlendModeCopy alpha:1.];
UIImage *croppedImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return croppedImage;
}

Memory Leak CGBitmapContextCreateImage

In Instruments it tells me that there is a leak caused by CGBitmapContextCreateImage in my resizedImage method. However after a lot of research and trial and error, I have come to the conclusion that it is caused somewhere else in the call chain.
The call chain is as follows:
takeFoto -> saveFoto -> setImage_bg -> bolly -> resizedImage
Here is all the related code
-(void)takeFoto
{
[stillImageOutput captureStillImageAsynchronouslyFromConnection:self.videoConnection completionHandler:
^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
NSData* idata = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];//[od copy];
UIImage *image = [UIImage imageWithData:idata];
CGImageRef cgi = [image CGImage];
CGImageRef cgi2 = CGImageCreateWithImageInRect(cgi, CGRectMake(0, 0, CGImageGetWidth(cgi), CGImageGetHeight(cgi));
UIImageOrientation iori;
if(self.devOri==UIInterfaceOrientationLandscapeRight)
{
if([self isFrontCamera]) iori = UIImageOrientationDownMirrored;
else iori = UIImageOrientationUp;
}
else if(self.devOri==UIInterfaceOrientationLandscapeLeft)
{
if([self isFrontCamera]) iori = UIImageOrientationUpMirrored;
else iori = UIImageOrientationDown;
}
else if(self.devOri==UIInterfaceOrientationPortraitUpsideDown)
{
if([self isFrontCamera]) iori = UIImageOrientationRightMirrored;
else iori = UIImageOrientationLeft;
}
else
{
if([self isFrontCamera]) iori = UIImageOrientationLeftMirrored;
else iori = UIImageOrientationRight;
}
UIImage *scaledImage = [[UIImage alloc] initWithCGImage:cgi2 scale:1 orientation:iori];
CGImageRelease(cgi2);
self.foto = scaledImage;
[scaledImage release];
[parent saveFoto];
}];
}
-(void)saveFoto
{
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
[self setImage_bg:captureManager.foto];
[pool release];
}
-(void)setImage_bg:(UIImage*)oimg
{
CGSize sz = savePreview.frame.size;
UIImage* img = [oimg copy];
[savePreview setImage:[filters resizeImage:img size:sz]];
sz = CGSizeMake(64, 64);
UIImage* img2 = [filters resizeImage:img size:sz];
bolPrv.image = [filters bolly:img2];
[img release];
}
//filters bolly
-(UIImage*)bolly:(UIImage*)img
{
CIImage *beginImage = [CIImage imageWithCGImage:img.CGImage];
CIContext *context = [CIContext contextWithOptions:nil];
UIImage* bb = [UIImage fromFile:#"bollywoodBlend3.png"];
UIImage* bb2 = [bb resizedImage:img.size interpolationQuality:kCGInterpolationMedium];
CIFilter *filter = [CIFilter filterWithName:#"CIOverlayBlendMode"
keysAndValues: kCIInputImageKey, beginImage,
#"inputBackgroundImage", [CIImage imageWithCGImage:bb2.CGImage], nil];
CIImage *outputImage = filter.outputImage;
filter = [CIFilter filterWithName:#"CIColorControls"
keysAndValues: kCIInputImageKey, outputImage,
#"inputSaturation", [NSNumber numberWithFloat:1.8],
#"inputBrightness", [NSNumber numberWithFloat:0.1],
#"inputContrast", [NSNumber numberWithFloat:1.5],
nil];
outputImage = filter.outputImage;
CGImageRef cgimg = [context createCGImage:outputImage fromRect:[outputImage extent]];
UIImage *newImg = [UIImage imageWithCGImage:cgimg];
CGImageRelease(cgimg);
return [newImg autorelease];
}
// filters resizeImage (same code used for resizedImage)
- (UIImage *)resizeImage:(UIImage*)img size:(CGSize)newSize
{
CGRect newRect = CGRectIntegral(CGRectMake(0, 0, newSize.width, newSize.height));
CGImageRef imageRef = img.CGImage;
// Build a context that's the same dimensions as the new size
CGColorSpaceRef csr = CGImageGetColorSpace(imageRef);
CGContextRef bitmap = CGBitmapContextCreate(NULL,
newRect.size.width,
newRect.size.height,
CGImageGetBitsPerComponent(imageRef),
4*newRect.size.width,
csr,
CGImageGetBitmapInfo(imageRef));
// Draw into the context; this scales the image
CGContextDrawImage(bitmap, newRect, imageRef);
// CGImageSourceCreateThumbnailAtIndex
// Get the resized image from the context and a UIImage
CGImageRef newImageRef = CGBitmapContextCreateImage(bitmap);
UIImage *newImage = [UIImage imageWithCGImage:newImageRef scale:img.scale orientation:img.imageOrientation];
// Clean up
CGImageRelease(newImageRef);
CGContextRelease(bitmap);
return newImage;
}
- (UIImage *)resizedImage:(CGSize)newSize
transform:(CGAffineTransform)transform
drawTransposed:(BOOL)transpose
interpolationQuality:(CGInterpolationQuality)quality
{
CGRect newRect = CGRectIntegral(CGRectMake(0, 0, newSize.width, newSize.height));
CGRect transposedRect = CGRectMake(0, 0, newRect.size.height, newRect.size.width);
CGImageRef imageRef = self.CGImage;
// Build a context that's the same dimensions as the new size
CGColorSpaceRef csr = CGImageGetColorSpace(imageRef);
CGContextRef bitmap = CGBitmapContextCreate(NULL,
newRect.size.width,
newRect.size.height,
CGImageGetBitsPerComponent(imageRef),
4*newRect.size.width,
csr,
CGImageGetBitmapInfo(imageRef));
// Rotate and/or flip the image if required by its orientation
CGContextConcatCTM(bitmap, transform);
// Set the quality level to use when rescaling
CGContextSetInterpolationQuality(bitmap, quality);
// Draw into the context; this scales the image
CGContextDrawImage(bitmap, transpose ? transposedRect : newRect, imageRef);
// CGImageSourceCreateThumbnailAtIndex
// Get the resized image from the context and a UIImage
CGImageRef newImageRef = CGBitmapContextCreateImage(bitmap);
UIImage *newImage = [UIImage imageWithCGImage:newImageRef];
// Clean up
CGImageRelease(newImageRef);
CGContextRelease(bitmap);
return newImage;
}
// UIImage from file
+(UIImage*)fromFile:(NSString*)fname
{
NSString* bundlePath = [[NSBundle mainBundle] bundlePath];
return [UIImage imageWithContentsOfFile:[NSString stringWithFormat:#"%#/%#", bundlePath,fname]];
}

CVPixelBuffer leak

I create and use a CVPixelBuffer buffer like so :
//Create buffer
CVPixelBufferRef conversionBuffer = nil;
if (indx < [self.sideImageList count] - 1)
{
UIImageView *tempImageView = [self.sideImageList objectAtIndex:indx + 1];
CGRect originalBackgroundFrame = [tempImageView frame];
UIView *tempView = [tempImageView.subviews objectAtIndex:0];
[tempView removeFromSuperview];
tempImageView.layer.borderColor = [UIColor clearColor].CGColor;
[tempImageView setFrame:CGRectMake(0, 0, kVideoWidth, kVideoHeight)];
UIImage *backgroundImage = [UIImage imageFromView:tempImageView];
[tempImageView addSubview:tempView];
tempImageView.layer.borderColor = [UIColor yellowColor].CGColor;
[tempImageView setFrame:originalBackgroundFrame];
UIImage *resizedBackground = [ICVideoViewController imageWithImage:backgroundImage
scaledToSize:CGSizeMake(kVideoWidth, kVideoHeight)];
UIImage *appendedImage = [ICVideoViewController appendImage:resizedBackground
to:conversionImage
atPoint:CGPointMake((kVideoWidth - conversionImage.size.width)/2,
(kVideoHeight - conversionImage.size.height)/2)
otherPoint:CGPointZero];
//Fill Buffer
conversionBuffer = [ICVideoViewController pixelBufferFromCGImage:[appendedImage CGImage] size:CGSizeMake(kVideoWidth, kVideoHeight)];
}
else
{
//Fill buffer
conversionBuffer = [ICVideoViewController pixelBufferFromCGImage:[conversionImage CGImage] size:CGSizeMake(kVideoWidth, kVideoHeight)];
}
while (adaptor.assetWriterInput.readyForMoreMediaData==NO)
{
[NSThread sleepForTimeInterval:0.1];
}
if (indx == [self.sideImageList count] - 1)
{
break;
}
[adaptor appendPixelBuffer:conversionBuffer withPresentationTime:CMTimeMake((frameRate*i)+frames-1, frameRate)];
while (adaptor.assetWriterInput.readyForMoreMediaData==NO)
{}
//release buffer
CVPixelBufferRelease(conversionBuffer);
and the method use to populate the buffer is .. i got this method from somewhere but there does'nt seem to be anything wrong with it ..
+ (CVPixelBufferRef) pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size
{
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width,
size.height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) options,
&pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
size.height, 8, 4*size.width, rgbColorSpace,
kCGImageAlphaNoneSkipFirst);
NSParameterAssert(context);
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
I seem to getting a leak in this method(courtesy leaks tool) and i still dont know what im doing wrong.

How to resize CIImage?

I need to resize more image in one "for" but if I use UIGraphicsGetImageFromCurrentImageContext, I don't have enough memory for more images, because it stay on autorelease and images released when "for" is terminated. I need another method for resize. Any ideas. Thanks
-(UIImage *)imageWithImage:(UIImage *)image scaledToSize:(CGSize)newSize {
CGSize targetSize = newSize;
CGSize imageSize = image.size;
CGFloat width = imageSize.width;
CGFloat height = imageSize.height;
CGFloat targetWidth = targetSize.width;
CGFloat targetHeight = targetSize.height;
CGFloat scaleFactor = 0.0;
CGFloat scaledWidth = targetWidth;
CGFloat scaledHeight = targetHeight;
CGPoint thumbnailPoint = CGPointMake(0.0,0.0);
if (CGSizeEqualToSize(imageSize, targetSize) == NO)
{
CGFloat widthFactor = targetWidth / width;
CGFloat heightFactor = targetHeight / height;
if (widthFactor > heightFactor)
scaleFactor = widthFactor; // scale to fit height
else
scaleFactor = heightFactor; // scale to fit width
scaledWidth = width * scaleFactor;
scaledHeight = height * scaleFactor;
// center the image
if (widthFactor > heightFactor)
{
thumbnailPoint.y = (targetHeight - scaledHeight) * 0.5;
}
else
if (widthFactor < heightFactor)
{
thumbnailPoint.x = (targetWidth - scaledWidth) * 0.5;
}
}
UIGraphicsBeginImageContext(targetSize); // this will crop
CGRect thumbnailRect = CGRectZero;
thumbnailRect.origin = thumbnailPoint;
thumbnailRect.size.width = scaledWidth;
thumbnailRect.size.height = scaledHeight;
[image drawInRect:thumbnailRect];
UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
//pop the context to get back to the default
UIGraphicsEndImageContext();
return newImage;
}
-(void)preparePhotosForResolution:(CGSize)resolution {
NSLog(#"Resolution : %f,%f",resolution.width,resolution.height);
NSFileManager *fm = [NSFileManager defaultManager];
NSString *tmpPath = [projectPath stringByAppendingPathComponent:#"temp"];
[fm removeItemAtPath:tmpPath error:nil];
[fm createDirectoryAtPath:tmpPath withIntermediateDirectories:YES attributes:nil error:nil];
for (int i = 0; i < [sortArray count]; i++) {
UIImage *image = [[UIImage alloc] initWithContentsOfFile:[projectPath stringByAppendingPathComponent:[sortArray objectAtIndex:i]]];
UIImage *newImage = [self imageWithImage:image scaledToSize:resolution];
[image release];
NSData *imgData = UIImagePNGRepresentation(newImage);
[fm createFileAtPath:[tmpPath stringByAppendingPathComponent:[sortArray objectAtIndex:i]] contents:imgData attributes:nil];
}
}
CIFilter *filter = [CIFilter filterWithName:#"CILanczosScaleTransform"];
[filter setValue:newImage forKey:#"inputImage"];
[filter setValue:#(scale) forKey:#"inputScale"];
[filter setValue:#1.0 forKey:#"inputAspectRatio"];
newImage = filter.outputImage;