Why Does this Cropping Code Result in a Rotated Image? - iphone

The title to this question should be rather clear. The code below performs a cropping operation and then displays the new cropped photo in an Image View. The problem is that the image, once cropped, is displayed with a different orientation than the original source image. Why is that? And what should I do about it?
-(void)imagePickerController:(UIImagePickerController *)picker
didFinishPickingMediaWithInfo:(NSDictionary *)info
{
[self.popoverController dismissPopoverAnimated:true];
NSString *mediaType = [info
objectForKey:UIImagePickerControllerMediaType];
[self dismissModalViewControllerAnimated:YES];
if ([mediaType isEqualToString:(NSString *)kUTTypeImage]) {
UIImage *image = [info
objectForKey:UIImagePickerControllerOriginalImage];
CGRect rect = CGRectMake(0, ((image.size.height - image.size.width) / 2), image.size.width, image.size.width);
CGImageRef subImageRef = CGImageCreateWithImageInRect(image.CGImage, rect);
CGRect smallBounds = CGRectMake(rect.origin.x, rect.origin.y, CGImageGetWidth(subImageRef), CGImageGetHeight(subImageRef));
UIGraphicsBeginImageContext(smallBounds.size);
CGContextRef context = UIGraphicsGetCurrentContext();
CGContextDrawImage(context, smallBounds, subImageRef);
croppedImage = [UIImage imageWithCGImage:subImageRef];
UIGraphicsEndImageContext();
imageView.image = croppedImage;
*EDIT
Based on the comments suggesting that root cause of the issue is the removal of the EXIF tag I attempted to correct the orientation with the following code. This still doesn't fix the problem but I think it is a step in the right direction. Perhaps it will help someone to propose a new answer to the question.
if (image.imageOrientation == UIImageOrientationLeft) {
NSLog(#"left");
CGContextRotateCTM (context, radians(90));
} else if (image.imageOrientation == UIImageOrientationRight) {
NSLog(#"right");
CGContextRotateCTM (context, radians(-90));
} else if (image.imageOrientation == UIImageOrientationUp) {
NSLog(#"up");
// NOTHING
} else if (image.imageOrientation == UIImageOrientationDown) {
NSLog(#"down");
CGContextRotateCTM (context, radians(-180.));
}
See https://stackoverflow.com/a/5184134/549273

I think your best bet will be to get the orientation from the incoming image, and then create your new image with the appropriate rotation.
Grab the metadata from the info dictionary passed in:
NSDictionary *metadata = [info objectForKey:UIImagePickerControllerMediaMetadata];
NSNumber *orientation = [metadata objectForKey:#"Orientation"];
Then when you create your UIImage:
croppedImage = [UIImage imageWithCGImage:subImageRef
scale:1.0
orientation:[orientation intValue]];
Hope that's a starting point for you, at least.

What you should do is fix the image orientation before coping it, I had the same problem, I'm using swift so I had to change this answer:
iOS UIImagePickerController result image orientation after upload
But the code works just fine. Just remember that it returns an image and its not editing the image, I struggled to get it right just because of that simple mistake. And by the way you should not use UIImageOrientation but UIImageOrientation.rawValue, at least in swift.
I'll leave the code for swift here, HOPE IT HELPS:
extension UIImage {
public func fixOrientation() -> UIImage?{
if self.imageOrientation.rawValue == UIImageOrientation.Up.rawValue {
return self
}
var transform : CGAffineTransform = CGAffineTransformIdentity
switch self.imageOrientation.rawValue {
case UIImageOrientation.Down.rawValue:
transform = CGAffineTransformTranslate(transform, self.size.width, self.size.height)
transform = CGAffineTransformRotate(transform, CGFloat(M_PI))
case UIImageOrientation.DownMirrored.rawValue:
transform = CGAffineTransformTranslate(transform, self.size.width, self.size.height)
transform = CGAffineTransformRotate(transform, CGFloat(M_PI))
case UIImageOrientation.Left.rawValue:
transform = CGAffineTransformTranslate(transform, self.size.width, 0)
transform = CGAffineTransformRotate(transform, CGFloat(M_PI_2))
case UIImageOrientation.LeftMirrored.rawValue:
transform = CGAffineTransformTranslate(transform, self.size.width, 0)
transform = CGAffineTransformRotate(transform, CGFloat(M_PI_2))
case UIImageOrientation.Right.rawValue:
print("working on right orientation")
transform = CGAffineTransformTranslate(transform, 0, self.size.height)
transform = CGAffineTransformRotate(transform, CGFloat(-M_PI_2))
case UIImageOrientation.RightMirrored.rawValue:
transform = CGAffineTransformTranslate(transform, 0, self.size.height)
transform = CGAffineTransformRotate(transform, CGFloat(-M_PI_2))
case UIImageOrientation.Up.rawValue: break
case UIImageOrientation.UpMirrored.rawValue: break
default: break
}
switch self.imageOrientation.rawValue {
case UIImageOrientation.UpMirrored.rawValue:
transform = CGAffineTransformTranslate(transform, self.size.width, 0)
transform = CGAffineTransformScale(transform, -1, 1)
case UIImageOrientation.DownMirrored.rawValue:
transform = CGAffineTransformTranslate(transform, self.size.width, 0)
transform = CGAffineTransformScale(transform, -1, 1)
case UIImageOrientation.LeftMirrored.rawValue:
transform = CGAffineTransformTranslate(transform, self.size.height, 0)
transform = CGAffineTransformScale(transform, -1, 1)
case UIImageOrientation.RightMirrored.rawValue:
transform = CGAffineTransformTranslate(transform, self.size.height, 0)
transform = CGAffineTransformScale(transform, -1, 1)
case UIImageOrientation.Up.rawValue: break
case UIImageOrientation.Down.rawValue: break
case UIImageOrientation.Left.rawValue: break
case UIImageOrientation.Right.rawValue: break
default: break
}
let bitmapInfo = CGImageGetBitmapInfo(self.CGImage)
let ctx = CGBitmapContextCreate(nil, Int(self.size.width), Int(self.size.height), CGImageGetBitsPerComponent(self.CGImage), 0, CGImageGetColorSpace(self.CGImage),
bitmapInfo.rawValue)
CGContextConcatCTM(ctx, transform)
switch self.imageOrientation {
case UIImageOrientation.Left:
CGContextDrawImage(ctx, CGRectMake(0, 0, self.size.height, self.size.width), self.CGImage)
case UIImageOrientation.LeftMirrored:
CGContextDrawImage(ctx, CGRectMake(0, 0, self.size.height, self.size.width), self.CGImage)
case UIImageOrientation.Right:
CGContextDrawImage(ctx, CGRectMake(0, 0, self.size.height, self.size.width), self.CGImage)
case UIImageOrientation.RightMirrored:
CGContextDrawImage(ctx, CGRectMake(0, 0, self.size.height, self.size.width), self.CGImage)
default:
CGContextDrawImage(ctx, CGRectMake(0, 0, self.size.width, self.size.height), self.CGImage)
}
let cgimg = CGBitmapContextCreateImage(ctx)
let image = UIImage(CGImage: cgimg!)
return image
}
}

Related

setting UIImageView content mode after applying a CIFIlter

Thanks for looking.
Here's my code
CIImage *result = _vignette.outputImage;
self.mainImageView.image = nil;
//self.mainImageView.contentMode = UIViewContentModeScaleAspectFit;
self.mainImageView.image = [UIImage imageWithCIImage:result];
self.mainImageView.contentMode = UIViewContentModeScaleAspectFit;
in here _vignette is correctly set up filter and image effect is applying to the image correctly.
I'm using a source image with resolution 500x375. My imageView has almost iPhone screen's resolution. So to avoid stretching I'm using AspectFit.
But after applying effect when I'm assigning the result image back to my imageView it streches. No matter which UIViewContentMode I use. It doesn't work. It seems it always applies ScaleToFill regardless the filter I've given.
Any idea why is this happening? Any suggestion is highly appreciated.
(1) Aspect Fit does stretch the image - to fit. If you don't want the image stretched at all, use Center (for example).
(2) imageWithCIImage gives you a very weird beast, a UIImage not based on CGImage, and so not susceptible to the normal rules of layer display. It is really nothing but a thin wrapper around CIImage, which is not what you want. You must convert (render) the CIFilter output thru CGImage to UIImage, thus giving you a UIImage that actually has some bits (CGImage, a bitmap). My discussion here gives you code that demonstrates:
http://www.apeth.com/iOSBook/ch15.html#_cifilter_and_ciimage
In other words, at some point you must call CIContext createCGImage:fromRect: to generate a CGImageRef from the output of your CIFilter, and pass that on into a UIImage. Until you do that, you don't have the output of your filter operations as a real UIImage.
Alternatively, you can draw the image from imageWithCIImage into a graphics context. For example, you can draw it into an image graphics context and then use that image.
What you can't do is display the image from imageWithCIImage directly. That's because it isn't an image! It has no underlying bitmap (CGImage). There's no there there. All it is is a set of CIFilter instructions for deriving the image.
This is an answer for Swift, inspired by this question and the solution by user1951992.
let imageView = UIImageView(frame: CGRect(x: 100, y: 200, width: 100, height: 50))
imageView.contentMode = .scaleAspectFit
//just an example for a CIFilter
//NOTE: we're using implicit unwrapping here because we're sure there is a filter
//(and an image) named exactly this
let filter = CIFilter(name: "CISepiaTone")!
let image = UIImage(named: "image")!
filter.setValue(CIImage(image: image), forKey: kCIInputImageKey)
filter.setValue(0.5, forKey: kCIInputIntensityKey)
guard let outputCGImage = filter?.outputImage,
let outputImage = CIContext().createCGImage(outputCGImage, from: outputCGImage.extent) else {
return
}
imageView.image = UIImage(cgImage: outputImage)
I just spent all day on this. I was getting orientation problems followed by poor quality output.
After snapping an image using the camera, I do this.
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
This causes a rotation problem so I needed to do this to it.
UIImage *scaleAndRotateImage(UIImage *image)
{
int kMaxResolution = image.size.height; // Or whatever
CGImageRef imgRef = image.CGImage;
CGFloat width = CGImageGetWidth(imgRef);
CGFloat height = CGImageGetHeight(imgRef);
CGAffineTransform transform = CGAffineTransformIdentity;
CGRect bounds = CGRectMake(0, 0, width, height);
if (width > kMaxResolution || height > kMaxResolution) {
CGFloat ratio = width/height;
if (ratio > 1) {
bounds.size.width = kMaxResolution;
bounds.size.height = bounds.size.width / ratio;
}
else {
bounds.size.height = kMaxResolution;
bounds.size.width = bounds.size.height * ratio;
}
}
CGFloat scaleRatio = bounds.size.width / width;
CGSize imageSize = CGSizeMake(CGImageGetWidth(imgRef), CGImageGetHeight(imgRef));
CGFloat boundHeight;
UIImageOrientation orient = image.imageOrientation;
switch(orient) {
case UIImageOrientationUp: //EXIF = 1
transform = CGAffineTransformIdentity;
break;
case UIImageOrientationUpMirrored: //EXIF = 2
transform = CGAffineTransformMakeTranslation(imageSize.width, 0.0);
transform = CGAffineTransformScale(transform, -1.0, 1.0);
break;
case UIImageOrientationDown: //EXIF = 3
transform = CGAffineTransformMakeTranslation(imageSize.width, imageSize.height);
transform = CGAffineTransformRotate(transform, M_PI);
break;
case UIImageOrientationDownMirrored: //EXIF = 4
transform = CGAffineTransformMakeTranslation(0.0, imageSize.height);
transform = CGAffineTransformScale(transform, 1.0, -1.0);
break;
case UIImageOrientationLeftMirrored: //EXIF = 5
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeTranslation(imageSize.height, imageSize.width);
transform = CGAffineTransformScale(transform, -1.0, 1.0);
transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
break;
case UIImageOrientationLeft: //EXIF = 6
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeTranslation(0.0, imageSize.width);
transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
break;
case UIImageOrientationRightMirrored: //EXIF = 7
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeScale(-1.0, 1.0);
transform = CGAffineTransformRotate(transform, M_PI / 2.0);
break;
case UIImageOrientationRight: //EXIF = 8
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeTranslation(imageSize.height, 0.0);
transform = CGAffineTransformRotate(transform, M_PI / 2.0);
break;
default:
[NSException raise:NSInternalInconsistencyException format:#"Invalid image orientation"];
}
UIGraphicsBeginImageContext(bounds.size);
CGContextRef context = UIGraphicsGetCurrentContext();
if (orient == UIImageOrientationRight || orient == UIImageOrientationLeft) {
CGContextScaleCTM(context, -scaleRatio, scaleRatio);
CGContextTranslateCTM(context, -height, 0);
}
else {
CGContextScaleCTM(context, scaleRatio, -scaleRatio);
CGContextTranslateCTM(context, 0, -height);
}
CGContextConcatCTM(context, transform);
CGContextDrawImage(UIGraphicsGetCurrentContext(), CGRectMake(0, 0, width, height), imgRef);
UIImage *imageCopy = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return imageCopy;
}
Then when I apply my filter, I do this (with special thanks to this thread - specifically matt and Wex)
-(UIImage*)processImage:(UIImage*)image {
CIImage *inImage = [CIImage imageWithCGImage:image.CGImage];
CIFilter *filter = [CIFilter filterWithName:#"CIColorControls" keysAndValues:
kCIInputImageKey, inImage,
#"inputContrast", [NSNumber numberWithFloat:1.0],
nil];
UIImage *outImage = [filter outputImage];
//Juicy bit
CGImageRef cgimageref = [[CIContext contextWithOptions:nil] createCGImage:outImage fromRect:[outImage extent]];
return [UIImage imageWithCGImage:cgimageref];
}

image position not correct after scaling

I am using UIViewContentModeScaleToFill to scale image.After scaling,Image is not showing on correct position.Help me.
Below is my code:
UIImage *image=[UIImage imageNamed:#"image2.png"];
UIImageView *imageView = [[UIImageView alloc] initWithImage:image];
imageView.frame=CGRectMake(0, 0, 200, 200);
imageView.contentMode = UIViewContentModeScaleAspectFit;
//change width of frame
CGRect frame = imageView.frame;
imageView.frame = frame;
[self.view addSubview:imageView];
After scaling:
refer to image 1
Before scaling:
refer to image 2
I want scaling image starting from origin point (0,0).
Hi try the below code i hope it help you to get idea....
UIImage *image=[UIImage imageNamed:#"image2.png"];
UIImageView *imageView = [[UIImageView alloc] initWithImage:image];
// imageView.contentMode = UIViewContentModeScaleToFill;
imageView.contentMode = UIViewContentModeScaleAspectFit;
imageView.frame = self.view.bounds;
[self.view addSubview:imageView];
Update after below comment:
I want to crop the image without changing its aspect ration.the width
is 200px.and height will change according to width. – shivam
you want to crop means you try the below category code:
UIImage+MyImage.h
#import <UIKit/UIKit.h>
#interface UIImage (MyImage)
+ (UIImage*)imageFromView:(UIView*)view;
+ (UIImage*)imageFromView:(UIView*)view scaledToSize:(CGSize)newSize;
+ (UIImage*)imageWithImage:(UIImage*)image scaledToSize:(CGSize)newSize;
+ (void)beginImageContextWithSize:(CGSize)size;
+ (UIImage *)croppedImage:(UIImage *)myImage :(CGRect)bounds;
+ (UIImage *) imageFromColor:(UIColor *)color;
#end
UIImage+MyImage.m
#import "UIImage+MyImage.h"
#implementation UIImage (MyImage)
+ (void)beginImageContextWithSize:(CGSize)size
{
if ([[UIScreen mainScreen] respondsToSelector:#selector(scale)]) {
if ([[UIScreen mainScreen] scale] == 2.0) {
UIGraphicsBeginImageContextWithOptions(size, YES, 2.0);
} else {
UIGraphicsBeginImageContext(size);
}
} else {
UIGraphicsBeginImageContext(size);
}
}
+ (void)endImageContext
{
UIGraphicsEndImageContext();
}
+ (UIImage*)imageFromView:(UIView*)view
{
[self beginImageContextWithSize:[view bounds].size];
BOOL hidden = [view isHidden];
[view setHidden:NO];
[[view layer] renderInContext:UIGraphicsGetCurrentContext()];
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
[self endImageContext];
[view setHidden:hidden];
return image;
}
+ (UIImage*)imageFromView:(UIView*)view scaledToSize:(CGSize)newSize
{
UIImage *image = [self imageFromView:view];
if ([view bounds].size.width != newSize.width ||
[view bounds].size.height != newSize.height) {
image = [self imageWithImage:image scaledToSize:newSize];
}
return image;
}
+ (UIImage*)imageWithImage:(UIImage*)image scaledToSize:(CGSize)newSize
{
[self beginImageContextWithSize:newSize];
[image drawInRect:CGRectMake(0,0,newSize.width,newSize.height)];
UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
[self endImageContext];
return newImage;
}
+ (UIImage *)croppedImage:(UIImage *)myImage :(CGRect)bounds {
CGImageRef imageRef = CGImageCreateWithImageInRect(myImage.CGImage, bounds);
UIImage *croppedImage = [UIImage imageWithCGImage:imageRef];
CGImageRelease(imageRef);
CGSize asd = croppedImage.size;
return croppedImage;
}
+ (UIImage *) imageFromColor:(UIColor *)color {
CGRect rect = CGRectMake(0, 0, 1, 1);
UIGraphicsBeginImageContext(rect.size);
CGContextRef context = UIGraphicsGetCurrentContext();
CGContextSetFillColorWithColor(context, [color CGColor]);
// [[UIColor colorWithRed:222./255 green:227./255 blue: 229./255 alpha:1] CGColor]) ;
CGContextFillRect(context, rect);
UIImage *img = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return img;
}
#end
the call will be :
imageView.image=[UIImage croppedImage:imageView.Image :CGRectMake(0,0,200,200) ];
First of all, you've got UIViewContentModeScaleAspectFit in your code, not UIViewContentModeScaleToFill as you've said. Post an original image, please.
Second, the 'change width of frame' part is useless, imageView.size.width is already set to 200. Scaling modes don't change view's frame at all, they change the way it shows its content.
I don't know what's the 'correct position' you are talking about, but try to make imageView fullscreen like this:
UIImageView *imageView = [[UIImageView alloc] initWithImage:image];
imageView.contentMode = UIViewContentModeScaleToFill;
imageView.frame = self.view.frame;
you can use this code for fix unwanted rotate image
Swift 5 Extension:
extension UIImage {
/// Fix image orientaton to protrait up
func fixedOrientation() -> UIImage? {
guard imageOrientation != UIImage.Orientation.up else {
// This is default orientation, don't need to do anything
return self.copy() as? UIImage
}
guard let cgImage = self.cgImage else {
// CGImage is not available
return nil
}
guard let colorSpace = cgImage.colorSpace, let ctx = CGContext(data: nil, width: Int(size.width), height: Int(size.height), bitsPerComponent: cgImage.bitsPerComponent, bytesPerRow: 0, space: colorSpace, bitmapInfo: CGImageAlphaInfo.premultipliedLast.rawValue) else {
return nil // Not able to create CGContext
}
var transform: CGAffineTransform = CGAffineTransform.identity
switch imageOrientation {
case .down, .downMirrored:
transform = transform.translatedBy(x: size.width, y: size.height)
transform = transform.rotated(by: CGFloat.pi)
case .left, .leftMirrored:
transform = transform.translatedBy(x: size.width, y: 0)
transform = transform.rotated(by: CGFloat.pi / 2.0)
case .right, .rightMirrored:
transform = transform.translatedBy(x: 0, y: size.height)
transform = transform.rotated(by: CGFloat.pi / -2.0)
case .up, .upMirrored:
break
#unknown default:
fatalError("Missing...")
break
}
// Flip image one more time if needed to, this is to prevent flipped image
switch imageOrientation {
case .upMirrored, .downMirrored:
transform = transform.translatedBy(x: size.width, y: 0)
transform = transform.scaledBy(x: -1, y: 1)
case .leftMirrored, .rightMirrored:
transform = transform.translatedBy(x: size.height, y: 0)
transform = transform.scaledBy(x: -1, y: 1)
case .up, .down, .left, .right:
break
#unknown default:
fatalError("Missing...")
break
}
ctx.concatenate(transform)
switch imageOrientation {
case .left, .leftMirrored, .right, .rightMirrored:
ctx.draw(cgImage, in: CGRect(x: 0, y: 0, width: size.height, height: size.width))
default:
ctx.draw(cgImage, in: CGRect(x: 0, y: 0, width: size.width, height: size.height))
break
}
guard let newCGImage = ctx.makeImage() else { return nil }
return UIImage.init(cgImage: newCGImage, scale: 1, orientation: .up)
}
}
Objective C Code :
-(UIImage *)scaleAndRotateImage:(UIImage *)image{
// No-op if the orientation is already correct
if (image.imageOrientation == UIImageOrientationUp) return image;
// We need to calculate the proper transformation to make the image upright.
// We do it in 2 steps: Rotate if Left/Right/Down, and then flip if Mirrored.
CGAffineTransform transform = CGAffineTransformIdentity;
switch (image.imageOrientation) {
case UIImageOrientationDown:
case UIImageOrientationDownMirrored:
transform = CGAffineTransformTranslate(transform, image.size.width, image.size.height);
transform = CGAffineTransformRotate(transform, M_PI);
break;
case UIImageOrientationLeft:
case UIImageOrientationLeftMirrored:
transform = CGAffineTransformTranslate(transform, image.size.width, 0);
transform = CGAffineTransformRotate(transform, M_PI_2);
break;
case UIImageOrientationRight:
case UIImageOrientationRightMirrored:
transform = CGAffineTransformTranslate(transform, 0, image.size.height);
transform = CGAffineTransformRotate(transform, -M_PI_2);
break;
case UIImageOrientationUp:
case UIImageOrientationUpMirrored:
break;
}
switch (image.imageOrientation) {
case UIImageOrientationUpMirrored:
case UIImageOrientationDownMirrored:
transform = CGAffineTransformTranslate(transform, image.size.width, 0);
transform = CGAffineTransformScale(transform, -1, 1);
break;
case UIImageOrientationLeftMirrored:
case UIImageOrientationRightMirrored:
transform = CGAffineTransformTranslate(transform, image.size.height, 0);
transform = CGAffineTransformScale(transform, -1, 1);
break;
case UIImageOrientationUp:
case UIImageOrientationDown:
case UIImageOrientationLeft:
case UIImageOrientationRight:
break;
}
// Now we draw the underlying CGImage into a new context, applying the transform
// calculated above.
CGContextRef ctx = CGBitmapContextCreate(NULL, image.size.width, image.size.height,
CGImageGetBitsPerComponent(image.CGImage), 0,
CGImageGetColorSpace(image.CGImage),
CGImageGetBitmapInfo(image.CGImage));
CGContextConcatCTM(ctx, transform);
switch (image.imageOrientation) {
case UIImageOrientationLeft:
case UIImageOrientationLeftMirrored:
case UIImageOrientationRight:
case UIImageOrientationRightMirrored:
// Grr...
CGContextDrawImage(ctx, CGRectMake(0,0,image.size.height,image.size.width), image.CGImage);
break;
default:
CGContextDrawImage(ctx, CGRectMake(0,0,image.size.width,image.size.height), image.CGImage);
break;
}
// And now we just create a new UIImage from the drawing context
CGImageRef cgimg = CGBitmapContextCreateImage(ctx);
UIImage *img = [UIImage imageWithCGImage:cgimg];
CGContextRelease(ctx);
CGImageRelease(cgimg);
return img;
}
Use of code
UIImage *img=[info objectForKey:UIImagePickerControllerOriginalImage];
img=[self scaleAndRotateImage:img];
NSData *image = UIImageJPEGRepresentation(img, 0.1);

How to scale image?

How to scale image with goo quality. I am using this following code but when I am taking using image from Camera its showing in different frame.
-(UIImage *)resizeImage:(UIImage *)image {
int w = image.size.width;
int h = image.size.height;
CGImageRef imageRef = [image CGImage];
int width, height;
int destWidth = 640;
int destHeight = 480;
if(w > h){
width = destWidth;
height = h*destWidth/w;
} else {
height = destHeight;
width = w*destHeight/h;
}
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef bitmap;
bitmap = CGBitmapContextCreate(NULL, width, height, 8, 4 * width, colorSpace, kCGImageAlphaPremultipliedFirst);
if (image.imageOrientation == UIImageOrientationLeft) {
CGContextRotateCTM (bitmap, M_PI/2);
CGContextTranslateCTM (bitmap, 0, -height);
} else if (image.imageOrientation == UIImageOrientationRight) {
CGContextRotateCTM (bitmap, -M_PI/2);
CGContextTranslateCTM (bitmap, -width, 0);
} else if (image.imageOrientation == UIImageOrientationUp) {
} else if (image.imageOrientation == UIImageOrientationDown) {
CGContextTranslateCTM (bitmap, width,height);
CGContextRotateCTM (bitmap, -M_PI);
}
CGContextDrawImage(bitmap, CGRectMake(0, 0, width, height), imageRef);
CGImageRef ref = CGBitmapContextCreateImage(bitmap);
UIImage *result = [UIImage imageWithCGImage:ref];
CGContextRelease(bitmap);
CGImageRelease(ref);
return result;
}
If there is anyway to reduce size without affecting to quality please suggest
Thanks in advance
In order to reduce size, you will have to compromise on the resolution of the image. I suggest you calculate a new WIDTH and HEIGHT for the image.
Here is something that worked for me:
Try this:
-(UIImage*) resizeImage:(CGImageRef)image toWidth:(int)width andHeight:(int)height
{
// create context, keeping original image properties
CGColorSpaceRef colorspace = CGImageGetColorSpace(image);
CGContextRef context = CGBitmapContextCreate(NULL, width, height, CGImageGetBitsPerComponent(image), CGImageGetBytesPerRow(image),
colorspace,
CGImageGetAlphaInfo(image));
if(context == NULL)
{
NSLog(#"Could not re-size");
return nil;
}
// draw image to context (resizing it)
CGContextDrawImage(context, CGRectMake(0, 0, width, height), image);
// extract resulting image from context
CGImageRef imgRef = CGBitmapContextCreateImage(context);
CGContextRelease(context);
UIImage* resizedImage = [UIImage imageWithCGImage:imgRef];
CGImageRelease(imgRef);
return resizedImage;
}
I have used the below code for scale the image for camera image. please implement if you found help ful
- (void)imagePickerController:(UIImagePickerController *)picker
didFinishPickingImage:(UIImage *)image
editingInfo:(NSDictionary *)editingInfo
{
//// ADDIGN THIS LINE WHEN YOU CAPTURED IMAGE
image = [self scaleAndRotateImage:image];
[self useImage:image];
[[picker parentViewController] dismissModalViewControllerAnimated:YES];
}
/// THE METHODS ROTATE THE CURRENT CAPTURED IMAGE AT 90 DEGREE OF ANGLE
- (void)scaleAndRotateImage:(UIImage *)image
{
int kMaxResolution = 320; // Or whatever
CGImageRef imgRef = image.CGImage;
CGFloat width = CGImageGetWidth(imgRef);
CGFloat height = CGImageGetHeight(imgRef);
CGAffineTransform transform = CGAffineTransformIdentity;
CGRect bounds = CGRectMake(0, 0, width, height);
if (width > kMaxResolution || height > kMaxResolution) {
CGFloat ratio = width/height;
if (ratio > 1) {
bounds.size.width = kMaxResolution;
bounds.size.height = bounds.size.width / ratio;
}
else {
bounds.size.height = kMaxResolution;
bounds.size.width = bounds.size.height * ratio;
}
}
CGFloat scaleRatio = bounds.size.width / width;
CGSize imageSize = CGSizeMake(CGImageGetWidth(imgRef), CGImageGetHeight(imgRef));
CGFloat boundHeight;
UIImageOrientation orient = image.imageOrientation;
switch(orient) {
case UIImageOrientationUp: //EXIF = 1
transform = CGAffineTransformIdentity;
break;
case UIImageOrientationUpMirrored: //EXIF = 2
transform = CGAffineTransformMakeTranslation(imageSize.width, 0.0);
transform = CGAffineTransformScale(transform, -1.0, 1.0);
break;
case UIImageOrientationDown: //EXIF = 3
transform = CGAffineTransformMakeTranslation(imageSize.width, imageSize.height);
transform = CGAffineTransformRotate(transform, M_PI);
break;
case UIImageOrientationDownMirrored: //EXIF = 4
transform = CGAffineTransformMakeTranslation(0.0, imageSize.height);
transform = CGAffineTransformScale(transform, 1.0, -1.0);
break;
case UIImageOrientationLeftMirrored: //EXIF = 5
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeTranslation(imageSize.height, imageSize.width);
transform = CGAffineTransformScale(transform, -1.0, 1.0);
transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
break;
case UIImageOrientationLeft: //EXIF = 6
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeTranslation(0.0, imageSize.width);
transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
break;
case UIImageOrientationRightMirrored: //EXIF = 7
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeScale(-1.0, 1.0);
transform = CGAffineTransformRotate(transform, M_PI / 2.0);
break;
case UIImageOrientationRight: //EXIF = 8
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeTranslation(imageSize.height, 0.0);
transform = CGAffineTransformRotate(transform, M_PI / 2.0);
break;
default:
[NSException raise:NSInternalInconsistencyException format:#"Invalid image orientation"];
}
UIGraphicsBeginImageContext(bounds.size);
CGContextRef context = UIGraphicsGetCurrentContext();
if (orient == UIImageOrientationRight || orient == UIImageOrientationLeft) {
CGContextScaleCTM(context, -scaleRatio, scaleRatio);
CGContextTranslateCTM(context, -height, 0);
}
else {
CGContextScaleCTM(context, scaleRatio, -scaleRatio);
CGContextTranslateCTM(context, 0, -height);
}
CGContextConcatCTM(context, transform);
CGContextDrawImage(UIGraphicsGetCurrentContext(), CGRectMake(0, 0, width, height), imgRef);
UIImage *imageCopy = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
[self setRotatedImage:imageCopy];
//return imageCopy;
}

imageOrientation returns Up, but photo shows up upsidedown

I'm grabbing some photos from the iPad photo library. I took a few test photos in landscape mode, both with the iPad home button on the left side, and then the right side.
So in my app, for some reason some of the photos show up upsidedown. I checked their imageOrientation property, and they are all 0 (meaning they are UIImageOrientationUp). So that means I can't even tell which photos I need to rotate.
What's going on, and how can I tell which photos need to be rotated? Thanks
A UIImage has a property imageOrientation, which instructs the UIImageView and other UIImage consumers to rotate the raw image data. There's a good chance that this flag is being saved to the exif data in the uploaded jpeg image, but the program you use to view it is not honoring that flag.
To rotate the UIImage to display properly when uploaded, you can use a category like this:
In .h file
UIImagefixOrientation.h
#interface UIImage (fixOrientation)
- (UIImage *)fixOrientation;
#end
UIImagefixOrientation.m
#implementation UIImage (fixOrientation)
- (UIImage *)fixOrientation {
// No-op if the orientation is already correct
if (self.imageOrientation == UIImageOrientationUp) return self;
// We need to calculate the proper transformation to make the image upright.
// We do it in 2 steps: Rotate if Left/Right/Down, and then flip if Mirrored.
CGAffineTransform transform = CGAffineTransformIdentity;
switch (self.imageOrientation) {
case UIImageOrientationDown:
case UIImageOrientationDownMirrored:
transform = CGAffineTransformTranslate(transform, self.size.width, self.size.height);
transform = CGAffineTransformRotate(transform, M_PI);
break;
case UIImageOrientationLeft:
case UIImageOrientationLeftMirrored:
transform = CGAffineTransformTranslate(transform, self.size.width, 0);
transform = CGAffineTransformRotate(transform, M_PI_2);
break;
case UIImageOrientationRight:
case UIImageOrientationRightMirrored:
transform = CGAffineTransformTranslate(transform, 0, self.size.height);
transform = CGAffineTransformRotate(transform, -M_PI_2);
break;
}
switch (self.imageOrientation) {
case UIImageOrientationUpMirrored:
case UIImageOrientationDownMirrored:
transform = CGAffineTransformTranslate(transform, self.size.width, 0);
transform = CGAffineTransformScale(transform, -1, 1);
break;
case UIImageOrientationLeftMirrored:
case UIImageOrientationRightMirrored:
transform = CGAffineTransformTranslate(transform, self.size.height, 0);
transform = CGAffineTransformScale(transform, -1, 1);
break;
}
// Now we draw the underlying CGImage into a new context, applying the transform
// calculated above.
CGContextRef ctx = CGBitmapContextCreate(NULL, self.size.width, self.size.height,
CGImageGetBitsPerComponent(self.CGImage), 0,
CGImageGetColorSpace(self.CGImage),
CGImageGetBitmapInfo(self.CGImage));
CGContextConcatCTM(ctx, transform);
switch (self.imageOrientation) {
case UIImageOrientationLeft:
case UIImageOrientationLeftMirrored:
case UIImageOrientationRight:
case UIImageOrientationRightMirrored:
// Grr...
CGContextDrawImage(ctx, CGRectMake(0,0,self.size.height,self.size.width), self.CGImage);
break;
default:
CGContextDrawImage(ctx, CGRectMake(0,0,self.size.width,self.size.height), self.CGImage);
break;
}
// And now we just create a new UIImage from the drawing context
CGImageRef cgimg = CGBitmapContextCreateImage(ctx);
UIImage *img = [UIImage imageWithCGImage:cgimg];
CGContextRelease(ctx);
CGImageRelease(cgimg);
return img;
}
#end
And call this function during capture image save or select image from the gallery.
Without getting into your image code, could it be due to the flipped coordinate system on iOS?
https://developer.apple.com/library/mac/documentation/Cocoa/Conceptual/CocoaDrawingGuide/Transforms/Transforms.html
~snip:
Configuring Your View to Use Flipped Coordinates
The first step you need to take to implement flipped coordinates is to decide the default orientation of your view. If you prefer to use flipped coordinates, there are two ways to configure your view’s coordinate system prior to drawing:
Override your view’s isFlipped method and return YES. Apply a flip
transform to your content immediately prior to rendering.
So in your view, you can try adding -(BOOL)isFlipped and return YES and test if that makes any difference.
I was also facing same problem. use this function to resolve:
- (UIImage *)scaleAndRotateImage:(UIImage *) image {
int kMaxResolution = 320;
CGImageRef imgRef = image.CGImage;
CGFloat width = CGImageGetWidth(imgRef);
CGFloat height = CGImageGetHeight(imgRef);
CGAffineTransform transform = CGAffineTransformIdentity;
CGRect bounds = CGRectMake(0, 0, width, height);
if (width > kMaxResolution || height > kMaxResolution) {
CGFloat ratio = width/height;
if (ratio > 1) {
bounds.size.width = kMaxResolution;
bounds.size.height = bounds.size.width / ratio;
}
else {
bounds.size.height = kMaxResolution;
bounds.size.width = bounds.size.height * ratio;
}
}
CGFloat scaleRatio = bounds.size.width / width;
CGSize imageSize = CGSizeMake(CGImageGetWidth(imgRef), CGImageGetHeight(imgRef));
CGFloat boundHeight;
UIImageOrientation orient = image.imageOrientation;
switch(orient) {
case UIImageOrientationUp: //EXIF = 1
transform = CGAffineTransformIdentity;
break;
case UIImageOrientationUpMirrored: //EXIF = 2
transform = CGAffineTransformMakeTranslation(imageSize.width, 0.0);
transform = CGAffineTransformScale(transform, -1.0, 1.0);
break;
case UIImageOrientationDown: //EXIF = 3
transform = CGAffineTransformMakeTranslation(imageSize.width, imageSize.height);
transform = CGAffineTransformRotate(transform, M_PI);
break;
case UIImageOrientationDownMirrored: //EXIF = 4
transform = CGAffineTransformMakeTranslation(0.0, imageSize.height);
transform = CGAffineTransformScale(transform, 1.0, -1.0);
break;
case UIImageOrientationLeftMirrored: //EXIF = 5
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeTranslation(imageSize.height, imageSize.width);
transform = CGAffineTransformScale(transform, -1.0, 1.0);
transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
break;
case UIImageOrientationLeft: //EXIF = 6
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeTranslation(0.0, imageSize.width);
transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
break;
case UIImageOrientationRightMirrored: //EXIF = 7
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeScale(-1.0, 1.0);
transform = CGAffineTransformRotate(transform, M_PI / 2.0);
break;
case UIImageOrientationRight: //EXIF = 8
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeTranslation(imageSize.height, 0.0);
transform = CGAffineTransformRotate(transform, M_PI / 2.0);
break;
default:
[NSException raise:NSInternalInconsistencyException format:#"Invalid image orientation"];
}
UIGraphicsBeginImageContext(bounds.size);
CGContextRef context = UIGraphicsGetCurrentContext();
if (orient == UIImageOrientationRight || orient == UIImageOrientationLeft) {
CGContextScaleCTM(context, -scaleRatio, scaleRatio);
CGContextTranslateCTM(context, -height, 0);
}
else {
CGContextScaleCTM(context, scaleRatio, -scaleRatio);
CGContextTranslateCTM(context, 0, -height);
}
CGContextConcatCTM(context, transform);
CGContextDrawImage(UIGraphicsGetCurrentContext(), CGRectMake(0, 0, width, height), imgRef);
UIImage *imageCopy = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return imageCopy;
}

UIImagePickerController camera device issue

I have the following problem: when I use the UIImagePickerController, AFTER I capture the picture and the shutter appears and disappears, there is a screen, with the captured picture, and two buttons: "Retake" and "Use". Even if I take the picture in landscapeLeft, or landscapeRight, on the above mentioned screen the taken picture appears in portrait, no matter what.
Now, my question is the following: how can I make the picked image appear in it's original orientation?
Thanks
When you pick a photo and UIImagePickerController called imagePickerController: didFinishPickingImage: editingInfo: method on it's delegate than you could inspect uiimage which it returns. And it should have orientation property. If it's not zero than normalize it. I added imageWithFixedOrientation method to UIImage over a category. See it's code below:
- (UIImage *)imageWithFixedOrientation {
// No-op if the orientation is already correct
if (self.imageOrientation == UIImageOrientationUp) return self;
// We need to calculate the proper transformation to make the image upright.
// We do it in 2 steps: Rotate if Left/Right/Down, and then flip if Mirrored.
CGAffineTransform transform = CGAffineTransformIdentity;
switch (self.imageOrientation) {
case UIImageOrientationDown:
case UIImageOrientationDownMirrored:
transform = CGAffineTransformTranslate(transform, self.size.width, self.size.height);
transform = CGAffineTransformRotate(transform, M_PI);
break;
case UIImageOrientationLeft:
case UIImageOrientationLeftMirrored:
transform = CGAffineTransformTranslate(transform, self.size.width, 0);
transform = CGAffineTransformRotate(transform, M_PI_2);
break;
case UIImageOrientationRight:
case UIImageOrientationRightMirrored:
transform = CGAffineTransformTranslate(transform, 0, self.size.height);
transform = CGAffineTransformRotate(transform, -M_PI_2);
break;
default:
break;
}
switch (self.imageOrientation) {
case UIImageOrientationUpMirrored:
case UIImageOrientationDownMirrored:
transform = CGAffineTransformTranslate(transform, self.size.width, 0);
transform = CGAffineTransformScale(transform, -1, 1);
break;
case UIImageOrientationLeftMirrored:
case UIImageOrientationRightMirrored:
transform = CGAffineTransformTranslate(transform, self.size.height, 0);
transform = CGAffineTransformScale(transform, -1, 1);
break;
default:
break;
}
// Now we draw the underlying CGImage into a new context, applying the transform
// calculated above.
CGContextRef ctx = CGBitmapContextCreate(NULL, self.size.width, self.size.height,
CGImageGetBitsPerComponent(self.CGImage), 0,
CGImageGetColorSpace(self.CGImage),
CGImageGetBitmapInfo(self.CGImage));
CGContextConcatCTM(ctx, transform);
switch (self.imageOrientation) {
case UIImageOrientationLeft:
case UIImageOrientationLeftMirrored:
case UIImageOrientationRight:
case UIImageOrientationRightMirrored:
// Grr...
CGContextDrawImage(ctx, CGRectMake(0,0,self.size.height,self.size.width), self.CGImage);
break;
default:
CGContextDrawImage(ctx, CGRectMake(0,0,self.size.width,self.size.height), self.CGImage);
break;
}
// And now we just create a new UIImage from the drawing context
CGImageRef cgimg = CGBitmapContextCreateImage(ctx);
UIImage *img = [UIImage imageWithCGImage:cgimg];
CGContextRelease(ctx);
CGImageRelease(cgimg);
return img;
}