I have an UIImageView with contentMode = .aspectFit. I have an image in imageView, which dimension is bigger than size of imageView. User can draw some lines and save them as sublayer. After that I need to save the edited image. But quality of saved image is worse than quality of image which I load.
What am I doing wrong? I tried to use transform, but it didn't work.
import UIKit
extension UIImageView {
var contentClippingRect: CGRect {
let imgViewSize = self.frame.size
let imgSize = self.image?.size ?? .zero
let scaleW = imgViewSize.width / imgSize.width
let scaleH = imgViewSize.height / imgSize.height
let aspect = fmin(scaleW, scaleH)
let width = imgSize.width * aspect
let height = imgSize.height * aspect
let imageRect = CGRect(x: (imgViewSize.width-width)/2 + self.frame.origin.x, y: (imgViewSize.height-height)/2 + self.frame.origin.y, width: width, height: height)
return imageRect
}
func asImage() -> UIImage {
let imageRect = self.contentClippingRect
let renderer = UIGraphicsImageRenderer(bounds: imageRect)
let renderedImage = renderer.image { rendererContext in
layer.render(in: rendererContext.cgContext)
}
return renderedImage
}
}
You can use UIGraphicsImageRendererFormat().scale it will increase quality of the rendered image a bit.
import UIKit
extension UIImageView {
var contentClippingRect: CGRect {
let imgViewSize = self.frame.size
let imgSize = self.image?.size ?? .zero
let scaleW = imgViewSize.width / imgSize.width
let scaleH = imgViewSize.height / imgSize.height
let aspect = fmin(scaleW, scaleH)
let width = imgSize.width * aspect
let height = imgSize.height * aspect
let imageRect = CGRect(x: (imgViewSize.width-width)/2 + self.frame.origin.x, y: (imgViewSize.height-height)/2 + self.frame.origin.y, width: width, height: height)
return imageRect
}
func asImage() -> UIImage {
let imageRect = self.contentClippingRect
//add this
let format = UIGraphicsImageRendererFormat()
format.scale = 2
let renderer = UIGraphicsImageRenderer(bounds: imageRect, format: format)
let renderedImage = renderer.image { rendererContext in
layer.render(in: rendererContext.cgContext)
}
return renderedImage
}
}
Related
I tried to look up the answer on the Internet and did not find it, maybe I typed my query wrong on the Internet.
Can you tell me how to crop a 3:4 photo? I can't find the function to crop it. Please share?
You can use this function to crop your image in any AspectRatio (e.g 3:4). You have to pass your Image and desired AspectRatio as a parameter to the function and it will return you the Cropped Image.
func crop(image: UIImage, to aspectRatio: CGFloat) -> UIImage {
let originalAspectRatio = image.size.height/image.size.width
var newImagesize = image.size
if originalAspectRatio > aspectRatio {
newImagesize.height = image.size.width * aspectRatio
} else if originalAspectRatio < aspectRatio {
newImagesize.width = image.size.height / aspectRatio
} else {
return image
}
let center = CGPoint(x: image.size.width/2, y: image.size.height/2)
let origin = CGPoint(x: center.x - newImagesize.width/2, y: center.y - newImagesize.height/2)
let cgCroppedImage = image.cgImage!.cropping(to: CGRect(origin: origin, size: CGSize(width: newImagesize.width, height: newImagesize.height)))!
let croppedImage = UIImage(cgImage: cgCroppedImage, scale: image.scale, orientation: image.imageOrientation)
return croppedImage
}
Usage:
let croppedImage = crop(image: "ImageName", to: 3/4)
Use the following function. For more details read this.
func cropImage(_ inputImage: UIImage, toRect cropRect: CGRect, viewWidth: CGFloat, viewHeight: CGFloat) -> UIImage?
{
let imageViewScale = max(inputImage.size.width / viewWidth,
inputImage.size.height / viewHeight)
// Scale cropRect to handle images larger than shown-on-screen size
let cropZone = CGRect(x:cropRect.origin.x * imageViewScale,
y:cropRect.origin.y * imageViewScale,
width:cropRect.size.width * imageViewScale,
height:cropRect.size.height * imageViewScale)
// Perform cropping in Core Graphics
guard let cutImageRef: CGImage = inputImage.cgImage?.cropping(to:cropZone)
else {
return nil
}
// Return image to UIImage
let croppedImage: UIImage = UIImage(cgImage: cutImageRef)
return croppedImage
}
I have large images uploaded by users in Swift and I need to resize them all to 100x100px to create thumbnails to store in my server. So far I have found that this resizes an image given a CGSize:
func resizedImage(image: UIImage, size: CGSize) -> UIImage? {
let renderer = UIGraphicsImageRenderer(size: size)
return renderer.image { (context) in
image.draw(in: CGRect(origin: .zero, size: size))
}
}
Is there any way to create a CGSize knowing that my target size is strictly 100x100px?
Got this to work:
extension UIImage {
func resizedImage(pixelSize: (width: Int, height: Int)) -> UIImage? {
var size = CGSize(width: CGFloat(pixelSize.width) / UIScreen.main.scale, height: CGFloat(pixelSize.height) / UIScreen.main.scale)
let rect = AVMakeRect(aspectRatio: self.size, insideRect: CGRect(x:0, y:0, width: size.width, height: size.height))
let renderer = UIGraphicsImageRenderer(size: size)
return renderer.image { (context) in
self.draw(in: rect)
}
}
}
You should initialize your render based on the user device scale and multiply its width and height instead of dividing it:
extension UIImage {
func aspectFitScaled(to size: CGSize) -> UIImage {
let format = imageRendererFormat
format.opaque = false
format.scale = UIScreen.main.scale
let isLandscape = self.size.width > self.size.height
let ratio = isLandscape ? size.width / self.size.width : size.height / self.size.height
let drawSize = self.size.scaled(by: ratio)
let x = (size.width - drawSize.width) / 2
let y = (size.height - drawSize.height) / 2
let origin = CGPoint(x: x, y: y)
return UIGraphicsImageRenderer(size: size, format: format).image { _ in
draw(in: CGRect(origin: origin, size: drawSize))
}
}
}
usage:
class ViewController: UIViewController {
// imageView frame is 200 x 200
#IBOutlet weak var imageView: UIImageView!
override func viewDidLoad() {
super.viewDidLoad()
// original image size is (719.0, 808.0)
let image = UIImage(data: try! Data(contentsOf: URL(string: "https://i.stack.imgur.com/Xs4RX.jpg")!))!
imageView.backgroundColor = .gray
let ivImage = image.aspectFitScaled(to: imageView.frame.size)
imageView.image = ivImage
print("ivImage.size", ivImage.size) // (200.0, 200.0)
print("ivImage.scale", ivImage.scale) // screen scale 3.0 iPhone 8 Plus
// lets check the real image dimension
let data = ivImage.jpegData(compressionQuality: 1)!
let savedSize = UIImage(data: data)!.size
print("savedSize", savedSize) // savedSize (600.0, 600.0)
}
}
I have an image in a UIImageView:
imageView.contentMode = .scaleAspectFit
imageView.backgroundColor = .red
because of .scaleAspectFit the image view has some red borders and thats OK:
User can added some UIView like label or images over the imageView.
In final step I used the following code to save edited image and user can share it or save it to photo library:
private func generateImage() -> UIImage? {
var finalImage: UIImage?
UIGraphicsBeginImageContextWithOptions(CGSize(width: imageView.frame.size.width, height: imageView.frame.size.height), true, 0)
imageView.drawHierarchy(in: CGRect(x: 0, y: 0, width: imageView.frame.size.width, height: imageView.frame.size.height), afterScreenUpdates: true)
finalImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return image
}
The problem is that the finalImage still has the red borders from imageView.
You can get CGRect of the UIImage displayed in the UIImageView in AspectFit content mode. Please create extension of UIImageView like this,
extension UIImageView {
var contentClippingRect: CGRect {
guard let image = image else { return bounds }
guard contentMode == .scaleAspectFit else { return bounds }
guard image.size.width > 0 && image.size.height > 0 else { return bounds }
let scale: CGFloat
if image.size.width > image.size.height {
scale = bounds.width / image.size.width
} else {
scale = bounds.height / image.size.height
}
let size = CGSize(width: image.size.width * scale, height: image.size.height * scale)
let x = (bounds.width - size.width) / 2.0
let y = (bounds.height - size.height) / 2.0
return CGRect(x: x, y: y, width: size.width, height: size.height)
}
}
You can now use imageView.contentClippingRect to read how read the position and size of the image inside.
You have to do minor changes in your method, call your function with appropriate bounds as contentClippingRect.
Let me know in case of any queries.
UPDATE
Please try this UIImageView+Extension, this might help you. It is in Objective-C code, convert it in Swift.
You can try this as well,
let image = #imageLiteral(resourceName: "Cat03")
let x: CGRect = AVMakeRect(aspectRatio: image.size, insideRect: imageView1.frame)
print(x)
Above code gives you size perfectly.
I am trying to resize a NSImage implementing a code that I got from this website https://gist.github.com/eiskalteschatten/dac3190fce5d38fdd3c944b45a4ca469, but it's not working.
Here is the code:
static func redimensionaNSImage(imagem: NSImage, tamanho: NSSize) -> NSImage {
var imagemRect: CGRect = CGRect(x: 0, y: 0, width: imagem.size.width, height: imagem.size.height)
let imagemRef = imagem.cgImage(forProposedRect: &imagemRect, context: nil, hints: nil)
return NSImage(cgImage: imagemRef!, size: tamanho)
}
I forgot to calculate the ratio. Now it's working fine.
static func redimensionaNSImage(imagem: NSImage, tamanho: NSSize) -> NSImage {
var ratio:Float = 0.0
let imageWidth = Float(imagem.size.width)
let imageHeight = Float(imagem.size.height)
let maxWidth = Float(tamanho.width)
let maxHeight = Float(tamanho.height)
// Get ratio (landscape or portrait)
if (imageWidth > imageHeight) {
// Landscape
ratio = maxWidth / imageWidth;
}
else {
// Portrait
ratio = maxHeight / imageHeight;
}
// Calculate new size based on the ratio
let newWidth = imageWidth * ratio
let newHeight = imageHeight * ratio
// Create a new NSSize object with the newly calculated size
let newSize:NSSize = NSSize(width: Int(newWidth), height: Int(newHeight))
// Cast the NSImage to a CGImage
var imageRect:CGRect = CGRect(x: 0, y: 0, width: imagem.size.width, height: imagem.size.height)
let imageRef = imagem.cgImage(forProposedRect: &imageRect, context: nil, hints: nil)
// Create NSImage from the CGImage using the new size
let imageWithNewSize = NSImage(cgImage: imageRef!, size: newSize)
// Return the new image
return imageWithNewSize
}
I want just a specific part of my captured image to be cropped in a circular shape right before it's viewed.
Here is the code for capturing still images:
#IBAction func takePhoto(sender: UIButton) {
self.stillImageOutput!.captureStillImageAsynchronouslyFromConnection(self.stillImageOutput!.connectionWithMediaType(AVMediaTypeVideo)) { (buffer:CMSampleBuffer!, error:NSError!) -> Void in
var image = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(buffer)
var data_image = UIImage(data: image)
self.previewImage.image = data_image
}
}
Thank you
if let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(buffer) {
if let image = UIImage(data: imageData) {
let square = image.size.width < image.size.height ? CGSize(width: image.size.width, height: image.size.width) : CGSize(width: image.size.height, height: image.size.height)
let imageView = UIImageView(frame: CGRect(origin: CGPoint(x: 0, y: 0), size: square))
imageView.contentMode = UIViewContentMode.ScaleAspectFill
imageView.image = image
imageView.layer.cornerRadius = square.width/2
imageView.layer.masksToBounds = true
UIGraphicsBeginImageContext(imageView.bounds.size)
imageView.layer.renderInContext(UIGraphicsGetCurrentContext())
let result = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
self.previewImage.image = result
}
}