Below code I am using to capture screen in macOS application,
let img = CGDisplayCreateImage(CGMainDisplayID())
guard let destination = FileManager.default.urls(for: .downloadsDirectory,
in: .userDomainMask).first?.appendingPathComponent("shot.jpg", isDirectory: false)
else {
print("Unable to save captured image!")
return
}
let properties: CFDictionary = [
kCGImagePropertyPixelWidth: "900",
kCGImagePropertyPixelHeight: "380"
] as CFDictionary
if let dest = CGImageDestinationCreateWithURL(destination as CFURL, kUTTypeJPEG, 1, properties) {
CGImageDestinationAddImage(dest, img!, properties)
CGImageDestinationFinalize(dest)
}
else {
print("Unable to create captured image to the destination!")
}
I have to scale the image to particular size while saving. So, I used CFDictionary with width, heigh properties of the image. But It's seems I am doing it as wrong. Please help me to find out correct solution. Thank you!
First, you can't resize using CGImageDestinationCreateWithURL or CGImageDestinationAddImage. If you look at the docs here and here you will notice that neither kCGImagePropertyPixelWidth or kCGImagePropertyPixelHeight is supported.
You will need to resize manually. You can use this tool, or modify it, if you find it helpful. It supports fill (stretch) and fit (scale while keeping the original aspect ratio) content modes. If you specify .fit it will center the drawing in the resulting image. If you specify .fill it will fill the whole space stretching whichever dimension it needs to.
enum ImageResizer {
enum ContentMode {
case fill
case fit
}
enum Error: Swift.Error {
case badOriginal
case resizeFailed
}
static func resize(_ source: CGImage, to targetSize: CGSize, mode: ContentMode) throws -> CGImage {
let context = CGContext(
data: nil,
width: Int(targetSize.width),
height: Int(targetSize.height),
bitsPerComponent: source.bitsPerComponent,
bytesPerRow: 0,
space: source.colorSpace ?? CGColorSpace(name: CGColorSpace.sRGB)!,
bitmapInfo: source.bitmapInfo.rawValue
)
guard let context = context else {
throw Error.badOriginal
}
let drawingSize: CGSize
switch mode {
case .fill:
drawingSize = targetSize
case .fit:
drawingSize = CGSize(width: source.width, height: source.height)
.scaledToFit(target: targetSize)
}
let drawRect = CGRect(origin: .zero, size: targetSize)
.makeCenteredRect(withSize: drawingSize)
context.interpolationQuality = .high
context.draw(source, in: drawRect)
guard let result = context.makeImage() else {
throw Error.resizeFailed
}
return result
}
}
ImageResizer depends on these CG extensions for scaling the source image and centering scaled image:
extension CGSize {
var maxDimension: CGFloat {
Swift.max(width, height)
}
var minDimension: CGFloat {
Swift.min(width, height)
}
func scaled(by scalar: CGFloat) -> CGSize {
CGSize(width: width * scalar, height: height * scalar)
}
func scaleFactors(to target: CGSize) -> CGSize {
CGSize(
width: target.width / width,
height: target.height / height
)
}
func scaledToFit(target: CGSize) -> CGSize {
return scaled(by: scaleFactors(to: target).minDimension)
}
}
extension CGRect {
func makeCenteredRect(withSize size: CGSize) -> CGRect {
let origin = CGPoint(
x: midX - size.width / 2.0,
y: midY - size.height / 2.0
)
return CGRect(origin: origin, size: size)
}
}
Also, make sure you set up permissions if you're going to save to .downloadsDirectory.
Related
I tried to look up the answer on the Internet and did not find it, maybe I typed my query wrong on the Internet.
Can you tell me how to crop a 3:4 photo? I can't find the function to crop it. Please share?
You can use this function to crop your image in any AspectRatio (e.g 3:4). You have to pass your Image and desired AspectRatio as a parameter to the function and it will return you the Cropped Image.
func crop(image: UIImage, to aspectRatio: CGFloat) -> UIImage {
let originalAspectRatio = image.size.height/image.size.width
var newImagesize = image.size
if originalAspectRatio > aspectRatio {
newImagesize.height = image.size.width * aspectRatio
} else if originalAspectRatio < aspectRatio {
newImagesize.width = image.size.height / aspectRatio
} else {
return image
}
let center = CGPoint(x: image.size.width/2, y: image.size.height/2)
let origin = CGPoint(x: center.x - newImagesize.width/2, y: center.y - newImagesize.height/2)
let cgCroppedImage = image.cgImage!.cropping(to: CGRect(origin: origin, size: CGSize(width: newImagesize.width, height: newImagesize.height)))!
let croppedImage = UIImage(cgImage: cgCroppedImage, scale: image.scale, orientation: image.imageOrientation)
return croppedImage
}
Usage:
let croppedImage = crop(image: "ImageName", to: 3/4)
Use the following function. For more details read this.
func cropImage(_ inputImage: UIImage, toRect cropRect: CGRect, viewWidth: CGFloat, viewHeight: CGFloat) -> UIImage?
{
let imageViewScale = max(inputImage.size.width / viewWidth,
inputImage.size.height / viewHeight)
// Scale cropRect to handle images larger than shown-on-screen size
let cropZone = CGRect(x:cropRect.origin.x * imageViewScale,
y:cropRect.origin.y * imageViewScale,
width:cropRect.size.width * imageViewScale,
height:cropRect.size.height * imageViewScale)
// Perform cropping in Core Graphics
guard let cutImageRef: CGImage = inputImage.cgImage?.cropping(to:cropZone)
else {
return nil
}
// Return image to UIImage
let croppedImage: UIImage = UIImage(cgImage: cutImageRef)
return croppedImage
}
I'm taking snapshot from a PDFView in PDFKit for streaming (20 times per sec), and I use this extesnsion
extension UIView {
func asImageBackground(viewLayer: CALayer, viewBounds: CGRect) -> UIImage {
let renderer = UIGraphicsImageRenderer(bounds: viewBounds)
return renderer.image { rendererContext in
viewLayer.render(in: rendererContext.cgContext)
}
}
}
But the output UIImage from this extension has a high resolution which make it difficult to stream. I can reduce it by this extension
extension UIImage {
func resize(_ max_size: CGFloat) -> UIImage {
// adjust for device pixel density
let max_size_pixels = max_size / UIScreen.main.scale
// work out aspect ratio
let aspectRatio = size.width/size.height
// variables for storing calculated data
var width: CGFloat
var height: CGFloat
var newImage: UIImage
if aspectRatio > 1 {
// landscape
width = max_size_pixels
height = max_size_pixels / aspectRatio
} else {
// portrait
height = max_size_pixels
width = max_size_pixels * aspectRatio
}
// create an image renderer of the correct size
let renderer = UIGraphicsImageRenderer(size: CGSize(width: width, height: height), format: UIGraphicsImageRendererFormat.default())
// render the image
newImage = renderer.image {
(context) in
self.draw(in: CGRect(x: 0, y: 0, width: width, height: height))
}
// return the image
return newImage
}
}
but it add an additional workload which make the process even worse. Is there any better way?
Thanks
You can downsample it using ImageIO which is recommended by Apple:
extension UIImage {
func downsample(to resolution: CGSize) -> UIImage? {
let imageSourceOptions = [kCGImageSourceShouldCache: false] as CFDictionary
guard let data = self.jpegData(compressionQuality: 0.75) as? CFData, let imageSource = CGImageSourceCreateWithData(data, imageSourceOptions) else {
return nil
}
let maxDimensionInPixels = Swift.max(resolution.width, resolution.height) * 3
let downsampleOptions = [
kCGImageSourceCreateThumbnailFromImageAlways: true,
kCGImageSourceShouldCacheImmediately: true,
kCGImageSourceCreateThumbnailWithTransform: true,
kCGImageSourceThumbnailMaxPixelSize: maxDimensionInPixels
] as CFDictionary
guard let downsampledImage = CGImageSourceCreateThumbnailAtIndex(imageSource, 0, downsampleOptions) else {
return nil
}
return UIImage(cgImage: downsampledImage)
}
}
I'm displaying images in my app that are downloaded from the network, but I'd like to downsample them so they aren't taking up multiple MB of memory. I could previously do this quite easily with UIKit:
func resizedImage(image: UIImage, for size: CGSize) -> UIImage? {
let renderer = UIGraphicsImageRenderer(size: size)
return renderer.image { (context) in
image.draw(in: CGRect(origin: .zero, size: size))
}
}
There are other methods as well, but they all depend on knowing the image view's desired size, which isn't straightforward in SwiftUI.
Is there a good API/method specifically for downsampling SwiftUI images?
I ended up solving it with geometry reader, which isn't ideal since it messes up the layout a bit.
#State var image: UIImage
var body: some View {
GeometryReader { geo in
Image(uiImage: self.image)
.resizable()
.aspectRatio(contentMode: .fit)
.onAppear {
let imageFrame = CGRect(x: 0, y: 0, width: geo.size.width, height: geo.size.height)
self.downsize(frame: imageFrame) // call whatever downsizing function you want
}
}
}
Use the geometry proxy to determine the image's frame, then downsample to that frame. I wish SwiftUI had their own API for this.
For resizing use this function. It works fast in lists or LazyVStack as well and reduces the memory consumption of the images.
public var body: some View {
GeometryReader { proxy in
let image = UIImage(named: imageName)?
.resize(height: proxy.size.height)
Image(uiImage: image ?? UIImage())
.resizable()
.scaledToFill()
}
}
public extension UIImage {
/// Resizes the image by keeping the aspect ratio
func resize(height: CGFloat) -> UIImage {
let scale = height / self.size.height
let width = self.size.width * scale
let newSize = CGSize(width: width, height: height)
let renderer = UIGraphicsImageRenderer(size: newSize)
return renderer.image { _ in
self.draw(in: CGRect(origin: .zero, size: newSize))
}
}
}
This method use CIFilter to scale down UIImage
https://developer.apple.com/library/archive/documentation/GraphicsImaging/Reference/CoreImageFilterReference/index.html#//apple_ref/doc/filter/ci/CILanczosScaleTransform
public extension UIImage {
func downsampled(by reductionAmount: Float) -> UIImage? {
let image = UIKit.CIImage(image: self)
guard let lanczosFilter = CIFilter(name: "CILanczosScaleTransform") else { return nil }
lanczosFilter.setValue(image, forKey: kCIInputImageKey)
lanczosFilter.setValue(NSNumber.init(value: reductionAmount), forKey: kCIInputScaleKey)
guard let outputImage = lanczosFilter.outputImage else { return nil }
let context = CIContext(options: [CIContextOption.useSoftwareRenderer: false])
guard let cgImage = context.createCGImage(outputImage, from: outputImage.extent) else { return nil}
let scaledImage = UIImage(cgImage: cgImage)
return scaledImage
}
}
And then you can use in SwiftUI View
struct ContentView: View {
var body: some View {
if let uiImage = UIImage(named: "sample")?.downsampled(by: 0.3) {
Image(uiImage: uiImage)
}
}
}
I have large images uploaded by users in Swift and I need to resize them all to 100x100px to create thumbnails to store in my server. So far I have found that this resizes an image given a CGSize:
func resizedImage(image: UIImage, size: CGSize) -> UIImage? {
let renderer = UIGraphicsImageRenderer(size: size)
return renderer.image { (context) in
image.draw(in: CGRect(origin: .zero, size: size))
}
}
Is there any way to create a CGSize knowing that my target size is strictly 100x100px?
Got this to work:
extension UIImage {
func resizedImage(pixelSize: (width: Int, height: Int)) -> UIImage? {
var size = CGSize(width: CGFloat(pixelSize.width) / UIScreen.main.scale, height: CGFloat(pixelSize.height) / UIScreen.main.scale)
let rect = AVMakeRect(aspectRatio: self.size, insideRect: CGRect(x:0, y:0, width: size.width, height: size.height))
let renderer = UIGraphicsImageRenderer(size: size)
return renderer.image { (context) in
self.draw(in: rect)
}
}
}
You should initialize your render based on the user device scale and multiply its width and height instead of dividing it:
extension UIImage {
func aspectFitScaled(to size: CGSize) -> UIImage {
let format = imageRendererFormat
format.opaque = false
format.scale = UIScreen.main.scale
let isLandscape = self.size.width > self.size.height
let ratio = isLandscape ? size.width / self.size.width : size.height / self.size.height
let drawSize = self.size.scaled(by: ratio)
let x = (size.width - drawSize.width) / 2
let y = (size.height - drawSize.height) / 2
let origin = CGPoint(x: x, y: y)
return UIGraphicsImageRenderer(size: size, format: format).image { _ in
draw(in: CGRect(origin: origin, size: drawSize))
}
}
}
usage:
class ViewController: UIViewController {
// imageView frame is 200 x 200
#IBOutlet weak var imageView: UIImageView!
override func viewDidLoad() {
super.viewDidLoad()
// original image size is (719.0, 808.0)
let image = UIImage(data: try! Data(contentsOf: URL(string: "https://i.stack.imgur.com/Xs4RX.jpg")!))!
imageView.backgroundColor = .gray
let ivImage = image.aspectFitScaled(to: imageView.frame.size)
imageView.image = ivImage
print("ivImage.size", ivImage.size) // (200.0, 200.0)
print("ivImage.scale", ivImage.scale) // screen scale 3.0 iPhone 8 Plus
// lets check the real image dimension
let data = ivImage.jpegData(compressionQuality: 1)!
let savedSize = UIImage(data: data)!.size
print("savedSize", savedSize) // savedSize (600.0, 600.0)
}
}
I'm porting my SpriteKit app from iOS to MacOS. I am designing my main menu in the main.storyboard, and I have an image as the background. When I resize the window, however, my image does not fill the whole screen.
I've tried:
.scaleAxesIndependently //???
.scaleNone //Centre
.scaleProportionallyDown //???
.scaleProportionallyUpOrDown //AspectFit
but none are the same as .aspectFill.
I am using swift
Subclassing NSImageView and overriding intrinsicContentSize you will be able to resizing image keeping aspect ratio, like so:
class AspectFillImageView: NSImageView {
override var intrinsicContentSize: CGSize {
guard let img = self.image else { return .zero }
let viewWidth = self.frame.size.width
let ratio = viewWidth / img.size.width
return CGSize(width: viewWidth, height: img.size.height * ratio)
}
}
If you just want to fill the whole view ignoring the ratio, use this extension instead:
extension NSImage {
func resize(to size: NSSize) -> NSImage {
return NSImage(size: size, flipped: false, drawingHandler: {
self.draw(in: $0)
return true
})
}
}
Extension usage:
NSImage.resize(to: self.view.frame.size)