White strip on iPad when rendering a view to an image - swift

I am trying to render a black rectangle to an image and save it to the photo library. But every time I render it on my iPad, the picture has a white strip on the top, that doesn’t happen if I do this on the iPhone.
I am using Swift Playgrounds 4, so maybe that’s the reason. It’s a bit strange, since both Views the small and the bigger one are both „iPads“.
Thank you for your help!
That’s my code so far:
import SwiftUI
struct ContentView: View {
var body: some View {
VStack {
Button("Snapshot") {
// Save Screenshot
let image = snapshotView.snapshot()
UIImageWriteToSavedPhotosAlbum(image, nil, nil, nil)
}
}
}
var snapshotView: some View {
VStack {
Rectangle()
.frame(width: 200, height: 200)
}
}
}
extension View {
func snapshot() -> UIImage {
let controller = UIHostingController(rootView: self)
let view = controller.view
let targetSize = controller.view.intrinsicContentSize
view?.bounds = CGRect(origin: .zero, size: targetSize)
view?.backgroundColor = .clear
let renderer = UIGraphicsImageRenderer(size: targetSize)
return renderer.image { _ in
view?.drawHierarchy(in: controller.view.bounds, afterScreenUpdates: true)
}
}
}
Image of the Rectangle

Related

Saving SwiftUi View as an image to photo album

I'm using extension to View I found on hackingwithswift.com:
extension View {
func snapshot() -> UIImage {
let controller = UIHostingController(rootView: self)
let view = controller.view
let targetSize = controller.view.intrinsicContentSize
view?.bounds = CGRect(origin: .zero, size: targetSize)
view?.backgroundColor = .clear
let renderer = UIGraphicsImageRenderer(size: targetSize)
return renderer.image { _ in
view?.drawHierarchy(in: controller.view.bounds, afterScreenUpdates: true)
}
}
}
I'm using in the following way:
I have object of type Canvas, which contains some drawing and I also added a border to make it more visible. Then I'm saving it to Photo Album, but finall photo is is out of position in relation to the original. I'm attaching screenshot of my view and finall photo.
let image = canvas.snapshot()
UIImageWriteToSavedPhotosAlbum(image, nil, nil, nil)
Add .edgesIgnoringSafeArea(.all) to the rootView: self parameter of the UIHostingController init call.
let controller = UIHostingController(rootView: self.edgesIgnoringSafeArea(.all))
That will make the screenshot not be clipped.

SwiftUI View to NSImage in AppKit

SwiftUI 2.0 | Swift 5.4 | Xcode 12.4 | macOS Big Sur 11.4
In iOS there is a way to render SwiftUI views to a UIImage that doesn't work in AppKit, only UIKit:
extension View {
func snapshot() -> UIImage {
let controller = UIHostingController(rootView: self)
let view = controller.view
let targetSize = controller.view.intrinsicContentSize
view?.bounds = CGRect(origin: .zero, size: targetSize)
view?.backgroundColor = .clear
let renderer = UIGraphicsImageRenderer(size: targetSize)
return renderer.image { _ in
view?.drawHierarchy(in: controller.view.bounds, afterScreenUpdates: true)
}
}
}
Is there any workaround to get that working on AppKit? The method UIGraphicsImageRenderer(size:) doesn't work on AppKit and there is no equivalent.
If I'm understanding your question correctly, you should be able to use an NSBitmapImageRef to do the trick!
Here's a simple example of generating an NSImage from SwiftUI view (Note: your host view must be visible!):
let view = MyView() // some SwiftUI view
let host = NSHostingView(rootView: view)
// Note: the host must be visible on screen, which I am guessing you already have it that way. If not, do that here.
let bitmapRep = host.bitmapImageRepForCachingDisplay(in: host.bounds)
host.cacheDisplay(in: host.bounds, to: bitmapRep!)
let image = NSImage(size: host.frame.size)
image.addRepresentation(bitmapRep!)
Here's an extension to NSHostingView that should safely make snapshots:
extension NSHostingView {
func snapshot() -> NSImage? {
// Make sure the view is visible:
guard self.window != nil else { return nil }
// Get bitmap data:
let bitmapRep = self.bitmapImageRepForCachingDisplay(in:
self.bounds)
self.cacheDisplay(in: self.bounds, to: bitmapRep!)
// Create NSImage from NSBitmapImageRep:
let image = NSImage(size: self.frame.size)
image.addRepresentation(bitmapRep!)
return image
}
}
I hope this helps. Let me know if you have any questions!

Render SwiftUI view offscreen and save view as UIImage to share

I'm trying to create a share button with SwiftUI that when pressed can share a generated image. I've found some tutorials that can screen shot a current displayed view and convert it to an UIImage. But I want to create a view programmatically off the screen and then save that to a UIImage that users can then share with a share sheet.
import SwiftUI
import SwiftyJSON
import MapKit
struct ShareRentalView : View {
#State private var region = MKCoordinateRegion(center: CLLocationCoordinate2D(latitude: 32.786038, longitude: -117.237324) , span: MKCoordinateSpan(latitudeDelta: 0.025, longitudeDelta: 0.025))
#State var coordinates: [JSON] = []
#State var origin: CGPoint? = nil
#State var size: CGSize? = nil
var body: some View {
GeometryReader{ geometry in
VStack(spacing: 0) {
ZStack{
HistoryMapView(region: region, pointsArray: $coordinates)
.frame(height: 300)
}.frame(height: 300)
}.onAppear {
self.origin = geometry.frame(in: .global).origin
self.size = geometry.size
}
}
}
func returnScreenShot() -> UIImage{
return takeScreenshot(origin: self.origin.unsafelyUnwrapped, size: self.size.unsafelyUnwrapped)
}
}
extension UIView {
var renderedImage: UIImage {
// rect of capure
let rect = self.bounds
// create the context of bitmap
UIGraphicsBeginImageContextWithOptions(rect.size, false, 0.0)
let context: CGContext = UIGraphicsGetCurrentContext()!
self.layer.render(in: context)
// get a image from current context bitmap
let capturedImage: UIImage = UIGraphicsGetImageFromCurrentImageContext()!
UIGraphicsEndImageContext()
return capturedImage
}
}
extension View {
func takeScreenshot(origin: CGPoint, size: CGSize) -> UIImage {
let window = UIWindow(frame: CGRect(origin: origin, size: size))
let hosting = UIHostingController(rootView: self)
hosting.view.frame = window.frame
window.addSubview(hosting.view)
window.makeKeyAndVisible()
return hosting.view.renderedImage
}
}
This is kind of my code idea at the moment. I have a view I've built that onAppear sets the CGpoint and CGsize of the screen capture. Then an attached method that can then take the screen shot of the view. The problem right now this view never renders because I never add this to a parent view as I don't want this view to appear to the user. In the parent view I have
struct HistoryCell: View {
...
private var shareRental : ShareRentalView? = nil
private var uiimage: UIImage? = nil
...
init(){
...
self.shareRental = ShareRentalView()
}
var body: some View {
...
Button{action: {self.uiimage = self.shareRental?.returnScreenShot()}}
...
}
}
This doesn't work because there view I want to screen shot is never rendered? Is there a way to render it in memory or off screen and then create an image from it? Or do I need to think of another way of doing this?
This ended up working to get the a screen shot of a view that was not presented on the screen to save as a UIImage
extension UIView {
func asImage() -> UIImage {
let format = UIGraphicsImageRendererFormat()
format.scale = 1
return UIGraphicsImageRenderer(size: self.layer.frame.size, format: format).image { context in
self.drawHierarchy(in: self.layer.bounds, afterScreenUpdates: true)
}
}
}
extension View {
func asImage() -> UIImage {
let controller = UIHostingController(rootView: self)
let size = controller.sizeThatFits(in: UIScreen.main.bounds.size)
controller.view.bounds = CGRect(origin: .zero, size: size)
let image = controller.view.asImage()
return image
}
}
And then in my parent view
var shareRental: ShareRentalView?
init(){
....
self.shareRental = ShareRentalView()
}
var body: some View {
Button(action: {
let shareImage = self.shareRental.asImage()
}
This gets me almost there. The MKMapSnapshotter has a delay while loading and the image creation happens too fast and there is no map when the UIImage is created.
In order to get around the issue with the delay in the map loading I created an array in a class that builds all the UIImages and stores them in an array.
class MyUser: ObservableObject {
...
public func buildHistoryRental(){
self.historyRentals.removeAll()
MapSnapshot().generateSnapshot(completion: self.snapShotRsp)
}
}
}
}
private func snapShotRsp(image: UIImage){
self.historyRentals.append(image))
}
And then I made a class to create snap shot images like this
func generateSnapshot(completion: #escaping (JSON, UIImage)->() ){
let mapSnapshotOptions = MKMapSnapshotOptions()
// Set the region of the map that is rendered. (by polyline)
let polyLine = MKPolyline(coordinates: &yourCoordinates, count: yourCoordinates.count)
let region = MKCoordinateRegionForMapRect(polyLine.boundingMapRect)
mapSnapshotOptions.region = region
// Set the scale of the image. We'll just use the scale of the current device, which is 2x scale on Retina screens.
mapSnapshotOptions.scale = UIScreen.main.scale
// Set the size of the image output.
mapSnapshotOptions.size = CGSize(width: IMAGE_VIEW_WIDTH, height: IMAGE_VIEW_HEIGHT)
// Show buildings and Points of Interest on the snapshot
mapSnapshotOptions.showsBuildings = true
mapSnapshotOptions.showsPointsOfInterest = true
let snapShotter = MKMapSnapshotter(options: mapSnapshotOptions)
var image: UIImage = UIImage()
snapshotter.start(completionHandler: { (snapshot: MKMapSnapshotter.Snapshot?, Error) -> Void in
if(Error != nil){
print("\(String(describing: Error))");
}else{
image = self.drawLineOnImage(snapshot: snapshot.unsafelyUnwrapped, pointsToUse: pointsToUse)
}
completion(image)
})
}
}
func drawLineOnImage(snapshot: MKMapSnapshot) -> UIImage {
let image = snapshot.image
// for Retina screen
UIGraphicsBeginImageContextWithOptions(self.imageView.frame.size, true, 0)
// draw original image into the context
image.draw(at: CGPoint.zero)
// get the context for CoreGraphics
let context = UIGraphicsGetCurrentContext()
// set stroking width and color of the context
context!.setLineWidth(2.0)
context!.setStrokeColor(UIColor.orange.cgColor)
// Here is the trick :
// We use addLine() and move() to draw the line, this should be easy to understand.
// The diificult part is that they both take CGPoint as parameters, and it would be way too complex for us to calculate by ourselves
// Thus we use snapshot.point() to save the pain.
context!.move(to: snapshot.point(for: yourCoordinates[0]))
for i in 0...yourCoordinates.count-1 {
context!.addLine(to: snapshot.point(for: yourCoordinates[i]))
context!.move(to: snapshot.point(for: yourCoordinates[i]))
}
// apply the stroke to the context
context!.strokePath()
// get the image from the graphics context
let resultImage = UIGraphicsGetImageFromCurrentImageContext()
// end the graphics context
UIGraphicsEndImageContext()
return resultImage!
}
It's important to return the image back async with the callback. Trying to return the image directly from the func call yielded a blank map.

Downsampling Images with SwiftUI

I'm displaying images in my app that are downloaded from the network, but I'd like to downsample them so they aren't taking up multiple MB of memory. I could previously do this quite easily with UIKit:
func resizedImage(image: UIImage, for size: CGSize) -> UIImage? {
let renderer = UIGraphicsImageRenderer(size: size)
return renderer.image { (context) in
image.draw(in: CGRect(origin: .zero, size: size))
}
}
There are other methods as well, but they all depend on knowing the image view's desired size, which isn't straightforward in SwiftUI.
Is there a good API/method specifically for downsampling SwiftUI images?
I ended up solving it with geometry reader, which isn't ideal since it messes up the layout a bit.
#State var image: UIImage
var body: some View {
GeometryReader { geo in
Image(uiImage: self.image)
.resizable()
.aspectRatio(contentMode: .fit)
.onAppear {
let imageFrame = CGRect(x: 0, y: 0, width: geo.size.width, height: geo.size.height)
self.downsize(frame: imageFrame) // call whatever downsizing function you want
}
}
}
Use the geometry proxy to determine the image's frame, then downsample to that frame. I wish SwiftUI had their own API for this.
For resizing use this function. It works fast in lists or LazyVStack as well and reduces the memory consumption of the images.
public var body: some View {
GeometryReader { proxy in
let image = UIImage(named: imageName)?
.resize(height: proxy.size.height)
Image(uiImage: image ?? UIImage())
.resizable()
.scaledToFill()
}
}
public extension UIImage {
/// Resizes the image by keeping the aspect ratio
func resize(height: CGFloat) -> UIImage {
let scale = height / self.size.height
let width = self.size.width * scale
let newSize = CGSize(width: width, height: height)
let renderer = UIGraphicsImageRenderer(size: newSize)
return renderer.image { _ in
self.draw(in: CGRect(origin: .zero, size: newSize))
}
}
}
This method use CIFilter to scale down UIImage
https://developer.apple.com/library/archive/documentation/GraphicsImaging/Reference/CoreImageFilterReference/index.html#//apple_ref/doc/filter/ci/CILanczosScaleTransform
public extension UIImage {
func downsampled(by reductionAmount: Float) -> UIImage? {
let image = UIKit.CIImage(image: self)
guard let lanczosFilter = CIFilter(name: "CILanczosScaleTransform") else { return nil }
lanczosFilter.setValue(image, forKey: kCIInputImageKey)
lanczosFilter.setValue(NSNumber.init(value: reductionAmount), forKey: kCIInputScaleKey)
guard let outputImage = lanczosFilter.outputImage else { return nil }
let context = CIContext(options: [CIContextOption.useSoftwareRenderer: false])
guard let cgImage = context.createCGImage(outputImage, from: outputImage.extent) else { return nil}
let scaledImage = UIImage(cgImage: cgImage)
return scaledImage
}
}
And then you can use in SwiftUI View
struct ContentView: View {
var body: some View {
if let uiImage = UIImage(named: "sample")?.downsampled(by: 0.3) {
Image(uiImage: uiImage)
}
}
}

How to Screenshot NSView for Mac OS?

I currently have a function that creates a screenshot of the entire window in a Mac application. However, I would like to target a particular view. I know in iOS you can do the following:
extension WKWebView {
var screenshot: UIImage {
if #available(iOS 10.0, *) {
return UIGraphicsImageRenderer(size: bounds.size).image { _ in
drawHierarchy(in: CGRect(origin: .zero, size: bounds.size), afterScreenUpdates: true)
}
} else {
UIGraphicsBeginImageContext(bounds.size)
drawHierarchy(in: bounds, afterScreenUpdates: true)
let image = UIGraphicsGetImageFromCurrentImageContext()!
return image
}
}
}
How would I convert this to Mac OS using AppKit instead of UIKit. The following is my current computed property for screenshots of the entire window.
extension WKWebView {
var screenshot: NSImage {
let displayID = CGWindowID() // cgmaindisplayid
let imageRef = CGDisplayCreateImage(displayID)
return NSImage(cgImage: imageRef!, size: (NSScreen.main?.frame.size)!)
}
}
You can use NSView method func dataWithPDF(inside rect: NSRect) -> Data
extension NSView {
var image: NSImage? {
return NSImage(data: dataWithPDF(inside: bounds))
}
}