How to zoom crop image in selected area - swift

I want to crop image from selected area this is the image area i want to crop
this is the result i try to crop
the image i crop is not like in the crop area.
i use scrollview to zoom and pan the imageView, this is how setup to crop image.
i use this method from this stackoverflow
class ALCroppedPhotoViewController: UIViewController {
#IBOutlet weak var containerImage: UIView!
#IBOutlet weak var overlayView: UIView!
#IBOutlet weak var previewImageView: UIImageView!
#IBOutlet weak var rectHoleView: UIView!
#IBOutlet weak var scrollView: UIScrollView!
var data: Data?
var rect: CGRect!
override func viewDidLoad() {
super.viewDidLoad()
previewImageView.image = UIImage(data: data ?? Data())
setupScrollView()
}
override func viewDidLayoutSubviews() {
super.viewDidLayoutSubviews()
let midX = overlayView.bounds.midX
let midY = overlayView.bounds.midY
let center = CGPoint(x: midX, y: midY)
let size: CGFloat = 312
// Create the initial layer from the view bounds.
let maskLayer = CAShapeLayer()
maskLayer.frame = overlayView.bounds
// Create the path.
rect = CGRect(x: center.x - size / 2, y: center.y - size / 2, width: size, height: size)
let path = UIBezierPath(rect: overlayView.bounds)
maskLayer.fillRule = .evenOdd
// Append the overlay image to the path so that it is subtracted.
path.append(UIBezierPath(roundedRect: rect, cornerRadius: 20))
maskLayer.path = path.cgPath
// Set the mask of the view.
overlayView.layer.mask = maskLayer
}
private func setupScrollView() {
scrollView.minimumZoomScale = 1.0
scrollView.maximumZoomScale = 2.0
scrollView.delegate = self
}
func snapshot(in imageView: UIImageView, rect: CGRect) -> UIImage {
assert(imageView.contentMode == .scaleAspectFit)
let image = imageView.image!
// figure out what the scale is
let imageRatio = imageView.bounds.width / imageView.bounds.height
let imageViewRatio = image.size.width / image.size.height
let scale: CGFloat
if imageRatio > imageViewRatio {
scale = image.size.height / imageView.bounds.height
} else {
scale = image.size.width / imageView.bounds.width
}
// convert the `rect` into coordinates within the image, itself
let size = rect.size * scale
let origin = CGPoint(x: image.size.width / 2 - (imageView.bounds.midX - rect.minX) * scale,
y: image.size.height / 2 - (imageView.bounds.midY - rect.minY) * scale)
let scaledRect = CGRect(origin: origin, size: size)
// now render the image and grab the appropriate rectangle within
// the image’s coordinate system
let format = UIGraphicsImageRendererFormat()
format.scale = image.scale
format.opaque = false
return UIGraphicsImageRenderer(bounds: scaledRect, format: format).image { _ in
image.draw(at: .zero)
}
}
extension CGSize {
static func * (lhs: CGSize, rhs: CGFloat) -> CGSize {
return CGSize(width: lhs.width * rhs, height: lhs.height * rhs)
}
}
}
The image actually crop successfully, but failed if i try to zoom really closely. How i can crop if try to zoom closely.
This is where the crash start when i try to print
scrollView.zoomScale
extension ALCroppedPhotoViewController: UIScrollViewDelegate {
func viewForZooming(in scrollView: UIScrollView) -> UIView? {
print(scrollView.zoomScale)
return previewImageView
}
}

UIScrollView has a property zoomScale that tells you how much you are currently zoomed in. You'll need to use that to adjust your scale property.

Related

CATiledLayer in NSView flashes on changing contentsScale

I have a CATiledLayer inside a NSView which is a documentView property of NSScrollView.
Storyboard setup is pretty straitforward: add NSScrollView to the default view controller and assign View class to the NSView of clipping view.
The following code draws a number of squares of random color. Scrolling works exactly as it should in CATiledLayer but zooming doesn't work very well:
Found tons of CATiledLayer problems and all the proposed solutions don't work for me (like subclassing with 0 fadeDuration or disabling CATransaction actions). I guess that setNeedsDisplay() screws it all but can't figure out the proper way to do that. If I use CALayer then I don't see the flashing issues but then I can't deal with large layers of thousands of boxes inside.
The View class source:
import Cocoa
import CoreGraphics
import Combine
let rows = 1000
let columns = 1000
let width = 50.0
let height = 50.0
class View: NSView {
typealias Coordinate = (x: Int, y: Int)
private let colors: [[CGColor]]
private let rect = CGRect(origin: .zero, size: CGSize(width: width, height: height))
private var store = Set<AnyCancellable>()
private var scale: CGFloat {
guard let scrollView = self.superview?.superview as? NSScrollView else { fatalError() }
return NSScreen.main!.backingScaleFactor * scrollView.magnification
}
required init?(coder: NSCoder) {
colors = (0..<rows).map { _ in (0..<columns).map { _ in .random } }
super.init(coder: coder)
setFrameSize(NSSize(width: width * CGFloat(columns), height: height * CGFloat(rows)))
wantsLayer = true
NotificationCenter.default.publisher(for: NSScrollView.didEndLiveMagnifyNotification).sink { [unowned self] _ in
self.layer?.contentsScale = scale
self.layer?.setNeedsDisplay()
}.store(in: &store)
}
override func makeBackingLayer() -> CALayer {
let layer = CATiledLayer()
layer.tileSize = CGSize(width: 1000, height: 1000)
return layer
}
override func draw(_ dirtyRect: NSRect) {
guard let context = NSGraphicsContext.current?.cgContext else { return }
let (min, max) = coordinates(in: dirtyRect)
context.translateBy(x: CGFloat(min.x) * width, y: CGFloat(min.y) * height)
(min.y...max.y).forEach { row in
context.saveGState()
(min.x...max.x).forEach { column in
context.setFillColor(colors[row][column])
context.addRect(rect)
context.drawPath(using: .fillStroke)
context.translateBy(x: width, y: 0)
}
context.restoreGState()
context.translateBy(x: 0, y: height)
}
}
private func coordinates(in rect: NSRect) -> (Coordinate, Coordinate) {
var minX = Int(rect.minX / width)
var minY = Int(rect.minY / height)
var maxX = Int(rect.maxX / width)
var maxY = Int(rect.maxY / height)
if minX >= columns {
minX = columns - 1
}
if maxX >= columns {
maxX = columns - 1
}
if minY >= rows {
minY = rows - 1
}
if maxY >= rows {
maxY = rows - 1
}
return ((minX, minY), (maxX, maxY))
}
}
extension CGColor {
class var random: CGColor {
let random = { CGFloat(arc4random_uniform(255)) / 255.0 }
return CGColor(red: random(), green: random(), blue: random(), alpha: random())
}
}
To be able to support zooming into a CATiledLayer, you set the layer's levelOfDetailBias. You don't need to observe the scroll view's magnification notifications, change the layers contentScale, or trigger manual redraws.
Here's a quick implementation that shows what kinds of dirtyRects you get at different zoom levels:
class View: NSView {
override init(frame frameRect: NSRect) {
super.init(frame: frameRect)
wantsLayer = true
}
required init?(coder: NSCoder) {
super.init(coder: coder)
wantsLayer = true
}
override func makeBackingLayer() -> CALayer {
let layer = CATiledLayer()
layer.tileSize = CGSize(width: 400, height: 400)
layer.levelsOfDetailBias = 3
return layer
}
override func draw(_ dirtyRect: NSRect) {
let context = NSGraphicsContext.current!
let scale = context.cgContext.ctm.a
NSColor.red.setFill()
dirtyRect.frame(withWidth: 10 / scale, using: .overlay)
NSColor.black.setFill()
let string: NSString = "Scale: \(scale)" as NSString
let attributes = [NSAttributedString.Key.font: NSFont.systemFont(ofSize: 40 / scale)]
let size = string.size(withAttributes: attributes)
string.draw(at: CGPoint(x: dirtyRect.midX - size.width / 2, y: dirtyRect.midY - size.height / 2),
withAttributes: attributes)
}
}
The current drawing contexts is already scaled to match the current zoom level (and the dirtyRect's get smaller and smaller for each level of detail down). You can extract the current scale from CGContext's transformation matrix as shown above, if needed.

Cropping is not working perfectly as per the frame drawn

I am trying to crop a selected portion of NSImage which is fitted as per ProportionallyUpOrDown(AspectFill) Mode.
I am drawing a frame using mouse dragged event like this:
class CropImageView: NSImageView {
var startPoint: NSPoint!
var shapeLayer: CAShapeLayer!
var flagCheck = false
var finalPoint: NSPoint!
override init(frame frameRect: NSRect) {
super.init(frame: frameRect)
}
required init?(coder: NSCoder) {
super.init(coder: coder)
}
override func draw(_ dirtyRect: NSRect) {
super.draw(dirtyRect)
}
override var image: NSImage? {
set {
self.layer = CALayer()
self.layer?.contentsGravity = kCAGravityResizeAspectFill
self.layer?.contents = newValue
self.wantsLayer = true
super.image = newValue
}
get {
return super.image
}
}
override func mouseDown(with event: NSEvent) {
self.startPoint = self.convert(event.locationInWindow, from: nil)
if self.shapeLayer != nil {
self.shapeLayer.removeFromSuperlayer()
self.shapeLayer = nil
}
self.flagCheck = true
var pixelColor: NSColor = NSReadPixel(startPoint) ?? NSColor()
shapeLayer = CAShapeLayer()
shapeLayer.lineWidth = 1.0
shapeLayer.fillColor = NSColor.clear.cgColor
if pixelColor == NSColor.black {
pixelColor = NSColor.color_white
} else {
pixelColor = NSColor.black
}
shapeLayer.strokeColor = pixelColor.cgColor
shapeLayer.lineDashPattern = [1]
self.layer?.addSublayer(shapeLayer)
var dashAnimation = CABasicAnimation()
dashAnimation = CABasicAnimation(keyPath: "lineDashPhase")
dashAnimation.duration = 0.75
dashAnimation.fromValue = 0.0
dashAnimation.toValue = 15.0
dashAnimation.repeatCount = 0.0
shapeLayer.add(dashAnimation, forKey: "linePhase")
}
override func mouseDragged(with event: NSEvent) {
let point: NSPoint = self.convert(event.locationInWindow, from: nil)
var newPoint: CGPoint = self.startPoint
let xDiff = point.x - self.startPoint.x
let yDiff = point.y - self.startPoint.y
let dist = min(abs(xDiff), abs(yDiff))
newPoint.x += xDiff > 0 ? dist : -dist
newPoint.y += yDiff > 0 ? dist : -dist
let path = CGMutablePath()
path.move(to: self.startPoint)
path.addLine(to: NSPoint(x: self.startPoint.x, y: newPoint.y))
path.addLine(to: newPoint)
path.addLine(to: NSPoint(x: newPoint.x, y: self.startPoint.y))
path.closeSubpath()
self.shapeLayer.path = path
}
override func mouseUp(with event: NSEvent) {
self.finalPoint = self.convert(event.locationInWindow, from: nil)
}
}
and selected this area as shown in picture using black dotted line:
My Cropping Code logic is this:
// resize Image Methods
extension CropProfileView {
func resizeImage(image: NSImage) -> Data {
var scalingFactor: CGFloat = 0.0
if image.size.width >= image.size.height {
scalingFactor = image.size.width/cropImgView.size.width
} else {
scalingFactor = image.size.height/cropImgView.size.height
}
let width = (self.cropImgView.finalPoint.x - self.cropImgView.startPoint.x) * scalingFactor
let height = (self.cropImgView.startPoint.y - self.cropImgView.finalPoint.y) * scalingFactor
let xPos = ((image.size.width/2) - (cropImgView.bounds.midX - self.cropImgView.startPoint.x) * scalingFactor)
let yPos = ((image.size.height/2) - (cropImgView.bounds.midY - (cropImgView.size.height - self.cropImgView.startPoint.y)) * scalingFactor)
var croppedRect: NSRect = NSRect(x: xPos, y: yPos, width: width, height: height)
let imageRef = image.cgImage(forProposedRect: &croppedRect, context: nil, hints: nil)
guard let croppedImage = imageRef?.cropping(to: croppedRect) else {return Data()}
let imageWithNewSize = NSImage(cgImage: croppedImage, size: NSSize(width: width, height: height))
guard let data = imageWithNewSize.tiffRepresentation,
let rep = NSBitmapImageRep(data: data),
let imgData = rep.representation(using: .png, properties: [.compressionFactor: NSNumber(floatLiteral: 0.25)]) else {
return imageWithNewSize.tiffRepresentation ?? Data()
}
return imgData
}
}
With this cropping logic i am getting this output:
I think as image is AspectFill thats why its not getting cropped in perfect size as per selected frame. Here if you look at output: xpositon & width & heights are not perfect. Or probably i am not calculating these co-ordinates properly. Let me know the faults probably i am calculating someting wrong.
Note: the CropImageView class in the question is a subclass of NSImageView but the view is layer-hosting and the image is drawn by the layer, not by NSImageView. imageScaling is not used.
When deciding which scaling factor to use you have to take the size of the image view into account. If the image size is width:120, height:100 and the image view size is width:120, height 80 then image.size.width >= image.size.height is true and image.size.width/cropImgView.size.width is 1 but the image is scaled because image.size.height/cropImgView.size.height is 1.25. Calculate the horizontal and vertical scaling factors and use the largest.
See How to crop a UIImageView to a new UIImage in 'aspect fill' mode?
Here's the calculation of croppedRect assuming cropImgView.size returns self.layer!.bounds.size.
var scalingWidthFactor: CGFloat = image.size.width/cropImgView.size.width
var scalingHeightFactor: CGFloat = image.size.height/cropImgView.size.height
var xOffset: CGFloat = 0
var yOffset: CGFloat = 0
switch cropImgView.layer?.contentsGravity {
case CALayerContentsGravity.resize: break
case CALayerContentsGravity.resizeAspect:
if scalingWidthFactor > scalingHeightFactor {
scalingHeightFactor = scalingWidthFactor
yOffset = (cropImgView.size.height - (image.size.height / scalingHeightFactor)) / 2
}
else {
scalingWidthFactor = scalingHeightFactor
xOffset = (cropImgView.size.width - (image.size.width / scalingWidthFactor)) / 2
}
case CALayerContentsGravity.resizeAspectFill:
if scalingWidthFactor < scalingHeightFactor {
scalingHeightFactor = scalingWidthFactor
yOffset = (cropImgView.size.height - (image.size.height / scalingHeightFactor)) / 2
}
else {
scalingWidthFactor = scalingHeightFactor
xOffset = (cropImgView.size.width - (image.size.width / scalingWidthFactor)) / 2
}
default:
print("contentsGravity \(String(describing: cropImgView.layer?.contentsGravity)) is not supported")
return nil
}
let width = (self.cropImgView.finalPoint.x - self.cropImgView.startPoint.x) * scalingWidthFactor
let height = (self.cropImgView.startPoint.y - self.cropImgView.finalPoint.y) * scalingHeightFactor
let xPos = (self.cropImgView.startPoint.x - xOffset) * scalingWidthFactor
let yPos = (cropImgView.size.height - self.cropImgView.startPoint.y - yOffset) * scalingHeightFactor
var croppedRect: NSRect = NSRect(x: xPos, y: yPos, width: width, height: height)
Bugfix: cropImgView.finalPoint should be the corner of the selection, not the location of mouseUp. In CropImageView set self.finalPoint = newPoint in mouseDragged instead of mouseUp.

Using SwiftUI or UIKit to position view elements?

This is more or less a repost of a question that I think could have been reproduced more minimally. I'm trying to form a text bubble using the triangle created by the overriden draw() function and the rest of the callout. I removed all the code that couldn't possibly affect the positioning of either the triangle or the box.
Recap: I'd like to move the triangle created by the draw function outside of the frame created in the initialization (Currently, it's inside the frame).
If I can add anything to clarify or make this question better, let me know.
class CustomCalloutView: UIView, MGLCalloutView {
let dismissesAutomatically: Bool = false
let isAnchoredToAnnotation: Bool = true
let tipHeight: CGFloat = 10.0
let tipWidth: CGFloat = 20.0
lazy var leftAccessoryView = UIView()
lazy var rightAccessoryView = UIView()
weak var delegate: MGLCalloutViewDelegate?
//MARK: Subviews -
required init() {
// init with 75% of width and 120px tall
super.init(frame: CGRect(origin: CGPoint(x: 0, y: 0), size: CGSize(width:
UIScreen.main.bounds.width * 0.75, height: 120)))
setup()
}
override var center: CGPoint {
set {
var newCenter = newValue
newCenter.y -= bounds.midY
super.center = newCenter
}
get {
return super.center
}
}
func setup() {
// setup this view's properties
self.backgroundColor = UIColor.white
}
func presentCallout(from rect: CGRect, in view: UIView, constrainedTo constrainedRect: CGRect,
animated: Bool) {
//Always, Slightly above center
self.center = view.center.applying(CGAffineTransform(translationX: 0, y: -self.frame.height))
view.addSubview(self)
}
override func draw(_ rect: CGRect) {
// Draw the pointed tip at the bottom.
let fillColor: UIColor = .black
let tipLeft = rect.origin.x + (rect.size.width / 2.0) - (tipWidth / 2.0)
let tipBottom = CGPoint(x: rect.origin.x + (rect.size.width / 2.0), y: rect.origin.y +
rect.size.height)
let heightWithoutTip = rect.size.height - tipHeight - 1
let currentContext = UIGraphicsGetCurrentContext()!
let tipPath = CGMutablePath()
tipPath.move(to: CGPoint(x: tipLeft, y: heightWithoutTip))
tipPath.addLine(to: CGPoint(x: tipBottom.x, y: tipBottom.y))
tipPath.addLine(to: CGPoint(x: tipLeft + tipWidth, y: heightWithoutTip))
tipPath.closeSubpath()
fillColor.setFill()
currentContext.addPath(tipPath)
currentContext.fillPath()
}
}

how to pinch zoom in on code created scrollview

My code creates a scrollview and image view that displays a picture array from a previous view controller. However, I am trying to implement code to make it so the user may zoom in on a picture. But what ever I do, it does not work. Any suggestions on what I am doing wrong, or where to implement the zoom in code? Thank you!
import UIKit
class DestinationVC: UIViewController {
#IBOutlet weak var myScrollView: UIScrollView!
var mySelectedProtocol:Protocol?
var pageControl:UIPageControl?
var currentPageIndex:Int=0
fileprivate var count:Int=0
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
if mySelectedProtocol == nil { self.navigationController?.popViewController(animated: true) }
if mySelectedProtocol!.imagesName!.count == 0 { self.navigationController?.popViewController(animated: true) }
/// We have Data
print("Img Array with Name ==> \(mySelectedProtocol?.imagesName ?? [])")
DispatchQueue.main.async {
self.addPageView()
}
}
func viewForZooming(in scrollView: UIScrollView) -> UIView? {
myScrollView.delegate = self
myScrollView.minimumZoomScale = 1.0
myScrollView.maximumZoomScale = 5.0
return myScrollView
}
private func addPageView() {
myScrollView.backgroundColor=UIColor.black
myScrollView.isUserInteractionEnabled=true
myScrollView.showsHorizontalScrollIndicator=true
myScrollView.isPagingEnabled=true
myScrollView.delegate=self
myScrollView.bounces=false
self.count=mySelectedProtocol!.imagesName!.count
for i in 0..<self.count {
///Get Origin
let xOrigin : CGFloat = CGFloat(i) * myScrollView.frame.size.width
///Create a imageView
let imageView = UIImageView()
imageView.frame = CGRect(x: xOrigin, y: 0, width: myScrollView.frame.size.width, height: myScrollView.frame.size.height)
imageView.contentMode = .scaleAspectFit
imageView.image=UIImage(named: mySelectedProtocol!.imagesName![i])
myScrollView.addSubview(imageView)
}
setUpPageControl()
///Set Content Size to Show
myScrollView.contentSize = CGSize(width: myScrollView.frame.size.width * CGFloat(self.count), height: myScrollView.frame.size.height)
}
private func setUpPageControl() {
if pageControl == nil { pageControl=UIPageControl() }
pageControl!.numberOfPages = self.count
pageControl!.currentPageIndicatorTintColor = UIColor.red
pageControl!.pageIndicatorTintColor = UIColor.white
pageControl!.frame = CGRect(x: 0, y: 20, width: self.view.frame.width, height: self.view.frame.height*0.2)
pageControl!.currentPage=currentPageIndex
self.view.addSubview(pageControl!)
self.view.bringSubview(toFront: pageControl!)
}
}
extension DestinationVC: UIScrollViewDelegate {
func scrollViewWillBeginDragging(_ scrollView: UIScrollView) {
let scrollW : CGFloat = scrollView.frame.size.width
currentPageIndex = Int(scrollView.contentOffset.x / scrollW)
self.pageControl!.currentPage=currentPageIndex
}
func scrollViewDidEndDecelerating(_ scrollView: UIScrollView) {
let scrollW : CGFloat = scrollView.frame.size.width
currentPageIndex = Int(scrollView.contentOffset.x / scrollW)
self.pageControl!.currentPage=currentPageIndex
}
}
You set the minimumScale to 1.0 - that's just 1x the normal scale. If you want it to be zoom down closer, you could try setting the minimum zoom like this:
let scaleWidth = scrollView.frame.size.width / scrollView.contentSize.width
let scaleHeight = scrollView.frame.size.height / scrollView.contentSize.height
let minScale = min(scaleWidth, scaleHeight)
scrollView.minimumZoomScale = minScale
scrollView.maximumZoomScale = 1.0
scrollView.zoomScale = minScale
And set the maximumZoomScale to 1.0 - the size of the content.

How do I create a circle with CALayer?

I have the code below tested, but when I give it constraints it becomes a little small circle:
override func drawRect(rect: CGRect) {
var path = UIBezierPath(ovalInRect: rect)
fillColor.setFill()
path.fill()
//set up the width and height variables
//for the horizontal stroke
let plusHeight:CGFloat = 300.0
let plusWidth:CGFloat = 450.0
//create the path
var plusPath = UIBezierPath()
//set the path's line width to the height of the stroke
plusPath.lineWidth = plusHeight
//move the initial point of the path
//to the start of the horizontal stroke
plusPath.moveToPoint(CGPoint(
x:self.bounds.width/2 - plusWidth/2 + 0.5,
y:self.bounds.height/2 + 0.5))
//add a point to the path at the end of the stroke
plusPath.addLineToPoint(CGPoint(
x:self.bounds.width/2 + plusWidth/2 + 0.5,
y:self.bounds.height/2 + 0.5))
}
Change radius and fillColor as you want. :)
import Foundation
import UIKit
class CircleLayerView: UIView {
var circleLayer: CAShapeLayer!
override func draw(_ rect: CGRect) {
super.draw(rect)
if circleLayer == nil {
circleLayer = CAShapeLayer()
let radius: CGFloat = 150.0
circleLayer.path = UIBezierPath(roundedRect: CGRect(x: 0, y: 0, width: 2.0 * radius, height: 2.0 * radius), cornerRadius: radius).cgPath
circleLayer.position = CGPoint(x: self.frame.midX - radius, y: self.frame.midY - radius)
circleLayer.fillColor = UIColor.blue.cgColor
self.layer.addSublayer(circleLayer)
}
}
}
The rect being passed into drawRect is the area that needs to be updated, not the size of the drawing. In your case, you would probably just ignore the rect being passed in and set the circle to the size you want.
//// Oval Drawing
var ovalPath = UIBezierPath(ovalInRect: CGRectMake(0, 0, 300, 300))
UIColor.whiteColor().setFill()
ovalPath.fill()
The only correct way to do it:
private lazy var whiteCoin: CAShapeLayer = {
let c = CAShapeLayer()
c.path = UIBezierPath(ovalIn: bounds).cgPath
c.fillColor = UIColor.white.cgColor
layer.addSublayer(c)
return c
}()
That literally makes a layer, as you wanted.
In iOS you must correctly resize any layers you are in charge of, when the view is resized/redrawn.
How do you do that? It is the very raison d'etre of layoutSubviews.
class ProperExample: UIView {
open override func layoutSubviews() {
super.layoutSubviews()
whiteCoin.frame = bounds
}
}
It's that simple.
class ProperExample: UIView {
open override func layoutSubviews() {
super.layoutSubviews()
whiteCoin.frame = bounds
}
private lazy var whiteCoin: CAShapeLayer = {
let c = CAShapeLayer()
c.path = UIBezierPath(ovalIn: bounds).cgPath
c.fillColor = UIColor.white.cgColor
layer.addSublayer(c)
return c
}()
}