Changing UIView rotation will change it's frame size. How to keep frame size after rotating? - swift

I'm rotating UIView by the UIRotationGestureRecognizer but I have a problem, The rotated UIView's frame size will be changed on rotation.
targetView = UIView(frame: CGRect(x: self.view.center.x, y: self.view.center.y , width: 100, height: 100))
targetView.backgroundColor = .red
targetView.isUserInteractionEnabled = true
self.view.addSubview(targetView)
targetView.center = self.view.center
let rotate = UIRotationGestureRecognizer(target: self, action: #selector(rotatedView))
targetView.addGestureRecognizer(rotate)
Here is the rotate function:
#objc func rotatedView(_ sender: UIRotationGestureRecognizer) {
var originalRotation = CGFloat()
if sender.state == .began {
sender.rotation = lastRotation
originalRotation = sender.rotation
print("TargetView Frame Size: [START ROTATE] ", self.targetView.frame.size)
print("TargetView Bounse Size: [START ROTATE] ", self.targetView.bounds)
} else if sender.state == .changed {
let newRotation = sender.rotation + originalRotation
sender.view?.transform = CGAffineTransform(rotationAngle: newRotation)
print("TargetView Frame Size: [ROTATETING] ", self.targetView.frame.size)
print("TargetView Bounse Size: [ROTATING ROTATE] ", self.targetView.bounds)
}
}
Size will be changed:
TargetView Frame Size: [START ROTATE] (100.0, 100.0)
TargetView Frame Size: [ROTATETING] (110.564206726133, 110.564206726133)
How to keep frame size after rotating just like Start Rotating?!
EDIT:
I have these gestures together: Rotate, Resize, Move.
Move is working well, but Rotate is changing size. I can use bounds here but If I resize too, the bounds is not useful anymore.

You need the scaled but non-rotated view size. You can compute that from the bounds.size of the view and the transform.
Example
let view = UIView(frame: CGRect(x: 0, y: 0, width: 100, height: 150))
// Scale view by 3X horizontally and by 4X vertically and
// rotate by 45 degrees
view.transform = CGAffineTransform(scaleX: 3, y: 4).rotated(by: 45.0 / 180.0 * .pi)
let transform = view.transform
// Find the angle, print it in degrees
let angle = atan2(-transform.c, transform.a)
print(angle * 180.0 / .pi) // 44.99999999999999
// Find scaleX and scaleY
let scaleX = transform.a * cos(angle) - transform.c * sin(angle)
let scaleY = transform.d * cos(angle) + transform.b * sin(angle)
print(scaleX) // 3.0
print(scaleY) // 4.0
print(view.frame.size) // (530.3300858899106, 707.1067811865476)
print(view.bounds.size) // (100.0, 150.0)
let adjustedSize = CGSize(width: view.bounds.size.width * scaleX, height: view.bounds.size.height * scaleY)
// print non-rotated but scaled size
print(adjustedSize) // (300.0, 600.0)
Here it is as a handy extension that will work for views that are scaled, rotated, and translated (moved):
extension CGAffineTransform {
var angle: CGFloat { return atan2(-self.c, self.a) }
var angleInDegrees: CGFloat { return self.angle * 180 / .pi }
var scaleX: CGFloat {
let angle = self.angle
return self.a * cos(angle) - self.c * sin(angle)
}
var scaleY: CGFloat {
let angle = self.angle
return self.d * cos(angle) + self.b * sin(angle)
}
}
Now, if you have a UIView:
let angle = view.transform.angleInDegrees
let scaleX = view.transform.scaleX
let scaleY = view.transform.scaleY
let adjustedSize = CGSize(width: view.bounds.size.width * scaleX, height: view.bounds.size.height * scaleY)

Related

Animating a 360 degree rotation around another view's center point

I'm making a loading spinner animation that pushes a view out from the middle, and then rotates all the way around the center view back to it's original location. This is what I am trying to achieve:
The inner arrow moves the view away from the center. I've already achieved this, the part I am stuck on is then rotating the view around the center view. I've read various other StackOverflow posts but have not been close to achieving this.
Code so far:
UIView.animate(withDuration: 0.5) {
self.topView.transform = CGAffineTransform(translationX: 20, y: -20)
} completion: { _ in
self.topView.setAnchorPoint(self.centerView.center)
// Rotate
}
}
Here is how I am setting the anchor point of the view. I'm using this as the view disappears when setting its anchor point otherwise.
func setAnchorPoint(_ point: CGPoint) {
var newPoint = CGPoint(x: bounds.size.width * point.x, y: bounds.size.height * point.y)
var oldPoint = CGPoint(x: bounds.size.width * layer.anchorPoint.x, y: bounds.size.height * layer.anchorPoint.y);
newPoint = newPoint.applying(transform)
oldPoint = oldPoint.applying(transform)
var position = layer.position
position.x -= oldPoint.x
position.x += newPoint.x
position.y -= oldPoint.y
position.y += newPoint.y
layer.position = position
layer.anchorPoint = point
}
Once the full 360 rotation is complete I would then need to move the view back in towards the center, completing the animation.
For the part when the loading view rotates around the circle view, you can use a UIBezierPath and create a CAKeyframeAnimation based on its path.
Take a look at this implementation. Hope it helps.
class LoadingViewController: UIViewController {
var circlePath: UIBezierPath!
lazy var loader = makeLoader()
lazy var centerView = makeCenterView()
override func viewDidLoad() {
super.viewDidLoad()
setup()
}
func makeLoader() -> UIButton {
let padding: CGFloat = 100
let width = self.view.frame.width - (2 * padding)
let b = UIButton(frame: CGRect(x: padding, y: 200, width: width, height: 50))
b.addTarget(self, action: #selector(didTap), for: .touchUpInside)
b.backgroundColor = .blue
return b
}
func makeCenterView() -> UIView {
let width: CGFloat = 20
let height: CGFloat = 20
let x = self.view.center.x - width/2
let y = self.view.center.y - height/2
let view = UIView(frame: CGRect(x: x, y: y, width: width, height: height))
view.backgroundColor = .green
return view
}
func setup() {
//create a UIBezierPath with a center that is at the center of the green view and a radius that has a length as the distance between the green view and the blue view.
let arcCenterX = centerView.center.x
let arcCenterY = centerView.center.y
let arcCenter = CGPoint(x: arcCenterX, y: arcCenterY)
let radius = arcCenterY - loader.center.y
let startAngle = -CGFloat.pi/2
let endAngle = CGFloat.pi*(1.5)
let arcPath = UIBezierPath(arcCenter: arcCenter, radius: radius, startAngle: startAngle, endAngle: endAngle, clockwise: true)
self.circlePath = arcPath
self.view.addSubview(loader)
self.view.addSubview(centerView)
}
#objc func didTap() {
//create a CAKeyframeAnimation with a path that matches the UIBezierPath above.
let loadAnimation = CAKeyframeAnimation(keyPath: "position")
loadAnimation.path = self.circlePath.cgPath
loadAnimation.calculationMode = .paced
loadAnimation.duration = 2.0
loadAnimation.rotationMode = .rotateAuto
loadAnimation.repeatCount = Float(CGFloat.greatestFiniteMagnitude)
loader.layer.add(loadAnimation, forKey: "circleAnimation")
}
}

Create a centered and rotated UILabels, between drawn lines in Core Graphic

I have the following code, to draw a circle, with segments inside of it :
override func draw(_ rect: CGRect) {
super.draw(rect)
if let context = UIGraphicsGetCurrentContext()
{
let width = fmin(self.frame.size.width, self.frame.size.height)
let offset_x = abs(width - self.frame.size.width)/2
let offset_y = abs(width - self.frame.size.height)/2
let padding = CGFloat(0.5)
let radius_size = (width/2) - (padding*2)
let circle_width = radius_size/4
context.setStrokeColor(UIColor.black.cgColor)
// Draw a circle
for i in 0 ..< 4
{
let offset = CGFloat(i) * circle_width
context.strokeEllipse(in:
CGRect(
x: padding + offset + offset_x,
y: padding + offset + offset_y,
width: (radius_size - offset)*2,
height: (radius_size - offset)*2))
}
let angles: [CGFloat] = [87.0, 112.0, 150]
let angles2: [CGFloat] = [210.0, 250.0, 330.0]
let center = CGPoint(x: width/2 + offset_x, y: width/2 + offset_y)
for angle in angles {
drawLine(context: context, center: center, radius: radius_size, angle: angle)
}
for angle in angles2 {
drawLine(context: context, center: center, radius: radius_size * 3 / 4, angle: angle)
}
context.strokePath()
}
}
func drawLine(context: CGContext, center: CGPoint, radius: CGFloat, angle: CGFloat) {
context.move(to: center)
context.addLine(to: CGPoint(x: center.x + radius * cos(angle * .pi / 180), y: center.y - radius * sin(angle * .pi / 180)))
}
I want to be able, to create a centered between the segments UILabels, with rotation to the right angle, I made an example in photo editor:
Thanks in advance.
I found the answer:
func drawCurvedText(angle: CGFloat, text: String) {
drawCurvedString(
on: textView.layer,
text: NSAttributedString(
string: text,
attributes: [
NSAttributedString.Key.foregroundColor: UIColor.white,
NSAttributedString.Key.font: UIFont.systemFont(ofSize: 15)
]),
angle: -angle,
radius: 130)
}
func drawCurvedString(on layer: CALayer, text: NSAttributedString, angle: CGFloat, radius: CGFloat) {
var radAngle = angle.radians
let textSize = text.boundingRect(
with: CGSize(width: .max, height: .max),
options: [.usesLineFragmentOrigin, .usesFontLeading],
context: nil)
.integral
.size
let perimeter: CGFloat = 2 * .pi * radius
let textAngle: CGFloat = textSize.width / perimeter * 2 * .pi
var textRotation: CGFloat = 0
var textDirection: CGFloat = 0
if angle > CGFloat(10).radians, angle < CGFloat(170).radians {
// bottom string
textRotation = 0.5 * .pi
textDirection = -2 * .pi
radAngle += textAngle / 2
} else {
// top string
textRotation = 1.5 * .pi
textDirection = 2 * .pi
radAngle -= textAngle / 2
}
for c in 0..<text.length {
let letter = text.attributedSubstring(from: NSRange(c..<c+1))
let charSize = letter.boundingRect(
with: CGSize(width: .max, height: .max),
options: [.usesLineFragmentOrigin, .usesFontLeading],
context: nil)
.integral
.size
let letterAngle = (charSize.width / perimeter) * textDirection
let x = radius * cos(radAngle + (letterAngle / 2))
let y = radius * sin(radAngle + (letterAngle / 2))
let singleChar = drawText(
on: layer,
text: letter,
frame: CGRect(
x: (layer.frame.size.width / 2) - (charSize.width / 2) + x,
y: (layer.frame.size.height / 2) - (charSize.height / 2) + y,
width: charSize.width,
height: charSize.height))
layer.addSublayer(singleChar)
singleChar.transform = CATransform3DMakeAffineTransform(CGAffineTransform(rotationAngle: radAngle - textRotation))
radAngle += letterAngle
}
}
func drawText(on layer: CALayer, text: NSAttributedString, frame: CGRect) -> CATextLayer {
let textLayer = CATextLayer()
textLayer.frame = frame
textLayer.string = text
textLayer.alignmentMode = CATextLayerAlignmentMode.center
textLayer.contentsScale = UIScreen.main.scale
return textLayer
}

SwiftUI - Scale, Zoom and Crop Image

Edited with link to repository.
I am using SwiftUI and so don't have access to the 'cropping view'. I am using gestures instead of ScrollView to capture a zoom level and offset (x and y) of an image. I am unable to return an image which crops properly based on these factors.
It seems as if SwiftUI itself might be a factor. Perhaps the offset of the image within the view needs to be accounted for in determining offsets and zoom levels?
I have the image and I have the following values from the gestures on the view to represent scale and x/y position:
#State var scale: CGFloat = 1.0
#State var currentPosition: CGSize = CGSize.zero
The current attempt, which gets closest for the function called:
func prepareImage( ) {
let imageToManipulate = UIImage(named: "landscape")
let currentPositionWidth = self.currentPosition.width
let currentPositionHeight = self.currentPosition.height
let zoomScale = self.scale
let imsize = imageToManipulate!.size
var scale : CGFloat = self.frameSize.width / imsize.width
if imsize.height * scale < self.frameSize.height {
scale = self.frameSize.height / imsize.height
}
let croppedImsize = CGSize(width: (self.frameSize.width/scale) / zoomScale, height: (self.frameSize.height/scale) / zoomScale)
let xOffset = (( imsize.width - croppedImsize.width ) / 2.0) - (currentPositionWidth / zoomScale)
let yOffset = (( imsize.height - croppedImsize.height) / 2.0) - (currentPositionHeight / zoomScale)
let croppedImrect: CGRect = CGRect(x: xOffset, y: yOffset, width: croppedImsize.width, height: croppedImsize.height)
let r = UIGraphicsImageRenderer(size:croppedImsize)
let croppedIm = r.image { _ in
imageToManipulate!.draw(at: CGPoint(x:-croppedImrect.origin.x, y:-croppedImrect.origin.y))
}
self.croppedImage = croppedIm
self.photoIsFinished = true
}
However, as you will see in the repository, when combining both zoom/scale and x/y offsets it is always 'off' a bit.
As well, when you try to crop to a square image the amount it is 'off' can be quite significant.
Thanks to Asperi's answer , I have implement a lightweight swiftUI library to crop image.Here is the library and demo.Here
The magic is below:
public var body: some View {
GeometryReader { proxy in
// ...
Button(action: {
// how to crop the image according to rectangle area
if self.tempResult == nil {
self.cropTheImageWithImageViewSize(proxy.size)
}
self.resultImage = self.tempResult
}) {
Text("Crop Image")
.padding(.all, 10)
.background(Color.blue)
.foregroundColor(.white)
.shadow(color: .gray, radius: 1)
.padding(.top, 50)
}
}
}
func cropTheImageWithImageViewSize(_ size: CGSize) {
let imsize = inputImage.size
let scale = max(inputImage.size.width / size.width,
inputImage.size.height / size.height)
let zoomScale = self.scale
let currentPositionWidth = self.dragAmount.width * scale
let currentPositionHeight = self.dragAmount.height * scale
let croppedImsize = CGSize(width: (self.cropSize.width * scale) / zoomScale, height: (self.cropSize.height * scale) / zoomScale)
let xOffset = (( imsize.width - croppedImsize.width) / 2.0) - (currentPositionWidth / zoomScale)
let yOffset = (( imsize.height - croppedImsize.height) / 2.0) - (currentPositionHeight / zoomScale)
let croppedImrect: CGRect = CGRect(x: xOffset, y: yOffset, width: croppedImsize.width, height: croppedImsize.height)
if let cropped = inputImage.cgImage?.cropping(to: croppedImrect) {
//uiimage here can write to data in png or jpeg
let croppedIm = UIImage(cgImage: cropped)
tempResult = croppedIm
result = Image(uiImage: croppedIm)
}
}
The answer was provided via the GitHub repository by juanj
let imageToManipulate = UIImage(named: "landscape")
let zoomScale = self.scale
let imsize = imageToManipulate!.size
var scale : CGFloat = self.frameSize.width / imsize.width
if imsize.height * scale < self.frameSize.height {
scale = self.frameSize.height / imsize.height
}
let currentPositionWidth = self.currentPosition.width / scale
let currentPositionHeight = self.currentPosition.height / scale
let croppedImsize = CGSize(width: (self.frameSize.width/scale) / zoomScale, height: (self.frameSize.height/scale) / zoomScale)
let xOffset = (( imsize.width - croppedImsize.width ) / 2.0) - (currentPositionWidth / zoomScale)
let yOffset = (( imsize.height - croppedImsize.height) / 2.0) - (currentPositionHeight / zoomScale)
let croppedImrect: CGRect = CGRect(x: xOffset, y: yOffset, width: croppedImsize.width, height: croppedImsize.height)
let r = UIGraphicsImageRenderer(size:croppedImsize)
let croppedIm = r.image { _ in
imageToManipulate!.draw(at: CGPoint(x:-croppedImrect.origin.x, y:-croppedImrect.origin.y))
}
self.croppedImage = croppedIm
self.photoIsFinished = true
The full code, demonstrating how to allow a user to zoom and pan an image within a frame in a SwiftUI view, and then crop the result to a new image can be viewed in the repository.

Swift: How to anchor a layer to view?

I've been trying to anchor a CAShapeLayer to a view for a few days already. I couldn't find the solution for this. This is a facetracker project. I want to create a round CAShapeLayer that I could place in a fixed position in the eyeglassesView. However, the view resizes when you turn your head, so the CAShapeLayer couldn't stay on top of the eyeglasses to serve as the lens. I plan to create some animation but I need the CAShapeLayer to stick to its place first regardless of which angle I move my face.
func faceTrackerDidUpdate(points: FacePoints?) {
let eyeCornerDist = sqrt(pow(points.leftEye[0].x - points.rightEye[5].x, 2) + pow(points.leftEye[0].y - points.rightEye[5].y, 2))
let eyeToEyeCenter = CGPointMake((points.leftEye[0].x + points.rightEye[5].x) / 2, (points.leftEye[0].y + points.rightEye[5].y) / 2)
let eyeglassesWidth = 1.5 * eyeCornerDist
let eyeglassesHeight = (eyeglassesView.image!.size.height / eyeglassesView.image!.size.width) * eyeglassesWidth
eyeglassesView.transform = CGAffineTransformIdentity
eyeglassesView.frame = CGRectMake(eyeToEyeCenter.x - eyeglassesWidth / 2, eyeToEyeCenter.y - 0.5 * eyeglassesHeight, eyeglassesWidth, eyeglassesHeight)
let layer = CAShapeLayer()
layer.anchorPoint = CGPointMake(0.0, 1.0)
layer.position = CGPointMake(0.0, 150.0)
layer.path = UIBezierPath(roundedRect: CGRect(x: 0, y: 0, width: 80, height: 80), cornerRadius: 80).CGPath
layer.fillColor = UIColor(white: 1, alpha: 0.08).CGColor
eyeglassesView.layer.addSublayer(layer)
eyeglassesView.hidden = false
setAnchorPoint(CGPointMake(0.5, 1.0), forView: eyeglassesView)
let angle = atan2(points.rightEye[5].y - points.leftEye[0].y, points.rightEye[5].x - points.leftEye[0].x)
eyeglassesView.transform = CGAffineTransformMakeRotation(angle)
}
func setAnchorPoint(anchorPoint: CGPoint, forView view: UIView) {
var newPoint = CGPointMake(view.bounds.size.width * anchorPoint.x, view.bounds.size.height * anchorPoint.y)
var oldPoint = CGPointMake(view.bounds.size.width * view.layer.anchorPoint.x, view.bounds.size.height * view.layer.anchorPoint.y)
newPoint = CGPointApplyAffineTransform(newPoint, view.transform)
oldPoint = CGPointApplyAffineTransform(oldPoint, view.transform)
var position = view.layer.position
position.x -= oldPoint.x
position.x += newPoint.x
position.y -= oldPoint.y
position.y += newPoint.y
view.layer.position = position
view.layer.anchorPoint = anchorPoint
}
I tried different things, position, anchorpoint, masktobounds, transform but I really don't know how to make it work. Any help would be greatly appreciated. Let me know if there's anything I'm missing.

Rotate NSImage in Swift, Cocoa, Mac OSX

Is there an easy way to rotate a NSImage in a Mac OSX app? Or just set the orientation from portrait to landscape using Swift?
I am playing around with CATransform3DMakeAffineTransform but I can't get it to work.
CATransform3DMakeAffineTransform(CGAffineTransformMakeRotation(CGFloat(M_PI) * 90/180))
It's the first time for me to work with transformations. So please be patient with me :) Maybe I'm working on a wrong approach...
Can anybody help me please?
Thanks!
public extension NSImage {
public func imageRotatedByDegreess(degrees:CGFloat) -> NSImage {
var imageBounds = NSZeroRect ; imageBounds.size = self.size
let pathBounds = NSBezierPath(rect: imageBounds)
var transform = NSAffineTransform()
transform.rotateByDegrees(degrees)
pathBounds.transformUsingAffineTransform(transform)
let rotatedBounds:NSRect = NSMakeRect(NSZeroPoint.x, NSZeroPoint.y, pathBounds.bounds.size.width, pathBounds.bounds.size.height )
let rotatedImage = NSImage(size: rotatedBounds.size)
//Center the image within the rotated bounds
imageBounds.origin.x = NSMidX(rotatedBounds) - (NSWidth(imageBounds) / 2)
imageBounds.origin.y = NSMidY(rotatedBounds) - (NSHeight(imageBounds) / 2)
// Start a new transform
transform = NSAffineTransform()
// Move coordinate system to the center (since we want to rotate around the center)
transform.translateXBy(+(NSWidth(rotatedBounds) / 2 ), yBy: +(NSHeight(rotatedBounds) / 2))
transform.rotateByDegrees(degrees)
// Move the coordinate system bak to normal
transform.translateXBy(-(NSWidth(rotatedBounds) / 2 ), yBy: -(NSHeight(rotatedBounds) / 2))
// Draw the original image, rotated, into the new image
rotatedImage.lockFocus()
transform.concat()
self.drawInRect(imageBounds, fromRect: NSZeroRect, operation: NSCompositingOperation.CompositeCopy, fraction: 1.0)
rotatedImage.unlockFocus()
return rotatedImage
}
var image = NSImage(named:"test.png")!.imageRotatedByDegreess(CGFloat(90)) //use only this values 90, 180, or 270
}
Updated for Swift 3:
public extension NSImage {
public func imageRotatedByDegreess(degrees:CGFloat) -> NSImage {
var imageBounds = NSZeroRect ; imageBounds.size = self.size
let pathBounds = NSBezierPath(rect: imageBounds)
var transform = NSAffineTransform()
transform.rotate(byDegrees: degrees)
pathBounds.transform(using: transform as AffineTransform)
let rotatedBounds:NSRect = NSMakeRect(NSZeroPoint.x, NSZeroPoint.y, pathBounds.bounds.size.width, pathBounds.bounds.size.height )
let rotatedImage = NSImage(size: rotatedBounds.size)
//Center the image within the rotated bounds
imageBounds.origin.x = NSMidX(rotatedBounds) - (NSWidth(imageBounds) / 2)
imageBounds.origin.y = NSMidY(rotatedBounds) - (NSHeight(imageBounds) / 2)
// Start a new transform
transform = NSAffineTransform()
// Move coordinate system to the center (since we want to rotate around the center)
transform.translateX(by: +(NSWidth(rotatedBounds) / 2 ), yBy: +(NSHeight(rotatedBounds) / 2))
transform.rotate(byDegrees: degrees)
// Move the coordinate system bak to normal
transform.translateX(by: -(NSWidth(rotatedBounds) / 2 ), yBy: -(NSHeight(rotatedBounds) / 2))
// Draw the original image, rotated, into the new image
rotatedImage.lockFocus()
transform.concat()
self.draw(in: imageBounds, from: NSZeroRect, operation: NSCompositingOperation.copy, fraction: 1.0)
rotatedImage.unlockFocus()
return rotatedImage
}
}
class SomeClass: NSViewController {
var image = NSImage(named:"test.png")!.imageRotatedByDegreess(degrees: CGFloat(90)) //use only this values 90, 180, or 270
}
Thank for this solution, however it did not worked perfectly for me.
As you may have noticed that pathBounds is not used anywhere. In my opinion is has to be used like so:
let rotatedBounds:NSRect = NSMakeRect(NSZeroPoint.x, NSZeroPoint.y , pathBounds.bounds.size.width, pathBounds.bounds.size.height )
Otherwise the image will be rotated but cropped to a square bounds.
Letting IKImageView do the heavy lifting:
import Quartz
extension NSImage {
func imageRotated(by degrees: CGFloat) -> NSImage {
let imageRotator = IKImageView()
var imageRect = CGRect(x: 0, y: 0, width: self.size.width, height: self.size.height)
let cgImage = self.cgImage(forProposedRect: &imageRect, context: nil, hints: nil)
imageRotator.setImage(cgImage, imageProperties: [:])
imageRotator.rotationAngle = CGFloat(-(degrees / 180) * CGFloat(M_PI))
let rotatedCGImage = imageRotator.image().takeUnretainedValue()
return NSImage(cgImage: rotatedCGImage, size: NSSize.zero)
}
}
Here's a simple Swift (4+) solution to drawing an image that is rotated around the center:
extension NSImage {
/// Rotates the image by the specified degrees around the center.
/// Note that if the angle is not a multiple of 90°, parts of the rotated image may be drawn outside the image bounds.
func rotated(by angle: CGFloat) -> NSImage {
let img = NSImage(size: self.size, flipped: false, drawingHandler: { (rect) -> Bool in
let (width, height) = (rect.size.width, rect.size.height)
let transform = NSAffineTransform()
transform.translateX(by: width / 2, yBy: height / 2)
transform.rotate(byDegrees: angle)
transform.translateX(by: -width / 2, yBy: -height / 2)
transform.concat()
self.draw(in: rect)
return true
})
img.isTemplate = self.isTemplate // preserve the underlying image's template setting
return img
}
}
This one works also for non-square images, Swift 5.
extension NSImage {
func rotated(by degrees : CGFloat) -> NSImage {
var imageBounds = NSRect(x: 0, y: 0, width: size.width, height: size.height)
let rotatedSize = AffineTransform(rotationByDegrees: degrees).transform(size)
let newSize = CGSize(width: abs(rotatedSize.width), height: abs(rotatedSize.height))
let rotatedImage = NSImage(size: newSize)
imageBounds.origin = CGPoint(x: newSize.width / 2 - imageBounds.width / 2, y: newSize.height / 2 - imageBounds.height / 2)
let otherTransform = NSAffineTransform()
otherTransform.translateX(by: newSize.width / 2, yBy: newSize.height / 2)
otherTransform.rotate(byDegrees: degrees)
otherTransform.translateX(by: -newSize.width / 2, yBy: -newSize.height / 2)
rotatedImage.lockFocus()
otherTransform.concat()
draw(in: imageBounds, from: CGRect.zero, operation: NSCompositingOperation.copy, fraction: 1.0)
rotatedImage.unlockFocus()
return rotatedImage
}
}
Building on #FrankByte.com's code, this version should extend correctly in both x and y on any image and any rotation.
extension NSImage {
func rotated(by degrees: CGFloat) -> NSImage {
let sinDegrees = abs(sin(degrees * CGFloat.pi / 180.0))
let cosDegrees = abs(cos(degrees * CGFloat.pi / 180.0))
let newSize = CGSize(width: size.height * sinDegrees + size.width * cosDegrees,
height: size.width * sinDegrees + size.height * cosDegrees)
let imageBounds = NSRect(x: (newSize.width - size.width) / 2,
y: (newSize.height - size.height) / 2,
width: size.width, height: size.height)
let otherTransform = NSAffineTransform()
otherTransform.translateX(by: newSize.width / 2, yBy: newSize.height / 2)
otherTransform.rotate(byDegrees: degrees)
otherTransform.translateX(by: -newSize.width / 2, yBy: -newSize.height / 2)
let rotatedImage = NSImage(size: newSize)
rotatedImage.lockFocus()
otherTransform.concat()
draw(in: imageBounds, from: CGRect.zero, operation: NSCompositingOperation.copy, fraction: 1.0)
rotatedImage.unlockFocus()
return rotatedImage
}
}