I want to make an elastic rubber effect when I swipe over my circle. I create my circle on this way:
import UIKit
class ViewController: UIViewController {
let screenSize: CGRect = UIScreen.mainScreen().bounds
let g = ShapeView(origin: CGPoint(x: UIScreen.mainScreen().bounds.width / 2, y: UIScreen.mainScreen().bounds.height / 2))
override func viewDidLoad() {
self.view.addSubview(g)
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
override func touchesMoved(touches: Set<UITouch>, withEvent event: UIEvent?) {
for touch: AnyObject in touches {
let location = touch.locationInView(self.view) //Location of your finger
print((location.x / screenSize.width))
self.view.test((location.x / screenSize.width), v: g);
}
}
}
import UIKit
extension UIView {
func test(t : CGFloat, v : UIView){
v.superview!.bringSubviewToFront(v)
v.transform = CGAffineTransformMakeScale(CGFloat(t) + 1,1);
}
}
class ShapeView: UIView {
let size: CGFloat = 150.0
init(origin: CGPoint) {
super.init(frame: CGRectMake(0.0, 0.0, size, size))
self.center = origin
self.backgroundColor = UIColor.clearColor()
}
// We need to implement init(coder) to avoid compilation errors
required init(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
override func drawRect(rect: CGRect) {
var path = UIBezierPath(ovalInRect: rect)
path.closePath()
UIColor.blackColor().setStroke()
path.fill()
}
}
This works as expected. I get this view:
But what I want is that when Im swiping over te image, that its get an 'sticky' effect. Something like this:
The blue line is the middle of te actual circle. And it is pulled to the right. It is becoming an eclipse from the right half of the circle. This is going on touch. So if im moving back it is becoming the circle it was. How can I do this?
Use a scale transform. If the user is pulling to the right, this would be a scale transform whose x component is greater than 1 in proportion to the degree of pull, and whose y component is 1.
Note that transforms are applied around the center by default, so this will also cause the image to shift to the left. I assume that that isn't what you want, so you will also need to compensate at the same time with a translate transform that pushes back to the right.
Related
I am creating a calendar app, and I tap to create new events in a UIScrollView. Think of it like the apple calendar "day view" where you can create new events and view them in a list on a scroll view. Each event is a UIView with a TextField on it, and right now everything works but I'm not sure how to make it so the events don't overlap when I create them.
Is there a way to prevent UIViews from overlapping when I create them or move them? I only am moving them up or down, so I just need to keep them from overlapping on the y axis. Right now I tap to create them, but they can overlap. Is there a way to put them on the same plane or something so they can't be added on top of each other?
It is highly recommended you post your own code for us to understand the general direction you're heading towards. Otherwise, it could a wasted effort on both of our parts.
For example, you mentioned that "right now everything works", but I have no idea how you're moving the views in a vertical manner. I went ahead and subclassed the gesture recognizer so hopefully it fits the flow of your code.
import UIKit.UIGestureRecognizerSubclass
class ViewController: UIViewController {
let size = CGSize(width: 200, height: 100)
var v1: UIView!
var v2: UIView!
override func viewDidLoad() {
super.viewDidLoad()
self.v1 = UIView(frame: .init(origin: .init(x: 100, y: 100), size: size))
self.view.addSubview(self.v1)
self.v1.backgroundColor = .orange
self.v2 = UIView(frame: .init(origin: .init(x: 100, y: 300), size: size))
self.view.addSubview(v2)
self.view.addSubview(self.v2)
self.v2.backgroundColor = .purple
let verticalGesture = VerticalGesture(target: self, action: #selector(dragged))
self.v2.addGestureRecognizer(verticalGesture)
}
#objc func dragged(_ sender: VerticalGesture) {
}
}
class VerticalGesture: UIPanGestureRecognizer {
private var touch: UITouch!
private var currentView: UIView!
private var currentSuperview: UIView!
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent) {
guard let firstTouch = touches.first, let currentView = self.view, let currentSuperview = currentView.superview else { return }
self.touch = firstTouch
self.currentView = currentView
self.currentSuperview = currentSuperview
super.touchesBegan(touches, with: event)
}
override func touchesMoved(_ touches: Set<UITouch>, with event: UIEvent) {
var center = self.currentView.center
let currentLocation = self.touch.location(in: self.currentSuperview)
let previousLocation = self.touch.previousLocation(in: self.currentSuperview)
let yTranslation = currentLocation.y - previousLocation.y
center.y += yTranslation
self.view!.center = center
super.touchesMoved(touches, with: event)
}
override func touchesEnded(_ touches: Set<UITouch>, with event: UIEvent) {
for subview in self.currentSuperview.subviews {
// this goes through all the sibling views including the non-calendar-event views i.e. calendar dates, calendar frame, etc
// so make sure to sort those out
if subview != self.currentView! && self.currentView!.frame.intersects(subview.frame) {
var center = self.currentView.center
let isHigher = subview.center.y >= center.y
// isHigher >= 0 means the current view that you've selected is located higher
// than the overlapping view below
if isHigher {
center = CGPoint(x: subview.center.x, y: subview.center.y - self.currentView.bounds.height)
self.view!.center = center
} else {
center = CGPoint(x: subview.center.x, y: subview.center.y + self.currentView.bounds.height)
self.view!.center = center
}
}
}
super.touchesEnded(touches, with: event)
}
}
Depending on whether the calendar event you're dragging is on the upper side or the lower side of the overlapping view, it will move to adjacent to it. Feel free to add a transform animation to it.
I want my swift code to display a uibezierPath button. The code uses override func draw to draw the button. The code is getting a compile error. Its telling me I am missing a parameter in let customButton = FunkyButton(coder: <#NSCoder#>) you can see the error in NSCODER. I dont know what to put for nscoder. What do you think I should put?
import UIKit
class ViewController: UIViewController {
var box = UIImageView()
override open var shouldAutorotate: Bool {
return false
}
// Specify the orientation.
override open var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .landscapeRight
}
let customButton = FunkyButton(coder: <#NSCoder#>)
override func viewDidLoad() {
super.viewDidLoad()
view.addSubview(box)
// box.frame = CGRect(x: view.frame.width * 0.2, y: view.frame.height * 0.2, width: view.frame.width * 0.2, height: view.frame.height * 0.2)
box.backgroundColor = .systemTeal
customButton!.backgroundColor = .systemPink
self.view.addSubview(customButton!)
customButton?.addTarget(self, action: #selector(press), for: .touchDown)
}
#objc func press(){
print("hit")
}
}
class FunkyButton: UIButton {
var shapeLayer = CAShapeLayer()
let aPath = UIBezierPath()
override func draw(_ rect: CGRect) {
let aPath = UIBezierPath()
aPath.move(to: CGPoint(x: rect.width * 0.2, y: rect.height * 0.8))
aPath.addLine(to: CGPoint(x: rect.width * 0.4, y: rect.height * 0.2))
//design path in layer
shapeLayer.path = aPath.cgPath
shapeLayer.strokeColor = UIColor.red.cgColor
shapeLayer.lineWidth = 1.0
shapeLayer.path = aPath.cgPath
// draw is called multiple times so you need to remove the old layer before adding the new one
shapeLayer.removeFromSuperlayer()
layer.addSublayer(shapeLayer)
}
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
}
override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
if self.isHidden == true || self.alpha < 0.1 || self.isUserInteractionEnabled == false {
return nil
}
if aPath.contains(point) {
return self
}
return nil
}
}
When instantiating FunkyButton, don’t manually call the coder rendition. Just call
let button = FunkyButton()
Or add it in IB and hook up an outlet to
#IBOutlet weak var button: FunkyButton!
In FunkyButton, you shouldn't update shape layer path inside draw(_:) method. During initialization, just add the shape layer to the layer hierarchy, and whenever you update the shape layer’s path, it will be rendered for you. No draw(_:) is needed/desired:
#IBDesignable
class FunkyButton: UIButton {
private let shapeLayer = CAShapeLayer()
private var path = UIBezierPath()
// called if button is instantiated programmatically (or as a designable)
override init(frame: CGRect = .zero) {
super.init(frame: frame)
configure()
}
// called if button is instantiated via IB
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
configure()
}
// called when the button’s frame is set
override func layoutSubviews() {
super.layoutSubviews()
updatePath()
}
override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
guard path.contains(point) else {
return nil
}
return super.hitTest(point, with: event)
}
}
private extension FunkyButton {
func configure() {
shapeLayer.strokeColor = UIColor.red.cgColor
shapeLayer.lineWidth = 1
layer.addSublayer(shapeLayer)
}
func updatePath() {
path = UIBezierPath()
path.move(to: CGPoint(x: bounds.width * 0.2, y: bounds.height * 0.8))
path.addLine(to: CGPoint(x: bounds.width * 0.4, y: bounds.height * 0.2))
path.addLine(to: CGPoint(x: bounds.width * 0.2, y: bounds.height * 0.2))
path.close()
shapeLayer.path = path.cgPath
}
}
If you really want to draw your path in draw(_:), that is an acceptable pattern, too, but you wouldn't use CAShapeLayer at all, and just manually stroke() the UIBezierPath in draw(_:). (If you implement this draw(_:) method, though, do not use the rect parameter of this method, but rather always refer back to the view’s bounds.)
Bottom line, either use draw(_:) (triggered by calling setNeedsDisplay) or use CAShapeLayer (and just update its path), but don't do both.
A few unrelated observations related to my code snippet:
You do not need to check for !isHidden or isUserInteractionEnabled in hitTest, as this method won't be called if the button is hidden or has user interaction disabled. As the documentation says:
This method ignores view objects that are hidden, that have disabled user interactions, or have an alpha level less than 0.01.
I have also removed the alpha check in hitTest, as that is non-standard behavior. It is not a big deal, but this is the sort of thing that bites you later on (e.g. change button base class and now it behaves differently).
You might as well make it #IBDesignable so that you can see it in Interface Builder (IB). There is no harm if you're only using it programmatically, but why not make it capable of being rendered in IB, too?
I have moved the configuration of the path into layoutSubviews. Anything based upon the bounds of the view should be responsive to changes in the layout. Sure, in your example, you are manually setting the frame, but this is an unnecessary limitation to place on this button class. You might use auto-layout in the future, and using layoutSubviews ensures that it will continue to function as intended. Plus, this way, the path will be updated if the size of the button changes.
There's no point in checking for contains if the path is a line. So, I've added a third point so that I can test whether the hit point falls within the path.
I am new to swift. I want to draw Circle on those pixels where user clicks.
Here is my code it is drawing circle but not at where I clicks....
I want to draw circle where user clicks....Like it is getting the coordinates where user clicks and printing them to console but I want to update the x and y arguments in ovalsandcircles() function.
Thanks in advance.
`import UIKit
class DemoView: UIView {
var startX :CGFloat = 0.0
var startY :CGFloat = 0.0
var path: UIBezierPath!
override init(frame: CGRect) {
super.init(frame: frame)
self.backgroundColor = UIColor.darkGray
}
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
}
override func draw(_ rect: CGRect) {
// Specify the fill color and apply it to the path.
ovalsAndCircles()
UIColor.orange.setFill()
path.fill()
Specify a border (stroke) color.
UIColor.purple.setStroke()
path.stroke()
}
func ovalsAndCircles () {
self.path = UIBezierPath(ovalIn: CGRect(x: startX,
y: startY,
width: 200,
height: 200))
}
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
let point = touches.first!.location(in: self)
startX = point.x
startY = point.y
print(startY)
print(startX)
}
}
`
(1) Add a UIView control through Interface Builder.
(2) Set the class name of that UIView control to DemoView.
(3) Create a subclass of UIView as DemoView as follows.
import UIKit
class DemoView: UIView {
let fillColor = UIColor.green
let strokeColor = UIColor.black
let radius: CGFloat = 100.0
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
let touch = event?.allTouches?.first
if let touchPoint = touch?.location(in: self) {
drawCircle(point: touchPoint)
}
}
func drawCircle(point: CGPoint) {
if let subLayers = self.layer.sublayers {
for subLayer in subLayers {
subLayer.removeFromSuperlayer()
}
}
let circlePath = UIBezierPath(arcCenter: point, radius: radius, startAngle: CGFloat(0), endAngle: CGFloat(Double.pi * 2.0), clockwise: true)
let shapeLayer = CAShapeLayer()
shapeLayer.path = circlePath.cgPath
shapeLayer.fillColor = fillColor.cgColor
shapeLayer.strokeColor = strokeColor.cgColor
self.layer.addSublayer(shapeLayer)
}
}
First look at the Coordinate System in iOS: https://developer.apple.com/library/archive/documentation/General/Conceptual/Devpedia-CocoaApp/CoordinateSystem.html
The CGRect has a origin property of type CGPoint. Maybe it helps to set the origin of your rect in your draw function:
let rect: CGRect = ....
rect.origin = CGPoint(x:0.5, y:0.5) //Set the origin to the middle of the rect
You call the touchesBegan Method to set the points where it should be drawn. But when the user moves over the screen it wont be recognized. Use touchesEnded Method instead
In my Mac OS app I need a functionality to be able to move NSImageView around inside an NSView after mouseDown event (triggered by user) happen in this NSImageView. When the user triggers mouse Up event, this view must move to the last mouseDrag event direction and stay there. During the move I want the NSImageView to be visible on the screen (it should move along with the mouse cursor).
I've read a Handling Mouse Events guide by Apple, https://developer.apple.com/library/content/documentation/Cocoa/Conceptual/EventOverview/HandlingMouseEvents/HandlingMouseEvents.html
Also I've downloaded this sample code: https://developer.apple.com/library/content/samplecode/DragItemAround/Introduction/Intro.html
Both links contain code in Objective C. Code for DragItemAround is outdated. I've tried to search for solutions on GitHub and other StackOverflow threads, but have not found any working solutions.
Would be glad to hear the answers on this question. I'm using Swift 3.
I've created a custom MovableImageView which is a subclass of NSImageView with this code:
import Cocoa
class MovableImageView: NSImageView {
override func draw(_ dirtyRect: NSRect) {
super.draw(dirtyRect)
// Drawing code here.
// setup the starting location of the
// draggable item
}
override func mouseDown(with event: NSEvent) {
Swift.print("mouseDown")
//let window = self.window!
//let startingPoint = event.locationInWindow
}
override func mouseDragged(with event: NSEvent) {
Swift.print("mouseDragged")
self.backgroundColor = NSColor.black
}
override func mouseUp(with event: NSEvent) {
Swift.print("mouseUp")
}
}
After that in Interface Builder i've set this class to NSImageView. I've also set constraints in Interface Builder for this NSImageView.
Now i'm trying to figure out how to move NSImageView inside NSView? (Swift 3, XCode 8)
You need to save the mouseDown point and use it for offset later. Please check the following code:
class MovableImageView: NSImageView {
var firstMouseDownPoint: NSPoint = NSZeroPoint
init() {
super.init(frame: NSZeroRect)
self.wantsLayer = true
self.layer?.backgroundColor = NSColor.red.cgColor
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
override func draw(_ dirtyRect: NSRect) {
super.draw(dirtyRect)
// Drawing code here.
}
override func mouseDown(with event: NSEvent) {
Swift.print("mouseDown")
firstMouseDownPoint = (self.window?.contentView?.convert(event.locationInWindow, to: self))!
}
override func mouseDragged(with event: NSEvent) {
Swift.print("mouseDragged")
let newPoint = (self.window?.contentView?.convert(event.locationInWindow, to: self))!
let offset = NSPoint(x: newPoint.x - firstMouseDownPoint.x, y: newPoint.y - firstMouseDownPoint.y)
let origin = self.frame.origin
let size = self.frame.size
self.frame = NSRect(x: origin.x + offset.x, y: origin.y + offset.y, width: size.width, height: size.height)
}
override func mouseUp(with event: NSEvent) {
Swift.print("mouseUp")
}
}
In the parent view, just add this MovableImageView as subview like this:
let view = MovableImageView()
view.frame = NSRect(x: 0, y: 0, width: 100, height: 100)
self.view.addSubview(view) //Self is your parent view
Constraint version of #brianLikeApple's answer:
class MovableImageView: NSImageView {
var firstMouseDownPoint: NSPoint = NSZeroPoint
var xConstraint: NSLayoutConstraint!
var yContstraint: NSLayoutConstraint!
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
self.wantsLayer = true
self.layer?.backgroundColor = NSColor.red.cgColor
}
override func draw(_ dirtyRect: NSRect) {
super.draw(dirtyRect)
// Drawing code here.
}
override func mouseDown(with event: NSEvent) {
Swift.print("mouseDown")
firstMouseDownPoint = (self.window?.contentView?.convert(event.locationInWindow, to: self))!
}
override func mouseDragged(with event: NSEvent) {
Swift.print("mouseDragged")
let newPoint = (self.window?.contentView?.convert(event.locationInWindow, to: self))!
let offset = NSPoint(x: newPoint.x - firstMouseDownPoint.x, y: newPoint.y - firstMouseDownPoint.y)
let origin = self.frame.origin
let size = self.frame.size
yContstraint.constant = (self.superview?.frame.height)! - origin.y - offset.y - size.height
xConstraint.constant = origin.x + offset.x
}
override func mouseUp(with event: NSEvent) {
Swift.print("mouseUp")
}
Here's a NSView you can drag. It uses constraints exactly like #ExeRhythm answer.
import AppKit
class DraggableNSView: NSView {
var down: NSPoint = NSZeroPoint
#IBOutlet var across: NSLayoutConstraint!
#IBOutlet var up: NSLayoutConstraint!
override func mouseDown(with e: NSEvent) {
down = window?.contentView?.convert(e.locationInWindow, to: self) ?? NSZeroPoint
}
override func mouseDragged(with e: NSEvent) {
guard let m = window?.contentView?.convert(e.locationInWindow, to: self) else { return }
let p = frame.origin + (m - down)
across.constant = p.x
up.constant = p.y
}
}
Don't forget that you must connect the across and up constraints in storyboard!
Note. If you prefer a "down" constraint rather than "up", it is:
// if you prefer a "down" constraint:
down.constant = (superview?.frame.height)! - p.y - frame.size.height
As always, add these two functions in your project,
func -(lhs: CGPoint, rhs: CGPoint) -> CGPoint {
return CGPoint(x: lhs.x - rhs.x, y: lhs.y - rhs.y)
}
func +(lhs: CGPoint, rhs: CGPoint) -> CGPoint {
return CGPoint(x: lhs.x + rhs.x, y: lhs.y + rhs.y)
}
Note that you can of course do this, to any subclass of NSView such as NSImageView:
class DraggableNSImageView: NSView {
... same
I am fairly new to Swift but along the way I noticed something strange. If I have a sprite and a label and enter the exact same position for each of them, they'll both have a different visual position. I am looking for someone to explain this as to better my understanding of Swift.
Here's the output:
//
// GameScene.swift
// Test Game
//
// Created by NioPullus on 9/7/15.
// Copyright (c) 2015 Owen Vnek. All rights reserved.
//
import SpriteKit
class GameScene: SKScene {
var testSprite: SKSpriteNode = SKSpriteNode(imageNamed: "Spaceship")
var testLabel: UILabel = UILabel()
func defineTestSprite(var #sprite: SKSpriteNode) -> SKSpriteNode {
sprite.position.x = 100
sprite.position.y = 100
sprite.xScale = 0.3
sprite.yScale = 0.3
return sprite
}
func defineTestLabel(var #label: UILabel) -> UILabel {
label = UILabel(frame: CGRect(x: 100, y: 100, width: 100, height: 100))
label.text = "Test"
return label
}
override func didMoveToView(view: SKView) {
self.addChild(defineTestSprite(sprite: self.testSprite))
self.view?.addSubview(defineTestLabel(label:self.testLabel))
}
override func touchesBegan(touches: Set<NSObject>, withEvent event: UIEvent) {
/* Called when a touch begins */
for touch in (touches as! Set<UITouch>) {
let location = touch.locationInNode(self)
}
}
override func update(currentTime: CFTimeInterval) {
/* Called before each frame is rendered */
}
}
UIKit and SpriteKit use different coordinate systems, so you wouldn't necessarily expect a sprite with the same x/y values as a subview to appear in the same visual position.
You can use the SKView method convertPoint:toScene: (and convertPoint:fromScene:) to convert between these coordinate spaces.
You can find out more information about the SpriteKit coordinate system here, and the UIView coordinate system here. (As you can see, they are very different.)