I have a NSView-class written using Cocoa:
import Cocoa
import SwiftUI
final class EntityViewImpl: NSViewRepresentable {
func makeNSView(context: NSViewRepresentableContext<EntityViewImpl>) -> EntityView {
return EntityView(frame: NSRect(x: 0, y: 0, width: 100, height: 200))
}
func updateNSView(_ nsView: EntityView, context: NSViewRepresentableContext<EntityViewImpl>) {
// do nothing
}
}
class EntityView: NSView {
var oldMousePosition: NSPoint = .zero
var oldFrameOrigin: NSPoint = .zero
override func draw(_ dirtyRect: NSRect) {
super.draw(dirtyRect)
NSColor.white.setFill()
dirtyRect.fill()
}
override func mouseDown(with event: NSEvent) {
self.oldFrameOrigin = self.frame.origin
self.oldMousePosition = event.locationInWindow
}
override func mouseDragged(with event: NSEvent) {
let pointInSuperview = event.locationInWindow
self.frame.origin = NSPoint(x: oldFrameOrigin.x + (pointInSuperview.x - oldMousePosition.x),
y: oldFrameOrigin.y + (pointInSuperview.y - oldMousePosition.y))
}
}
and I want to use this class inside a SwiftUI View-body like:
struct ContentView: View {
var body: some View {
Group {
EntityViewImpl()
.frame(width: 100, height: 100)
}
.frame(maxWidth: .infinity, maxHeight: .infinity)
}
}
This works fine and I can also drag around the EntityView. However, I can only "grab" it inside of it's original frame. If I drag it outside of the bounds, the mouse event will no longer trigger on the parts of the view that are outside of the original frame. Do I have to somehow "tell" the ContentView that the NSView moved?
Actually if you are using SwiftUI, you need to move the superview of EntityView, like this:
override func mouseDown(with event: NSEvent) {
self.oldFrameOrigin = self.superview!.frame.origin
self.oldMousePosition = self.superview!.convert(event.locationInWindow, from: self)
}
override func mouseDragged(with event: NSEvent) {
let pointInSuperview = self.superview!.convert(event.locationInWindow, from: self)
self.superview!.frame.origin = NSPoint(x: oldFrameOrigin.x + (pointInSuperview.x - oldMousePosition.x),
y: oldFrameOrigin.y + (-pointInSuperview.y + oldMousePosition.y))
}
Related
Trying to animate a sprite node within SpriteKit using SwiftUI but not having much success.
Created a touchable sprite, that works.
But tried a texture map, does nothing.
Tried manually changing the textures, does nothing.
import SwiftUI
import SpriteKit
class GameScene: SKScene {
private var cat1: Cat!
private var cat2: Cat!
private var catAnimation: SKAction!
func createCats() {
var textures:[SKTexture] = []
for i in 1...4 {
textures.append(SKTexture(imageNamed: "img\(i)"))
}
print("tex \(textures.count)")
catAnimation = SKAction.animate(withNormalTextures: textures, timePerFrame: 1)
cat1 = Cat(texture: textures.first, color: UIColor.red, size: CGSize(width: 48, height: 48))
cat1.position = CGPoint(x: 64, y: 128)
cat1.color = .magenta
cat1.isUserInteractionEnabled = true
cat1.delegate = self
addChild(cat1)
cat2 = Cat(texture: textures.first, color: UIColor.red, size: CGSize(width: 48, height: 48))
cat2.position = CGPoint(x: 128, y: 128)
cat2.color = .green
cat2.isUserInteractionEnabled = true
cat2.delegate = self
addChild(cat2)
}
override func didMove(to view: SKView) {
createCats()
}
}
extension GameScene: CatDelegate {
func catTouched(cat: Cat) {
print("cat of color \(cat.color)")
cat.run(SKAction.repeat(catAnimation, count: 4))
}
}
protocol CatDelegate {
func catTouched(cat: Cat)
}
class Cat: SKSpriteNode {
var delegate: CatDelegate!
var isEnabled = true
var sendTouchesToScene = true
var colour: SKColor = .blue
var textures:[SKTexture] = []
var count = 0
override init(texture: SKTexture!, color: UIColor, size: CGSize) {
super.init(texture: texture, color: color, size: size)
for i in 1...4 {
textures.append(SKTexture(imageNamed: "img\(i)"))
}
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent!) {
guard isEnabled else { return }
//send touch to scene if you need to handle further touches by the scene
if sendTouchesToScene {
super.touchesBegan(touches, with: event)
}
self.colorBlendFactor = 1.0
//handle touches for cat
delegate?.catTouched(cat: self)
}
override func touchesEnded(_ touches: Set<UITouch>, with event: UIEvent?) {
//
}
}
struct ContentView: View {
var scene: SKScene {
let scene = GameScene()
scene.size = CGSize(width: 256, height: 256)
scene.scaleMode = .fill
scene.backgroundColor = .white
return scene
}
var body: some View {
SpriteView(scene: scene)
.frame(width: 256, height: 256)
.border(Color.red)
}
}
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
ContentView()
}
}
extension Task where Success == Never, Failure == Never {
static func sleep(seconds: Double) async throws {
let duration = UInt64(seconds * 1000_000_000)
try await sleep(nanoseconds: duration)
}
}
I am subclassing UIView, and am trying to "drag" a CGRect back and forth across the screen. Basically I want to move(redraw) the rectangle every time I drag my finger. So far, I have this code:
var rectangle: CGRect {
get {
return CGRect(x: 200,
y: 200,
width: frame.width / 6,
height: 15)
}
set {}
}
override func draw(_ rect: CGRect) {
let gesture = UIPanGestureRecognizer(target: self, action: #selector(dragRectangle(recognizer:)))
addGestureRecognizer(gesture)
drawRectangle(rect)
}
func drawRectangle(_ rect: CGRect) {
let path = UIBezierPath(rect: rectangle)
UIColor.black.set()
path.fill()
}
#objc func dragRectangle(recognizer: UIPanGestureRecognizer) {
let translation = recognizer.translation(in: self)
rectangle = CGRect(x: rectangle.midX + translation.x, y: rectangle.midY + translation.y, width: rectangle.width, height: rectangle.height)
setNeedsDisplay()
recognizer.setTranslation(CGPoint.zero, in: self)
}
This is my first time using UIPanGestureRecognizer, so I'm not sure of all the details that go into this. I have set breakpoints in drawRectangle and confirmed that this is being called. However, the rectangle on the screen does not move at all, no matter how many times I try to drag it. What's wrong?
This is how you can do it easily. just copy paste and run this.
//
// RootController.swift
// SampleApp
//
// Created by Chanaka Caldera on 24/6/19.
// Copyright © 2019 homeapps. All rights reserved.
//
import UIKit
class RootController: UIViewController {
private var draggableView: UIView!
private var pangesture: UIPanGestureRecognizer!
override func viewDidLoad() {
super.viewDidLoad()
setdragview()
}
}
extension RootController {
fileprivate func setdragview() {
draggableView = UIView()
draggableView.translatesAutoresizingMaskIntoConstraints = false
draggableView.backgroundColor = .lightGray
view.addSubview(draggableView)
let draggableviewConstraints = [draggableView.leftAnchor.constraint(equalTo: view.leftAnchor,constant: 10),
draggableView.topAnchor.constraint(equalTo: view.topAnchor, constant: 64),
draggableView.widthAnchor.constraint(equalToConstant: 100),
draggableView.heightAnchor.constraint(equalToConstant: 40)]
NSLayoutConstraint.activate(draggableviewConstraints)
pangesture = UIPanGestureRecognizer()
draggableView.isUserInteractionEnabled = true
draggableView.addGestureRecognizer(pangesture)
pangesture.addTarget(self, action: #selector(draggableFunction(_:)))
}
}
extension RootController {
#objc fileprivate func draggableFunction(_ sender: UIPanGestureRecognizer) {
view.bringSubviewToFront(draggableView)
let translation = sender.translation(in: self.view)
draggableView.center = CGPoint(x: draggableView.center.x + translation.x, y: draggableView.center.y + translation.y)
sender.setTranslation(CGPoint.zero, in: self.view)
print("drag works : \(translation.x)")
}
}
here is the demo,
Hope this will help. cheers !
Try like this (check comments through code):
#IBDesignable
class Rectangle: UIView {
#IBInspectable var color: UIColor = .clear {
didSet { backgroundColor = color }
}
// draw your view using the background color
override func draw(_ rect: CGRect) {
backgroundColor?.set()
UIBezierPath(rect: rect).fill()
}
// add the gesture recognizer to your view
override func didMoveToSuperview() {
addGestureRecognizer(UIPanGestureRecognizer(target: self, action: #selector(pan)))
}
// your gesture selector
#objc func pan(_ gesture: UIPanGestureRecognizer) {
// update your view frame origin
frame.origin += gesture.translation(in: self)
// reset the gesture translation
gesture.setTranslation(.zero, in: self)
}
}
extension CGPoint {
static func +=(lhs: inout CGPoint, rhs: CGPoint) {
lhs.x += rhs.x
lhs.y += rhs.y
}
}
To draw rectangles on your view when panning you can do as follow:
import UIKit
class ViewController: UIViewController {
var rectangles: [Rectangle] = []
override func viewDidLoad() {
super.viewDidLoad()
view.addGestureRecognizer(UIPanGestureRecognizer(target: self, action: #selector(pan)))
}
#objc func pan(_ gesture: UIPanGestureRecognizer) {
switch gesture.state {
case .began:
let rectangle = Rectangle(frame: .init(origin: gesture.location(in: view), size: .init(width: 0, height: 0)))
rectangle.fillColor = .red
rectangle.strokeColor = .white
rectangle.lineWidth = 3
view.addSubview(rectangle)
rectangles.append(rectangle)
case .changed:
let distance = gesture.translation(in: view)
let index = rectangles.index(before: rectangles.endIndex)
let frame = rectangles[index].frame
rectangles[index].frame = .init(origin: frame.origin, size: .init(width: frame.width + distance.x, height: frame.height + distance.y))
rectangles[index].setNeedsDisplay()
gesture.setTranslation(.zero, in: view)
case .ended:
break
default:
break
}
}
}
Sample Project
I figured out most of the problem as to why the rectangle wasn't moving. It turns out that I misunderstood how variable getters and setters work in Swift. Nevertheless, I changed this code:
var rectangle: CGRect {
get {
return CGRect(x: 200,
y: 200,
width: frame.width / 6,
height: 15)
}
set {}
}
to be a lazy variable:
lazy var rectangle: CGRect = {
return CGRect(x: 200,
y: 200,
width: self.frame.width / 6,
height: 15)
}()
The reason I needed get and set in the first place was because I use frame in my variable calculations, and that was not available until the view itself was fully instantiated. I also tweaked this code a bit:
rectangle = CGRect(x: rectangle.midX + translation.x, y: rectangle.midY + translation.y, width: rectangle.width, height: rectangle.height)
and used minX instead of midX:
rectangle = CGRect(x: rectangle.minX + translation.x, y: rectangle.minY + translation.y, width: rectangle.width, height: rectangle.height)
This is because CGRect is initialized with the x and y parameters being the minX and minY.
This is good progress(at least the rectangle moves). However, I am not able to figure out why the rectangle only switches places after I have released my mouse, resulting in choppy movement.
In my Mac OS app I need a functionality to be able to move NSImageView around inside an NSView after mouseDown event (triggered by user) happen in this NSImageView. When the user triggers mouse Up event, this view must move to the last mouseDrag event direction and stay there. During the move I want the NSImageView to be visible on the screen (it should move along with the mouse cursor).
I've read a Handling Mouse Events guide by Apple, https://developer.apple.com/library/content/documentation/Cocoa/Conceptual/EventOverview/HandlingMouseEvents/HandlingMouseEvents.html
Also I've downloaded this sample code: https://developer.apple.com/library/content/samplecode/DragItemAround/Introduction/Intro.html
Both links contain code in Objective C. Code for DragItemAround is outdated. I've tried to search for solutions on GitHub and other StackOverflow threads, but have not found any working solutions.
Would be glad to hear the answers on this question. I'm using Swift 3.
I've created a custom MovableImageView which is a subclass of NSImageView with this code:
import Cocoa
class MovableImageView: NSImageView {
override func draw(_ dirtyRect: NSRect) {
super.draw(dirtyRect)
// Drawing code here.
// setup the starting location of the
// draggable item
}
override func mouseDown(with event: NSEvent) {
Swift.print("mouseDown")
//let window = self.window!
//let startingPoint = event.locationInWindow
}
override func mouseDragged(with event: NSEvent) {
Swift.print("mouseDragged")
self.backgroundColor = NSColor.black
}
override func mouseUp(with event: NSEvent) {
Swift.print("mouseUp")
}
}
After that in Interface Builder i've set this class to NSImageView. I've also set constraints in Interface Builder for this NSImageView.
Now i'm trying to figure out how to move NSImageView inside NSView? (Swift 3, XCode 8)
You need to save the mouseDown point and use it for offset later. Please check the following code:
class MovableImageView: NSImageView {
var firstMouseDownPoint: NSPoint = NSZeroPoint
init() {
super.init(frame: NSZeroRect)
self.wantsLayer = true
self.layer?.backgroundColor = NSColor.red.cgColor
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
override func draw(_ dirtyRect: NSRect) {
super.draw(dirtyRect)
// Drawing code here.
}
override func mouseDown(with event: NSEvent) {
Swift.print("mouseDown")
firstMouseDownPoint = (self.window?.contentView?.convert(event.locationInWindow, to: self))!
}
override func mouseDragged(with event: NSEvent) {
Swift.print("mouseDragged")
let newPoint = (self.window?.contentView?.convert(event.locationInWindow, to: self))!
let offset = NSPoint(x: newPoint.x - firstMouseDownPoint.x, y: newPoint.y - firstMouseDownPoint.y)
let origin = self.frame.origin
let size = self.frame.size
self.frame = NSRect(x: origin.x + offset.x, y: origin.y + offset.y, width: size.width, height: size.height)
}
override func mouseUp(with event: NSEvent) {
Swift.print("mouseUp")
}
}
In the parent view, just add this MovableImageView as subview like this:
let view = MovableImageView()
view.frame = NSRect(x: 0, y: 0, width: 100, height: 100)
self.view.addSubview(view) //Self is your parent view
Constraint version of #brianLikeApple's answer:
class MovableImageView: NSImageView {
var firstMouseDownPoint: NSPoint = NSZeroPoint
var xConstraint: NSLayoutConstraint!
var yContstraint: NSLayoutConstraint!
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
self.wantsLayer = true
self.layer?.backgroundColor = NSColor.red.cgColor
}
override func draw(_ dirtyRect: NSRect) {
super.draw(dirtyRect)
// Drawing code here.
}
override func mouseDown(with event: NSEvent) {
Swift.print("mouseDown")
firstMouseDownPoint = (self.window?.contentView?.convert(event.locationInWindow, to: self))!
}
override func mouseDragged(with event: NSEvent) {
Swift.print("mouseDragged")
let newPoint = (self.window?.contentView?.convert(event.locationInWindow, to: self))!
let offset = NSPoint(x: newPoint.x - firstMouseDownPoint.x, y: newPoint.y - firstMouseDownPoint.y)
let origin = self.frame.origin
let size = self.frame.size
yContstraint.constant = (self.superview?.frame.height)! - origin.y - offset.y - size.height
xConstraint.constant = origin.x + offset.x
}
override func mouseUp(with event: NSEvent) {
Swift.print("mouseUp")
}
Here's a NSView you can drag. It uses constraints exactly like #ExeRhythm answer.
import AppKit
class DraggableNSView: NSView {
var down: NSPoint = NSZeroPoint
#IBOutlet var across: NSLayoutConstraint!
#IBOutlet var up: NSLayoutConstraint!
override func mouseDown(with e: NSEvent) {
down = window?.contentView?.convert(e.locationInWindow, to: self) ?? NSZeroPoint
}
override func mouseDragged(with e: NSEvent) {
guard let m = window?.contentView?.convert(e.locationInWindow, to: self) else { return }
let p = frame.origin + (m - down)
across.constant = p.x
up.constant = p.y
}
}
Don't forget that you must connect the across and up constraints in storyboard!
Note. If you prefer a "down" constraint rather than "up", it is:
// if you prefer a "down" constraint:
down.constant = (superview?.frame.height)! - p.y - frame.size.height
As always, add these two functions in your project,
func -(lhs: CGPoint, rhs: CGPoint) -> CGPoint {
return CGPoint(x: lhs.x - rhs.x, y: lhs.y - rhs.y)
}
func +(lhs: CGPoint, rhs: CGPoint) -> CGPoint {
return CGPoint(x: lhs.x + rhs.x, y: lhs.y + rhs.y)
}
Note that you can of course do this, to any subclass of NSView such as NSImageView:
class DraggableNSImageView: NSView {
... same
I'm trying to make two NSView's draggable and I was wondering how to code that in Swift for OSX Mac app? I'm thinking to put two NSView's within an NSView and make them draggable within that NSView. For example, create NSViewA and nested within in are NSViewB and NSViewC (both draggable within NSViewA). I would like to code in Swift. Cheers!
You can do this fairly easily if you take advantage of the information passed on the responder chain. Consider this custom NSView class.
class DraggableView : NSView {
var startPoint: NSPoint?
var frameOrigin: NSPoint?
override func mouseDown(with event: NSEvent) {
startPoint = event.locationInWindow
frameOrigin = frame.origin
}
override func mouseDragged(with event: NSEvent) {
let offset = event.locationInWindow - startPoint!
frame.origin = frameOrigin! + offset
}
override func mouseUp(with event: NSEvent) {
startPoint = nil
frameOrigin = nil
}
}
To make the math work for NSPoint, I overloaded operators in the extension:
extension NSPoint {
static func -(pointA: NSPoint, pointB: NSPoint) -> NSPoint {
return NSPoint(x: pointA.x - pointB.x, y: pointA.y - pointB.y)
}
static func +(pointA: NSPoint, pointB: NSPoint) -> NSPoint {
return NSPoint(x: pointA.x + pointB.x, y: pointA.y + pointB.y)
}
}
Then your ViewController class is just setup code:
class ViewController: NSViewController {
let viewA: DraggableView = DraggableView()
let viewB: DraggableView = DraggableView()
override func viewDidLoad() {
super.viewDidLoad()
viewA.frame = NSRect(origin: .zero, size: NSSize(width: 100, height: 100))
viewB.frame = NSRect(origin: NSPoint(x: 125, y: 0), size: NSSize(width: 100, height: 100))
viewA.wantsLayer = true
viewB.wantsLayer = true
viewA.layer?.backgroundColor = NSColor.blue.cgColor
viewB.layer?.backgroundColor = NSColor.green.cgColor
view.addSubview(viewA)
view.addSubview(viewB)
}
}
I'm new to swift programming and have been stuck on this issue for the last week. I am trying to centre on a hero while applying impulse to him. My code works if I create a hero in the scene class and centre him but it doesn't work when I put a hero in another class and try to centre him from scene class. Now centre on works if I just move him by changing his x or y position but not if I apply impulse. It seems that when a hero is in a separate class, when impulse is applied x and y while he is in motion is showing up as a nil, however when he is in the same class than x and y while in the motion are showing up ok. Any help is much appreciated.
Code where Center ON works with ApplyImpulse when everything is in the same class*****
class GameScene: SKScene {
var hero:SKSpriteNode?
var mazeWorld:SKNode?
var player=Player()
var heroLocation:CGPoint = CGPointZero
override func didMoveToView(view: SKView) {
mazeWorld = childNodeWithName("mazeWorld")
heroLocation = mazeWorld!.childNodeWithName("sPoint")!.position
self.anchorPoint = CGPoint(x: 0.5,y: 0.3)
hero?.position = heroLocation
hero = SKSpriteNode(imageNamed:"hero_walk_down_0")
hero!.physicsBody?.dynamic = false
hero!.physicsBody?.affectedByGravity = true
hero!.physicsBody?.pinned = false
hero!.physicsBody = SKPhysicsBody(rectangleOfSize: hero!.size)
hero!.physicsBody?.mass = 1.0;
hero!.setScale(0.8)
mazeWorld!.addChild(hero!)
}
override func touchesBegan(touches: NSSet, withEvent event: UIEvent)
{
/* Called when a touch begins */
for touch: AnyObject in touches {
hero!.physicsBody?.applyImpulse(CGVector(dx: 240, dy: 550))
}
}
override func update(currentTime: CFTimeInterval) {
/* Called before each frame is rendered */
}
func centerOnNode(node:SKNode){
let cameraPositionInScene:CGPoint = self.convertPoint(node.position, fromNode: mazeWorld!)
mazeWorld!.position = CGPoint(x:mazeWorld!.position.x - cameraPositionInScene.x, y: mazeWorld!.position.y - cameraPositionInScene.y)
}
override func didSimulatePhysics() {
self.centerOnNode(hero!)
}
}
**** Here is the code with Center ON that doesn't work with apply impulse for some reason
import SpriteKit
class GameScene: SKScene {
var hero:SKSpriteNode?
var mazeWorld:SKNode?
var player=Player()
var heroLocation:CGPoint = CGPointZero
override func didMoveToView(view: SKView) {
mazeWorld = childNodeWithName("mazeWorld")
heroLocation = mazeWorld!.childNodeWithName("sPoint")!.position
self.anchorPoint = CGPoint(x: 0.5,y: 0.3)
player.position = heroLocation
mazeWorld!.addChild(player)
}
override func touchesBegan(touches: NSSet, withEvent event: UIEvent)
{
/* Called when a touch begins */
for touch: AnyObject in touches {
player.jumpRight()
}
}
override func update(currentTime: CFTimeInterval) {
/* Called before each frame is rendered */
}
func centerOnNode(node:SKNode){
let cameraPositionInScene:CGPoint = self.convertPoint(node.position, fromNode: mazeWorld!)
mazeWorld!.position = CGPoint(x:mazeWorld!.position.x - cameraPositionInScene.x, y: mazeWorld!.position.y - cameraPositionInScene.y)
}
override func didSimulatePhysics() {
self.centerOnNode(player)
}
}
import Foundation
import SpriteKit
class Player: SKNode{
var objectSprite:SKSpriteNode?
override init()
{
super.init()
objectSprite = SKSpriteNode(imageNamed:"hero_walk_down_0")
objectSprite!.physicsBody?.dynamic = false
objectSprite!.physicsBody?.affectedByGravity = true
objectSprite!.physicsBody?.pinned = false
objectSprite!.physicsBody = SKPhysicsBody(rectangleOfSize: objectSprite!.size)
objectSprite!.physicsBody?.mass = 1.0
objectSprite!.setScale(0.8)
addChild(objectSprite!)
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
func jumpRight()
{
objectSprite!.physicsBody?.applyImpulse(CGVector(dx: 240, dy: 550))
}
}