Keydown not called for objects inside an SKScene - swift

I was working on a little game when I ran into a problem. I made a little test project in order to isolate the problem.
I have the following class:
class Sprite: SKSpriteNode {
override var acceptsFirstResponder: Bool { get { return true } }
init() {
super.init(texture: nil, color: .blue, size: CGSize(width: 200, height: 100))
isUserInteractionEnabled = true
}
override func keyDown(with event: NSEvent) {
position.x += 100
print("keydown")
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
}
I create an instance of this class in the following scene:
class GameScene: SKScene {
override var acceptsFirstResponder: Bool { get { return true } }
let x = Sprite()
override func didMove(to view: SKView) {
anchorPoint = CGPoint(x: 0.5, y: 0.5)
isUserInteractionEnabled = true
addChild(x)
}
override func keyDown(with event: NSEvent) {
x.position.y += 50
}
}
As you can see, I have a keyDown method in both classes. The one in GameScene gets called as I can clearly see the sprite (x) move upwards. The keyDown method in my Sprite class however, never gets called. I have searched the web and the only answer I've really found is that acceptsFirstResponder must be overridden and return true, but this doesn't help in my case. Any suggestions?

Oh my... I found a simple solution and I certainly don't know how I didn't think of it before.
Since GameScene's keyDown method actually gets called, I just called the sprite's keyDown method from there and passed the argument:
class GameScene: SKScene {
let x = Sprite()
override func didMove(to view: SKView) {
anchorPoint = CGPoint(x: 0.5, y: 0.5)
addChild(x)
}
override func keyDown(with event: NSEvent) {
x.position.y += 50
x.keyDown(with: event)
}
}
And it didn't require me to override any variables (I guess it's set to first responder by default - please correct me if this is false).

Related

SpriteKit isUserInteractionEnabled=false Blocks Touches

I am building a SpriteKit game. The game scene is the parent of a transparent layer above it whose role is to display occasional messages to the player. I want this layer to be transparent and inert most of the time, and of course, never receive touches. As such, I have isUserInteractionEnabled set to false. However, when I add this later to the scene, it blocks all touches below it. What gives?
Edit: the parent of the MessageLayer is the game scene, and the game scene also has isUserInteractionEnabled = false, so I do not believe that the MessageLayer is inheriting the behavior.
Here is my code:
import Foundation
import SpriteKit
class MessageLayer: SKSpriteNode {
init() {
let size = CGSize(width: screenWidth, height: screenHeight)
super.init(texture: nil, color: UIColor.blue, size: size)
self.isUserInteractionEnabled = false
self.name = "MessageLayer"
alpha = 0.6
zPosition = +200
}
override init(texture: SKTexture!, color: UIColor, size: CGSize)
{
super.init(texture: texture, color: color, size: size)
}
required init(coder aDecoder: NSCoder) {
super.init(texture: nil, color: UIColor.clear, size: CGSize(width: 100, height: 100))
}
// MARK: - Functions
func alphaUp() {
alpha = 0.6
}
func alphaDown() {
alpha = 0.0
}
}
Just relay the touches to the scene / other nodes you want to do stuff with.
class TransparentNode: SKSpriteNode {
init(color: SKColor = .clear, size: CGSize) {
super.init(texture: nil, color: color, size: size)
isUserInteractionEnabled = true
}
// touches began on ios:
override func mouseDown(with event: NSEvent) {
// relay event to scene:
scene!.mouseDown(with: event)
}
required init?(coder aDecoder: NSCoder) { fatalError() }
}
class GameScene: SKScene {
lazy var transparentNode: SKSpriteNode = MyNode(size: self.size)
// touches began on ios:
override func mouseDown(with event: NSEvent) {
print("ho")
}
override func didMove(to view: SKView) {
addChild(transparentNode)
isUserInteractionEnabled = false
}
}
And here you can see that the invisible node is sending events properly:

How to move NSImageView inside NSView in Cocoa?

In my Mac OS app I need a functionality to be able to move NSImageView around inside an NSView after mouseDown event (triggered by user) happen in this NSImageView. When the user triggers mouse Up event, this view must move to the last mouseDrag event direction and stay there. During the move I want the NSImageView to be visible on the screen (it should move along with the mouse cursor).
I've read a Handling Mouse Events guide by Apple, https://developer.apple.com/library/content/documentation/Cocoa/Conceptual/EventOverview/HandlingMouseEvents/HandlingMouseEvents.html
Also I've downloaded this sample code: https://developer.apple.com/library/content/samplecode/DragItemAround/Introduction/Intro.html
Both links contain code in Objective C. Code for DragItemAround is outdated. I've tried to search for solutions on GitHub and other StackOverflow threads, but have not found any working solutions.
Would be glad to hear the answers on this question. I'm using Swift 3.
I've created a custom MovableImageView which is a subclass of NSImageView with this code:
import Cocoa
class MovableImageView: NSImageView {
override func draw(_ dirtyRect: NSRect) {
super.draw(dirtyRect)
// Drawing code here.
// setup the starting location of the
// draggable item
}
override func mouseDown(with event: NSEvent) {
Swift.print("mouseDown")
//let window = self.window!
//let startingPoint = event.locationInWindow
}
override func mouseDragged(with event: NSEvent) {
Swift.print("mouseDragged")
self.backgroundColor = NSColor.black
}
override func mouseUp(with event: NSEvent) {
Swift.print("mouseUp")
}
}
After that in Interface Builder i've set this class to NSImageView. I've also set constraints in Interface Builder for this NSImageView.
Now i'm trying to figure out how to move NSImageView inside NSView? (Swift 3, XCode 8)
You need to save the mouseDown point and use it for offset later. Please check the following code:
class MovableImageView: NSImageView {
var firstMouseDownPoint: NSPoint = NSZeroPoint
init() {
super.init(frame: NSZeroRect)
self.wantsLayer = true
self.layer?.backgroundColor = NSColor.red.cgColor
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
override func draw(_ dirtyRect: NSRect) {
super.draw(dirtyRect)
// Drawing code here.
}
override func mouseDown(with event: NSEvent) {
Swift.print("mouseDown")
firstMouseDownPoint = (self.window?.contentView?.convert(event.locationInWindow, to: self))!
}
override func mouseDragged(with event: NSEvent) {
Swift.print("mouseDragged")
let newPoint = (self.window?.contentView?.convert(event.locationInWindow, to: self))!
let offset = NSPoint(x: newPoint.x - firstMouseDownPoint.x, y: newPoint.y - firstMouseDownPoint.y)
let origin = self.frame.origin
let size = self.frame.size
self.frame = NSRect(x: origin.x + offset.x, y: origin.y + offset.y, width: size.width, height: size.height)
}
override func mouseUp(with event: NSEvent) {
Swift.print("mouseUp")
}
}
In the parent view, just add this MovableImageView as subview like this:
let view = MovableImageView()
view.frame = NSRect(x: 0, y: 0, width: 100, height: 100)
self.view.addSubview(view) //Self is your parent view
Constraint version of #brianLikeApple's answer:
class MovableImageView: NSImageView {
var firstMouseDownPoint: NSPoint = NSZeroPoint
var xConstraint: NSLayoutConstraint!
var yContstraint: NSLayoutConstraint!
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
self.wantsLayer = true
self.layer?.backgroundColor = NSColor.red.cgColor
}
override func draw(_ dirtyRect: NSRect) {
super.draw(dirtyRect)
// Drawing code here.
}
override func mouseDown(with event: NSEvent) {
Swift.print("mouseDown")
firstMouseDownPoint = (self.window?.contentView?.convert(event.locationInWindow, to: self))!
}
override func mouseDragged(with event: NSEvent) {
Swift.print("mouseDragged")
let newPoint = (self.window?.contentView?.convert(event.locationInWindow, to: self))!
let offset = NSPoint(x: newPoint.x - firstMouseDownPoint.x, y: newPoint.y - firstMouseDownPoint.y)
let origin = self.frame.origin
let size = self.frame.size
yContstraint.constant = (self.superview?.frame.height)! - origin.y - offset.y - size.height
xConstraint.constant = origin.x + offset.x
}
override func mouseUp(with event: NSEvent) {
Swift.print("mouseUp")
}
Here's a NSView you can drag. It uses constraints exactly like #ExeRhythm answer.
import AppKit
class DraggableNSView: NSView {
var down: NSPoint = NSZeroPoint
#IBOutlet var across: NSLayoutConstraint!
#IBOutlet var up: NSLayoutConstraint!
override func mouseDown(with e: NSEvent) {
down = window?.contentView?.convert(e.locationInWindow, to: self) ?? NSZeroPoint
}
override func mouseDragged(with e: NSEvent) {
guard let m = window?.contentView?.convert(e.locationInWindow, to: self) else { return }
let p = frame.origin + (m - down)
across.constant = p.x
up.constant = p.y
}
}
Don't forget that you must connect the across and up constraints in storyboard!
Note. If you prefer a "down" constraint rather than "up", it is:
// if you prefer a "down" constraint:
down.constant = (superview?.frame.height)! - p.y - frame.size.height
As always, add these two functions in your project,
func -(lhs: CGPoint, rhs: CGPoint) -> CGPoint {
return CGPoint(x: lhs.x - rhs.x, y: lhs.y - rhs.y)
}
func +(lhs: CGPoint, rhs: CGPoint) -> CGPoint {
return CGPoint(x: lhs.x + rhs.x, y: lhs.y + rhs.y)
}
Note that you can of course do this, to any subclass of NSView such as NSImageView:
class DraggableNSImageView: NSView {
... same

Changing texture for subclass of SKSpriteNode?

The update function in the class below, which is a subclass of SKSpriteNode, is supposed to change the texture of the class. According to this SO answer, updating the texture property is sufficient for changing a SKSpriteNode's texture. However, this code doesn't work.
Any ideas why?
class CharacterNode: SKSpriteNode {
let spriteSize = CGSize(width: 70, height: 100)
var level = 0
init(level: Int) {
self.level = level
super.init(texture: nil, color: UIColor.clear, size: spriteSize)
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
func update(level: Int) {
self.level = level
self.texture = textureForLevel(level: level)
}
func textureForLevel(level: Int) -> SKTexture {
return SKTexture(imageNamed: "TestTexture")
}
Your code works well. In fact this SO answer is correct.
About SKSpriteNode subclass, the update method is a new custom function added by you, because the instance method update(_:) is valid only for SKScene class, this just to say that this function is not performed if it is not called explicitly.
To make a test about your class you can change your code as below( I've changed only textureForLevel method only to make this example):
class CharacterNode: SKSpriteNode {
let spriteSize = CGSize(width: 70, height: 100)
var level = 0
init(level: Int) {
self.level = level
super.init(texture: nil, color: UIColor.clear, size: spriteSize)
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
func update(level: Int) {
self.level = level
self.texture = textureForLevel(level: level)
}
func textureForLevel(level: Int) -> SKTexture {
if level == 3 {
return SKTexture(imageNamed: "TestTexture3.jpg")
}
return SKTexture(imageNamed: "TestTexture.jpg")
}
}
class GameScene: SKScene {
override func didMove(to view: SKView) {
let characterNode = CharacterNode(level:2)
characterNode.update(level:2)
addChild(characterNode)
characterNode.position = CGPoint(x:self.frame.midX,y:self.frame.midY)
characterNode.run(SKAction.wait(forDuration: 2.0), completion: {
characterNode.update(level:3)
})
}
}
As you can see when you launch GameScene a characterNode sprite will be shown in the center of the screen. After 2 seconds, the texture will change.

How to make NSView draggable in Swift (OSX Mac app)?

I'm trying to make two NSView's draggable and I was wondering how to code that in Swift for OSX Mac app? I'm thinking to put two NSView's within an NSView and make them draggable within that NSView. For example, create NSViewA and nested within in are NSViewB and NSViewC (both draggable within NSViewA). I would like to code in Swift. Cheers!
You can do this fairly easily if you take advantage of the information passed on the responder chain. Consider this custom NSView class.
class DraggableView : NSView {
var startPoint: NSPoint?
var frameOrigin: NSPoint?
override func mouseDown(with event: NSEvent) {
startPoint = event.locationInWindow
frameOrigin = frame.origin
}
override func mouseDragged(with event: NSEvent) {
let offset = event.locationInWindow - startPoint!
frame.origin = frameOrigin! + offset
}
override func mouseUp(with event: NSEvent) {
startPoint = nil
frameOrigin = nil
}
}
To make the math work for NSPoint, I overloaded operators in the extension:
extension NSPoint {
static func -(pointA: NSPoint, pointB: NSPoint) -> NSPoint {
return NSPoint(x: pointA.x - pointB.x, y: pointA.y - pointB.y)
}
static func +(pointA: NSPoint, pointB: NSPoint) -> NSPoint {
return NSPoint(x: pointA.x + pointB.x, y: pointA.y + pointB.y)
}
}
Then your ViewController class is just setup code:
class ViewController: NSViewController {
let viewA: DraggableView = DraggableView()
let viewB: DraggableView = DraggableView()
override func viewDidLoad() {
super.viewDidLoad()
viewA.frame = NSRect(origin: .zero, size: NSSize(width: 100, height: 100))
viewB.frame = NSRect(origin: NSPoint(x: 125, y: 0), size: NSSize(width: 100, height: 100))
viewA.wantsLayer = true
viewB.wantsLayer = true
viewA.layer?.backgroundColor = NSColor.blue.cgColor
viewB.layer?.backgroundColor = NSColor.green.cgColor
view.addSubview(viewA)
view.addSubview(viewB)
}
}

Swift Center on Camera not working when ApplyingImpulse in another class

I'm new to swift programming and have been stuck on this issue for the last week. I am trying to centre on a hero while applying impulse to him. My code works if I create a hero in the scene class and centre him but it doesn't work when I put a hero in another class and try to centre him from scene class. Now centre on works if I just move him by changing his x or y position but not if I apply impulse. It seems that when a hero is in a separate class, when impulse is applied x and y while he is in motion is showing up as a nil, however when he is in the same class than x and y while in the motion are showing up ok. Any help is much appreciated.
Code where Center ON works with ApplyImpulse when everything is in the same class*****
class GameScene: SKScene {
var hero:SKSpriteNode?
var mazeWorld:SKNode?
var player=Player()
var heroLocation:CGPoint = CGPointZero
override func didMoveToView(view: SKView) {
mazeWorld = childNodeWithName("mazeWorld")
heroLocation = mazeWorld!.childNodeWithName("sPoint")!.position
self.anchorPoint = CGPoint(x: 0.5,y: 0.3)
hero?.position = heroLocation
hero = SKSpriteNode(imageNamed:"hero_walk_down_0")
hero!.physicsBody?.dynamic = false
hero!.physicsBody?.affectedByGravity = true
hero!.physicsBody?.pinned = false
hero!.physicsBody = SKPhysicsBody(rectangleOfSize: hero!.size)
hero!.physicsBody?.mass = 1.0;
hero!.setScale(0.8)
mazeWorld!.addChild(hero!)
}
override func touchesBegan(touches: NSSet, withEvent event: UIEvent)
{
/* Called when a touch begins */
for touch: AnyObject in touches {
hero!.physicsBody?.applyImpulse(CGVector(dx: 240, dy: 550))
}
}
override func update(currentTime: CFTimeInterval) {
/* Called before each frame is rendered */
}
func centerOnNode(node:SKNode){
let cameraPositionInScene:CGPoint = self.convertPoint(node.position, fromNode: mazeWorld!)
mazeWorld!.position = CGPoint(x:mazeWorld!.position.x - cameraPositionInScene.x, y: mazeWorld!.position.y - cameraPositionInScene.y)
}
override func didSimulatePhysics() {
self.centerOnNode(hero!)
}
}
**** Here is the code with Center ON that doesn't work with apply impulse for some reason
import SpriteKit
class GameScene: SKScene {
var hero:SKSpriteNode?
var mazeWorld:SKNode?
var player=Player()
var heroLocation:CGPoint = CGPointZero
override func didMoveToView(view: SKView) {
mazeWorld = childNodeWithName("mazeWorld")
heroLocation = mazeWorld!.childNodeWithName("sPoint")!.position
self.anchorPoint = CGPoint(x: 0.5,y: 0.3)
player.position = heroLocation
mazeWorld!.addChild(player)
}
override func touchesBegan(touches: NSSet, withEvent event: UIEvent)
{
/* Called when a touch begins */
for touch: AnyObject in touches {
player.jumpRight()
}
}
override func update(currentTime: CFTimeInterval) {
/* Called before each frame is rendered */
}
func centerOnNode(node:SKNode){
let cameraPositionInScene:CGPoint = self.convertPoint(node.position, fromNode: mazeWorld!)
mazeWorld!.position = CGPoint(x:mazeWorld!.position.x - cameraPositionInScene.x, y: mazeWorld!.position.y - cameraPositionInScene.y)
}
override func didSimulatePhysics() {
self.centerOnNode(player)
}
}
import Foundation
import SpriteKit
class Player: SKNode{
var objectSprite:SKSpriteNode?
override init()
{
super.init()
objectSprite = SKSpriteNode(imageNamed:"hero_walk_down_0")
objectSprite!.physicsBody?.dynamic = false
objectSprite!.physicsBody?.affectedByGravity = true
objectSprite!.physicsBody?.pinned = false
objectSprite!.physicsBody = SKPhysicsBody(rectangleOfSize: objectSprite!.size)
objectSprite!.physicsBody?.mass = 1.0
objectSprite!.setScale(0.8)
addChild(objectSprite!)
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
func jumpRight()
{
objectSprite!.physicsBody?.applyImpulse(CGVector(dx: 240, dy: 550))
}
}