I added purchased 3D zombie model and I want to add it multiple times so there is a group of zombies chasing the player (his camera). The DAE file (zombie) I have has couple of animations (attack, walk, run and so on). I added a couple of these zombie models and they all appear on the positions I wanted them to appear (previously determined coordinates stored in zombies array)
When they appear on the screen they do all the animations as it is presented in DAE file.
And now my question: Is it possible to add this DAE zombie model but display it in a specified frame? For example 2-122? My code so far (I deleted what was unnecessary)
import UIKit
import CoreLocation
import SceneKit
import ARKit
#available(iOS 11.0, *)
class VRViewController: UIViewController, CLLocationManagerDelegate
{
/* VARIABLES */
var number:Int = 10
var speed:Int = 4
var zombies = [[AnyObject]]()
var timer = Timer()
var nodes = [SCNNode]()
let configuration = ARWorldTrackingConfiguration()
#IBOutlet weak var sceneView: ARSCNView!
override func viewWillAppear(_ animated: Bool)
{
super.viewWillAppear(animated)
// Run the view's session
sceneView.session.run(configuration)
}
override func viewDidLoad()
{
super.viewDidLoad()
// Set the view's delegate
sceneView.delegate = self as? ARSCNViewDelegate
// Create a new scene
let scene = SCNScene()
// Set the scene to the view
sceneView.scene = scene
sceneView.autoenablesDefaultLighting = true
/* sceneView.debugOptions = [.showConstraints, .showLightExtents, ARSCNDebugOptions.showFeaturePoints, ARSCNDebugOptions.showWorldOrigin] */
}
override func viewDidAppear(_ animated: Bool)
{
setZombies()
timer = Timer.scheduledTimer(timeInterval: 1, target: self, selector: (#selector(VRViewController.restartSession)), userInfo: nil, repeats: true)
}
override func didReceiveMemoryWarning()
{
super.didReceiveMemoryWarning()
}
/* MAIN FUNCTIONS */
func setZombies()
{
for i in 0..<number
{
let zombiePlane = zombies[i][0] as! CGPoint
let thetaPlane = zombies[i][1] as! Double
let xPlane = abs(zombiePlane.x - center.x)
let yPlane = abs(zombiePlane.y - center.y)
let distance = sqrt((xPlane*xPlane) + (yPlane*yPlane))
var theta3D:Double = thetaPlane * (180/Double.pi) - diff2D - 90 /* degrees */
theta3D = theta3D * (Double.pi/180) /* radians */
let x3D = Float(distance) * Float(cos(theta3D))
let z3D = Float(distance) * Float(sin(theta3D))
addZombies(i:i, x: x3D, y: -1.5, z: z3D)
}
}
func addZombies(i:Int, x: Float, y: Float, z: Float) {
guard let zombieScene = SCNScene(named: "art.scnassets/zombie/StrongZombie.DAE") else { return }
let zombieNode = SCNNode()
let zombieSceneChildNodes = zombieScene.rootNode.childNodes
for childNode in zombieSceneChildNodes {
zombieNode.addChildNode(childNode)
}
zombieNode.position = SCNVector3(x, y, z)
zombieNode.scale = SCNVector3(0.1, 0.1, 0.1)
sceneView.scene.rootNode.addChildNode(zombieNode)
nodes.append(zombieNode)
}
func restartSession()
{
/* self.sceneView.session.run(configuration, options: [.resetTracking]) */
for i in 0..<number
{
let theta3D = atan2(nodes[i].position.z, nodes[i].position.x)
let movement = (Float(speed)/5)
print(speed)
let distance = sqrt((nodes[i].position.x)*(nodes[i].position.x) + (nodes[i].position.z)*(nodes[i].position.z)) - movement
let x3D = Float(distance) * Float(cos(theta3D))
let z3D = Float(distance) * Float(sin(theta3D))
nodes[i].position = SCNVector3(x:x3D, y:-10, z:z3D)
}
}
/* SESSION FUNCTIONS */
func session(_ session: ARSession, didFailWithError error: Error)
{
// Present an error message to the user
}
func sessionWasInterrupted(_ session: ARSession)
{
// Inform the user that the session has been interrupted, for example, by presenting an overlay
}
func sessionInterruptionEnded(_ session: ARSession)
{
// Reset tracking and/or remove existing anchors if consistent tracking is required
}
}
Related
I want to show 3d modeling when reading Qr code and when it matches. But after reading the Qr code, the camera cannot see the surface, and when I look at the output, I get the error "VIO error callback: 161457.637109, 1, Frame processing rate has fallen below pre-set threshold". The camera works very slowly, I think there is a situation with this. Despite reading the barcode, 3d modeling does not turn on after reading the barcode.
enum FunctionMode {
case none
case placeObject(String)
case measure
}
class ARKitTestViewController: UIViewController {
#IBOutlet var sceneView: ARSCNView!
#IBOutlet weak var crosshair: UIView!
#IBOutlet weak var messageLabel: UILabel!
#IBOutlet weak var trackingInfo: UILabel!
var currentMode: FunctionMode = .none
var objects: [SCNNode] = []
// Current touch location
private var currTouchLocation: CGPoint?
let sequenceHandler = VNSequenceRequestHandler()
var isObjectAdded: Bool = false
var isQRCodeFound: Bool = false
var viewCenter:CGPoint = CGPoint()
override func viewDidLoad() {
super.viewDidLoad()
runARSession()
trackingInfo.text = ""
messageLabel.text = ""
viewCenter = CGPoint(x: view.bounds.width / 2.0, y: view.bounds.height / 2.0)
}
#IBAction func didTapReset(_ sender: Any) {
removeAllObjects()
}
func removeAllObjects() {
for object in objects {
object.removeFromParentNode()
}
objects = []
}
// MARK: - barcode handling
func searchQRCode(){
guard let frame = sceneView.session.currentFrame else {
return
}
let handler = VNImageRequestHandler(ciImage: CIImage(cvPixelBuffer: frame.capturedImage), options: [.properties : ""])
//DispatchQueue.global(qos: .userInteractive).async {
do {
try handler.perform([self.barcodeRequest])
} catch {
print(error)
}
//}
}
lazy var barcodeRequest: VNDetectBarcodesRequest = {
return VNDetectBarcodesRequest(completionHandler: self.handleBarcodes)
}()
func handleBarcodes(request: VNRequest, error: Error?) {
//print("handleBarcodes called")
guard let observations = request.results as? [VNBarcodeObservation]
else { fatalError("unexpected result type from VNBarcodeRequest") }
guard observations.first != nil else {
/*DispatchQueue.main.async {
print("No Barcode detected.")
}*/
return
}
// Loop through the found results
for result in request.results! {
print("Barcode detected")
// Cast the result to a barcode-observation
if let barcode = result as? VNBarcodeObservation {
if let payload = barcode.payloadStringValue {
let screenCentre : CGPoint = CGPoint(x: self.sceneView.bounds.midX, y: self.sceneView.bounds.midY)
let hitTestResults = sceneView.hitTest(screenCentre, types: [.existingPlaneUsingExtent])
//check payload
if let hitResult = hitTestResults.first {
// Get Coordinates of HitTest
let transform : matrix_float4x4 = hitResult.worldTransform
let worldCoord : SCNVector3 = SCNVector3Make(transform.columns.3.x, transform.columns.3.y, transform.columns.3.z)
let plane = SCNPlane(width: 0.1, height: 0.1)
let material = SCNMaterial()
material.diffuse.contents = UIColor.red
plane.materials = [material]
// Holder node
let node = SCNNode()
//node.transform = SCNMatrix4MakeRotation(-Float.pi / 2, 1, 0, 0)
//node.geometry = plane
sceneView.scene.rootNode.addChildNode(node)
node.position = worldCoord
//check payload
if(payload == "target_1"){
//Add 3D object
let objectScene = SCNScene(named: "Models.scnassets/candle/candle.scn")!
if let objectNode = objectScene.rootNode.childNode(withName: "candle", recursively: true) {
node.addChildNode(objectNode)
}
}
if(payload == "target_2"){
//Add 3D object
let objectScene = SCNScene(named: "Models.scnassets/lamp/lamp.scn")!
if let objectNode = objectScene.rootNode.childNode(withName: "lamp", recursively: true) {
node.addChildNode(objectNode)
}
}
isQRCodeFound = true
}
}
}
}
}
// MARK: - AR functions
func runARSession() {
// Registers ARKitTestViewController as ARSCNView delegate. You’ll use this later to render objects.
sceneView.delegate = self
// Uses ARWorldTrackingConfiguration to make use of all degrees of movement and give the best results. Remember, it supports A9 processors and up.
let configuration = ARWorldTrackingConfiguration()
// Turns on the automatic horizontal plane detection. You’ll use this to render planes for debugging and to place objects in the world.
configuration.planeDetection = .horizontal
// This turns on the light estimation calculations. ARSCNView uses that automatically and lights your objects based on the estimated light conditions in the real world.
configuration.isLightEstimationEnabled = true
// run(_:options) starts the ARKit session along with capturing video. This method will cause your device to ask for camera capture permission. If the user denies this request, ARKit won’t work.
sceneView.session.run(configuration)
// ASRCNView has an extra feature of rendering feature points. This turns it on for debug builds.
#if DEBUG
sceneView.debugOptions = ARSCNDebugOptions.showFeaturePoints
#endif
}
//Function that gives the user some feedback of the current tracking status.
func updateTrackingInfo() {
// You can get the current ARFrame thanks to the currentFrame property on the ARSession object.
guard let frame = sceneView.session.currentFrame else {
return
}
// The trackingState property can be found in the current frame’s ARCamera object. The trackingState enum value limited has an associated TrackingStateReason value which tells you the specific tracking problem.
switch frame.camera.trackingState {
case .limited(let reason):
switch reason {
case .excessiveMotion:
trackingInfo.text = "Limited Tracking: Excessive Motion"
case .insufficientFeatures:
trackingInfo.text =
"Limited Tracking: Insufficient Details"
default:
trackingInfo.text = "Limited Tracking"
}
default:
trackingInfo.text = "Good tracking conditions"
}
// You turned on light estimation in the ARWorldTrackingConfiguration, so it’s measured and provided in each ARFrame in the lightEstimate property.
guard
let lightEstimate = frame.lightEstimate?.ambientIntensity
else {
return
}
// ambientIntensity is given in lumen units. Less than 100 lumens is usually too dark, so you communicate this to the user.
if lightEstimate < 100 {
trackingInfo.text = "Limited Tracking: Too Dark"
}
}
}
extension ARKitTestViewController: ARSCNViewDelegate {
func renderer(_ renderer: SCNSceneRenderer, didAdd node: SCNNode, for anchor: ARAnchor) {
DispatchQueue.main.async {
if let planeAnchor = anchor as? ARPlaneAnchor {
#if DEBUG
let planeNode = createPlaneNode(center: planeAnchor.center, extent: planeAnchor.extent)
node.addChildNode(planeNode)
#endif
// else means that ARAnchor is not ARPlaneAnchor subclass, but just a regular ARAnchor instance you added in touchesBegan(_:with:)
} else {
// currentMode is a ARKitTestViewController property already added in the starter. It represents the current UI state: placeObject value if the object button is selected, or measure value if the measuring button is selected. The switch executes different code depending on the UI state.
switch self.currentMode {
case .none:
break
// placeObject has an associated string value which represents the path to the 3D model .scn file. You can browse all the 3D models in Models.scnassets.
case .placeObject(let name):
// nodeWithModelName(_:) creates a new 3D model SCNNode with the given path name. It’s a helper function provided with the starter project.
let modelClone = nodeWithModelName(name)
// Append the node to the objects array provided with the starter.
self.objects.append(modelClone)
// Finally, you add your new object node to the SCNNode provided to the delegate method.
node.addChildNode(modelClone)
// You’ll implement measuring later.
case .measure:
break
}
}
}
}
func renderer(_ renderer: SCNSceneRenderer, didUpdate node: SCNNode, for anchor: ARAnchor) {
DispatchQueue.main.async {
if let planeAnchor = anchor as? ARPlaneAnchor {
// Update the child node, which is the plane node you added earlier in renderer(_:didAdd:for:). updatePlaneNode(_:center:extent:) is a function included with the starter that updates the coordinates and size of the plane to the updated values contained in ARPlaneAnchor.
updatePlaneNode(node.childNodes[0], center: planeAnchor.center, extent: planeAnchor.extent)
}
}
}
func renderer(_ renderer: SCNSceneRenderer, didRemove node: SCNNode,
for anchor: ARAnchor) {
guard anchor is ARPlaneAnchor else { return }
// Removes the plane from the node if the corresponding ARAnchorPlane has been removed. removeChildren(inNode:) was provided with the starter project as well.
removeChildren(inNode: node)
}
func renderer(_ renderer: SCNSceneRenderer, updateAtTime time: TimeInterval) {
DispatchQueue.main.async {
// Updates tracking info for each rendered frame.
self.updateTrackingInfo()
if(!self.isQRCodeFound){
self.searchQRCode()
}
// If the dot in the middle hit tests with existingPlaneUsingExtent type, it turns green to indicate high quality hit testing to the user.
if let _ = self.sceneView.hitTest(
self.viewCenter,
types: [.existingPlaneUsingExtent]).first {
self.crosshair.backgroundColor = UIColor.green
} else {
self.crosshair.backgroundColor = UIColor(white: 0.34, alpha: 1)
}
}
}
func session(_ session: ARSession, didFailWithError error: Error) {
print("ARSession error: \(error.localizedDescription)")
let message = error.localizedDescription
messageLabel.text = message
DispatchQueue.main.asyncAfter(deadline: .now() + 2) {
if self.messageLabel.text == message {
self.messageLabel.text = ""
}
}
}
// sessionWasInterrupted(_:) is called when a session is interrupted, like when your app is backgrounded.
func sessionWasInterrupted(_ session: ARSession) {
print("Session interrupted")
let message = "Session interrupted"
messageLabel.text = message
DispatchQueue.main.asyncAfter(deadline: .now() + 2) {
if self.messageLabel.text == message {
self.messageLabel.text = ""
}
}
}
func sessionInterruptionEnded(_ session: ARSession) {
print("Session resumed")
let message = "Session resumed"
messageLabel.text = message
DispatchQueue.main.asyncAfter(deadline: .now() + 2) {
if self.messageLabel.text == message {
self.messageLabel.text = ""
}
}
// When sessionInterruptionEnded(_:) is called, you should remove all your objects and restart the AR session by calling the runSession() method you implemented before. removeAllObjects() is a helper method provided with the starter project.
removeAllObjects()
runARSession()
}
}
I want to achieve the billboard effect in RealityKit (the plane always look at the camera), I used the Entity.Look() method, but the result is weird, I can't even see the plane, the scripts I used as below, so, what is the problem?
struct ARViewContainer: UIViewRepresentable {
func makeUIView(context: Context) -> ARView {
let arView = ARView(frame: .zero)
let config = ARWorldTrackingConfiguration()
config.planeDetection = .horizontal
arView.session.run(config, options:[ ])
arView.session.delegate = arView
arView.createPlane()
return arView
}
func updateUIView(_ uiView: ARView, context: Context) { }
}
var planeMesh = MeshResource.generatePlane(width: 0.15, height: 0.15)
var planeMaterial = SimpleMaterial(color:.white,isMetallic: false)
var planeEntity = ModelEntity(mesh:planeMesh,materials:[planeMaterial])
var arCameraPostion : SIMD3<Float>!
var isPlaced = false
extension ARView : ARSessionDelegate{
func createPlane(){
let planeAnchor = AnchorEntity(plane:.horizontal)
planeAnchor.addChild(planeEntity)
self.scene.addAnchor(planeAnchor)
//planeAnchor.transform.rotation = simd_quatf(angle: .pi, axis: [0,1,0])
}
public func session(_ session: ARSession, didUpdate frame: ARFrame){
guard let arCamera = session.currentFrame?.camera else { return }
if isPlaced {
arCameraPostion = SIMD3(arCamera.transform.columns.3.x,0,arCamera.transform.columns.3.z)
planeEntity.look(at: arCameraPostion, from: planeEntity.position, upVector: [0, 1, 0],relativeTo: nil)
}
}
public func session(_ session: ARSession, didAdd anchors: [ARAnchor]) {
isPlaced = true
}
}
session(_:didUpdate:) method
Try the following logic to implement a "billboard" behavior for RealityKit camera. You can use this code as a starting point. It generates a rotation of the model around its local Y axis based on camera position.
import RealityKit
import ARKit
class ViewController: UIViewController {
#IBOutlet var arView: ARView!
var model = Entity()
override func viewDidLoad() {
super.viewDidLoad()
arView.session.delegate = self
let config = ARWorldTrackingConfiguration()
arView.session.run(config)
self.model = try! ModelEntity.load(named: "drummer")
let anchor = AnchorEntity(world: [0, 0, 0])
anchor.addChild(self.model)
arView.scene.anchors.append(anchor)
}
}
A pivot point of the model must be in the center of it (not at some distance from the model).
extension ViewController: ARSessionDelegate {
func session(_ session: ARSession, didUpdate frame: ARFrame) {
let camTransform: float4x4 = arView.cameraTransform.matrix
let alongXZPlane: simd_float4 = camTransform.columns.3
let yaw: Float = atan2(alongXZPlane.x - model.position.x,
alongXZPlane.z - model.position.z)
print(yaw)
// Identity matrix 4x4
var positionAndScale = float4x4()
// position
positionAndScale.columns.3.z = -0.25
// scale
positionAndScale.columns.0.x = 0.01
positionAndScale.columns.1.y = 0.01
positionAndScale.columns.2.z = 0.01
// orientation matrix
let orientation = Transform(pitch: 0, yaw: yaw, roll: 0).matrix
// matrices multiplication
let transform = simd_mul(positionAndScale, orientation)
self.model.transform.matrix = transform
}
}
subscribe(to:on:_:) method
Alternatively, you can implement a subscription to the event stream.
import RealityKit
import Combine
class ViewController: UIViewController {
#IBOutlet var arView: ARView!
var model = Entity()
var subs: [AnyCancellable] = []
override func viewDidLoad() {
super.viewDidLoad()
self.model = try! ModelEntity.load(named: "drummer")
let anchor = AnchorEntity(world: [0, 0, 0])
anchor.addChild(self.model)
arView.scene.anchors.append(anchor)
arView.scene.subscribe(to: SceneEvents.Update.self) { _ in
let camTransform: float4x4 = self.arView.cameraTransform.matrix
let alongXZPlane: simd_float4 = camTransform.columns.3
let yaw: Float = atan2(alongXZPlane.x - self.model.position.x,
alongXZPlane.z - self.model.position.z)
var positionAndScale = float4x4()
positionAndScale.columns.3.z = -0.25
positionAndScale.columns.0.x = 0.01
positionAndScale.columns.1.y = 0.01
positionAndScale.columns.2.z = 0.01
let orientation = Transform(pitch: 0, yaw: yaw, roll: 0).matrix
let transform = simd_mul(positionAndScale, orientation)
self.model.transform.matrix = transform
}.store(in: &subs)
}
}
I am currently experimenting with some code that I found on the internet about a game where you have to click on one set of items and avoid clicking on the other. I am currently trying to add a timer to the game so that it lasts of a total of 30 seconds but I am really struggling to do so as I am quite inexperienced with this programming language.
import UIKit
import QuartzCore
import SceneKit
class GameViewController: UIViewController, SCNSceneRendererDelegate {
var gameView:SCNView!
var SceneGame:SCNScene!
var NodeCamera:SCNNode!
var targetCreationTime:TimeInterval = 0
override func viewDidLoad() {
super.viewDidLoad()
View_in()
initScene()
initCamera()
}
func View_in(){
gameView = self.view as! SCNView
gameView.allowsCameraControl = true
gameView.autoenablesDefaultLighting = true
gameView.delegate = self
}
func initScene (){
SceneGame = SCNScene()
gameView.scene = SceneGame
gameView.isPlaying = true
}
func initCamera(){
NodeCamera = SCNNode()
NodeCamera.camera = SCNCamera()
NodeCamera.position = SCNVector3(x:0, y:5, z:10)
SceneGame.rootNode.addChildNode(NodeCamera)
}
func createTarget(){
let geometry:SCNGeometry = SCNPyramid( width: 1, height: 1, length: 1)
let randomColor = arc4random_uniform(2
) == 0 ? UIColor.green : UIColor.red
geometry.materials.first?.diffuse.contents = randomColor
let geometryNode = SCNNode(geometry: geometry)
geometryNode.physicsBody = SCNPhysicsBody(type: .dynamic, shape: nil)
if randomColor == UIColor.red {
geometryNode.name = "enemy"
}else{
geometryNode.name = "friend"
}
SceneGame.rootNode.addChildNode(geometryNode)
let randomDirection:Float = arc4random_uniform(2) == 0 ? -1.0 : 1.0
let force = SCNVector3(x: randomDirection, y: 15, z: 0)
geometryNode.physicsBody?.applyForce(force, at: SCNVector3(x: 0.05, y: 0.05, z: 0.05), asImpulse: true)
}
func renderer(_ renderer: SCNSceneRenderer, updateAtTime time: TimeInterval) {
if time > targetCreationTime{
createTarget()
targetCreationTime = time + 0.6
}
cleanUp()
}
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
let touch = touches.first!
let location = touch.location(in: gameView)
let hitList = gameView.hitTest(location, options: nil)
if let hitObject = hitList.first{
let node = hitObject.node
if node.name == "friend" {
node.removeFromParentNode()
self.gameView.backgroundColor = UIColor.black
}else {
node.removeFromParentNode()
self.gameView.backgroundColor = UIColor.red
}
}
}
func cleanUp() {
for node in SceneGame.rootNode.childNodes {
if node.presentation.position.y < -2 {
node.removeFromParentNode()
}
}
}
override var shouldAutorotate: Bool {
return true
}
override var prefersStatusBarHidden: Bool {
return true
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
if UIDevice.current.userInterfaceIdiom == .phone {
return .allButUpsideDown
} else {
return .all
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Release any cached data, images, etc that aren't in use.
}
}
You could use a Timer object, documented here. Just set up the timer when you want the game to start, probably once you've finished all your initializations. When you set up the timer, just wait for it to call back to your code when it finishes and run whatever logic you want to use to terminate your game.
EDIT
Create a variable representing the time you want your game will end:
var time: CGFloat = 60
Then, add an SCNAction to your scene so that each second it will decrease this variable value, for example in the viewDidLoad:
//One second before decrease the time
let wait = SCNAction.wait(forDuration: 1)
//This is the heart of this answer
// An action that reduce the time and when it is less than 1 (it reached zero) do whatever you want
let reduceTime = SCNAction.run{ _ in
self.time -= 1
if self.time < 1 {
// Do whatever you want
// for example show a game over scene or something else
}
}
}
SceneGame.rootNode.run(SCNAction.repeatForever(SCNAction.sequence([wait,reduceTime])))
If you want, you can show the remaining time by using SKLabel on an HUD, which is an SKScene used as overlay.
You can check this tutorial for how to create an HUD
As well, you can use an SCNText, this is the documentation about it
I have two ARSCNView sessions side by side for stereo view. I am trying to create a box in stereo view and then make it spin.
All works fine until I move the parentNode using self.parentNode.runAction
The movement only occurs in the right side view (SceneView2). No movement occurs in the left side view. Views are also offset. I need the movement to be synchronized in both the left and the right views.
Thanks.
Here is the code:
import UIKit
import ARKit
import SceneKit
import CoreLocation
import GLKit
class ViewController1: UIViewController, ARSCNViewDelegate {
#IBOutlet weak var sceneView: ARSCNView!
#IBOutlet weak var SceneView2: ARSCNView!
#IBOutlet weak var Label: UILabel!
var parentNode: SCNNode!
override func viewDidLoad() {
super.viewDidLoad()
addBox()
// Set the view's delegate
sceneView.delegate = self
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
let configuration = ARWorldTrackingConfiguration()
configuration.worldAlignment = .gravityAndHeading
configuration.planeDetection = .horizontal
sceneView.session.run(configuration)
movebox()
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
sceneView.session.pause()
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Release any cached data, images, etc that aren't in use.
}
func session(_ session: ARSession, didFailWithError error: Error) {
// Present an error message to the user
}
func sessionWasInterrupted(_ session: ARSession) {
// Inform the user that the session has been interrupted, for example, by presenting an overlay
}
func sessionInterruptionEnded(_ session: ARSession) {
// Reset tracking and/or remove existing anchors if consistent tracking is required
}
// UPDATE EVERY FRAME:
func renderer(_ renderer: SCNSceneRenderer, updateAtTime time: TimeInterval) {
DispatchQueue.main.async {
self.updateFrame()
//self.movebox()
}
}
func updateFrame() {
// Clone pointOfView for Second View
let pointOfView : SCNNode = (sceneView.pointOfView?.clone())!
// Determine Adjusted Position for Right Eye
let orientation : SCNQuaternion = pointOfView.orientation
let orientationQuaternion : GLKQuaternion = GLKQuaternionMake(orientation.x, orientation.y, orientation.z, orientation.w)
let eyePos : GLKVector3 = GLKVector3Make(1.0, 0.0, 0.0)
let rotatedEyePos : GLKVector3 = GLKQuaternionRotateVector3(orientationQuaternion, eyePos)
let rotatedEyePosSCNV : SCNVector3 = SCNVector3Make(rotatedEyePos.x, rotatedEyePos.y, rotatedEyePos.z)
let mag : Float = 0.066 // This is the value for the distance between two pupils (in metres). The Interpupilary Distance (IPD).
pointOfView.position.x += rotatedEyePosSCNV.x * mag
pointOfView.position.y += rotatedEyePosSCNV.y * mag
pointOfView.position.z += rotatedEyePosSCNV.z * mag
// Set PointOfView for SecondView
SceneView2.pointOfView = pointOfView
}
func addBox() {
let sideMaterial = SCNMaterial()
sideMaterial.diffuse.contents = UIColor.orange
sideMaterial.locksAmbientWithDiffuse = true;
let box = SCNBox(width: 0.3, height: 0.6, length: 0.1, chamferRadius: 0.005)
box.materials = [sideMaterial, sideMaterial, sideMaterial, sideMaterial, sideMaterial, sideMaterial]
let boxNode = SCNNode()
boxNode.geometry = box
boxNode.position = SCNVector3(0, 0, -0.2)
let scene = SCNScene()
scene.rootNode.addChildNode(boxNode)
parentNode = scene.rootNode
parentNode.position = SCNVector3(0, 0, -1.0)
sceneView.scene = scene
// Set up SceneView2 (Right Eye)
SceneView2.scene = scene
SceneView2.showsStatistics = sceneView.showsStatistics
SceneView2.isPlaying = true // Turn on isPlaying to ensure this ARSCNView recieves updates.
}
func movebox() {
DispatchQueue.main.async {
let rotate = SCNAction.rotateBy(x: 0, y: 5, z: 0, duration: 20)
let moveSequence = SCNAction.sequence([rotate])
let moveLoop = SCNAction.repeatForever(moveSequence)
self.parentNode.runAction(moveLoop)
}
}
}
I am trying to develop a game as a complete beginner. I setup a game scene that does reference a object called taxiNode and BlockNode correctly.
I now want to make things interactive and want to add an impulse on the taxiNode, when tapping the BlockNode. For that I set up func interact() within my BlockNode class, but I can not access my TaxiNode.
Here is my code for the BlockNode Class
import SpriteKit
class BlockNode: SKSpriteNode, CustomNodeEvents, InteractiveNode {
func didMoveToScene() {
print("block added")
userInteractionEnabled = true
}
func interact() {
taxiNode.physicsBody!.applyForce(CGVectorMake(0, 400))
}
override func touchesEnded(touches: Set<UITouch>, withEvent event: UIEvent?) {
super.touchesEnded(touches, withEvent: event)
print("destroy block")
//interact()
}
}
My GameScene Class looks like this
import SpriteKit
struct PhysicsCategory {
static let None: UInt32 = 0
static let Taxi: UInt32 = 0b1 // 1
static let Block: UInt32 = 0b10 // 2
static let Edge: UInt32 = 0b100 // 4
/* static let Edge: UInt32 = 0b1000 // 8
static let Label: UInt32 = 0b10000 // 16
static let Spring:UInt32 = 0b100000 // 32
static let Hook: UInt32 = 0b1000000 // 64 */
}
protocol CustomNodeEvents {
func didMoveToScene()
}
protocol InteractiveNode {
func interact()
}
class GameScene: SKScene, SKPhysicsContactDelegate {
var taxiNode: TaxiNode!
override func didMoveToView(view: SKView) {
/* Setup your scene here */
// Calculate playable margin
let maxAspectRatio: CGFloat = 16.0/9.0 // iPhone 5
let maxAspectRatioHeight = size.width / maxAspectRatio
let playableMargin: CGFloat = (size.height - maxAspectRatioHeight)/2
let playableRect = CGRect(x: 0, y: playableMargin,
width: size.width, height: size.height-playableMargin*2)
physicsBody = SKPhysicsBody(edgeLoopFromRect: playableRect)
physicsWorld.contactDelegate = self
physicsBody!.categoryBitMask = PhysicsCategory.Edge
enumerateChildNodesWithName("//*", usingBlock: {node, _ in
if let customNode = node as? CustomNodeEvents {
customNode.didMoveToScene()
}
})
taxiNode = childNodeWithName("taxi") as! TaxiNode
}
override func touchesBegan(touches: Set<UITouch>, withEvent event: UIEvent?) {
}
override func update(currentTime: CFTimeInterval) {
/* Called before each frame is rendered */
}
}
And I get the following error within my BlockNode Class
"Use of unresolved identifier "taxiNode"
Does anyone have a clue what I need to fix to adress the taxiNode and make the receive my impulse?
Look up scope of variables to learn more.
Your block Node does not know what taxi node is, nor should it.
What you need to do here is let your blockNode know what taxi is.
To do this, you have to pass it in:
First establish the function correctly:
func interact(taxiNode : TaxiNode) {
taxiNode.physicsBody!.applyForce(CGVectorMake(0, 400))
}
Then when you need to interact:
blockNode.interact(taxiNode)
Make sure you fix your protocol to handle this.
protocol InteractiveNode {
func interact(taxiNode:TaxiNode)
}