Trouble with object recognition in UITapGestureRecognizer - swift

I am trying to call the handleTap() function as shown below and let result: AnyObject look for objects in another class that extends to SCNScene
override func viewDidLoad()
{
super.viewDidLoad()
...
// add a tap gesture recognizer
let tapGesture = UITapGestureRecognizer(target: self, action: #selector(handleTap(_:)))
self.sceneView.addGestureRecognizer(tapGesture)
}
//(HANDLES A TAP BY THE USER) NOT WORKING
func handleTap(_ gestureRecognize: UIGestureRecognizer)
{
print("tap")
//retrieve the SCNView
let scnView = self.view as! SCNView
//check what nodes are tapped
let p = gestureRecognize.location(in: scnView)
let hitResults = scnView.hitTest(p, options: [:])
//check that we clicked on at least one object
if hitResults.count > 0
{
//retrieved the first clicked object
let result: AnyObject = hitResults[0]
//get its material
let material = result.node!.geometry!.firstMaterial!
//highlight it
SCNTransaction.begin()
SCNTransaction.animationDuration = 0.2
}
The function works however it is supposed to highlight a node which is located in another class AppScene that extends to SCNScene
class AppScene: SCNScene
{
//objects are located in here
}
I am trying to find a way to make the handleTap() function to search for objects that are children of AppScene() as it doesn't work right now.

Related

Collide RealityKit 3D model with LiDAR mesh

I spent many days trying to understand and follow examples without success.
My goal is to place a virtual AR object to the real world scanned previously with the LiDAR. With the showSceneUnderstanding I can see the realtime mesh created ok that fine.
With a tap function I can insert a usdz file, that also fine.
Because I have toyModel.physicsBody?.mode = .kinematic and self.arView.installGestures(for: toyRobot) I can move/scale the model.
Now want to be able move the model AND collide with the mesh generated by the LiDAR. When I move the model to a scanned wall the mesh it's stopped for example.
Here is my complete code :
import UIKit
import RealityKit
import ARKit
class ViewController: UIViewController, ARSessionDelegate {
#IBOutlet var arView: ARView!
var tapRecognizer = UITapGestureRecognizer()
override func viewDidLoad() {
super.viewDidLoad()
self.arView.session.delegate = self
//Scene Understanding options
self.arView.environment.sceneUnderstanding.options.insert([.physics, .collision, .occlusion])
//Only for dev
self.arView.debugOptions.insert(.showSceneUnderstanding)
self.tapRecognizer = UITapGestureRecognizer(target: self, action: #selector(placeObject(_:)))
self.arView.addGestureRecognizer(self.tapRecognizer)
}
#objc func placeObject(_ sender: UITapGestureRecognizer) {
// Perform a ray cast against the mesh (sceneUnderstanding)
// Note: Ray-cast option ".estimatedPlane" with alignment ".any" also takes the mesh into account.
let tapLocation = sender.location(in: arView)
if let result = arView.raycast(from: tapLocation, allowing: .estimatedPlane, alignment: .any).first {
// Load the "Toy robot"
let toyRobot = try! ModelEntity.loadModel(named: "toy_robot_vintage.usdz")
// Add gestures to the toy (only available is physicsBody mode == kinematic)
self.arView.installGestures(for: toyRobot)
// Toy Anchor to place the toy on surface
let toyAnchor = AnchorEntity(world: result.worldTransform)
toyAnchor.addChild(toyRobot)
// Create a "Physics" model of the toy in order to add physics mode
guard let toyModel = toyAnchor.children.first as? HasPhysics else {
return
}
// Because toyModel is a fresh new model we need to init physics
toyModel.generateCollisionShapes(recursive: true)
toyModel.physicsBody = .init()
// Add the physics body mode
toyModel.physicsBody?.mode = .kinematic
let test = ShapeResource.generateConvex(from: toyRobot.model!.mesh)
toyModel.components[CollisionComponent] = CollisionComponent(shapes: [test], mode: .default, filter: .default)
// Finally add the toy anchor to the scene
self.arView.scene.addAnchor(toyAnchor)
}
}
}
Someone knows if it's possible to achieve that ? Many thanks in advance!
Following the previous discussion with #AndyFedoroff I added convext raycast in order, always, to collide the placed 3d object with the LiDAR created mesh. Here is my full code. I don't know if I'm doing well... In any case still doesn't work.
import UIKit
import RealityKit
import ARKit
class ViewController: UIViewController, ARSessionDelegate {
#IBOutlet var arView: ARView!
var tapRecognizer = UITapGestureRecognizer()
var panGesture = UIPanGestureRecognizer()
var panGestureEntity: Entity? = nil
override func viewDidLoad() {
super.viewDidLoad()
self.arView.session.delegate = self
//Scene Understanding options
self.arView.environment.sceneUnderstanding.options.insert([.physics, .collision, .occlusion])
//Only for dev
self.arView.debugOptions.insert(.showSceneUnderstanding)
self.tapRecognizer = UITapGestureRecognizer(target: self, action: #selector(self.placeObject))
self.arView.addGestureRecognizer(self.tapRecognizer)
self.panGesture = UIPanGestureRecognizer(target: self, action: #selector(self.didPan))
self.arView.addGestureRecognizer(self.panGesture)
}
#objc func placeObject(_ sender: UITapGestureRecognizer) {
// Perform a ray cast against the mesh (sceneUnderstanding)
// Note: Ray-cast option ".estimatedPlane" with alignment ".any" also takes the mesh into account.
let tapLocation = sender.location(in: arView)
if let result = self.arView.raycast(from: tapLocation, allowing: .estimatedPlane, alignment: .any).first {
// Load the "Toy robot"
let toyRobot = try! ModelEntity.loadModel(named: "toy_robot_vintage.usdz")
// Add gestures to the toy (only available is physicsBody mode == kinematic)
self.arView.installGestures(for: toyRobot)
// Toy Anchor to place the toy on surface
let toyAnchor = AnchorEntity(world: result.worldTransform)
toyAnchor.addChild(toyRobot)
// Create a "Physics" model of the toy in order to add physics mode
guard let model = toyAnchor.children.first as? (Entity & HasPhysics)
else { return }
model.generateCollisionShapes(recursive: true)
model.physicsBody = PhysicsBodyComponent(shapes: [.generateBox(size: .one)],
mass: 1.0,
material: .default,
mode: .kinematic)
// Finally add the toy anchor to the scene
self.arView.scene.addAnchor(toyAnchor)
}
}
#objc public func didPan(_ sender: UIPanGestureRecognizer) {
print(sender.state)
let point = sender.location(in: self.arView)
switch sender.state {
case .began:
if let entity = self.arView.hitTest(point).first?.entity {
self.panGestureEntity = entity
} else {
self.panGestureEntity = nil
}
case .changed:
if let entity = self.panGestureEntity {
if let raycast = arView
.scene
.raycast(origin: .zero, direction: .one, length: 1, query: .all, mask: .all, relativeTo: entity)
.first
{
print("hit", raycast.entity, raycast.distance)
}
}
case .ended:
self.panGestureEntity = nil
default: break;
}
}
}
Try downcasting to Entity & HasPhysics:
guard let model = anchor.children.first as? (Entity & HasPhysics)
else { return }
model.generateCollisionShapes(recursive: true)
model.physicsBody = PhysicsBodyComponent(shapes: [.generateBox(size: .one)],
mass: 1.0,
material: .default,
mode: .kinematic)
Official documentation says:
For non-model entities, generateCollisionShapes(recursive:) method has no effect. Nevertheless, the method is defined for all entities so that you can call it on any entity, and have the calculation propagate recursively to all that entity’s descendants.

Gesture recognizer is not working on UIView

I'm trying to get a few gesture recognisers to work on a UIView. The UIview is in this case a retrieved SVG image, the library that I use is SwiftSVG.
But the actual added image is a UIView, so I think that is not the problem?
override func viewDidLoad() {
super.viewDidLoad()
let svgURL = URL(string: "https://openclipart.org/download/181651/manhammock.svg")!
let hammock = UIView(SVGURL: svgURL) { (svgLayer) in
svgLayer.fillColor = UIColor(red:0.8, green:0.16, blue:0.32, alpha:1.00).cgColor
svgLayer.resizeToFit(self.v2imageview.bounds)
}
hammock.isUserInteractionEnabled = true
let tap = UITapGestureRecognizer(target: self, action: #selector(self.handleTap(_:)))
hammock.isUserInteractionEnabled = true;
hammock.addGestureRecognizer(tap)
self.view.addSubview(hammock)
}
// function which is triggered when handleTap is called
#objc func handleTap(_ sender: UITapGestureRecognizer) {
print("Hello World")
}
How can I make the recognizer work?
Thanks
You need to set a frame
hammock.frame = ///
or constraints

why is bringSubviewToFront() only working for panning, not tapping?

view.bringSubviewToFront(tappedView)
is working when I drag (pan) views but not when I tap them. My issue is that when I have one view layered over the other, I want the bottom view to come to the front when tapped. Currently it will only come to the front when dragged. Do I need some additional code to do this? Thanks.
Here's an excerpt from my code for more context:
#objc func didPan(_ recognizer: UIPanGestureRecognizer) {
let location = recognizer.location(in: self.view)
switch recognizer.state {
case .began:
currentTappedView = moviePieceView.filter { pieceView -> Bool in
let convertedLocation = view.convert(location, to: pieceView)
return pieceView.point(inside: convertedLocation, with: nil)
}.first
currentTargetView = movieTargetView.filter { $0.pieceView == currentTappedView }.first
case .changed:
guard let tappedView = currentTappedView else { return }
let translation = recognizer.translation(in: self.view)
tappedView.center = CGPoint(x: tappedView.center.x + translation.x, y: tappedView.center.y + translation.y)
recognizer.setTranslation(.zero, in: view)
view.bringSubviewToFront(tappedView)
```
I had this same problem, I managed to solve it this way:
Add a gestureRecognizer to your view and put this in your code. Don't forget to set the delegate as well when attaching this to the code!
#IBAction func tappedView1(recognizer: UITapGestureRecognizer) {
view.bringSubviewToFront(myView)
}
Put this in your viewDidLoad:
let tap = UITapGestureRecognizer(target: self, action: #selector(tappedView1))
tap.cancelsTouchesInView = false
self.view.addGestureRecognizer(tap)
If you want to do that from your panGesture check my question.
Thank you all. It worked when I wrote a second function in addition to didPan().
This is the additional code:
override func viewDidLoad() {
super.viewDidLoad()
configureLabel()
let panGestureRecognizer = UIPanGestureRecognizer(target: self, action: #selector(didPan(_:)))
view.addGestureRecognizer(panGestureRecognizer)
let tapGestureRecognizer = UITapGestureRecognizer(target: self, action: #selector(didTap(_:)))
view.addGestureRecognizer(tapGestureRecognizer)
}
#objc func didTap(_ recognizer: UITapGestureRecognizer) {
let location = recognizer.location(in: self.view)
currentTappedView = moviePieceView.filter { pieceView -> Bool in
let convertedLocation = view.convert(location, to: pieceView)
return pieceView.point(inside: convertedLocation, with: nil)
}.first
guard let tappedView = currentTappedView else { return }
view.bringSubviewToFront(tappedView)
}
HOWEVER, I found out you can also just tick the "User Interaction Enabled" box in the Image View if you don't want to do it programmatically.

Function not called with selector action

I'm trying to handle tap gesture in an external UIViewController's class but my function was never called. Here is my
code:
import SceneKit
import UIKit
class SceneManager
{
private let assetFolder: String
private let mainCamera: SCNNode
private let view: SCNView
private let scene: SCNScene
init(view: SCNView, assetFolder: String, sceneFile: String, mainCameraName: String, backgroundColor: UIColor) {
self.assetFolder = assetFolder
self.scene = SCNScene(named: (self.assetFolder + "/scene/" + sceneFile))!
self.mainCamera = self.scene.rootNode.childNodeWithName(mainCameraName, recursively: true)!
self.view = view
self.view.backgroundColor = backgroundColor
self.view.allowsCameraControl = false
self.view.pointOfView = self.mainCamera
self.view.scene = self.scene
//PROBLEM BELOW
let gesture = UITapGestureRecognizer(target: self, action: #selector(self.handleTap(_:)))
self.view.addGestureRecognizer(gesture)
}
#objc func handleTap(sender: UITapGestureRecognizer) {
print("hello")
}
}
Here is my ViewController Class :
import UIKit
import QuartzCore
import SceneKit
class ViewController: UIViewController
{
override func viewDidLoad()
{
super.viewDidLoad()
let view = self.view as! SCNView
view.showsStatistics = true
_ = SceneManager(view: view, assetFolder: "art.scnassets", sceneFile: "EURO_COPTER.dae", mainCameraName: "camera", backgroundColor: UIColor.blackColor())
}
}
If someone has an idea. Thanks in advance.
Try following code it may help
let gesture = UITapGestureRecognizer(target: self, action: #selector(SceneManager.handleTap(_:)))
Try to this way:-
When you declare your class, add UIGestureRecognizerDelegate after the class it subclasses. Here's what that looks like in my case:
let gesture = UITapGestureRecognizer(target: self, action: #selector(self.handleTap(_:)))
self.view.userInteractionEnabled = true;
gesture.delegate = self
self.view.addGestureRecognizer(gesture)

AVPlayercontroller - Adding Overlay Subviews

Depends upon the PlaybackControl Visibity How to show and hide the Custom Subview added in contentOverlayView?
I want to do like Youtube TVOS app did.
I tried with UIPressesEvent but it is not giving me the exact touch events. It is giving me these:
override func pressesBegan(presses: Set<UIPress>, withEvent event: UIPressesEvent?) {
for item in presses {
switch item.type {
case .Menu:
self.customViews.alpha = 0
case .PlayPause:
self.player?.pause()
self.customViews.alpha = 0
default:
self.setVisibilityToPreviewView()
}
}
}
func setVisibilityToPreviewView () { //This wont work in all cases.
if self.previewView.alpha == 1 {
self.previewView.alpha = 0
} else {
self.previewView.alpha = 1
}
}
But with this Touch events i can only show and hide the subviews.
It should be hidden when the playbackcontrol is Hidden.
If I get the PlayBackControl Visibility values I don't need to worry about hiding these subviews.
Apple is Using AVNowPlayingPlaybackControlsViewController. It is not open for developers.
So I need to find some other better way to do this.
Please guide me how to do it.
You can register a tapGesture recognizer and then set its allowPressTypes property to UIPressType.Select, something like this
let tapRecognizer = UITapGestureRecognizer(target: self, action: "onSelect:")
tapRecognizer.allowedPressTypes = [NSNumber(integer: UIPressType.Select.rawValue)];
self.view.addGestureRecognizer(tapRecognizer)
And inside your action button show or hide custom overlays.
Example: Add this code inside a view controller and on tap (selecting at empty area on front of remote, touch area) you will see a message on console.
override func viewDidLoad() {
super.viewDidLoad()
let tapGesture = UITapGestureRecognizer(target: self, action: "onSelect")
tapGesture.allowedPressTypes = [NSNumber(integer: UIPressType.Select.rawValue)]
self.view.addGestureRecognizer(tapGesture);
}
func onSelect(){
print("this is select gesture handler method");
}
Update: Below is the code which will create AVPlayerController and will register tapGestureRecognizer to playervc.view.
override func viewDidAppear(animated: Bool) {
super.viewDidAppear(animated)
playContent()
}
func playContent(){
let urlString = "<contentURL>"
guard let url = NSURL(string: urlString) else{
return
}
let avPlayer = AVPlayer(URL: url)
let playerVC = AVPlayerViewController()
playerVC.player = avPlayer
self.playerObj = playerVC
let tapGesture = UITapGestureRecognizer(target: self, action: "onSelect")
tapGesture.allowedPressTypes = [NSNumber(integer: UIPressType.Select.rawValue)]
self.view.addGestureRecognizer(tapGesture);
playerVC.view.addGestureRecognizer(tapGesture)
self.presentViewController(playerVC, animated: true, completion: nil);
}
Give a try to this, I think it should work fine.