Pass value from SwiftUI to ARKit dynamically - swift

I'm currently engaging personal project using ARKit with Swift.
In this app, a skeleton is appearing by detected body and it tracks body's motion.
What I want to know is, how to change the position of skeleton by touching a slider dynamically.
The position of skeleton is defined in ARKit class ARDelegateHandler such as self.characterOffset = [1, 0, 0] which means 1m right from a detected body.
I want to change the characterOffset's x-axis value by slider in SwiftUI.(something like self.characterOffset = [x, 0, 0])
Here is my code.
ARViewContainer.swift
import SwiftUI
import RealityKit
import ARKit
import Combine
struct ARViewContainer: UIViewRepresentable {
let characterAnchor = AnchorEntity()
#Binding var offsetValue:Float
#ObservedObject var offsetInstance = Offset()
func makeUIView(context: Context) -> ARView {
let arView = ARView(frame: .zero)
arView.session.delegate = context.coordinator
return arView
}
func updateUIView(_ uiView: ARView, context: Context) {
let configuration = ARBodyTrackingConfiguration()
uiView.session.run(configuration)
uiView.scene.addAnchor(characterAnchor)
print(offsetValue)
}
func makeCoordinator() -> ARDelegateHandler {
ARDelegateHandler(self, anchor: characterAnchor)
}
class ARDelegateHandler: NSObject, ARSessionDelegate {
var arVC: ARViewContainer
let characterAnchor: AnchorEntity
var character: BodyTrackedEntity?
var characterOffset: SIMD3<Float>
init(_ control: ARViewContainer, anchor: AnchorEntity) {
self.arVC = control
self.characterAnchor = anchor
self.characterOffset = [1, 0, 0]
super.init()
setSkeleton()
}
func setSkeleton(){
var cancellable: AnyCancellable? = nil
cancellable = Entity.loadBodyTrackedAsync(named: "character/robot").sink(
receiveCompletion: { completion in
if case let .failure(error) = completion {
print("Error: Unable to load model: \(error.localizedDescription)")
}
cancellable?.cancel()
}, receiveValue: { (character: Entity) in
if let character = character as? BodyTrackedEntity {
character.scale = [1, 1, 1]
self.character = character
cancellable?.cancel()
} else {
print("Error: Unable to load model as BodyTrackedEntity")
}
})
}
func session(_ session: ARSession, didUpdate anchors: [ARAnchor]) {
for anchor in anchors {
guard let bodyAnchor = anchor as? ARBodyAnchor else { continue }
let bodyPosition = simd_make_float3(bodyAnchor.transform.columns.3)
characterAnchor.position = bodyPosition + characterOffset
characterAnchor.orientation = Transform(matrix: bodyAnchor.transform).rotation
if let character = character, character.parent == nil {
characterAnchor.addChild(character)
}
}
}
}
}
ContentView.swift (User is expected to touch slider and offsetValue would be changed. SlideView has been already implemented)
import SwiftUI
import RealityKit
import ARKit
import Combine
struct ContentView : View {
#State var offsetValue:Float = 0.0
#ObservedObject var offsetInstance = Offset()
var body: some View {
VStack{
Button(action:{self.offsetInstance.setOffset(offset: 1.0)},
label:{Text("check")})
ZStack(alignment: .bottom) {
ARViewContainer(offsetValue: $offsetValue)
SlideView(offSetValue: $offsetValue)
}
}
}
}
Thank you very much for your help!

Related

How to record ARView Camera Position and Rotation over time and save it to a file

I have been trying to create an ARView for over two days now that can record the position of the camera in space over time and then save this to a keyframe file. Basically, I want to create an app that lets you record virtual camera movements that can then be used in 3d applications like Autodesk Maya or Cinema4D to drive a camera. preferred file outputs would be anything that can hold a camera object and animate it over time (alternatively also an object that moves over time, that I can then parent my camera to).
Here is the code I have, sorry for it being a bit chaotic, I have tried a LOT of different things... Basically I try to record device position and rotation and then save it to an MDL object, but somehow it doesn't animate. I have also tried multiple different file types (some of those didn't support keyframe animation, so that didn't help, but from what I understand, Alembic does)
import SwiftUI
import ARKit
import RealityKit
import ModelIO
struct ARViewContainer: UIViewRepresentable {
let session = ARSession()
let delegate = MySessionDelegate()
func makeUIView(context: Context) -> ARView {
// Set up the ARView with session
let arView = ARView(frame: .zero)
let boxAnchor = try! Experience.loadBox()
arView.scene.anchors.append(boxAnchor)
arView.session.delegate = delegate // assign delegate to the session
return arView
}
func updateUIView(_ uiView: ARView, context: Context) {
// Update the ARView if needed
}
func startARSession() {
// Start the ARSession
let configuration = ARWorldTrackingConfiguration()
configuration.planeDetection = [.horizontal, .vertical]
session.run(configuration, options: [])
}
func stopARSession() {
// Stop the ARSession
session.pause()
}
}
class MySessionDelegate: NSObject, ARSessionDelegate {
var object: MDLMesh?
let asset = MDLAsset()
let cameraTransform = MDLTransform()
var documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!
func session(_ session: ARSession, didUpdate frame: ARFrame) {
// Get the camera position and orientation for the current frame
let transform = frame.camera.transform
let rotation = frame.camera.eulerAngles
let position = transform.columns.3
let elapsedTime = frame.timestamp
cameraTransform.setTranslation(position[SIMD3(0,1,2)], forTime: elapsedTime)
cameraTransform.setRotation(rotation, forTime: elapsedTime)
print("Camera Transform: \(cameraTransform.matrix)")
}
}
struct Camera: View {
var body: some View {
VStack {
ARViewContainer().onAppear(perform: ARViewContainer().startARSession)
.onDisappear(perform: ARViewContainer().stopARSession)
Button("Export Recording") {
// Create an MDLAsset with a box representing the camera transform
let object = MDLMesh(boxWithExtent: .init(0.1, 0.1, 0.1), segments: .init(10, 10, 10), inwardNormals: false, geometryType: .triangles, allocator: nil)
object.name = "Camera Transform"
object.transform = MySessionDelegate().cameraTransform
let asset = MDLAsset()
asset.add(object)
// Export the MDLAsset to a file
let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!
let fileURL = documentsDirectory.appendingPathComponent("recording.abc")
try! asset.export(to: fileURL)
}
}
}
}
If there is a completely different way of doing it, please also share, I thank everybody in advance for any help!
Recording sixty 4x4 Transform Matrices per second
To write the data to a text file, I used an encoding of JSON data of 4x4 transformation matrix (i.e. using a complex transform). After clicking on the Record Transform Values button, the data immediately begins to be written to the toMaya.txt file. For convenience, I put all 16 values ​​of the matrix on the screen (I tested this on an iPad, so get a device with a bigger screen).
Data from nested lists in the toMaya.txt file can be easily read using regular Python or MEL scripting. Take a look at what nestings look like. Each of the 16 matrix values ​​is in Float type.
[x0,y0,z0,w0] is a first matrix column, [x1,y1,z1,w1] is a second matrix column, etc.
Here's the code:
import SwiftUI
import RealityKit
import Combine
struct ARViewContainer : UIViewRepresentable {
#Binding var arView: ARView
func makeUIView(context: Context) -> ARView { return arView }
func updateUIView(_ view: ARView, context: Context) { }
}
struct ContentView : View {
#State private var arView = ARView(frame: .zero)
#State private var subs: [AnyCancellable] = []
#State private var array: [[[Float]]] = [[ [1,0,0,0], [0,1,0,0],
[0,0,1,0], [0,0,0,1] ]]
let url = FileManager.default.urls(for: .documentDirectory,
in: .userDomainMask)[0]
.appendingPathComponent("toMaya.txt")
var body: some View {
ZStack {
ARViewContainer(arView: $arView).ignoresSafeArea()
HStack {
VStack {
Text("\(array.last![0][0])").foregroundColor(.white)
Text("\(array.last![0][1])").foregroundColor(.white)
Text("\(array.last![0][2])").foregroundColor(.white)
Text("\(array.last![0][3])").foregroundColor(.white)
}
VStack {
Text("\(array.last![1][0])").foregroundColor(.white)
Text("\(array.last![1][1])").foregroundColor(.white)
Text("\(array.last![1][2])").foregroundColor(.white)
Text("\(array.last![1][3])").foregroundColor(.white)
}
VStack {
Text("\(array.last![2][0])").foregroundColor(.white)
Text("\(array.last![2][1])").foregroundColor(.white)
Text("\(array.last![2][2])").foregroundColor(.white)
Text("\(array.last![2][3])").foregroundColor(.white)
}
VStack {
Text("\(array.last![3][0])").foregroundColor(.white)
Text("\(array.last![3][1])").foregroundColor(.white)
Text("\(array.last![3][2])").foregroundColor(.white)
Text("\(array.last![3][3])").foregroundColor(.white)
}
}
VStack {
Button("Record Transform Values") {
DispatchQueue.main.async {
arView.scene.subscribe(to: SceneEvents.Update.self) { _ in
let col = arView.cameraTransform.matrix.columns
let mtx: [[Float]] = [
[col.0.x, col.0.y, col.0.z, col.0.w],
[col.1.x, col.1.y, col.1.z, col.1.w],
[col.2.x, col.2.y, col.2.z, col.2.w],
[col.3.x, col.3.y, col.3.z, col.3.w]
]
array.append(mtx)
if let data = try? JSONEncoder().encode(self.array) {
guard let str = String(data: data, encoding: .ascii)
else { return }
do {
try str.write(to: url,
atomically: true,
encoding: .ascii)
print(url)
} catch {
print(error.localizedDescription)
}
}
}.store(in: &subs)
}
}
Spacer()
}
VStack {
Spacer()
Text("\(array.count)").foregroundColor(.white)
}
}
}
}
My toMaya.txt file is waiting for me in the following debugging directory:
file:///var/mobile/Containers/Data/Application/7C675F52-C78B-4252-98B5-3EBD37A3F832/Documents/toMaya.txt

Can I use .playAudio() to resume playback after stopping?

From the question in this If I assign a sound in Reality Composer, can I stop it programmatically in RealityKit?, I would like to use method to resume playback after Play Music.
Can I do that?
Now, I use this command in stopAudio function to stop the music.
func stopAudio() {
if arView.scene.anchors.count > 0 {
if arView.scene.anchors[0].isAnchored {
arView.scene.anchors[0].children[0].stopAllAudio()
}
}
}
If I want arView.scene.anchors[0] to replay the music again, which command should I use?
Audio Playback Controller
Since RealityKit 2.0 isn't able to control parameters of Reality Composer's behaviors, the best strategy for controlling audio is to create a programmatic AudioPlaybackController. To feed your audio file to the controller, export .rcproject scene to .usdz format and use unzipping trick to extract the .aiff, .caf or .mp3 sound file. When loading audio for playback, you can choose between spatial and non-spatial audio experience.
UIKit version
import UIKit
import RealityKit
extension ViewController {
private func loadAudio() {
do {
let resource = try AudioFileResource.load(
named: "planetarium07.caf",
in: nil,
inputMode: .spatial,
loadingStrategy: .preload,
shouldLoop: true)
self.controller = entity.prepareAudio(resource)
self.controller?.speed = 0.9
self.controller?.fade(to: .infinity, duration: 2)
} catch {
print(error.localizedDescription)
}
}
}
ViewController.
class ViewController : UIViewController {
#IBOutlet var uiView: UIView! // when using #IBAction buttons
#IBOutlet var arView: ARView!
private var entity = Entity()
private var controller: AudioPlaybackController? = nil
override func viewDidLoad() {
super.viewDidLoad()
uiView.backgroundColor = .systemCyan
let boxScene = try! Experience.loadBox()
arView.scene.anchors.append(boxScene)
let anchor = boxScene.anchor
anchor?.addChild(entity)
self.loadAudio()
}
#IBAction func playMusic(_ sender: UIButton) {
self.controller?.play()
}
#IBAction func stopMusic(_ sender: UIButton) {
self.controller?.pause()
// self.controller?.stop()
}
}
SwiftUI version
import SwiftUI
import RealityKit
struct ContentView : View {
#State var arView = ARView(frame: .zero)
#State var controller: AudioPlaybackController? = nil
#State var entity = Entity()
var body: some View {
ZStack {
ARViewContainer(arView: $arView,
entity: $entity).ignoresSafeArea()
VStack {
Spacer()
Button("Play") { loadSound(); controller?.play() }
Button("Stop") { controller?.stop() }
}
}
}
func loadSound() {
do {
let resource = try AudioFileResource.load(
named: "planetarium07.caf",
in: nil,
inputMode: .spatial,
loadingStrategy: .preload,
shouldLoop: true)
self.controller = entity.prepareAudio(resource)
} catch {
print(error.localizedDescription)
}
}
}
ARViewContainer.
struct ARViewContainer: UIViewRepresentable {
#Binding var arView: ARView
#Binding var entity: Entity
func makeUIView(context: Context) -> ARView {
let boxScene = try! Experience.loadBox()
arView.scene.anchors.append(boxScene)
let anchor = boxScene.anchor
anchor?.addChild(entity)
return arView
}
func updateUIView(_ view: ARView, context: Context) { }
}

How to open a SwiftUI view by tapping an entity in RealityKit?

I am using SwiftUI with RealityKit. As displayed in the code below, I have a plane entity that when tapped simply prints the name of the entity. What approach should I take toward navigating to a new view when I tap the entity? It would be preferable to navigate as with a navigation link in a normal view, but if that is not possible then perhaps a fullScreenCover?
ARViewContainer.swift:
class Coordinator: NSObject {
weak var view: ARView?
#objc func handleTap(_ recognizer: UITapGestureRecognizer) {
guard let view = self.view else { return }
let tapLocation = recognizer.location(in: view)
if let entity = view.entity(at: tapLocation) as? ModelEntity {
print(entity.name)
}
}
}
struct ARViewContainer: UIViewRepresentable {
typealias UIViewType = ARView
func makeUIView(context: Context) -> ARView{
let arView = ARView(frame: .zero, cameraMode: .ar, automaticallyConfigureSession: true)
context.coordinator.view = arView
arView.addGestureRecognizer(UITapGestureRecognizer(target: context.coordinator, action: #selector(Coordinator.handleTap)))
arView.scene.anchors.removeAll()
let anchor = AnchorEntity()
let plane = MeshResource.generatePlane(width: 1, height: 1)
var material = UnlitMaterial()
material.color = .init(tint: .white,
texture: .init(try! .load(named: "instagram")))
let planeEntity = ModelEntity(mesh: plane, materials: [material])
planeEntity.generateCollisionShapes(recursive: true)
planeEntity.name = "Plane Entity"
planeEntity.position.z -= 1.0
planeEntity.setParent(anchor)
arView.scene.addAnchor(anchor)
return arView
}
func updateUIView(_ uiView: ARView, context: Context){
}
func makeCoordinator() -> Coordinator {
Coordinator()
}
}
ContentView.swift
struct ContentView: View {
#State var open = false
var body: some View {
NavigationView{
ZStack {
ARViewContainer()
.ignoresSafeArea(.all)
}
}
}
}
View I want to navigate to:
struct TestView : View {
var body : some View {
VStack{
Text("Test View")
}
}
}
Manage the state of the view in an observable object and modify it from your AR view.
struct ContentView: View {
#ObservedObject var settings = Settings.shared
var body: some View {
NavigationView {
ZStack {
ARViewContainer()
.ignoresSafeArea(.all)
NavigationLink("", isActive: $settings.shouldOpenDetailsView) {
TestView()
}
}
}
}
}
class Settings: ObservableObject {
static let shared = Settings()
#Published var shouldOpenDetailsView = false
}
class Coordinator: NSObject {
weak var view: ARView?
#objc func handleTap(_ recognizer: UITapGestureRecognizer) {
guard let view = self.view else { return }
let tapLocation = recognizer.location(in: view)
if let entity = view.entity(at: tapLocation) as? ModelEntity {
Settings.shared.shouldOpenDetailsView = true
}
}
}

Method `enumerateChildNodes` is not finding nodes

I'm developing an ARKit app that has 2 buttons: "Play" and "Reset". When I click on Play, a pyramid node is displayed. When I click on Reset, all pyramid nodes should be deleted from the scene:
Another requirement is that I want to disable the Play button when I click on it.
Next is my contentView code:
import ARKit
struct ContentView: View {
#State var node = SCNNode()
#State var play: Bool = false
#State var reset: Bool = false
#State var isDisabled: Bool = false
#State var showBanner:Bool = true
var body: some View {
ZStack {
SceneKitView(node: $node, play: $play, reset: $reset, isDisabled: $isDisabled)
VStack {
Spacer()
HStack {
Button(action: {
play = true
}) {
Image("Play")
}.padding()
.disabled(isDisabled) /// <<<<< THIS DISABLES THE BUTTON
Spacer()
Button(action: {
play = false
reset = true
}) {
Image("Reset")
}.padding()
}
}
}
}
}
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
ContentView()
}
}
In order to disable the Play button, I'm using a .disabled and the Boolean isDisabled.
My code changes the value of isDisabled to TRUE in a Coordinator :
import ARKit
import SwiftUI
struct SceneKitView: UIViewRepresentable {
let arView = ARSCNView(frame: .zero)
let config = ARWorldTrackingConfiguration()
#Binding var node: SCNNode
#Binding var play: Bool
#Binding var reset: Bool
#Binding var isDisabled: Bool
func makeCoordinator() -> Coordinator {
Coordinator(self)
}
final class Coordinator: NSObject, ARSCNViewDelegate {
var control: SceneKitView
init(_ control: SceneKitView) {
self.control = control
}
func renderer(_ renderer: SCNSceneRenderer, willRenderScene scene: SCNScene, atTime time: TimeInterval) {
DispatchQueue.main.async {
if self.control.play {
self.control.addNode()
self.control.play = false
self.control.isDisabled = true // HERE I UPDATE THE VALUE !!!!!!!!
self.control.reset = false
}else{
}
}
}
}
func removeNode(){
self.arView.scene.rootNode.enumerateChildNodes { (node, _) in
print("existing nodes = \(node)")
node.removeFromParentNode()
}
}
func updateUIView(_ uiView: ARSCNView,
context: Context) {
if self.reset {
self.removeNode()
print("game reseted")
}
}
func makeUIView(context: Context) -> ARSCNView {
arView.scene = SCNScene()
arView.delegate = context.coordinator
arView.autoenablesDefaultLighting = true
//arView.debugOptions = [ARSCNDebugOptions.showFeaturePoints, ARSCNDebugOptions.showWorldOrigin]
arView.showsStatistics = true
arView.session.run(self.config)
return arView
}
func addNode(){
let pyramid = SCNNode(geometry: SCNPyramid(width: 0.1, height: 0.1, length: 0.1))
pyramid.geometry?.firstMaterial?.diffuse.contents = UIColor.red
pyramid.geometry?.firstMaterial?.specular.contents = UIColor.white
pyramid.position = SCNVector3(0,0,-0.5)
self.arView.scene.rootNode.addChildNode(pyramid)
}
}
Problem: When the app is running and I click on RESET, the method removeNode is invoked and this method uses enumerateChildNodes to find the nodes and delete them using removeFromParentNode but the pyramid node is not removed ! :(
The crazy thing (for me) is that if I don't change the value of isDisabled in the Coordinator, i.e. I comment that line, removeNode works, and the node is removed !!!!!
Any comment, suggestion is welcome :)
This solves the issue:
func updateUIView(_ uiView: ARSCNView, context: Context) {
if self.reset {
DispatchQueue.main.async {
context.coordinator.control.removeNode()
context.coordinator.control.isDisabled = false
}
print("Game Reset")
}
}

How can I update the Observable Object from an ARView extension in SwiftUI?

I am trying to Update the globalDataTransfer class from the ARView extension and reflect the changes on to ArView. Below is the globalDataTransfer function
class globalDataTransfer: ObservableObject{
#Published var val: String = "No Value"
required init (any : String){
self.val = any
}
func check() -> String{
return(self.val)
}
}
My ARViewContainer
struct ARViewContainer: UIViewRepresentable {
func makeUIView(context: UIViewRepresentableContext<ARViewContainer>) -> ARView {
let arView = ARView(frame: .zero,cameraMode: .ar,automaticallyConfigureSession: true)
arView.setupForBodyTracking()
arView.scene.addAnchor(bodySkeletonAnchor)
return(arView)
}
func updateUIView(_ uiView: ARView, context: Context) {
}
typealias UIViewType = ARView
}
My ARView extension is
extension ARView: ARSessionDelegate{
func setupForBodyTracking(){
let config = ARBodyTrackingConfiguration()
self.session.run(config)
self.session.delegate = self
}
public func session(_ session: ARSession, didUpdate anchors: [ARAnchor]) {
for anchor in anchors{
if let bodyAnchor = anchor as? ARBodyAnchor{
if let skeleton = bodySkeleton{
skeleton.update(with: bodyAnchor)
var
}
else{
let skeleton = BodySkeleton(for: bodyAnchor)
bodySkeleton = skeleton
bodySkeletonAnchor.addChild(skeleton)
}
var Val:String = calcAngle(anchor: bodyAnchor) //Function that returns Joint Angles and lengths
}
}
}
}
Content View :
struct ArView: View {
#ObservedObject var forGlobalValue: globalDataTransfer = globalDataTransfer(any: "No Value")
var body: some View{
ZStack{
ARViewContainer()
VStack{
Text(forGlobalValue.val)
Text(" This is \(test(any: forGlobalValue))")
}
}
}
}
Can anyone let me know How do I pass this Observable object to the ARView or is there any way of getting the Val variable from ARView extension and update it regularly on my View Text.