Is there any way to load usdz model to a custom entity class?
I tried to cast the returned ModelEntity to my custom class but it didn't work out.
let entity: CustomEntity = try! CustomEntity.load(named: name) as! CustomEntity
UIKit version
import UIKit
import RealityKit
class CustomClass: Entity, HasModel {
let modelName: String? = "gramophone"
let myAnchor = AnchorEntity()
func loader() -> AnchorEntity {
if let name = self.modelName {
let modelEntity = try! CustomClass.loadModel(named: name)
myAnchor.addChild(modelEntity)
}
return myAnchor
}
}
class ViewController: UIViewController {
#IBOutlet var arView: ARView!
let modelName: String? = "gramophone"
override func viewDidLoad() {
super.viewDidLoad()
let usdz = CustomClass().loader()
arView.scene.anchors.append(usdz)
}
}
SwiftUI version:
import SwiftUI
import RealityKit
class CustomClass: Entity, HasModel {
func printer() {
print("I'm inside CustomClass...")
}
}
struct ARViewContainer: UIViewRepresentable {
let modelName: String? = "gramophone"
func makeUIView(context: Context) -> ARView {
let arView = ARView(frame: .zero)
typealias CustomEntity = ModelEntity
var modelEntity = CustomEntity()
if let name = self.modelName {
modelEntity = try! CustomClass.loadModel(named: name)
let anchor = AnchorEntity()
anchor.addChild(modelEntity)
arView.scene.anchors.append(anchor)
}
return arView
}
func updateUIView(_ uiView: ARView, context: Context) {
CustomClass().printer()
}
}
Related
From the question in this If I assign a sound in Reality Composer, can I stop it programmatically in RealityKit?, I would like to use method to resume playback after Play Music.
Can I do that?
Now, I use this command in stopAudio function to stop the music.
func stopAudio() {
if arView.scene.anchors.count > 0 {
if arView.scene.anchors[0].isAnchored {
arView.scene.anchors[0].children[0].stopAllAudio()
}
}
}
If I want arView.scene.anchors[0] to replay the music again, which command should I use?
Audio Playback Controller
Since RealityKit 2.0 isn't able to control parameters of Reality Composer's behaviors, the best strategy for controlling audio is to create a programmatic AudioPlaybackController. To feed your audio file to the controller, export .rcproject scene to .usdz format and use unzipping trick to extract the .aiff, .caf or .mp3 sound file. When loading audio for playback, you can choose between spatial and non-spatial audio experience.
UIKit version
import UIKit
import RealityKit
extension ViewController {
private func loadAudio() {
do {
let resource = try AudioFileResource.load(
named: "planetarium07.caf",
in: nil,
inputMode: .spatial,
loadingStrategy: .preload,
shouldLoop: true)
self.controller = entity.prepareAudio(resource)
self.controller?.speed = 0.9
self.controller?.fade(to: .infinity, duration: 2)
} catch {
print(error.localizedDescription)
}
}
}
ViewController.
class ViewController : UIViewController {
#IBOutlet var uiView: UIView! // when using #IBAction buttons
#IBOutlet var arView: ARView!
private var entity = Entity()
private var controller: AudioPlaybackController? = nil
override func viewDidLoad() {
super.viewDidLoad()
uiView.backgroundColor = .systemCyan
let boxScene = try! Experience.loadBox()
arView.scene.anchors.append(boxScene)
let anchor = boxScene.anchor
anchor?.addChild(entity)
self.loadAudio()
}
#IBAction func playMusic(_ sender: UIButton) {
self.controller?.play()
}
#IBAction func stopMusic(_ sender: UIButton) {
self.controller?.pause()
// self.controller?.stop()
}
}
SwiftUI version
import SwiftUI
import RealityKit
struct ContentView : View {
#State var arView = ARView(frame: .zero)
#State var controller: AudioPlaybackController? = nil
#State var entity = Entity()
var body: some View {
ZStack {
ARViewContainer(arView: $arView,
entity: $entity).ignoresSafeArea()
VStack {
Spacer()
Button("Play") { loadSound(); controller?.play() }
Button("Stop") { controller?.stop() }
}
}
}
func loadSound() {
do {
let resource = try AudioFileResource.load(
named: "planetarium07.caf",
in: nil,
inputMode: .spatial,
loadingStrategy: .preload,
shouldLoop: true)
self.controller = entity.prepareAudio(resource)
} catch {
print(error.localizedDescription)
}
}
}
ARViewContainer.
struct ARViewContainer: UIViewRepresentable {
#Binding var arView: ARView
#Binding var entity: Entity
func makeUIView(context: Context) -> ARView {
let boxScene = try! Experience.loadBox()
arView.scene.anchors.append(boxScene)
let anchor = boxScene.anchor
anchor?.addChild(entity)
return arView
}
func updateUIView(_ view: ARView, context: Context) { }
}
Simply trying to print a hello when I tap on an object in the Reality file I made in Reality Composer. Not able to set the link between Notify and in-app actions.
import SwiftUI
import RealityKit
struct ContentView: View {
var body: some View {
ZStack{
ARViewContainer()
Text("Level 1")
}
}
}
struct ARViewContainer : UIViewRepresentable {
func makeUIView(context: Context) -> ARView {
let arView = ARView(frame: .zero)
let yellowEntity = try! ModelEntity.load(named: "Yellow")
let anchorEntity = AnchorEntity(plane: .horizontal)
anchorEntity.addChild(yellowEntity)
arView.scene.addAnchor(anchorEntity)
yellowEntity.actions.Yellowtapped.onAction = handleTap(_:)
func handleTap(_entity: Entity?){
guard let entity = entity else {return}
print("Hello")
}
return arView
}
func updateUIView(_ uiView: ARView, context: Context) {
}
}
If you use .rcproject all works perfectly:
struct ARViewContainer: UIViewRepresentable {
func makeUIView(context: Context) -> ARView {
let arView = ARView(frame: .zero)
let scene = try! Experience.loadBox()
scene.actions.notifier.onAction = printer
let anchor = AnchorEntity(plane: .horizontal)
anchor.addChild(scene)
arView.scene.addAnchor(anchor)
return arView
}
func updateUIView(_ uiView: ARView, context: Context) { }
func printer(_ entity: Entity?) -> Void { print("Hello") }
}
P.S.
Do not forget to merge 2 actions together (in Reality Composer).
really stuck on this.
I'm trying to pass ARView from MakeUIView to makeCoordinator
I really need this to use ARView inside of #objc func handleTap
struct ARViewContainer: UIViewRepresentable{
func makeUIView(context: Context) -> ARView {
let myARView = ARView(frame: .zero)
//...config and things….
let tapGesture = UITapGestureRecognizer(target: context.coordinator, action: #selector(context.coordinator.handleTap(_:)))
myARView.addGestureRecognizer(tapGesture)
return myARView
}
func makeCoordinator() -> Coordinator {
Coordinator("whatshouldiusehere", self.$focusObject, self.$focusName)
}
class Coordinator: NSObject {
private let view: ARView
private var object: Binding<Entity?>
private var objectname: Binding<String?>
init(_ view: ARView, _ obj: Binding<Entity?>, _ objname: Binding<String?>) {
self.objectname = objname
self.object = obj
self.view = view
super.init()
}
#objc func handleTap(_ sender: UIGestureRecognizer? = nil) {
guard let touchInView = sender?.location(in: view) else {
return
}
guard let hitEntity = view.entity(at: touchInView) else {return}
//doing something with object here, assigning to #Binding for example
}
}
}
I can't move myARView = ARView(frame: .zero) outside of makeUIView, cuz I'm using SwiftUI and it inits every time when variables changes.
But how I can pass it in any way?
Or any other option to access Binding with ARView same time.
A coordinator is available via context, so you can inject it via property, like
struct ARViewContainer: UIViewRepresentable{
func makeUIView(context: Context) -> ARView {
let myARView = ARView(frame: .zero)
//...config and things….
let tapGesture = UITapGestureRecognizer(target: context.coordinator, action: #selector(context.coordinator.handleTap(_:)))
myARView.addGestureRecognizer(tapGesture)
context.coordinator.view = myARView // << inject here !!
return myARView
}
func makeCoordinator() -> Coordinator {
Coordinator(self.$focusObject, self.$focusName)
}
class Coordinator: NSObject {
var view: ARView? // << optional initially
private var object: Binding<Entity?>
private var objectname: Binding<String?>
init(_ obj: Binding<Entity?>, _ objname: Binding<String?>) {
self.objectname = objname
self.object = obj
super.init()
}
// ... other code update accordingly
}
I am trying to Update the globalDataTransfer class from the ARView extension and reflect the changes on to ArView. Below is the globalDataTransfer function
class globalDataTransfer: ObservableObject{
#Published var val: String = "No Value"
required init (any : String){
self.val = any
}
func check() -> String{
return(self.val)
}
}
My ARViewContainer
struct ARViewContainer: UIViewRepresentable {
func makeUIView(context: UIViewRepresentableContext<ARViewContainer>) -> ARView {
let arView = ARView(frame: .zero,cameraMode: .ar,automaticallyConfigureSession: true)
arView.setupForBodyTracking()
arView.scene.addAnchor(bodySkeletonAnchor)
return(arView)
}
func updateUIView(_ uiView: ARView, context: Context) {
}
typealias UIViewType = ARView
}
My ARView extension is
extension ARView: ARSessionDelegate{
func setupForBodyTracking(){
let config = ARBodyTrackingConfiguration()
self.session.run(config)
self.session.delegate = self
}
public func session(_ session: ARSession, didUpdate anchors: [ARAnchor]) {
for anchor in anchors{
if let bodyAnchor = anchor as? ARBodyAnchor{
if let skeleton = bodySkeleton{
skeleton.update(with: bodyAnchor)
var
}
else{
let skeleton = BodySkeleton(for: bodyAnchor)
bodySkeleton = skeleton
bodySkeletonAnchor.addChild(skeleton)
}
var Val:String = calcAngle(anchor: bodyAnchor) //Function that returns Joint Angles and lengths
}
}
}
}
Content View :
struct ArView: View {
#ObservedObject var forGlobalValue: globalDataTransfer = globalDataTransfer(any: "No Value")
var body: some View{
ZStack{
ARViewContainer()
VStack{
Text(forGlobalValue.val)
Text(" This is \(test(any: forGlobalValue))")
}
}
}
}
Can anyone let me know How do I pass this Observable object to the ARView or is there any way of getting the Val variable from ARView extension and update it regularly on my View Text.
I'm currently engaging personal project using ARKit with Swift.
In this app, a skeleton is appearing by detected body and it tracks body's motion.
What I want to know is, how to change the position of skeleton by touching a slider dynamically.
The position of skeleton is defined in ARKit class ARDelegateHandler such as self.characterOffset = [1, 0, 0] which means 1m right from a detected body.
I want to change the characterOffset's x-axis value by slider in SwiftUI.(something like self.characterOffset = [x, 0, 0])
Here is my code.
ARViewContainer.swift
import SwiftUI
import RealityKit
import ARKit
import Combine
struct ARViewContainer: UIViewRepresentable {
let characterAnchor = AnchorEntity()
#Binding var offsetValue:Float
#ObservedObject var offsetInstance = Offset()
func makeUIView(context: Context) -> ARView {
let arView = ARView(frame: .zero)
arView.session.delegate = context.coordinator
return arView
}
func updateUIView(_ uiView: ARView, context: Context) {
let configuration = ARBodyTrackingConfiguration()
uiView.session.run(configuration)
uiView.scene.addAnchor(characterAnchor)
print(offsetValue)
}
func makeCoordinator() -> ARDelegateHandler {
ARDelegateHandler(self, anchor: characterAnchor)
}
class ARDelegateHandler: NSObject, ARSessionDelegate {
var arVC: ARViewContainer
let characterAnchor: AnchorEntity
var character: BodyTrackedEntity?
var characterOffset: SIMD3<Float>
init(_ control: ARViewContainer, anchor: AnchorEntity) {
self.arVC = control
self.characterAnchor = anchor
self.characterOffset = [1, 0, 0]
super.init()
setSkeleton()
}
func setSkeleton(){
var cancellable: AnyCancellable? = nil
cancellable = Entity.loadBodyTrackedAsync(named: "character/robot").sink(
receiveCompletion: { completion in
if case let .failure(error) = completion {
print("Error: Unable to load model: \(error.localizedDescription)")
}
cancellable?.cancel()
}, receiveValue: { (character: Entity) in
if let character = character as? BodyTrackedEntity {
character.scale = [1, 1, 1]
self.character = character
cancellable?.cancel()
} else {
print("Error: Unable to load model as BodyTrackedEntity")
}
})
}
func session(_ session: ARSession, didUpdate anchors: [ARAnchor]) {
for anchor in anchors {
guard let bodyAnchor = anchor as? ARBodyAnchor else { continue }
let bodyPosition = simd_make_float3(bodyAnchor.transform.columns.3)
characterAnchor.position = bodyPosition + characterOffset
characterAnchor.orientation = Transform(matrix: bodyAnchor.transform).rotation
if let character = character, character.parent == nil {
characterAnchor.addChild(character)
}
}
}
}
}
ContentView.swift (User is expected to touch slider and offsetValue would be changed. SlideView has been already implemented)
import SwiftUI
import RealityKit
import ARKit
import Combine
struct ContentView : View {
#State var offsetValue:Float = 0.0
#ObservedObject var offsetInstance = Offset()
var body: some View {
VStack{
Button(action:{self.offsetInstance.setOffset(offset: 1.0)},
label:{Text("check")})
ZStack(alignment: .bottom) {
ARViewContainer(offsetValue: $offsetValue)
SlideView(offSetValue: $offsetValue)
}
}
}
}
Thank you very much for your help!