Can I use .playAudio() to resume playback after stopping? - swift

From the question in this If I assign a sound in Reality Composer, can I stop it programmatically in RealityKit?, I would like to use method to resume playback after Play Music.
Can I do that?
Now, I use this command in stopAudio function to stop the music.
func stopAudio() {
if arView.scene.anchors.count > 0 {
if arView.scene.anchors[0].isAnchored {
arView.scene.anchors[0].children[0].stopAllAudio()
}
}
}
If I want arView.scene.anchors[0] to replay the music again, which command should I use?

Audio Playback Controller
Since RealityKit 2.0 isn't able to control parameters of Reality Composer's behaviors, the best strategy for controlling audio is to create a programmatic AudioPlaybackController. To feed your audio file to the controller, export .rcproject scene to .usdz format and use unzipping trick to extract the .aiff, .caf or .mp3 sound file. When loading audio for playback, you can choose between spatial and non-spatial audio experience.
UIKit version
import UIKit
import RealityKit
extension ViewController {
private func loadAudio() {
do {
let resource = try AudioFileResource.load(
named: "planetarium07.caf",
in: nil,
inputMode: .spatial,
loadingStrategy: .preload,
shouldLoop: true)
self.controller = entity.prepareAudio(resource)
self.controller?.speed = 0.9
self.controller?.fade(to: .infinity, duration: 2)
} catch {
print(error.localizedDescription)
}
}
}
ViewController.
class ViewController : UIViewController {
#IBOutlet var uiView: UIView! // when using #IBAction buttons
#IBOutlet var arView: ARView!
private var entity = Entity()
private var controller: AudioPlaybackController? = nil
override func viewDidLoad() {
super.viewDidLoad()
uiView.backgroundColor = .systemCyan
let boxScene = try! Experience.loadBox()
arView.scene.anchors.append(boxScene)
let anchor = boxScene.anchor
anchor?.addChild(entity)
self.loadAudio()
}
#IBAction func playMusic(_ sender: UIButton) {
self.controller?.play()
}
#IBAction func stopMusic(_ sender: UIButton) {
self.controller?.pause()
// self.controller?.stop()
}
}
SwiftUI version
import SwiftUI
import RealityKit
struct ContentView : View {
#State var arView = ARView(frame: .zero)
#State var controller: AudioPlaybackController? = nil
#State var entity = Entity()
var body: some View {
ZStack {
ARViewContainer(arView: $arView,
entity: $entity).ignoresSafeArea()
VStack {
Spacer()
Button("Play") { loadSound(); controller?.play() }
Button("Stop") { controller?.stop() }
}
}
}
func loadSound() {
do {
let resource = try AudioFileResource.load(
named: "planetarium07.caf",
in: nil,
inputMode: .spatial,
loadingStrategy: .preload,
shouldLoop: true)
self.controller = entity.prepareAudio(resource)
} catch {
print(error.localizedDescription)
}
}
}
ARViewContainer.
struct ARViewContainer: UIViewRepresentable {
#Binding var arView: ARView
#Binding var entity: Entity
func makeUIView(context: Context) -> ARView {
let boxScene = try! Experience.loadBox()
arView.scene.anchors.append(boxScene)
let anchor = boxScene.anchor
anchor?.addChild(entity)
return arView
}
func updateUIView(_ view: ARView, context: Context) { }
}

Related

Can I control Reality Composer behaviors in RealityKit?

I would like to make a button using SwiftUI. When the button is pressed, the model will hide. I have already read the tutorial in this link (Creating a Trigger), but I don't know how to control it programmatically.
Here is my code:
struct VocabView : View {
#State private var arView = ARView(frame: .zero)
var body: some View {
ZStack{
ARViewContainer(arView: $arView)
.ignoresSafeArea()
VStack {
Button("hide") {
hide()
}
}
}
}
func hide() {
let demoScene = try! Experience1.loadDemo()
if arView.scene.anchors.count > 0 {
if arView.scene.anchors[0].isAnchored {
demoScene.notifications.hide.post()
}
}
}
}
struct ARViewContainer2: UIViewRepresentable {
#Binding var arView: ARView
func makeUIView(context: Context) -> ARView {
let demoScene = try! Experience1.loadDemo()
DispatchQueue.main.async {
arView.scene.anchors.append(demoScene)
}
return arView
}
}
Here is the configuration in Reality Composer:
You are loading your model twice – at first in makeUIView() method and secondly in hide() method. Try my version.
import SwiftUI
import RealityKit
struct ContentView : View {
#State private var arView = ARView(frame: .zero)
#State private var scene = try! Experience.loadBox()
var body: some View {
ZStack{
ARViewContainer(arView: $arView, scene: $scene)
.ignoresSafeArea()
VStack {
Spacer()
Button("Hide Model") { hideModel() }
}
}
}
private func hideModel() {
if arView.scene.anchors.count > 0 {
if arView.scene.anchors[0].isAnchored {
scene.notifications.notify.post()
}
}
}
}
struct ARViewContainer : UIViewRepresentable {
#Binding var arView: ARView
#Binding var scene: Experience.Box
func makeUIView(context: Context) -> ARView {
arView.scene.anchors.append(scene)
return arView
}
func updateUIView(_ view: ARView, context: Context) { }
}

AVPlayer audio only works when ringer is on

With SwiftUI, I have a custom avplayer that auto plays and loops the video. The problem is whether or not I specifically tell avplayer to unmute, it is still muted. The physical volume buttons have no effect. The only way to toggle mute on/off is to physically switch the ringer to silent (muted) or not silent (unmute).
Here is the parent view:
struct VideoCacheView: View {
#State private var avPlayer: AVPlayer? = nil
public let url: String
public let thumbnailURL: String
var body: some View {
if self.avPlayer != nil {
CustomVideoPlayer(player: Binding(self.$avPlayer)!)
.onAppear {
self.avPlayer?.isMuted = false
self.avPlayer?.play()
}
}
}
}
and the child:
struct CustomVideoPlayer: UIViewControllerRepresentable {
#EnvironmentObject var cvm: CameraViewModel
#Binding var player: AVPlayer
func makeCoordinator() -> Coordinator {
return Coordinator(self)
}
func makeUIViewController(context: Context) -> AVPlayerViewController {
let controller = AVPlayerViewController()
controller.player = self.player
controller.showsPlaybackControls = false
controller.videoGravity = self.cvm.videoGravity
player.actionAtItemEnd = .none
NotificationCenter.default.addObserver(context.coordinator, selector: #selector(context.coordinator.restartPlayback), name: .AVPlayerItemDidPlayToEndTime, object: player.currentItem)
return controller
}
func updateUIViewController(_ uiViewController: AVPlayerViewController, context: Context) { }
class Coordinator: NSObject {
public var parent: CustomVideoPlayer
init(_ parent: CustomVideoPlayer) {
self.parent = parent
}
#objc func restartPlayback () {
self.parent.player.seek(to: .zero)
}
}
}
Why is the only volume control my avplayer has is with the physicaly silent switch?
https://developer.apple.com/documentation/avfoundation/avplayer/1390127-volume
Turns out that the volume is set to 0.0 when ringer is in silent mode. By setting the volume to 1.0 by default, there is volume all the time.
Added this:
self.player?.volume = 1.0
inside of the child view below the videoGravity line

SwiftUI AVPlayer and AVPlayerLayer during view re-creation

I'm creating a Mac OS app, using SwiftUI.
I have a NSViewRepresentable that wraps the AVPlayer:
class PlaybackModel {
var player: AVPlayer!
var playerLayer: AVPlayerLayer!
func createPlayerLayer() -> AVPlayerLayer {
let result = AVPlayerLayer(player: self.player)
result.videoGravity = .resizeAspect
result.needsDisplayOnBoundsChange = true
result.backgroundColor = .black
self.playerLayer = result
}
}
struct PlayerViewAdapter: NSViewRepresentable {
let playbackModel: PlaybackModel!
func makeNSView(context: Context) -> BasePlayerView {
return BasePlayerView(playbackModel)
}
func updateNSView(_ nsView: BasePlayerView, context: Context) {
}
}
and the BasePlayerView uses the AVPlayerLayer as a backing layer:
class BasePlayerView: NSView {
private var playbackModel: PlaybackModel!
init(_ playbackModel: PlaybackModel) {
self.playbackModel = playbackModel
super.init(frame: .zero)
wantsLayer = true
}
override func makeBackingLayer() -> CALayer {
return playbackModel.playerLayer
}
}
Initially it works fine and I can see the video playing inside the view.
Alas, there seems to be an issue during SwiftUI re-render process, when the PlayerViewAdapter.makeNSView() method gets called again for the same PlaybackModel (due to state changes), the AVPlayerLayer is not rendered in the newly created BasePlayerView.
Do I need to perform some additional action during this re-creation of the player views?

SwiftUI - AVPlayerViewController Full Screen on tvOS

I am able to present an AVPlayerViewController from SwiftUI but there is some padding around the video and I would like for it to be full-screen.
From the SwiftUI portion there is the following:
var body: some View {
NavigationView {
List {
ForEach(topicsArray) { topic in
Section(header: Text(topic.title)) {
ForEach(0..<topic.shows.count) { index in
NavigationLink(destination: PlayerView(showID: topic.shows[index])) {
ShowCell(showID: topic.shows[index])
}
.navigationBarTitle("")
.navigationBarHidden(true)
}
}
}
}
.listStyle(GroupedListStyle())
.padding()
}.onAppear(perform: initialDataLoad)
}
The code being called from the NavigationLink that shows the player is:
struct PlayerView: UIViewControllerRepresentable {
var showID:Int
func makeUIViewController(context: Context) -> AVPlayerViewController {
let pv = PlayerViewController()
pv.showID = showID
return pv
}
func updateUIViewController(_ viewController: AVPlayerViewController, context: Context) {
}
}
class PlayerViewController: AVPlayerViewController {
var showID:Int! {
didSet {
setup()
}
}
private var videoLaunch:VideoLaunch!
private func setup() {
videoLaunch = VideoLaunch(showID: showID,
season: nil,
episodeID: nil,
selectedIndex: IndexPath(row: 0, section: 0),
showType: .single,
dataStructure: topics as Any,
screenType: .live)
playVideo()
}
private func playVideo() {
guard let videoURL = self.videoLaunch.getMediaURL() else {print("Problem getting media URL");return}
self.player = AVPlayer(url: videoURL)
self.videoGravity = .resizeAspectFill
self.player?.play()
}
I have tried setting the bounds and using the modalpresentationstyle for fullscreen, but none have had any impact. There is still what looks like a 10 point border around the video.
I was able to solve the issue by inserting the following within the PlayerViewController class.
override func viewDidLayoutSubviews() {
self.view.bounds = UIScreen.main.bounds
}

Pass value from SwiftUI to ARKit dynamically

I'm currently engaging personal project using ARKit with Swift.
In this app, a skeleton is appearing by detected body and it tracks body's motion.
What I want to know is, how to change the position of skeleton by touching a slider dynamically.
The position of skeleton is defined in ARKit class ARDelegateHandler such as self.characterOffset = [1, 0, 0] which means 1m right from a detected body.
I want to change the characterOffset's x-axis value by slider in SwiftUI.(something like self.characterOffset = [x, 0, 0])
Here is my code.
ARViewContainer.swift
import SwiftUI
import RealityKit
import ARKit
import Combine
struct ARViewContainer: UIViewRepresentable {
let characterAnchor = AnchorEntity()
#Binding var offsetValue:Float
#ObservedObject var offsetInstance = Offset()
func makeUIView(context: Context) -> ARView {
let arView = ARView(frame: .zero)
arView.session.delegate = context.coordinator
return arView
}
func updateUIView(_ uiView: ARView, context: Context) {
let configuration = ARBodyTrackingConfiguration()
uiView.session.run(configuration)
uiView.scene.addAnchor(characterAnchor)
print(offsetValue)
}
func makeCoordinator() -> ARDelegateHandler {
ARDelegateHandler(self, anchor: characterAnchor)
}
class ARDelegateHandler: NSObject, ARSessionDelegate {
var arVC: ARViewContainer
let characterAnchor: AnchorEntity
var character: BodyTrackedEntity?
var characterOffset: SIMD3<Float>
init(_ control: ARViewContainer, anchor: AnchorEntity) {
self.arVC = control
self.characterAnchor = anchor
self.characterOffset = [1, 0, 0]
super.init()
setSkeleton()
}
func setSkeleton(){
var cancellable: AnyCancellable? = nil
cancellable = Entity.loadBodyTrackedAsync(named: "character/robot").sink(
receiveCompletion: { completion in
if case let .failure(error) = completion {
print("Error: Unable to load model: \(error.localizedDescription)")
}
cancellable?.cancel()
}, receiveValue: { (character: Entity) in
if let character = character as? BodyTrackedEntity {
character.scale = [1, 1, 1]
self.character = character
cancellable?.cancel()
} else {
print("Error: Unable to load model as BodyTrackedEntity")
}
})
}
func session(_ session: ARSession, didUpdate anchors: [ARAnchor]) {
for anchor in anchors {
guard let bodyAnchor = anchor as? ARBodyAnchor else { continue }
let bodyPosition = simd_make_float3(bodyAnchor.transform.columns.3)
characterAnchor.position = bodyPosition + characterOffset
characterAnchor.orientation = Transform(matrix: bodyAnchor.transform).rotation
if let character = character, character.parent == nil {
characterAnchor.addChild(character)
}
}
}
}
}
ContentView.swift (User is expected to touch slider and offsetValue would be changed. SlideView has been already implemented)
import SwiftUI
import RealityKit
import ARKit
import Combine
struct ContentView : View {
#State var offsetValue:Float = 0.0
#ObservedObject var offsetInstance = Offset()
var body: some View {
VStack{
Button(action:{self.offsetInstance.setOffset(offset: 1.0)},
label:{Text("check")})
ZStack(alignment: .bottom) {
ARViewContainer(offsetValue: $offsetValue)
SlideView(offSetValue: $offsetValue)
}
}
}
}
Thank you very much for your help!