Rotation of UIViewRepresentable View using AVFoundation - swift

I am using CameraPreview (UIViewRepresentable) as a bridge between UIKit and SwiftUI. CameraPreview is configured with CameraModel and uses AVCaptureVideoPreviewLayer. CameraModel is configuring AVCaptureSession, and AVCaptureDeviceInput. Everything works fine, except when I try to rotate device on iPad. The CameraPreview is not rotated properly and uses input from previous rotation. (Pictures below) I can check the device being rotated in SwiftUI (based on this article) , but I do not know how to reconfigure rotation ofCameraPreview and AVCaptureVideoPreviewLayer . Any Idea how to trigger layout update in UIKit after I find out that device was rotated?
Code below:
struct CameraView: View {
#StateObject var camera = CameraModel()
var body: some View {
CameraPreview(camera: camera)
.ignoresSafeArea(.all, edges: .all)
}
}
struct CameraPreview: UIViewRepresentable {
#ObservedObject var camera: CameraModel
func makeUIView(context: Context) -> UIView {
let view = UIView(frame: UIScreen.main.bounds)
camera.preview = AVCaptureVideoPreviewLayer(session: camera.session)
camera.preview.frame = view.frame
//your own properies
camera.preview.videoGravity = .resizeAspectFill
view.layer.addSublayer(camera.preview)
//starting session
camera.session.startRunning()
return view
}
func updateUIView(_ uiView: UIViewType, context: Context) { }
}
class CameraModel: NSObject, ObservableObject, AVCapturePhotoCaptureDelegate {
#Published var session = AVCaptureSession()
#Published var preview: AVCaptureVideoPreviewLayer!
#Published var output = AVCapturePhotoOutput()
func setUp() {
do {
self.session.beginConfiguration()
let device = AVCaptureDevice.default(for: .video)
let input = try AVCaptureDeviceInput(device:device!)
if self.session.canAddInput(input) {
self.session.addInput(input)
}
if self.session.canAddOutput(self.output) {
self.session.addOutput(self.output)
}
self.session.commitConfiguration()
}catch {
print(error.localizedDescription)
}
}
}

I solved the issue by following:
adapting preview frame to bounds instead of frame.
when I detect change of device orientation on some superview View, I change the value of deviceRotation variable, which automatically triggers updateUIView function.
struct CameraPreview: UIViewRepresentable {
#ObservedObject var camera: CameraModel
var deviceRotation: UIDeviceOrientation
func makeUIView(context: Context) -> UIView {
let view = UIView(frame: UIScreen.main.bounds)
camera.preview = AVCaptureVideoPreviewLayer(session: camera.session)
camera.preview.frame = view.bounds
//your own properies
camera.preview.videoGravity = .resizeAspectFill
view.layer.addSublayer(camera.preview)
//starting session
camera.session.startRunning()
return view
}
func updateUIView(_ uiView: UIViewType, context: Context) {
let view = UIView(frame: UIScreen.main.bounds)
camera.preview.frame = view.bounds
}
}
manually setting AVCaptureVideoOrientation on AVCaptureConnection to correct device orientation. I do this every time when i detect orientation change:
previewConnection.videoPreviewLayer?.connection?.videoOrientation = orientation.videoOrientation

Related

Can I use .playAudio() to resume playback after stopping?

From the question in this If I assign a sound in Reality Composer, can I stop it programmatically in RealityKit?, I would like to use method to resume playback after Play Music.
Can I do that?
Now, I use this command in stopAudio function to stop the music.
func stopAudio() {
if arView.scene.anchors.count > 0 {
if arView.scene.anchors[0].isAnchored {
arView.scene.anchors[0].children[0].stopAllAudio()
}
}
}
If I want arView.scene.anchors[0] to replay the music again, which command should I use?
Audio Playback Controller
Since RealityKit 2.0 isn't able to control parameters of Reality Composer's behaviors, the best strategy for controlling audio is to create a programmatic AudioPlaybackController. To feed your audio file to the controller, export .rcproject scene to .usdz format and use unzipping trick to extract the .aiff, .caf or .mp3 sound file. When loading audio for playback, you can choose between spatial and non-spatial audio experience.
UIKit version
import UIKit
import RealityKit
extension ViewController {
private func loadAudio() {
do {
let resource = try AudioFileResource.load(
named: "planetarium07.caf",
in: nil,
inputMode: .spatial,
loadingStrategy: .preload,
shouldLoop: true)
self.controller = entity.prepareAudio(resource)
self.controller?.speed = 0.9
self.controller?.fade(to: .infinity, duration: 2)
} catch {
print(error.localizedDescription)
}
}
}
ViewController.
class ViewController : UIViewController {
#IBOutlet var uiView: UIView! // when using #IBAction buttons
#IBOutlet var arView: ARView!
private var entity = Entity()
private var controller: AudioPlaybackController? = nil
override func viewDidLoad() {
super.viewDidLoad()
uiView.backgroundColor = .systemCyan
let boxScene = try! Experience.loadBox()
arView.scene.anchors.append(boxScene)
let anchor = boxScene.anchor
anchor?.addChild(entity)
self.loadAudio()
}
#IBAction func playMusic(_ sender: UIButton) {
self.controller?.play()
}
#IBAction func stopMusic(_ sender: UIButton) {
self.controller?.pause()
// self.controller?.stop()
}
}
SwiftUI version
import SwiftUI
import RealityKit
struct ContentView : View {
#State var arView = ARView(frame: .zero)
#State var controller: AudioPlaybackController? = nil
#State var entity = Entity()
var body: some View {
ZStack {
ARViewContainer(arView: $arView,
entity: $entity).ignoresSafeArea()
VStack {
Spacer()
Button("Play") { loadSound(); controller?.play() }
Button("Stop") { controller?.stop() }
}
}
}
func loadSound() {
do {
let resource = try AudioFileResource.load(
named: "planetarium07.caf",
in: nil,
inputMode: .spatial,
loadingStrategy: .preload,
shouldLoop: true)
self.controller = entity.prepareAudio(resource)
} catch {
print(error.localizedDescription)
}
}
}
ARViewContainer.
struct ARViewContainer: UIViewRepresentable {
#Binding var arView: ARView
#Binding var entity: Entity
func makeUIView(context: Context) -> ARView {
let boxScene = try! Experience.loadBox()
arView.scene.anchors.append(boxScene)
let anchor = boxScene.anchor
anchor?.addChild(entity)
return arView
}
func updateUIView(_ view: ARView, context: Context) { }
}

AVPlayer audio only works when ringer is on

With SwiftUI, I have a custom avplayer that auto plays and loops the video. The problem is whether or not I specifically tell avplayer to unmute, it is still muted. The physical volume buttons have no effect. The only way to toggle mute on/off is to physically switch the ringer to silent (muted) or not silent (unmute).
Here is the parent view:
struct VideoCacheView: View {
#State private var avPlayer: AVPlayer? = nil
public let url: String
public let thumbnailURL: String
var body: some View {
if self.avPlayer != nil {
CustomVideoPlayer(player: Binding(self.$avPlayer)!)
.onAppear {
self.avPlayer?.isMuted = false
self.avPlayer?.play()
}
}
}
}
and the child:
struct CustomVideoPlayer: UIViewControllerRepresentable {
#EnvironmentObject var cvm: CameraViewModel
#Binding var player: AVPlayer
func makeCoordinator() -> Coordinator {
return Coordinator(self)
}
func makeUIViewController(context: Context) -> AVPlayerViewController {
let controller = AVPlayerViewController()
controller.player = self.player
controller.showsPlaybackControls = false
controller.videoGravity = self.cvm.videoGravity
player.actionAtItemEnd = .none
NotificationCenter.default.addObserver(context.coordinator, selector: #selector(context.coordinator.restartPlayback), name: .AVPlayerItemDidPlayToEndTime, object: player.currentItem)
return controller
}
func updateUIViewController(_ uiViewController: AVPlayerViewController, context: Context) { }
class Coordinator: NSObject {
public var parent: CustomVideoPlayer
init(_ parent: CustomVideoPlayer) {
self.parent = parent
}
#objc func restartPlayback () {
self.parent.player.seek(to: .zero)
}
}
}
Why is the only volume control my avplayer has is with the physicaly silent switch?
https://developer.apple.com/documentation/avfoundation/avplayer/1390127-volume
Turns out that the volume is set to 0.0 when ringer is in silent mode. By setting the volume to 1.0 by default, there is volume all the time.
Added this:
self.player?.volume = 1.0
inside of the child view below the videoGravity line

SwiftUI AVPlayer and AVPlayerLayer during view re-creation

I'm creating a Mac OS app, using SwiftUI.
I have a NSViewRepresentable that wraps the AVPlayer:
class PlaybackModel {
var player: AVPlayer!
var playerLayer: AVPlayerLayer!
func createPlayerLayer() -> AVPlayerLayer {
let result = AVPlayerLayer(player: self.player)
result.videoGravity = .resizeAspect
result.needsDisplayOnBoundsChange = true
result.backgroundColor = .black
self.playerLayer = result
}
}
struct PlayerViewAdapter: NSViewRepresentable {
let playbackModel: PlaybackModel!
func makeNSView(context: Context) -> BasePlayerView {
return BasePlayerView(playbackModel)
}
func updateNSView(_ nsView: BasePlayerView, context: Context) {
}
}
and the BasePlayerView uses the AVPlayerLayer as a backing layer:
class BasePlayerView: NSView {
private var playbackModel: PlaybackModel!
init(_ playbackModel: PlaybackModel) {
self.playbackModel = playbackModel
super.init(frame: .zero)
wantsLayer = true
}
override func makeBackingLayer() -> CALayer {
return playbackModel.playerLayer
}
}
Initially it works fine and I can see the video playing inside the view.
Alas, there seems to be an issue during SwiftUI re-render process, when the PlayerViewAdapter.makeNSView() method gets called again for the same PlaybackModel (due to state changes), the AVPlayerLayer is not rendered in the newly created BasePlayerView.
Do I need to perform some additional action during this re-creation of the player views?

SwiftUI - AVPlayerViewController Full Screen on tvOS

I am able to present an AVPlayerViewController from SwiftUI but there is some padding around the video and I would like for it to be full-screen.
From the SwiftUI portion there is the following:
var body: some View {
NavigationView {
List {
ForEach(topicsArray) { topic in
Section(header: Text(topic.title)) {
ForEach(0..<topic.shows.count) { index in
NavigationLink(destination: PlayerView(showID: topic.shows[index])) {
ShowCell(showID: topic.shows[index])
}
.navigationBarTitle("")
.navigationBarHidden(true)
}
}
}
}
.listStyle(GroupedListStyle())
.padding()
}.onAppear(perform: initialDataLoad)
}
The code being called from the NavigationLink that shows the player is:
struct PlayerView: UIViewControllerRepresentable {
var showID:Int
func makeUIViewController(context: Context) -> AVPlayerViewController {
let pv = PlayerViewController()
pv.showID = showID
return pv
}
func updateUIViewController(_ viewController: AVPlayerViewController, context: Context) {
}
}
class PlayerViewController: AVPlayerViewController {
var showID:Int! {
didSet {
setup()
}
}
private var videoLaunch:VideoLaunch!
private func setup() {
videoLaunch = VideoLaunch(showID: showID,
season: nil,
episodeID: nil,
selectedIndex: IndexPath(row: 0, section: 0),
showType: .single,
dataStructure: topics as Any,
screenType: .live)
playVideo()
}
private func playVideo() {
guard let videoURL = self.videoLaunch.getMediaURL() else {print("Problem getting media URL");return}
self.player = AVPlayer(url: videoURL)
self.videoGravity = .resizeAspectFill
self.player?.play()
}
I have tried setting the bounds and using the modalpresentationstyle for fullscreen, but none have had any impact. There is still what looks like a 10 point border around the video.
I was able to solve the issue by inserting the following within the PlayerViewController class.
override func viewDidLayoutSubviews() {
self.view.bounds = UIScreen.main.bounds
}

SwiftUI and Unity

I’m trying to integrate a Unity view in SwiftUI, I have the below code, but when I run the app I get no output, I know SpriteKit and SceneKit are possible and my unity view runs in a standard swift app, I’m wondering if swiftUI is possible.
struct ContentView: View {
var body: some View {
UnityUIView()
}
}
struct UnityUIView : UIViewRepresentable {
func makeUIView(context: Context) -> UIView {
let appDelegate = UIApplication.shared.delegate as? AppDelegate
appDelegate.startUnity()
return UnityGetGLView()!
}
func updateUIView(_ view: UIView, context: Context) {
}
}
I've tried to create a UIViewControllerRepresentable but get the same thing, The screen flashes once and then disappears, I think it's the splash screen as I changed the colour for debugging, no dice.
struct ContentView: View {
var body: some View {
TestUnityViewController()
}
}
struct TestUnityViewController: UIViewControllerRepresentable {
func makeUIViewController(context: Context) -> UIViewController {
let vc = UIViewController()
let appDelegate = UIApplication.shared.delegate as! AppDelegate
appDelegate.startUnity()
let unityView = UnityGetGLView()!
vc.view.backgroundColor = .red
vc.view!.addSubview(unityView)
return vc
}
func updateUIViewController(_ viewController: UIViewController, context: Context) {
}
}
If I add a delay to the UIViewControllerRepresentable, it works....interesting
struct TestUnityViewController: UIViewControllerRepresentable {
func makeUIViewController(context: Context) -> UIViewController {
let vc = UIViewController()
vc.view.backgroundColor = .red
DispatchQueue.main.asyncAfter(deadline: .now() + 2.5) {
let appDelegate = UIApplication.shared.delegate as! AppDelegate
appDelegate.startUnity()
let unityView = UnityGetGLView()!
vc.view!.addSubview(unityView)
}
return vc
}
func updateUIViewController(_ viewController: UIViewController, context: Context) {
}
}
For people still having the issue. I haven't investigated yet, but if you are using the new Unity example to integrate Unity as a framework, the delay indeed fixed the issue with SwiftUI.
You can create a SwiftUI View to which the Unity view will be added:
import SwiftUI
struct TestUnityViewController: UIViewControllerRepresentable {
func makeUIViewController(context: Context) -> UIViewController {
let vc = UIViewController()
UnityBridge.showUnity()
DispatchQueue.main.asyncAfter(deadline: .now() + 5.0) {
let unityView = UnityBridge.getAppController().rootView!
vc.view!.addSubview(unityView)
}
return vc
}
func updateUIViewController(_ viewController: UIViewController, context: Context) {}
}
struct ContentView: View {
var body: some View {
TestUnityViewController()
}
}
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
ContentView()
}
}
Here, UnityBridge is a wrapper around the Unity framework instanciation, similar to the Objective C version from the repository.
The methods:
showUnity() creates the UnityFramework instance, and then calls the showUnityWindow method
getAppController calls the appController method from he UnityFramework object
In the meantime I find a better solution, this will do. I imagine there is a better way to do that, maybe an event triggered to know when Unity's view is fully ready.
EDIT:
I created an example repository to show how to integrate Unity in a SwiftUI project: https://github.com/DavidPeicho/unity-swiftui-example