Adding a continuously looped video to macOS SwiftUI app - swift

I want to add a few looped videos to a macOS app written in SwiftUI, but when I run it the video doesn't play.
I'm not sure if I've over engineered this or have a bug I can't spot - so hopefully someone can help!
I want call the VideoTutorialView(videoName:) passing in a different video.
import SwiftUI
import AVFoundation
struct VideoTutorialView: View {
#State private var player = AVQueuePlayer()
let center = NotificationCenter.default
let videoName: String
var body: some View {
PlayerView(videoName: videoName, player: player)
.aspectRatio(CGFloat(16 / 9), contentMode: .fill)
.frame(height: 140)
.background(Color.gray.opacity(0.3))
.cornerRadius(.roundedCorner)
.onAppear { player.play() }
.onDisappear { player.pause() }
.onReceive(center.publisher(for: NSApplication.willResignActiveNotification)) { _ in
player.pause()
}
.onReceive(center.publisher(for: NSApplication.didBecomeActiveNotification)) { _ in
player.play()
}
}
}
struct PlayerView: NSViewRepresentable {
private let videoName: String
private let player: AVQueuePlayer
init(videoName: String, player: AVQueuePlayer) {
self.videoName = videoName
self.player = player
}
func updateNSView(_ nsView: NSView, context: NSViewRepresentableContext<PlayerView>) {}
func makeNSView(context: Context) -> NSView {
return LoopingPlayerUIView(videoName: videoName, player: player)
}
}
class LoopingPlayerUIView: NSView {
private let playerLayer = AVPlayerLayer()
private var playerLooper: AVPlayerLooper?
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
init(videoName: String,
player: AVQueuePlayer,
videoGravity: AVLayerVideoGravity = .resizeAspectFill) {
super.init(frame: .zero)
guard let fileUrl = Bundle.main.url(forResource: videoName, withExtension: "mp4") else { return }
let asset = AVAsset(url: fileUrl)
let item = AVPlayerItem(asset: asset)
player.isMuted = true
playerLayer.player = player
playerLayer.videoGravity = videoGravity
layer?.addSublayer(playerLayer)
playerLooper = AVPlayerLooper(player: player, templateItem: item)
}
override func layout() {
super.layout()
playerLayer.frame = bounds
}
}

super.init(frame: .zero)
Looks suspicious. You should try setting some values to see the video that are consistent with your SwiftUI view.

Related

Pausing Animation in SceneKit

I loaded into SceneKit a .usdz file which has an animation attached to it. I want to stop this animation to play.. but I can't find the right way.
I load the .usdz asset file with the following method:
func loadIdle() {
let urlfile = Bundle.main.path(forResource: "armi_idle",
ofType: "usdz",
inDirectory: "Asset.scnassets")!
let scene = try! SCNScene(url: URL(string: urlfile)!)
guard let findNode = scene.rootNode.childNode(withName: "armi_idle",
recursively: true)
else {
print("err finde idle")
return
}
// try to pause, but not work
findNode.isPaused = true
// add to main scene
self.scene.rootNode.addChildNode(findNode)
}
A picture of the asset:
Declare the following properties:
import SceneKit
#IBOutlet var sceneView: SCNView!
var notWalking: Bool = true
var animations = [String: CAAnimation]()
var node = SCNNode()
override func viewDidLoad() {
super.viewDidLoad()
sceneView.scene = SCNScene()
self.animation()
}
Then try the following logic (you need Idle and Walking files) :
fileprivate func loadAnimation(withKey: String, scene: String, id: String) {
let url = Bundle.main.url(forResource: scene, withExtension: "dae")!
let source = SCNSceneSource(url: url, options: nil)
guard let character = source?.entryWithIdentifier(id,
withClass: CAAnimation.self) else { return }
character.fadeInDuration = 0.5
character.fadeOutDuration = 0.5
self.animations[withKey] = character
}
then:
fileprivate func animation() {
let standStill = SCNScene(named: "art.scnassets/Idle")!
for childNode in standStill.rootNode.childNodes {
self.node.addChildNode(childNode)
}
sceneView.scene.rootNode.addChildNode(self.node)
self.loadAnimation(withKey: "walking", scene: "art.scnassets/Walking",
id: "Walking-1")
}
and then:
func playWalking(key: String) {
sceneView.scene.rootNode.addAnimation(animations[key]!, forKey: key)
}
func stopWalking(key: String) {
sceneView.scene.rootNode.removeAnimation(forKey: key,
blendOutDuration: 0.75)
}
and at last:
#IBAction func pressed(_ sender: UIButton) {
notWalking ? playWalking(key: "walking") : stopWalking(key: "walking")
notWalking.toggle()
}

SwiftUI UIViewRepresentable AVPlayer crashing due to "periodTimeObserver"

I have a SwiftUI application which has a carousel of videos. I'm using an AVPlayer with UIViewRepresentable and I'm creating the carousel with a ForEach loop of my custom UIViewRepresentable view. I want to have a "periodicTimeObserver" on the active AVPlayer, but it crashes and says
"An instance of AVPlayer cannot remove a time observer that was added
by a different instance of AVPlayer SwiftUI"
My question is how can I remove the periodicTimeObserver of an AVPlayer inside of a UIViewRepresentable inside of a UIView, without causing the app to crash?
Here is my code:
ForEach(videosArray.indices, id: \.self) { i in
let videoURL = videosArray[i]
ZStack {
VStack {
VideoView.init(viewModel: viewModel, videoURL: URL(string: videoURL)!, videoIndex: i)
}
}
}
struct VideoView: UIViewRepresentable {
#ObservedObject var viewModel = viewModel.init()
var videoURL:URL
var previewLength:Double?
var videoIndex: Int
func makeUIView(context: Context) -> UIView {
return PlayerView.init(frame: .zero, url: videoURL, previewLength: previewLength ?? 6)
}
func updateUIView(_ uiView: UIView, context: Context) {
if videoIndex == viewModel.currentIndexSelected {
if let playerView = uiView as? PlayerView {
if !viewModel.isPlaying {
playerView.pause()
} else {
playerView.play(customStartTime: viewModel.newStartTime, customEndTime: viewModel.newEndTime)
}
}
} else {
if let playerView = uiView as? PlayerView {
playerView.pause()
}
}
}
}
public class ViewModel: ObservableObject {
#Published public var currentIndexSelected: Int = 0
#Published public var isPlaying: Bool = true
#Published public var newStartTime = 0.0
#Published public var newEndTime = 30.0
}
class PlayerView: UIView {
private let playerLayer = AVPlayerLayer()
private var previewTimer:Timer?
var previewLength:Double
var player: AVPlayer?
var timeObserver: Any? = nil
init(frame: CGRect, url: URL, previewLength:Double) {
self.previewLength = previewLength
super.init(frame: frame)
player = AVPlayer(url: url)
player!.volume = 0
player!.play()
playerLayer.player = player
playerLayer.videoGravity = .resizeAspectFill
playerLayer.backgroundColor = UIColor.black.cgColor
layer.addSublayer(playerLayer)
}
required init?(coder: NSCoder) {
self.previewLength = 15
super.init(coder: coder)
}
override func layoutSubviews() {
super.layoutSubviews()
playerLayer.frame = bounds
}
func pause() {
if let timeObserver = timeObserver {
self.player?.removeTimeObserver(timeObserver)
self.timeObserver = nil
}
player?.pause()
}
#objc func replayFinishedItem(noti: NSNotification) {
print("REPLAY FINISHED NOTIIIII: \(noti)")
if let timeDict = noti.object as? [String: Any], let startTime = timeDict["startTime"] as? Double, let endTime = timeDict["endTime"] as? Double/*, let player = timeDict["player"] as? AVPlayer, let observer = timeDict["timeObserver"]*/ {
self.removeTheTimeObserver()
self.play(customStartTime: startTime, customEndTime: endTime)
}
}
#objc func removeTheTimeObserver() {
print("ATTEMPT TO REMOVE IT!")
if let timeObserver = timeObserver {
self.player?.removeTimeObserver(timeObserver)
self.timeObserver = nil
}
}
func play(at playPosition: Double = 0.0, customStartTime: Double = 0.0, customEndTime: Double = 15.0) {
var startTime = customStartTime
var endTime = customEndTime
if customStartTime > customEndTime {
startTime = customEndTime
endTime = customStartTime
}
if playPosition != 0.0 {
player?.seek(to: CMTime(seconds: playPosition, preferredTimescale: CMTimeScale(1)))
} else {
player?.seek(to: CMTime(seconds: startTime, preferredTimescale: CMTimeScale(1)))
}
player?.play()
var timeDict: [String: Any] = ["startTime": startTime, "endTime": endTime]
NotificationCenter.default.addObserver(self, selector: #selector(self.replayFinishedItem(noti:)), name: .customAVPlayerShouldReplayNotification, object: nil)
self.timeObserver = self.player?.addPeriodicTimeObserver(forInterval: CMTime.init(value: 1, timescale: 100), queue: DispatchQueue.main, using: { [weak self] time in
guard let strongSelf = self else {
return
}
let currentTime = CMTimeGetSeconds(strongSelf.player!.currentTime())
let currentTimeStr = String(currentTime)
if let currentTimeDouble = Double(currentTimeStr) {
let userDefaults = UserDefaults.standard
userDefaults.set(currentTimeDouble, forKey: "currentTimeDouble")
NotificationCenter.default.post(name: .currentTimeDouble, object: currentTimeDouble)
if currentTimeDouble >= endTime {
if let timeObserver = strongSelf.timeObserver {
strongSelf.player?.removeTimeObserver(timeObserver)
strongSelf.timeObserver = nil
}
strongSelf.player?.pause()
NotificationCenter.default.post(name: .customAVPlayerShouldReplayNotification, object: timeDict)
} else if let currentItem = strongSelf.player?.currentItem {
let seconds = currentItem.duration.seconds
if currentTimeDouble >= seconds {
if let timeObserver = strongSelf.timeObserver {
strongSelf.player?.removeTimeObserver(timeObserver)
strongSelf.timeObserver = nil
}
NotificationCenter.default.post(name: .customAVPlayerShouldReplayNotification, object: timeDict)
}
}
}
})
}
}

ARSession CurrentFrame is missing the AR interpretation Model Entities

I have the following ARView:
import SwiftUI
import UIKit
import RealityKit
import ARKit
struct ARViewContainer: UIViewRepresentable {
#EnvironmentObject var selectedFood: SelectedFood
#EnvironmentObject var arSession: ARSessionObservable
func makeCoordinator() -> Coordinator {
Coordinator(self)
}
func makeUIView(context: Context) -> ARView {
let arView = ARView(frame: .zero)
let config = ARWorldTrackingConfiguration()
config.planeDetection = [.vertical, .horizontal]
config.environmentTexturing = .automatic
if ARWorldTrackingConfiguration.supportsSceneReconstruction(.mesh) {
config.sceneReconstruction = .mesh
}
arView.session.delegate = context.coordinator
arView.session.run(config)
arSession.session = arView.session
return arView
}
func updateUIView(_ uiView: ARView, context: Context) {
if (!selectedFood.food.image.isEmpty) {
let data = try! Data(contentsOf: URL(string: self.selectedFood.food.image)!)
let fileURL = FileManager.default.temporaryDirectory.appendingPathComponent(UUID().uuidString)
try! data.write(to: fileURL)
do {
let texture = try TextureResource.load(contentsOf: fileURL)
var material = SimpleMaterial()
material.baseColor = MaterialColorParameter.texture(texture)
material.tintColor = UIColor.white.withAlphaComponent(0.99)
let entity = ModelEntity(mesh: .generatePlane(width: 0.1, height: 0.1), materials: [material])
let anchor = AnchorEntity(.plane(.any, classification: .any, minimumBounds: .zero))
anchor.addChild(entity)
uiView.scene.addAnchor(anchor)
} catch {
print(error.localizedDescription)
}
}
}
class Coordinator: NSObject, ARSessionDelegate, ARSCNViewDelegate {
var arVC: ARViewContainer
init(_ arViewContainer: ARViewContainer) {
self.arVC = arViewContainer
}
func session(_ session: ARSession, didUpdate frame: ARFrame) {
}
func session(_ session: ARSession, didAdd anchors: [ARAnchor]) {
}
}
}
And in HomeView i have the following two variables:
#StateObject var arSession: ARSessionObservable = ARSessionObservable()
#State private var capturedImage: UIImage = UIImage()
The following button with action:
Button {
if let capturedFrame = arSession.session.currentFrame {
let ciimg = CIImage(cvPixelBuffer: capturedFrame.capturedImage)
if let cgImage = convertCIImageToCGImage(inputImage: ciimg) {
capturedImage = UIImage(cgImage: cgImage).rotate(radians: .pi / 2)
self.isShowingMail = true
}
}
} label: {
Image("ShareScreen")
.resizable()
.aspectRatio(contentMode:.fit)
.frame(width: 66, height: 66, alignment: .center)
}
Which takes the currentFrame from the session and opens a Mail sharing model with attachment:
.sheet(isPresented: $isShowingMail) {
MailComposeViewController(toRecipients: [], mailBody: nil, imageAttachment: capturedImage) {
self.isShowingMail = false
}
The mail sharing:
func makeUIViewController(context: UIViewControllerRepresentableContext<MailComposeViewController>) -> MFMailComposeViewController {
let mail = MFMailComposeViewController()
mail.mailComposeDelegate = context.coordinator
mail.setToRecipients(self.toRecipients)
if let body = mailBody {
mail.setMessageBody(body, isHTML: true)
}
if let image = imageAttachment {
if let imageData = image.pngData() {
mail.addAttachmentData(imageData, mimeType: "image/png", fileName: "image.png")
}
}
return mail
}
The problem is that on the preview there are present the Model Entities, photo below:
And when i press share, on the mail preview the model is missing from the frame:
I managed to make it work by moving arView: ARView! outside the ARViewContainer
var arView: ARView!
struct ARViewContainer: UIViewRepresentable {
func makeUIView(context: Context) -> ARView {
arView = ARView(frame: .zero)
let config = ARWorldTrackingConfiguration()
config.planeDetection = [.vertical, .horizontal]
config.environmentTexturing = .automatic
if ARWorldTrackingConfiguration.supportsSceneReconstruction(.mesh) {
config.sceneReconstruction = .mesh
}
arView.session.delegate = context.coordinator
arView.session.run(config)
return arView
}
}
And then calling the snapshot function to arView in the other View:
Button {
arView.snapshot(saveToHDR: false) { image in
let image = UIImage(data: (image?.pngData())!)
capturedImage = image!
self.isShowingMail = true
}

How to using realtime camera streaming in swiftui?

I build a StreamingView like this:
struct StreamingView: UIViewRepresentable {
func updateUIView(_ uiView: UIView, context: UIViewRepresentableContext<StreamingView>) {
//
}
func makeUIView(context: UIViewRepresentableContext<StreamingView>) -> UIView {
let view = UIView()
let captureSession = AVCaptureSession()
captureSession.sessionPreset = .photo
guard let captureDevice = AVCaptureDevice.default(for: .video) else { return view}
guard let input = try? AVCaptureDeviceInput(device: captureDevice) else { return view}
captureSession.addInput(input)
captureSession.startRunning()
let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
view.layer.addSublayer(previewLayer)
previewLayer.frame = view.frame
return view
}
}
but It didn't work. how could I build a pure swiftui view for streaming?
Try the below demo code
Note: make sure all preparations done, like turned on Camera in capabilities, added NSCameraUsageDescription in Info.plist... and camera can be tested only on a real device.
import SwiftUI
import UIKit
import AVFoundation
class PreviewView: UIView {
private var captureSession: AVCaptureSession?
init() {
super.init(frame: .zero)
var allowedAccess = false
let blocker = DispatchGroup()
blocker.enter()
AVCaptureDevice.requestAccess(for: .video) { flag in
allowedAccess = flag
blocker.leave()
}
blocker.wait()
if !allowedAccess {
print("!!! NO ACCESS TO CAMERA")
return
}
// setup session
let session = AVCaptureSession()
session.beginConfiguration()
let videoDevice = AVCaptureDevice.default(.builtInWideAngleCamera,
for: .video, position: .unspecified) //alternate AVCaptureDevice.default(for: .video)
guard videoDevice != nil, let videoDeviceInput = try? AVCaptureDeviceInput(device: videoDevice!), session.canAddInput(videoDeviceInput) else {
print("!!! NO CAMERA DETECTED")
return
}
session.addInput(videoDeviceInput)
session.commitConfiguration()
self.captureSession = session
}
override class var layerClass: AnyClass {
AVCaptureVideoPreviewLayer.self
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
var videoPreviewLayer: AVCaptureVideoPreviewLayer {
return layer as! AVCaptureVideoPreviewLayer
}
override func didMoveToSuperview() {
super.didMoveToSuperview()
if nil != self.superview {
self.videoPreviewLayer.session = self.captureSession
self.videoPreviewLayer.videoGravity = .resizeAspect
self.captureSession?.startRunning()
} else {
self.captureSession?.stopRunning()
}
}
}
struct PreviewHolder: UIViewRepresentable {
func makeUIView(context: UIViewRepresentableContext<PreviewHolder>) -> PreviewView {
PreviewView()
}
func updateUIView(_ uiView: PreviewView, context: UIViewRepresentableContext<PreviewHolder>) {
}
typealias UIViewType = PreviewView
}
struct DemoVideoStreaming: View {
var body: some View {
VStack {
PreviewHolder()
}.frame(minWidth: 0, maxWidth: .infinity, minHeight: 0, maxHeight: .infinity, alignment: .center)
}
}

Can I get 3d models from web servers on Swift?

I'm working on an application with Arkit. There are many 3D models and the size is big in my app. Can I get these models out of another server (outside sites)? I'm new on swift, I can't seem to find anything on loading a 3d model from a web server.
is it enough to change the model path there? Thank you
func loadModel() {
guard let virtualObjectScene = SCNScene(named: "\(modelName).\(fileExtension)", inDirectory: "Models.scnassets/\(modelName)") else {
return
}
let wrapperNode = SCNNode()
for child in virtualObjectScene.rootNode.childNodes {
let defaults = UserDefaults.standard
wrapperNode.addChildNode(child)
}
self.addChildNode(wrapperNode)
}
All code:
import UIKit
import SceneKit
import ARKit
class VirtualObject: SCNNode {
var modelName: String = ""
var fileExtension: String = ""
var thumbImage: UIImage!
var title: String = ""
var viewController: ViewController?
override init() {
super.init()
self.name = "Virtual object root node"
}
init(modelName: String, fileExtension: String, thumbImageFilename: String, title: String) {
super.init()
self.name = "Virtual object root node"
self.modelName = modelName
self.fileExtension = fileExtension
self.thumbImage = UIImage(named: thumbImageFilename)
self.title = title
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
func loadModel() {
guard let virtualObjectScene = SCNScene(named: "\(modelName).\(fileExtension)", inDirectory: "Models.scnassets/\(modelName)") else {
return
}
let wrapperNode = SCNNode()
for child in virtualObjectScene.rootNode.childNodes {
let defaults = UserDefaults.standard
wrapperNode.addChildNode(child)
}
self.addChildNode(wrapperNode)
}
func unloadModel() {
self.removeFromParentNode()
for child in self.childNodes {
child.removeFromParentNode()
}
}
func translateBasedOnScreenPos(_ pos: CGPoint, instantly: Bool, infinitePlane: Bool) {
guard let controller = viewController else {
return
}
let result = controller.worldPositionFromScreenPosition(pos, objectPos: self.position, infinitePlane: infinitePlane)
controller.moveVirtualObjectToPosition(result.position, instantly, !result.hitAPlane)
}
}
extension VirtualObject {
static func isNodePartOfVirtualObject(_ node: SCNNode) -> Bool {
if node.name == "Virtual object root node" {
return true
}
if node.parent != nil {
return isNodePartOfVirtualObject(node.parent!)
}
return false
}
static let availableObjects: [VirtualObject] = [
Anatomy()
]
}
you can load an scn file from a webserver with ip addresses like this (i used a fake ip below)
let myURL = NSURL(string: “http://110.151.153.202:80/scnfiles/myfile.scn”)
let scene = try! SCNScene(url: myURL! as URL, options:nil)
Edit:
Here’s a simple Swift PlayGrounds which pulls a test cube scn file from my github repo. You just tap anywhere and the cube loads.
import ARKit
import SceneKit
import PlaygroundSupport
class ViewController: NSObject {
var sceneView: ARSCNView
init(sceneView: ARSCNView) {
self.sceneView = sceneView
super.init()
self.setupWorldTracking()
self.sceneView.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(ViewController.handleTap(_:))))
}
private func setupWorldTracking() {
if ARWorldTrackingConfiguration.isSupported {
let configuration = ARWorldTrackingConfiguration()
configuration.planeDetection = .horizontal
configuration.isLightEstimationEnabled = true
self.sceneView.session.run(configuration, options: [])
}
}
#objc func handleTap(_ gesture: UITapGestureRecognizer) {
let results = self.sceneView.hitTest(gesture.location(in: gesture.view), types: ARHitTestResult.ResultType.featurePoint)
guard let result: ARHitTestResult = results.first else {
return
}
// pulls cube.scn from github repo
let myURL = NSURL(string: "https://raw.githubusercontent.com/wave-electron/scnFile/master/cube.scn")
let scene = try! SCNScene(url: myURL! as URL, options: nil)
let node = scene.rootNode.childNode(withName: "SketchUp", recursively: true)
node?.scale = SCNVector3(0.01,0.01,0.01)
let position = SCNVector3Make(result.worldTransform.columns.3.x, result.worldTransform.columns.3.y, result.worldTransform.columns.3.z)
node?.position = position
self.sceneView.scene.rootNode.addChildNode(node!)
}
}
let sceneView = ARSCNView()
let viewController = ViewController(sceneView: sceneView)
sceneView.autoenablesDefaultLighting = true
PlaygroundPage.current.needsIndefiniteExecution = true
PlaygroundPage.current.liveView = viewController.sceneView