I'm using AVAudioEngine of AVFoundation with SwiftUI in Swift Playground and using MVVM as Architecture. But if I separate my structs in files, I receive two errors: Execution was interrupted, reason: signal SIGABRT. and Cannot find file in scope. The only way that all works is using them in the same file.
Here is the playground base code, where I call MusicCreatorView.
import SwiftUI
import PlaygroundSupport
public struct StartView: View {
public var body: some View {
ZStack {
Rectangle()
.fill(Color.white)
.frame(width: 400, height: 400, alignment: .center)
NavigationView {
VStack {
NavigationLink("Start", destination: MusicCreatorView())
}
}
}
}
}
PlaygroundPage.current.setLiveView(StartView()) // error: Execution was interrupted, reason: signal SIGABRT.
AudioEngine can be found by StartView (if called at StartView, just move SIGABRT error from PlaygroundPage.current.setLineView() to where it is called) but can't be found by MusicCreatorView.
import Foundation
import AVFoundation
public struct AudioEngine {
public var engine = AVAudioEngine()
public var player = AVAudioPlayerNode()
public var audioBuffer: AVAudioPCMBuffer?
public var audioFormat: AVAudioFormat?
public var audioFile: AVAudioFile? {
didSet {
if let audioFile = audioFile {
audioFormat = audioFile.fileFormat
}
}
}
public var audioFileURL: URL? {
didSet {
if let audioFileURL = audioFileURL {
audioFile = try? AVAudioFile(forReading: audioFileURL)
}
}
}
public init() {
setupAudio()
}
public mutating func setupAudio() {
audioFileURL = Bundle.main.url(forResource: "EverybodysCirculation", withExtension: "mp3")
guard let format = audioFormat else { return }
engine.attach(player)
engine.connect(player, to: engine.mainMixerNode, format: format)
engine.prepare()
do {
try engine.start()
} catch {
print(error.localizedDescription)
}
}
public func scheduleAudioFile() {
guard let audioFile = audioFile else { return }
player.scheduleFile(audioFile, at: nil, completionHandler: nil)
}
public func playSound() {
player.isPlaying ? player.pause() : player.play()
}
}
Trying to call AudioEngine at MusicCreatorView
import Foundation
import SwiftUI
import AVFoundation
public struct MusicCreatorView: View {
var audioEngine = AudioEngine() // Cannot find 'AudioEngine' in scope
public init() {}
public var body: some View {
Text("Try to Create your own music")
Button("play") {
print("apertou")
audioEngine.playSound() // audioEngine <<error type>>
}
}
}
Here is how my files are organized
https://i.stack.imgur.com/losWf.png
Multiple files in Sources cannot see each other. They are just independent libraries available to the actual playground. You should be developing this in a real iOS project, rather than a playground.
You just need to explicitly declare stuff as public. Its annoying, and sometimes I need to close and reopen the playground for the scoping to take effect after marking them public.
Related
From the question in this If I assign a sound in Reality Composer, can I stop it programmatically in RealityKit?, I would like to use method to resume playback after Play Music.
Can I do that?
Now, I use this command in stopAudio function to stop the music.
func stopAudio() {
if arView.scene.anchors.count > 0 {
if arView.scene.anchors[0].isAnchored {
arView.scene.anchors[0].children[0].stopAllAudio()
}
}
}
If I want arView.scene.anchors[0] to replay the music again, which command should I use?
Audio Playback Controller
Since RealityKit 2.0 isn't able to control parameters of Reality Composer's behaviors, the best strategy for controlling audio is to create a programmatic AudioPlaybackController. To feed your audio file to the controller, export .rcproject scene to .usdz format and use unzipping trick to extract the .aiff, .caf or .mp3 sound file. When loading audio for playback, you can choose between spatial and non-spatial audio experience.
UIKit version
import UIKit
import RealityKit
extension ViewController {
private func loadAudio() {
do {
let resource = try AudioFileResource.load(
named: "planetarium07.caf",
in: nil,
inputMode: .spatial,
loadingStrategy: .preload,
shouldLoop: true)
self.controller = entity.prepareAudio(resource)
self.controller?.speed = 0.9
self.controller?.fade(to: .infinity, duration: 2)
} catch {
print(error.localizedDescription)
}
}
}
ViewController.
class ViewController : UIViewController {
#IBOutlet var uiView: UIView! // when using #IBAction buttons
#IBOutlet var arView: ARView!
private var entity = Entity()
private var controller: AudioPlaybackController? = nil
override func viewDidLoad() {
super.viewDidLoad()
uiView.backgroundColor = .systemCyan
let boxScene = try! Experience.loadBox()
arView.scene.anchors.append(boxScene)
let anchor = boxScene.anchor
anchor?.addChild(entity)
self.loadAudio()
}
#IBAction func playMusic(_ sender: UIButton) {
self.controller?.play()
}
#IBAction func stopMusic(_ sender: UIButton) {
self.controller?.pause()
// self.controller?.stop()
}
}
SwiftUI version
import SwiftUI
import RealityKit
struct ContentView : View {
#State var arView = ARView(frame: .zero)
#State var controller: AudioPlaybackController? = nil
#State var entity = Entity()
var body: some View {
ZStack {
ARViewContainer(arView: $arView,
entity: $entity).ignoresSafeArea()
VStack {
Spacer()
Button("Play") { loadSound(); controller?.play() }
Button("Stop") { controller?.stop() }
}
}
}
func loadSound() {
do {
let resource = try AudioFileResource.load(
named: "planetarium07.caf",
in: nil,
inputMode: .spatial,
loadingStrategy: .preload,
shouldLoop: true)
self.controller = entity.prepareAudio(resource)
} catch {
print(error.localizedDescription)
}
}
}
ARViewContainer.
struct ARViewContainer: UIViewRepresentable {
#Binding var arView: ARView
#Binding var entity: Entity
func makeUIView(context: Context) -> ARView {
let boxScene = try! Experience.loadBox()
arView.scene.anchors.append(boxScene)
let anchor = boxScene.anchor
anchor?.addChild(entity)
return arView
}
func updateUIView(_ view: ARView, context: Context) { }
}
I've created a media app, which plays music via HTTP live stream in SwiftUI. The code (snippet) looks like following:
struct PlayerView: View {
#State var audioPlayer: AVPlayer!
#State var buttonSymbol: String = "pause.circle.fill"
let url: URL!
var body: some View {
VStack {
Text("Sound")
HStack {
Button(action: {
if self.audioPlayer.rate != 0.0 {
self.buttonSymbol = "play.circle.fill"
self.audioPlayer.pause()
} else {
self.buttonSymbol = "pause.circle.fill"
self.audioPlayer.play()
}
}) {
Image(systemName: self.buttonSymbol)
}
.buttonStyle(PlainButtonStyle())
}
}
.onAppear {let audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setCategory(AVAudioSession.Category.playback, mode: .default, policy: AVAudioSession.RouteSharingPolicy.longFormAudio, options: [])
} catch let error {
fatalError("*** Unable to set up the audio session: \(error.localizedDescription)")
}
let item = AVPlayerItem(url: self.url)
self.audioPlayer = AVPlayer(playerItem: item)
audioSession.activate(options: []) { (success, error) in
guard error == nil else {
fatalError("An error occurred: \(error!.localizedDescription)")
return
}
self.audioPlayer.play()
}
}
}
}
My lack of understanding is, how do I receive the underlying AVPlayer/AVPlayerItem errors (for instance the HTTP stream is interrupted) in SwiftUI in order to show them in the UI? Which SwiftUI action I have to use and how?
According to the documentation AVPlayerItem has a notification called AVPlayerItemFailedToPlayToEndTime.
One possible implementation in a Viewmodel would be:
import Combine
....
playerItemFailedToPlayToEndTimeObserver = NotificationCenter
.default
.publisher(for: NSNotification.Name.AVPlayerItemFailedToPlayToEndTime)
.sink(receiveValue: {[weak self] notification in
// handle notification here
})
You would need to hold a reference to playerItemFailedToPlayToEndTimeObserver else it will go out of scope as soon as the method calling this finished.
I have been trying to create an application in swiftUI. Although my application is running properly but as soon as I run it in the emulator I am getting a message in my console stating that :-
2022-10-21 17:18:40.571733+0530 H4XOR News[87575:1413313] [SwiftUI] Publishing changes from background threads is not allowed; make sure to publish values from the main thread (via operators like receive(on:)) on model updates.
Here's how my code looks like.
NetworkManager.swift:-
import Foundation
class NetworkManager: ObservableObject {
#Published var posts = [Post]()
func fetchData() {
if let url = URL(string: "https://hn.algolia.com/api/v1/search?tags=front_page") {
let session = URLSession(configuration: .default)
let task = session.dataTask(with: url) { (data, response, error) in
if error == nil {
let decoder = JSONDecoder()
if let safeData = data {
do {
let results = try decoder.decode(Results.self, from: safeData)
self.posts = results.hits
} catch {
print(error)
}
}
}
}
task.resume()
}
}
}
PostData.swift
import Foundation
struct Results: Decodable {
let hits: [Post]
}
struct Post: Decodable, Identifiable {
var id: String {
return objectID
}
let objectID: String
let points: Int
let title: String
let url: String?
}
ContentView.swift
import SwiftUI
struct ContentView: View {
#ObservedObject var networkManager = NetworkManager()
var body: some View {
NavigationView {
List(networkManager.posts) { post in
Text(post.title)
}
.navigationBarTitle("H4XOR NEWS")
}
.onAppear {
self.networkManager.fetchData()
}
}
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
ContentView()
}
}
}
Can anyone tell me why am I getting such message and how to resolve it.
As a side-effect of assigning your posts property in self.posts = results.hits, the NetworkManager is sending an event on its objectWillChange publisher (which is auto-generated as part of the ObservableObject protocol). This happens on the same thread as the caller, and SwiftUI only wants this to happen on the main thread.
The completion handler of the data task is called on a background thread. And since you assign to posts on this thread, SwiftUI complains.
Simply dispatch to the main queue before assigning to this property:
DispatchQueue.main.async {
self.posts = results.hits
}
I am making a music player in swiftui, there is a part of the code where an instance of the player (audioManager.player) on which several components depend, in order not to set the same value constantly I wrapped the playback part in a conditional (if let player = audioManager.player) based on whether or not there is a “player” instance, but when doing so the content disappears strangely, as if the “player” instance does not exist or is negative, I tried it just validating a part and I realized that everything works correctly but I don't know why the content is not displayed on the canvas or in the simulator. I would appreciate if you help me with a suggestion or solution, everything is welcome. ;)
import Foundation
import AVKit
final class AudioManager: ObservableObject {
var player: AVAudioPlayer?
func startPlayer(track: String, isPreview: Bool = false) {
guard let url = Bundle.main.url(forResource: track, withExtension: "mp3") else {
print("Resourse not found")
return
}
do {
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default)
try AVAudioSession.sharedInstance().setActive(true)
player = try AVAudioPlayer(contentsOf: url)
if isPreview {
player?.prepareToPlay()
} else {
player?.play()
}
} catch {
print("Fail to initialize", error)
}
}
}
import SwiftUI
struct PlayerView: View {
#EnvironmentObject var audioManager: AudioManager
#State private var value: Double = 0.0
#State private var isEditing: Bool = false
var isPreview: Bool = false
let timer = Timer
.publish(every: 0.5, on: .main, in: .common)
.autoconnect()
var body: some View {
ZStack {
VStack(alignment: .center, spacing: 50) {
VStack(spacing: 10) {
// MARK: ** THE ERROR IS HERE **
if let player = audioManager.player {
Slider(value: $value, in: 0...player.duration)
.foregroundColor(.white)
.padding(.top)
}
} // VStack
.padding(.horizontal, 30)
} // VStack
.foregroundColor(.white)
}
.onAppear {
audioManager.startPlayer(track: "meditation1", isPreview: isPreview)
}
.onReceive(timer) { _ in
guard let player = audioManager.player, !isEditing else { return }
value = player.currentTime
}
}
}
struct PlayerView_Previews: PreviewProvider {
static var previews: some View {
Group {
PlayerView(isPreview: true)
.environmentObject(AudioManager())
}
}
}
I'm trying to check for the internet connection in my app, and currently, I have this code:
private let monitor: NWPathMonitor
monitor.pathUpdateHandler = { [weak self] path in
print(path.status)
self?.isConnected = path.status == .satisfied
}
However, this does not work. Specifically, the print does not print out the value in the debug console.
Could you please tell me what I have done wrong?
Thank you.
here is my (SwiftUI, sorry) test code that works for me.
You can probably recycle some of the code for your purpose.
import SwiftUI
import Foundation
import Network
#main
struct TestApp: App {
var body: some Scene {
WindowGroup {
ContentView()
}
}
}
struct ContentView: View {
let monitor = NWPathMonitor()
var body: some View {
Text("testing")
.onAppear {
let queue = DispatchQueue(label: "monitoring")
monitor.start(queue: queue)
monitor.pathUpdateHandler = { path in
print(path.status)
if path.status == .satisfied {
print("-----> YES connected")
} else {
print("-----> NOT connected")
}
}
}
}
}
You can also use specific monitor, such as let monitor = NWPathMonitor(requiredInterfaceType: .wifi)