AudioKit Conflict between Midi Instrument and Mic behavior - swift

I am trying to make my app produce midi notes at the same time listening to the input from the mic:
var engine = AudioEngine()
var initialDevice: Device!
var mic: AudioEngine.InputNode!
var tappableNodeA: Fader!
var tappableNodeB: Fader!
var tappableNodeC: Fader!
var silence: Fader!
var tracker: PitchTap!
private var instrument = MIDISampler(name: "Instrument 1")
func noteOn(note: MIDINoteNumber) {
instrument.play(noteNumber: note, velocity: 90, channel: 0)
}
func noteOff(note: MIDINoteNumber) {
instrument.stop(noteNumber: note, channel: 0)
}
override func viewDidLoad() {
super.viewDidLoad()
print("init started ")
guard let input = engine.input else { fatalError() }
guard let device = engine.inputDevice else { fatalError() }
print("input selected")
initialDevice = device
engine.output = instrument
mic = input
tappableNodeA = Fader(mic)
tappableNodeB = Fader(tappableNodeA)
tappableNodeC = Fader(tappableNodeB)
silence = Fader(tappableNodeC, gain: 0)
engine.output = silence
print("objects init")
tracker = PitchTap(mic) { pitch, amp in
DispatchQueue.main.async {
self.update(pitch[0], amp[0])
}
}
start()
// other init that are not related
}
The start function is written below:
func start() {
do {
if let fileURL = Bundle.main.url(forResource: "Sounds/Sampler Instruments/sawPiano1", withExtension: "exs") {
try instrument.loadInstrument(url: fileURL)
} else {
Log("Could not find file")
}
} catch {
Log("Could not load instrument")
}
do {
try engine.start()
tracker.start()
} catch let err {
print("caught error at start")
Log(err)
}
}
As long as I making the first try call to set up the instrument I get the following error:
*** Terminating app due to uncaught exception 'com.apple.coreaudio.avfaudio', reason: 'required condition is false: _engine != nil
Why the would the condition be false?

Ok, so the solution was to separate the calls into two functions, and position the first call before tapNode configuration:
var engine = AudioEngine()
var initialDevice: Device!
var mic: AudioEngine.InputNode!
var tappableNodeA: Fader!
var tappableNodeB: Fader!
var tappableNodeC: Fader!
var silence: Fader!
var tracker: PitchTap!
private var instrument = MIDISampler(name: "Instrument 1")
func noteOn(note: MIDINoteNumber) {
instrument.play(noteNumber: note, velocity: 90, channel: 0)
}
func noteOff(note: MIDINoteNumber) {
instrument.stop(noteNumber: note, channel: 0)
}
override func viewDidLoad() {
super.viewDidLoad()
print("init started ")
guard let input = engine.input else { fatalError() }
guard let device = engine.inputDevice else { fatalError() }
print("input selected")
initialDevice = device
engine.output = instrument
start1()
mic = input
tappableNodeA = Fader(mic)
tappableNodeB = Fader(tappableNodeA)
tappableNodeC = Fader(tappableNodeB)
silence = Fader(tappableNodeC, gain: 0)
engine.output = silence
print("objects init")
tracker = PitchTap(mic) { pitch, amp in
DispatchQueue.main.async {
self.update(pitch[0], amp[0])
}
}
start()
// other init that are not related
}
func start1(){
do {
if let fileURL = Bundle.main.url(forResource: "Sounds/Sampler Instruments/sawPiano1", withExtension: "exs") {
try instrument.loadInstrument(url: fileURL)
} else {
Log("Could not find file")
}
} catch let err {
Log("Could not load instrument")
Log(err)
}
}
func start() {
do {
try engine.start()
tracker.start()
} catch let err {
print("caught error at start")
Log(err)
}
}
Although the exception is now gone, there is still no sound being played for some reason.

Related

SwiftUI: stop speech recognition recording session upon navigating in TabView

A recording session for speech recognition starts upon opening and navigating to a tab in the TabView, but it also needs to keep recording until either 'back' or 'next' was recognised, after which it can stop the session. Now, if those words weren't recognised and the user navigates within the tabview, the session isn't stopped correctly and throws an error, which then prevents a new recording session from being started.
Error thrown:
[Utility] +[AFAggregator logDictationFailedWithError:] Error Domain=kAFAssistantErrorDomain Code=203 "Corrupt" UserInfo={NSLocalizedDescription=Corrupt, NSUnderlyingError=0x281281aa0 {Error Domain=SiriSpeechErrorDomain Code=102 "(null)"}}
I have tried implementing stopRecording() in the TabView's selection set method, right before a session is started (startRecording()), but that does not seem to work. Is there a way to stop the recording session upon navigating through the TabView? I want to eventually be able to navigate through the tabs using voice.
Content view:
struct ContentView: View {
#State private var selectedTab = 1
static let voiceRecogniser = VoiceRecogniser()
var body: some View {
VStack {
TabView(
selection: Binding(
get: { selectedTab },
set: {
selectedTab = $0
ContentView.voiceRecogniser.startRecording()
})
) {
Text("Tab 1")
.tag(1)
Text("Tab 2")
.tag(2)
Text("Tab 3")
.tag(3)
}
.tabViewStyle(PageTabViewStyle())
}
.onAppear {
ContentView.voiceRecogniser.startRecording()
}
}
}
VoiceRecogniser class:
class VoiceRecogniser {
private let speechRecogniser = SFSpeechRecognizer(locale: Locale(identifier: "en-GB"))!
private var recognitionRequest: SFSpeechAudioBufferRecognitionRequest?
private var recognitionTask: SFSpeechRecognitionTask?
private let audioEngine = AVAudioEngine()
func startRecording() {
guard speechRecogniser.isAvailable else {
return
}
guard SFSpeechRecognizer.authorizationStatus() == .authorized else {
SFSpeechRecognizer.requestAuthorization({ (status) in
})
return
}
recognitionTask?.cancel()
self.recognitionTask = nil
let audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setCategory(AVAudioSession.Category.playAndRecord, mode: .measurement, options: .duckOthers) // Ensure session can play audio as well as record
try audioSession.setMode(AVAudioSession.Mode.measurement)
try audioSession.setActive(true, options: .notifyOthersOnDeactivation)
} catch {
print("Error with audio session")
}
let inputNode = audioEngine.inputNode
recognitionRequest = SFSpeechAudioBufferRecognitionRequest()
guard let recognitionRequest = recognitionRequest else { fatalError("Unable to create a SFSpeechAudioBufferRecognitionRequest object") }
recognitionRequest.shouldReportPartialResults = true
self.recognitionTask = speechRecogniser.recognitionTask(with: recognitionRequest) { result, error in
var isFinal = false
if let result = result {
let spokenText = result.bestTranscription.formattedString
let voiceCommands = ["Next", "Back"]
let string = spokenText.lowercased()
for command in voiceCommands {
if (string.contains(command.lowercased())) {
switch command {
case "Next":
print("Go next")
case "Back":
print("Go back")
default:
print("Default")
}
isFinal = true // stop listening once a voice command was recognised
}
}
//isFinal = true // stop listening after saying anything
}
if error != nil || isFinal {
self.stopRecording()
}
}
let recordingFormat = inputNode.outputFormat(forBus: 0)
inputNode.removeTap(onBus: 0)
inputNode.installTap(onBus: 0, bufferSize: 1024, format: recordingFormat) { (buffer: AVAudioPCMBuffer, when: AVAudioTime) in
self.recognitionRequest?.append(buffer)
}
audioEngine.prepare()
do {
try audioEngine.start()
} catch {
print("Error whith starting audio engine")
}
}
func stopRecording() {
self.audioEngine.stop()
self.recognitionRequest?.endAudio()
self.recognitionRequest = nil
self.recognitionTask = nil
self.recognitionTask?.cancel()
}
}

HealthKit keeps updating the sample data on a simulator, but not the actual data on apple watch

I just started learning swift using WWDC open sources. Im learning on how to create watch os workout application. When I run this on the simulator it will keep updating sample data, but on my Apple Watch, when I run this, it doesn't keep updating the live workout data. I am sure that I have to deal with code below but
extension WorkoutManager: HKLiveWorkoutBuilderDelegate {
func workoutBuilderDidCollectEvent(_ workoutBuilder: HKLiveWorkoutBuilder) {
}
func workoutBuilder(_ workoutBuilder: HKLiveWorkoutBuilder, didCollectDataOf collectedTypes: Set<HKSampleType>) {
for type in collectedTypes {
guard let quantityType = type as? HKQuantityType else {
return // Nothing to do.
}
let statistics = workoutBuilder.statistics(for: quantityType)
// Update the published values.
updateForStatistics(statistics)
}
}
}
I don't know exactly what goes on when invoking HealthKit, I took most of the code from the WWDC example.
import Foundation
import HealthKit
class WorkoutManager: NSObject, ObservableObject {
var selectedWorkout: HKWorkoutActivityType? {
didSet {
guard let selectedWorkout = selectedWorkout else { return }
startWorkout(workoutType: selectedWorkout)
}
}
#Published var showingSummaryView: Bool = false {
didSet {
if showingSummaryView == false {
resetWorkout()
}
}
}
let healthStore = HKHealthStore()
var session: HKWorkoutSession?
var builder: HKLiveWorkoutBuilder?
func startWorkout(workoutType: HKWorkoutActivityType) {
let configuration = HKWorkoutConfiguration()
configuration.activityType = workoutType
configuration.locationType = .outdoor
// Create the session and obtain the workout builder.
do {
session = try HKWorkoutSession(healthStore: healthStore, configuration: configuration)
builder = session?.associatedWorkoutBuilder()
} catch {
// Handle any exceptions.
return
}
// Set the workout builder's data source.
builder?.dataSource = HKLiveWorkoutDataSource(healthStore: healthStore,
workoutConfiguration: configuration)
session?.delegate = self
builder?.delegate = self
// Start the workout session and begin data collection.
let startDate = Date()
session?.startActivity(with: startDate)
builder?.beginCollection(withStart: startDate) { (success, error) in
// The workout has started.
}
}
func requestAuthorization() {
// The quantity type to write to the health store.
let typesToShare: Set = [
HKQuantityType.workoutType()
]
// The quantity types to read from the health store.
let typesToRead: Set = [
HKQuantityType.quantityType(forIdentifier: .heartRate)!,
HKObjectType.activitySummaryType()
]
// Request authorization for those quantity types.
healthStore.requestAuthorization(toShare: typesToShare, read: typesToRead) { (success, error) in
// Handle error.
}
}
// MARK: - Session State Control
// The app's workout state.
#Published var running = false
func togglePause() {
if running == true {
self.pause()
} else {
resume()
}
}
func pause() {
session?.pause()
}
func resume() {
session?.resume()
}
func endWorkout() {
session?.end()
showingSummaryView = true
}
// MARK: - Workout Metrics
#Published var averageHeartRate: Double = 0
#Published var heartRate: Double = 0
#Published var workout: HKWorkout?
func updateForStatistics(_ statistics: HKStatistics?) {
guard let statistics = statistics else { return }
DispatchQueue.main.async {
switch statistics.quantityType {
case HKQuantityType.quantityType(forIdentifier: .heartRate):
let heartRateUnit = HKUnit.count().unitDivided(by: HKUnit.minute())
self.heartRate = statistics.mostRecentQuantity()?.doubleValue(for: heartRateUnit) ?? 0
self.averageHeartRate = statistics.averageQuantity()?.doubleValue(for: heartRateUnit) ?? 0
default:
return
}
}
}
func resetWorkout() {
selectedWorkout = nil
builder = nil
workout = nil
session = nil
averageHeartRate = 0
heartRate = 0
}
}
extension WorkoutManager: HKWorkoutSessionDelegate {
func workoutSession(_ workoutSession: HKWorkoutSession, didChangeTo toState: HKWorkoutSessionState,
from fromState: HKWorkoutSessionState, date: Date) {
DispatchQueue.main.async {
self.running = toState == .running
}
// Wait for the session to transition states before ending the builder.
if toState == .ended {
builder?.endCollection(withEnd: date) { (success, error) in
self.builder?.finishWorkout { (workout, error) in
DispatchQueue.main.async {
self.workout = workout
}
}
}
}
}
func workoutSession(_ workoutSession: HKWorkoutSession, didFailWithError error: Error) {
}
}
extension WorkoutManager: HKLiveWorkoutBuilderDelegate {
func workoutBuilderDidCollectEvent(_ workoutBuilder: HKLiveWorkoutBuilder) {
}
func workoutBuilder(_ workoutBuilder: HKLiveWorkoutBuilder, didCollectDataOf collectedTypes: Set<HKSampleType>) {
for type in collectedTypes {
guard let quantityType = type as? HKQuantityType else {
return // Nothing to do.
}
let statistics = workoutBuilder.statistics(for: quantityType)
// Update the published values.
updateForStatistics(statistics)
}
}
}
Do you have the Workout processing background mode enabled in the Info.plist?

Query real time heart rate variability and heart rate values

I'm trying to read out real time heart rate + heart rate variability data from the apple watch.
I am able to read the heartRate value but don't know how to implement the reading of the HRV. I it possible to have them read out simultaneously ?
class HealthKitManager {
private var healthStore = HKHealthStore()
private var heartRateQuantity = HKUnit(from: "count/min")
private var heartRateVariability = HKUnit(from: "count/min")
private var activeQueries = [HKQuery]()
#Published var heartRateValues = HeartRateValues()
func autorizeHealthKit() {
let heartRate = HKObjectType.quantityType(forIdentifier: .heartRate)!
let heartRateVariability = HKObjectType.quantityType(forIdentifier: .heartRateVariabilitySDNN)!
let HKreadTypes: Set = [heartRate, heartRateVariability]
healthStore.requestAuthorization(toShare: nil, read: HKreadTypes) { (success, error) in
if let error = error {
print("Error requesting health kit authorization: \(error)")
}
}
}
func fetchHeartRateData(quantityTypeIdentifier: HKQuantityTypeIdentifier ) {
let devicePredicate = HKQuery.predicateForObjects(from: [HKDevice.local()])
let updateHandler: (HKAnchoredObjectQuery, [HKSample]?, [HKDeletedObject]?, HKQueryAnchor?, Error?) -> Void = {
query, samples, deletedObjects, queryAnchor, error in
guard let samples = samples as? [HKQuantitySample] else {
return
}
self.process(samples, type: quantityTypeIdentifier)
}
let query = HKAnchoredObjectQuery(type: HKObjectType.quantityType(forIdentifier: quantityTypeIdentifier)!, predicate: devicePredicate, anchor: nil, limit: HKObjectQueryNoLimit, resultsHandler: updateHandler)
query.updateHandler = updateHandler
healthStore.execute(query)
activeQueries.append(query)
}
private func process(_ samples: [HKQuantitySample], type: HKQuantityTypeIdentifier) {
for sample in samples {
if type == .heartRate {
DispatchQueue.main.async {
self.heartRateValues.heartRate = sample.quantity.doubleValue(for: self.heartRateQuantity)
}
}
// Not Sure about this part and readings show 0.0
else if type == .heartRateVariabilitySDNN {
DispatchQueue.main.async {
self.heartRateValues.heartRateVariability = sample.quantity.doubleValue(for: self.heartRateVariability)
}
}
}
}
func stopFetchingHeartRateData() {
activeQueries.forEach { healthStore.stop($0) }
activeQueries.removeAll()
DispatchQueue.main.async {
self.heartRateValues.heartRate = 0.0
self.heartRateValues.heartRateVariability = 0.0
}
}
}
A comparable question was asked here but it didn't receive an answer:
Get Apple watch heartRateVariabilitySDNN realtime?
I managed to get the hrs reading as well, just added:
private func process(_ samples: [HKQuantitySample], type: HKQuantityTypeIdentifier) {
for sample in samples {
if type == .heartRate {
DispatchQueue.main.async {
self.heartRateValues.heartRate = sample.quantity.doubleValue(for: self.heartRateQuantity)
}
} else if type == .heartRateVariabilitySDNN {
DispatchQueue.main.async {
self.heartRateValues.heartRateVariability = sample.quantity.doubleValue(for: self.heartRateVariabilityQuantity)
}
}
}
}
And then call the class with:
fetchHeartRateData(quantityTypeIdentifier: .heartRate)
fetchHeartRateData(quantityTypeIdentifier: .heartRateVariabilitySDNN)
Problem remains the HRV value get's read once, and doesn't change over time. Any ideas ?

Swift AVFoundation in playground not outputting sound

My Morse code translator will not output the sound as it should. I have tested the speakers and my methods without this function and it works flawlessly, but it is not in context with the rest of the program. The compiler gives me no errors and the playground does not crash, it just doesn't play sound. Volume and ringer is at full.
func speakTheCode(message: String) {
var speaker = AVAudioPlayer()
let longBeep = #fileLiteral(resourceName: "beep_long.mp3")
let shortBeep = #fileLiteral(resourceName: "beep_short.mp3")
let dash = "-"
let dot = "."
for character in message.characters {
if character == dash[dash.startIndex] {
speaker = try! AVAudioPlayer(contentsOf: longBeep)
speaker.prepareToPlay()
print("-")
}
else if character == dot[dot.startIndex] {
speaker = try! AVAudioPlayer(contentsOf: shortBeep)
speaker.prepareToPlay()
print(".")
}
speaker.play()
}
}
I've been messing around with the code for hours now and nothing is working. What (if anything) am I doing wrong?
There seems to some playgrounds issues with playing audio. See this thread:
Playing a sound in a Swift Playground
However, I was able to make some changes to your code and get it to work. Here's my code:
class Morse:NSObject, AVAudioPlayerDelegate {
private var message = ""
private var dotSound:AVAudioPlayer!
private var dashSound:AVAudioPlayer!
private let dash = Character("-")
private let dot = Character(".")
private var index:String.Index!
init(message:String) {
super.init()
do {
if let url = Bundle.main.url(forResource:"beep_short", withExtension:"mp3") {
self.dotSound = try AVAudioPlayer(contentsOf:url)
self.dotSound.delegate = self
self.dotSound.prepareToPlay()
}
} catch {
NSLog("Error loading dot audio!")
}
do {
if let url = Bundle.main.url(forResource:"beep_long", withExtension:"mp3") {
self.dashSound = try AVAudioPlayer(contentsOf:url)
self.dashSound.delegate = self
self.dashSound.prepareToPlay()
}
} catch {
NSLog("Error loading dash audio!")
}
self.message = message
self.index = message.startIndex
}
func playCharacter() {
let character = message.characters[index]
NSLog("Character: \(character)")
if character == dash {
dashSound.play()
} else if character == dot {
dotSound.play()
}
}
func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) {
NSLog("Finished playing")
if index != message.endIndex {
self.index = message.index(after:index)
playCharacter()
}
}
}
let m = Morse(message:"...---")
m.playCharacter()
PlaygroundPage.current.needsIndefiniteExecution = true
I had to enable indefinite execution to get the code to execute at all. Also, I had some issues with the second audio file loading but I didn't investigate further to see if it was an issue with my test file or something else since it mostly worked.
#Fahim still it is showing error
class Morse:NSObject, AVAudioPlayerDelegate {
private var message = ""
private var dotSound:AVAudioPlayer!
private var dashSound:AVAudioPlayer!
private let dash = Character("-")
private let dot = Character(".")
private var index:String.Index!
init(message:String) {
super.init()
do {
if let url = Bundle.main.url(forResource:"beep_short", withExtension:"mp3") {
self.dotSound = try AVAudioPlayer(contentsOf:url)
self.dotSound.delegate = self
self.dotSound.prepareToPlay()
}
} catch {
NSLog("Error loading dot audio!")
}
do {
if let url = Bundle.main.url(forResource:"beep_long", withExtension:"mp3") {
self.dashSound = try AVAudioPlayer(contentsOf:url)
self.dashSound.delegate = self
self.dashSound.prepareToPlay()
}
} catch {
NSLog("Error loading dash audio!")
}
self.message = message
self.index = message.startIndex
}
func playCharacter() {
let character = message.characters[index]
NSLog("Character: \(character)")
if character == dash {
dashSound.play()
} else if character == dot {
dotSound.play()
}
}
func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) {
NSLog("Finished playing")
if index != message.endIndex {
self.index = message.index(after:index)
playCharacter()
}
}
}
let m = Morse(message:"...---")
m.playCharacter()
PlaygroundPage.current.needsIndefiniteExecution = true

extension may not contain stored properties and no member problems in swift

I am not able to understand why I am getting these errors. someone please help me. Here is the source code
import UIKit
import AVFoundation
// MARK: - PlaySoundsViewController: AVAudioPlayerDelegate
extension PlaySoundsViewController: AVAudioPlayerDelegate {
var audioEngine = AVAudioEngine()
// MARK: Alerts
struct Alerts {
static let DismissAlert = "Dismiss"
static let RecordingDisabledTitle = "Recording disabled"
static let RecordginDisabledMessage = "Youve disabled this app from recording your microphone. Check settings"
static let RecodingFailedTitle = "Recording failed"
static let RecordingFailedMessage = "Something went wrong with the recording"
static let AudioRecordedError = "Audio Recorder Error"
static let AudioSessionError = "Audio Session Error"
static let AudioRecordingError = "Audio Recording Error"
static let AudioFileError = "Audio File Error"
static let AudioEngineError = "Audio Engine Error"
}
// MARK: PlayingState (raw values correspond to sender tags)
enum PlayingState { case playing, notPlaying }
// MARK: Audio Functions
func setupAudio() {
// initialize (recording) audio file
do {
audioFile = try AVAudioFile(forReading: recordedAudioURL as URL)
} catch {
showAlert(Alerts.AudioFileError, message: String(describing: error))
}
}
func playSound(rate: Float? = nil, pitch: Float? = nil, echo: Bool = false, reverb: Bool = false) {
// initialize audio engine components
audioEngine = AVAudioEngine()
// node for playing audio
audioPlayerNode = AVAudioPlayerNode()
audioEngine.attach(audioPlayerNode)
// node for adjusting rate/pitch
let changeRatePitchNode = AVAudioUnitTimePitch()
if let pitch = pitch {
changeRatePitchNode.pitch = pitch
}
if let rate = rate {
changeRatePitchNode.rate = rate
}
audioEngine.attach(changeRatePitchNode)
// node for echo
let echoNode = AVAudioUnitDistortion()
echoNode.loadFactoryPreset(.multiEcho1)
audioEngine.attach(echoNode)
// node for reverb
let reverbNode = AVAudioUnitReverb()
reverbNode.loadFactoryPreset(.cathedral)
reverbNode.wetDryMix = 50
audioEngine.attach(reverbNode)
// connect nodes
if echo == true && reverb == true {
connectAudioNodes(audioPlayerNode, changeRatePitchNode, echoNode, reverbNode, audioEngine.outputNode)
} else if echo == true {
connectAudioNodes(audioPlayerNode, changeRatePitchNode, echoNode, audioEngine.outputNode)
} else if reverb == true {
connectAudioNodes(audioPlayerNode, changeRatePitchNode, reverbNode, audioEngine.outputNode)
} else {
connectAudioNodes(audioPlayerNode, changeRatePitchNode, audioEngine.outputNode)
}
// schedule to play and start the engine!
audioPlayerNode.stop()
audioPlayerNode.scheduleFile(audioFile, at: nil) {
var delayInSeconds: Double = 0
if let lastRenderTime = self.audioPlayerNode.lastRenderTime, let playerTime = self.audioPlayerNode.playerTime(forNodeTime: lastRenderTime) {
if let rate = rate {
delayInSeconds = Double(self.audioFile.length - playerTime.sampleTime) / Double(self.audioFile.processingFormat.sampleRate) / Double(rate)
} else {
delayInSeconds = Double(self.audioFile.length - playerTime.sampleTime) / Double(self.audioFile.processingFormat.sampleRate)
}
}
// schedule a stop timer for when audio finishes playing
self.stopTimer = Timer(timeInterval: delayInSeconds, target: self, selector: #selector(PlaySoundsViewController.stopAudio), userInfo: nil, repeats: false)
RunLoop.main.add(self.stopTimer!, forMode: RunLoopMode.defaultRunLoopMode)
}
do {
try audioEngine.start()
} catch {
showAlert(Alerts.AudioEngineError, message: String(describing: error))
return
}
// play the recording!
audioPlayerNode.play()
}
func stopAudio() {
if let audioPlayerNode = audioPlayerNode {
audioPlayerNode.stop()
}
if let stopTimer = stopTimer {
stopTimer.invalidate()
}
configureUI(.notPlaying)
if let audioEngine = audioEngine {
audioEngine.stop()
audioEngine.reset()
}
}
// MARK: Connect List of Audio Nodes
func connectAudioNodes(_ nodes: AVAudioNode...) {
for x in 0..<nodes.count-1 {
audioEngine.connect(nodes[x], to: nodes[x+1], format: audioFile.processingFormat)
}
}
// MARK: UI Functions
func configureUI(_ playState: PlayingState) {
switch(playState) {
case .playing:
setPlayButtonsEnabled(false)
stopButton.isEnabled = true
case .notPlaying:
setPlayButtonsEnabled(true)
stopButton.isEnabled = false
}
}
func setPlayButtonsEnabled(_ enabled: Bool) {
snailButton.isEnabled = enabled
chipmunkButton.isEnabled = enabled
rabbitButton.isEnabled = enabled
vaderButton.isEnabled = enabled
echoButton.isEnabled = enabled
reverbButton.isEnabled = enabled
}
func showAlert(_ title: String, message: String) {
let alert = UIAlertController(title: title, message: message, preferredStyle: .alert)
alert.addAction(UIAlertAction(title: Alerts.DismissAlert, style: .default, handler: nil))
self.present(alert, animated: true, completion: nil)
}
}
The screenshot of the error is in the link below.
Screenshot!
You can not declare var audioEngine = AVAudioEngine() inside an extension. Declare it inside PlaySoundsViewController class instead.
Extensions are meant to augment behaviours, not fundamentally change a class.
#source: Why can't you add stored properties to extensions?