URL is nil in AVAudioFile - swift

I am trying to play multiple audio files on top of a background audio file using AVAudioEngine.
When I try to initialize backgroundAudioFile, the app crashes saying :Thread 1: Fatal error: 'try!' expression unexpectedly raised an error: Error Domain=com.apple.coreaudio.avfaudio Code=2003334207 "(null)" UserInfo={failed call=ExtAudioFileOpenURL((CFURLRef)fileURL, &_extAudioFile)}.
The url passed to the initializer is valid. It is printed out.
import Foundation
import AVFoundation
class AudioPlayer {
var timeIndex = 0 // keeps track of the index of the item transveersed within `timesToTransverse` by `player?.addBoundaryTimeObserver`
var topAudioFiles: [AVAudioFile] = []
var engine:AVAudioEngine
var backgroundAudioNode: AVAudioPlayerNode
var backgroundAudioFile: AVAudioFile
var topAudioAudioNodes = [AVAudioPlayerNode]()
var mixer: AVAudioMixerNode
var timer: Timer!
var urls: [URL] = []
var player: AVPlayer!
var timesToTransverse = [NSValue]() //contains timeValues in seconds such as ["1.54",2.64, 67.20]
var delays = [UInt64]()
fileprivate var timeObserverToken: Any?
init (url: URL, urls: [URL] = [], timesToTransverse: [NSValue]) {
self.urls = urls
self.timesToTransverse = timesToTransverse
topAudioFiles = urls.map { try! AVAudioFile(forReading: $0) }
print("the remote url is \(url)")
// it prints the url is https://firebasestorage.googleapis.com/v0/b/salsaworld-658f3.appspot.com/o/adminAudioFiles%2F-LWv5rnKiLawXvsSsQgG.m4a?alt=media&token=19e8eac0-2b47-49e2-acd3-9a459903f84b
//it crashes on this line
backgroundAudioFile = try! AVAudioFile(forReading: url)
player = AVPlayer(url: url)
engine = AVAudioEngine()
mixer = AVAudioMixerNode()
engine.attach(mixer)
engine.connect(mixer, to: engine.outputNode, format: nil)
backgroundAudioNode = AVAudioPlayerNode()
initTopAudioNodes()
try! engine.start()
}
func initTopAudioNodes() {
for _ in topAudioFiles {
topAudioAudioNodes += [AVAudioPlayerNode()]
}
for node in topAudioAudioNodes {
engine.attach(node)
engine.connect(node, to: mixer, format: nil)
}
}//end initTopAudioNodes
func playWithAudioPlayerAndNodes() {
player.play()
var i = 1
timeObserverToken = player.addBoundaryTimeObserver(forTimes: timesToTransverse, queue: nil) {
[weak self] in
guard let self = self else {return}
let index = i % self.topAudioAudioNodes.count
let node = self.topAudioAudioNodes[index]
node.scheduleFile(self.topAudioFiles[index], at: nil, completionHandler: nil)
node.play()
i += 1
/* Because there are no time signature changes,
we can simply increment timeIndex with + 1 every time
`addBoundaryTimeObserver`'s completion handler is called.
Then, we subscript timesToTransverse with timeIndex
in order to get the subsequent timeInSeconds
*/
guard self.timeIndex < self.timesToTransverse.count else {return}
print("timeIndex is now \(self.timeIndex)")
let timeElement = self.timesToTransverse[self.timeIndex]
let timeInSeconds = CMTimeGetSeconds(timeElement.timeValue)
//use reminder operator to determine the beat count
let beat = (self.timeIndex + 1) % 8 == 0 ? 8 : ((self.timeIndex + 1) % 8)
print("Beat would be: ", beat)
/*
0: (0 + 1) % 8 = 1
1: (1 + 1) % 8 = 2
6: (6 + 1) % 8 = 7
7: (7 + 1) % 8 = 0
*/
self.timeIndex += 1
}//end class AudioPlayer
// create instance of class AudioPlayer and call func playWithAudioPlayerAndNodes
class HomeViewController {
override func viewDidLoad() {
super.viewDidLoad()
let bundle = Bundle.main
let one = bundle.url(forResource: "1", withExtension: "wav")!
let two = bundle.url(forResource: "2", withExtension: "wav")!
let three = bundle.url(forResource: "3", withExtension: "wav")!
let five = bundle.url(forResource: "5", withExtension: "wav")!
let six = bundle.url(forResource: "6", withExtension: "wav")!
let seven = bundle.url(forResource: "7", withExtension: "wav")!
// mediaArray contains string URL's downloaded from FirebaseDatabse
let mediaItem = mediaArray[tableIndexPath.row]
guard let backgroundAudio = URL(string: mediaItem.mediaAudioUrlStringRepresentation ?? "") else {return}
let audioPlayer = AudioPlayer(url:
backgroundAudio,
urls: [one, two, three, five, six, seven],
timesToTransverse: timesToTransverse)
//start playing
audioPlayer.playWithAudioPlayerAndNodes()
}
}

You cannot call AVAudioFile(forReading:) on a remote file. You need to download the binary data and parse it into packets using audio file stream services. That way, you can supply the packets to a buffer and play from the buffer thru the audio engine.

you can try:
let one = try! AVAudioFile(forReading: URL(fileURLWithPath: (Bundle.main.path(forResource: "1", ofType: "wav", inDirectory: "YOUKNOW!")!)))
it works fine with me

Related

AVAudioEngine doesn't playback a sound

I am trying to play with AVAudioEngine to playback the wav file. I tried to do it in a few different ways, but nothing work.
Try 1
...
private var audioEngine: AVAudioEngine = AVAudioEngine()
private var mixer: AVAudioMixerNode = AVAudioMixerNode()
private var audioFilePlayer: AVAudioPlayerNode = AVAudioPlayerNode()
func Play1() {
guard let filePath = Bundle.main.url(forResource: "testwav", withExtension: "wav", subdirectory: "res") else {
print("file not found")
return
}
print("\(filePath)")
guard let audioFile = try? AVAudioFile(forReading: filePath) else{ return }
let audioFormat = audioFile.processingFormat
let audioFrameCount = UInt32(audioFile.length)
guard let audioFileBuffer = AVAudioPCMBuffer(pcmFormat: audioFormat, frameCapacity: audioFrameCount) else{ return }
do{
try audioFile.read(into: audioFileBuffer)
} catch{
print("over")
}
let mainMixer = audioEngine.mainMixerNode
audioEngine.attach(audioFilePlayer)
audioEngine.connect(audioFilePlayer, to:mainMixer, format: audioFileBuffer.format)
audioEngine.connect(mainMixer, to:audioEngine.outputNode, format: audioFileBuffer.format)
try? audioEngine.start()
audioFilePlayer.play()
audioFilePlayer.scheduleBuffer(audioFileBuffer, at: nil, options:AVAudioPlayerNodeBufferOptions.loops)
}
...
Try 2
...
private var audioEngine: AVAudioEngine = AVAudioEngine()
private var mixer: AVAudioMixerNode = AVAudioMixerNode()
private var audioFilePlayer: AVAudioPlayerNode = AVAudioPlayerNode()
func Play2() {
DispatchQueue.global(qos: .background).async {
self.audioEngine.attach(self.mixer)
self.audioEngine.connect(self.mixer, to: self.audioEngine.outputNode, format: nil)
// !important - start the engine *before* setting up the player nodes
try! self.audioEngine.start()
let audioPlayer = AVAudioPlayerNode()
self.audioEngine.attach(audioPlayer)
// Notice the output is the mixer in this case
self.audioEngine.connect(audioPlayer, to: self.mixer, format: nil)
guard let fileUrl = Bundle.main.url(forResource: "testwav", withExtension: "wav", subdirectory: "res") else {
// guard let url = Bundle.main.url(forResource: "audiotest", withExtension: "mp3", subdirectory: "res") else {
print("mp3 not found")
return
}
do {
let file = try AVAudioFile(forReading: fileUrl)
audioPlayer.scheduleFile(file, at: nil, completionHandler: nil)
audioPlayer.play(at: nil)
} catch let error {
print(error.localizedDescription)
}
}
}
...
...
private var audioEngine: AVAudioEngine = AVAudioEngine()
private var mixer: AVAudioMixerNode = AVAudioMixerNode()
private var audioFilePlayer: AVAudioPlayerNode = AVAudioPlayerNode()
func Play3() {
DispatchQueue.global(qos: .background).async {
self.audioEngine = AVAudioEngine()
_ = self.audioEngine.mainMixerNode
self.audioEngine.prepare()
do {
try self.audioEngine.start()
} catch {
print(error)
}
guard let url = Bundle.main.url(forResource: "testwav", withExtension: "wav", subdirectory: "res") else {
// guard let url = Bundle.main.url(forResource: "audiotest", withExtension: "mp3", subdirectory: "res") else {
print("mp3 not found")
return
}
let player = AVAudioPlayerNode()
player.volume = 1.0
do {
let audioFile = try AVAudioFile(forReading: url)
let format = audioFile.processingFormat
print(format)
self.audioEngine.attach(player)
self.audioEngine.connect(player, to: self.audioEngine.mainMixerNode, format: format)
player.scheduleFile(audioFile, at: nil, completionHandler: nil)
} catch let error {
print(error.localizedDescription)
}
player.play()
}
}
...
Also should be mentioned that there are no errors, while debugging I see that all the methods are executed and everything is ok, but I don't hear sound playback...
What am I doing wrong?
Try to activate your audio session with the following method:
func setActive(_ active: Bool, options: AVAudioSession.SetActiveOptions = []) throws.
Please note that if another active audio session has higher priority than yours (for example, a phone call), and neither audio session allows mixing, attempting to activate your audio session fails. Deactivating an audio session that has running audio objects stops them, deactivates the session, and return an AVAudioSession.ErrorCode.isBusy error.

How to play sequence MP3 files? [duplicate]

This question already has answers here:
Expected declaration error for making high score [duplicate]
(2 answers)
Closed 3 years ago.
I'm trying to play sequence mp3 files by using array, for and AVQueueplayer. I see this error Expected declaration.
for number in myIndex..<arr.count{
var queuePlayer: AVQueuePlayer = {
let url1 = Bundle.main.url(forResource: String(myIndex+number), withExtension: "mp3")!
let item1 = AVPlayerItem(url: url1)
let queue = AVQueuePlayer(items: [item1])
return queue
}()
}`
Expected declaration
#IBAction func autoplay(_ sender: Any) {
var items : [AVPlayerItem] = []
for number in myIndex..<arr.count {
let url = Bundle.main.url(forResource: String(number), withExtension: "mp3")!
items.append(AVPlayerItem(url: url))
}
let queue = AVQueuePlayer(items: items)
queue.play()
}
You need one AVQueuPlayer so try it like this:
lazy var queue : AVQueuePlayer = {
return AVQueuePlayer()
}()
var items : [AVPlayerItem] = []
for number in myIndex..<arr.count{
let url = Bundle.main.url(forResource: String(myIndex+number), withExtension: "mp3")!
items.append(AVPlayerItem(url: url))
}
queue = AVQueuePlayer(items: items)
Then you can simply use queue.play() or queue.advanceToNextItem() or so

How to play multiple Audio Files simultaneously using AVPlayer?

I am trying to play multiple audio files using 2 AVPlayer instances, but one of the player stops for a fraction of a second rather than playing all audio files simultaneously.
The logic of the program is as follows:
var player: AVPlayer? will stream an audio file from my database. On its own is playing perfectly.
fileprivate var countPlayer: AVPlayer? plays the count number of the current item being played by var player. The count is a sequence of 1 to 8 and for each digit I am storing/sandobxing a .wav file locally such as 1.wav, 2.wav...8.wav.
When current time of var player is at a certain time, countPlayer is triggered and it plays one of the local file 1.wav, 2.wav..etc.
The problem is that when the var countPlayer starts playing, it causes the background AVPlayer, namely var player to stop for a fraction of a second, similar to what's described in this comment:
Play multiple Audio Files with AVPlayer
var player: AVPlayer? //plays the song
fileprivate var countPlayer: AVPlayer? // plays the count number of song
private func addBoundaryTimeObserver(tableIndexPath: IndexPath) {
let mediaItem = mediaArray[tableIndexPath.row]
guard let url = URL(string: mediaItem.mediaAudioUrlStringRepresentation ?? "") else {return}
let playerItem = AVPlayerItem(url: url)
player = AVPlayer(playerItem: playerItem)
var timesToTransverse = [NSValue]()
//convert string representation of times elements to array
let timesRecorded: [String] = mediaItem.timesRecorded.components(separatedBy: ",")
// Build boundary times from arrayOfBeats keys
let timeDoubles: [Double] = timesRecorded.compactMap {timeString in
if let second = Double("\(timeString)") {
return second
}
return nil
}
guard timeDoubles.count > 0 else {return} //unexpected
timesToTransverse = timeDoubles.map { second in
let cmtime = CMTime(seconds: second, preferredTimescale: CMTimeScale(NSEC_PER_SEC))
return NSValue(time: cmtime)
}
guard timesToTransverse.count != 0 else {return}
guard let playerCell = tableView.cellForRow(at: IndexPath(row: 0, section: 0)) as? PlayerCell else {return}
startTime = Date().timeIntervalSinceReferenceDate
timeIndex = 0
player?.play()
player?.rate = Float(initialPlaybackRate)
// find the index of time
//reset timeObserverToken
// call a function with the new times sorted
// Queue on which to invoke the callback
let mainQueue = DispatchQueue.main
// Add time observer
timeObserverToken =
player?.addBoundaryTimeObserver(forTimes: timesToTransverse, queue: mainQueue) {
[weak self] in
//because there are no time signature changes, we can simply increment timeIndex with + 1 every time `addBoundaryTimeObserver` completion handler is called and subscript timesToTransverse with timeIndex in order to get the subsequent timeInSeconds
guard let strongSelf = self, strongSelf.timeIndex < timesToTransverse.count else {return}
let timeElement = timesToTransverse[strongSelf.timeIndex]
strongSelf.timeInSeconds = CMTimeGetSeconds(timeElement.timeValue)
//show progress in progressView
let duration = CMTimeGetSeconds(playerItem.duration)
let cmtimeSeconds = CMTime(seconds: strongSelf.timeInSeconds, preferredTimescale: CMTimeScale(NSEC_PER_SEC))
//Total time since timer started, in seconds
strongSelf.timeInSeconds = Date().timeIntervalSinceReferenceDate - strongSelf.startTime
let timeString = String(format: "%.2f", strongSelf.timeInSeconds)
strongSelf.timeString = timeString
//use reminder operator to determine the beat count
let beat = (strongSelf.timeIndex + 1) % 8 == 0 ? 8 : ((strongSelf.timeIndex + 1) % 8)
//play the beat count : 1, 2, ...8
self.preapareToPlayAudio(beatCount: beat)
/*
0: (0 + 1) % 8 = 1
1: (1 + 1) % 8 = 2
6: (6 + 1) % 8 = 7
7: (7 + 1) % 8 = 0
*/
strongSelf.timeIndex += 1
}
}//end addBoundaryTimeObserver
//prepare determine what wav file to play
private func preapareToPlayAudio(beatCount: Int) {
switch beatCount {
case 1:
guard let url = Bundle.main.url(forResource: "1", withExtension: "wav") else {return}
playWith(beatCountURL: url)
//7 more cases go here .....
default: print("unexpected case here")
}
}//end play(beatCount: Int)
private func playWith(beatCountURL: URL) {
let playerItem = AVPlayerItem(url: beatCountURL)
countPlayer = AVPlayer(playerItem: playerItem)
countPlayer?.play()
}
You would be better off using AVAudioPlayerNode, AVAudioMixerNode, AVAudioEngine. Using these classes you won't have problems like you have right now. It's also not that difficult to set up.
You can check out my gist, in order to play the sounds in your Playgrounds you would need to put audio files into Resources folder in Project Navigator:
https://gist.github.com/standinga/24342d23acfe70dc08cbcc994895f32b
The code works without stopping background audio when top sounds are triggered.
Here's also the same code:
import AVFoundation
import PlaygroundSupport
PlaygroundPage.current.needsIndefiniteExecution = true
class AudioPlayer {
var backgroundAudioFile:AVAudioFile
var topAudioFiles: [AVAudioFile] = []
var engine:AVAudioEngine
var backgroundAudioNode: AVAudioPlayerNode
var topAudioAudioNodes = [AVAudioPlayerNode]()
var mixer: AVAudioMixerNode
var timer: Timer!
var urls: [URL] = []
init (_ url: URL, urls: [URL] = []) {
backgroundAudioFile = try! AVAudioFile(forReading: url)
topAudioFiles = urls.map { try! AVAudioFile(forReading: $0) }
engine = AVAudioEngine()
mixer = AVAudioMixerNode()
engine.attach(mixer)
engine.connect(mixer, to: engine.outputNode, format: nil)
self.urls = urls
backgroundAudioNode = AVAudioPlayerNode()
for _ in topAudioFiles {
topAudioAudioNodes += [AVAudioPlayerNode()]
}
}
func start() {
engine.attach(backgroundAudioNode)
engine.connect(backgroundAudioNode, to: mixer, format: nil)
backgroundAudioNode.scheduleFile(backgroundAudioFile, at: nil, completionHandler: nil)
try! engine.start()
backgroundAudioNode.play()
for node in topAudioAudioNodes {
engine.attach(node)
engine.connect(node, to: mixer, format: nil)
try! engine.start()
}
// simulate rescheduling files played on top of background audio
DispatchQueue.global().async { [unowned self] in
for i in 0..<1000 {
sleep(2)
let index = i % self.topAudioAudioNodes.count
let node = self.topAudioAudioNodes[index]
node.scheduleFile(self.topAudioFiles[index], at: nil, completionHandler: nil)
node.play()
}
}
}
}
let bundle = Bundle.main
let beepLow = bundle.url(forResource: "beeplow", withExtension: "wav")!
let beepMid = bundle.url(forResource: "beepmid", withExtension: "wav")!
let backgroundAudio = bundle.url(forResource: "backgroundAudio", withExtension: "wav")!
let audioPlayer = AudioPlayer(backgroundAudio, urls: [beepLow, beepMid])
audioPlayer.start()

Mac - Swift 3 - queuing audio files and playing

I would like to write an app in swift 3 in order to play queued audio files without any gap, crack or noise when passing from one to another.
My first try was using AvAudioPlayer and AvAudioDelegate (AVAudioPlayer using array to queue audio files - Swift), but I don't know how to preload the next song to avoid gap. Even if I know how to do it, I am not certain it is the best way to achieve my goal.
AVQueuePlayer seems to be a better candidate for the job, it is made for that purpose, but I don't find any example to help me out.
Maybe it is only a problem of preloading or buffering? I am a bit lost in this ocean of possibilities.
Any suggestion is welcomed.
It is far to be perfect, specially if you want to do it twice or more ("file exist" error), but it can serve as a base.
What it does is taking two files (mines are aif samples of ap. 4 sec.), encode them in one file and play the resulting files. If you have hundreds of them, assembled aleatory or not, it can make great fun.
All credits for the mergeAudioFiles function goes to #Peyman and #Pigeon_39. Concatenate two audio files in Swift and play them
Swift 3
import Cocoa
import AVFoundation
var action = AVAudioPlayer()
let path = Bundle.main.path(forResource: "audiofile1.aif", ofType:nil)!
let url = URL(fileURLWithPath: path)
let path2 = Bundle.main.path(forResource: "audiofile2.aif", ofType:nil)!
let url2 = URL(fileURLWithPath: path2)
let array1 = NSMutableArray(array: [url, url2])
class ViewController: NSViewController, AVAudioPlayerDelegate
{
#IBOutlet weak var LanceStop: NSButton!
override func viewDidLoad()
{
super.viewDidLoad()
}
override var representedObject: Any?
{
didSet
{
// Update the view, if already loaded.
}
}
#IBAction func Lancer(_ sender: NSButton)
{
mergeAudioFiles(audioFileUrls: array1)
let url3 = NSURL(string: "/Users/ADDUSERNAMEHERE/Documents/FinalAudio.m4a")
do
{
action = try AVAudioPlayer(contentsOf: url3 as! URL)
action.delegate = self
action.numberOfLoops = 0
action.prepareToPlay()
action.volume = 1
action.play()
}
catch{print("error")}
}
func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool)
{
if flag == true
{
}
}
var mergeAudioURL = NSURL()
func mergeAudioFiles(audioFileUrls: NSArray) {
//audioFileUrls.adding(url)
//audioFileUrls.adding(url2)
let composition = AVMutableComposition()
for i in 0 ..< audioFileUrls.count {
let compositionAudioTrack :AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
let asset = AVURLAsset(url: (audioFileUrls[i] as! NSURL) as URL)
let track = asset.tracks(withMediaType: AVMediaTypeAudio)[0]
let timeRange = CMTimeRange(start: CMTimeMake(0, 600), duration: track.timeRange.duration)
try! compositionAudioTrack.insertTimeRange(timeRange, of: track, at: composition.duration)
}
let documentDirectoryURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first! as NSURL
self.mergeAudioURL = documentDirectoryURL.appendingPathComponent("FinalAudio.m4a")! as URL as NSURL
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
assetExport?.outputFileType = AVFileTypeAppleM4A
assetExport?.outputURL = mergeAudioURL as URL
assetExport?.exportAsynchronously(completionHandler:
{
switch assetExport!.status
{
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport?.error)")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport?.error)")
case AVAssetExportSessionStatus.unknown:
print("unknown\(assetExport?.error)")
case AVAssetExportSessionStatus.waiting:
print("waiting\(assetExport?.error)")
case AVAssetExportSessionStatus.exporting:
print("exporting\(assetExport?.error)")
default:
print("Audio Concatenation Complete")
}
})
}
}

Generate AVAudioPCMBuffer with AVAudioRecorder

Along with iOS 10, apple released a new framework which recognizes speech. Data can be passed to this framework either by appending AVAudioPCMBuffers or giving a URL to a m4a. Currently, speech recognition works using the former but this is only possible after somebody has finished and is not in real time. Here is the code for that:
let audioSession = AVAudioSession.sharedInstance()
var audioRecorder:AVAudioRecorder!;
var soundURLGlobal:URL!;
function setUp(){
let recordSettings = [AVSampleRateKey : NSNumber(value: Float(44100.0)),
AVFormatIDKey : NSNumber(value: Int32(kAudioFormatMPEG4AAC)),
AVNumberOfChannelsKey : NSNumber(value: 1),
AVEncoderAudioQualityKey : NSNumber(value: Int32(AVAudioQuality.medium.rawValue))]
let fileManager = FileManager.default()
let urls = fileManager.urlsForDirectory(.documentDirectory, inDomains: .userDomainMask)
let documentDirectory = urls[0] as NSURL
let soundURL = documentDirectory.appendingPathComponent("sound.m4a")
soundURLGlobal=soundURL;
do {
try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord)
try audioRecorder = AVAudioRecorder(url: soundURL!, settings: recordSettings)
audioRecorder.prepareToRecord()
} catch {}
}
function start(){
do {
try audioSession.setActive(true)
audioRecorder.record()
} catch {}
}
function stop(){
audioRecorder.stop()
let request=SFSpeechURLRecognitionRequest(url: soundURLGlobal!)
let recognizer=SFSpeechRecognizer();
recognizer?.recognitionTask(with: request, resultHandler: { (result, error) in
if(result!.isFinal){
print(result?.bestTranscription.formattedString)
}
})
}
I am trying to convert this but I cannot find where to get a AVAudioPCMBuffer.
Thanks,
good topic.
Hi B Person
here is topic with solution
Tap Mic Input Using AVAudioEngine in Swift
see lecture Wwdc 2014
502 - AVAudioEngine in Practice
capture microphone => in 20 min
create buffer with tap code => in 21 .50
here is swift 3 code
#IBAction func button01Pressed(_ sender: Any) {
let inputNode = audioEngine.inputNode
let bus = 0
inputNode?.installTap(onBus: bus, bufferSize: 2048, format: inputNode?.inputFormat(forBus: bus)) {
(buffer: AVAudioPCMBuffer!, time: AVAudioTime!) -> Void in
var theLength = Int(buffer.frameLength)
print("theLength = \(theLength)")
var samplesAsDoubles:[Double] = []
for i in 0 ..< Int(buffer.frameLength)
{
var theSample = Double((buffer.floatChannelData?.pointee[i])!)
samplesAsDoubles.append( theSample )
}
print("samplesAsDoubles.count = \(samplesAsDoubles.count)")
}
audioEngine.prepare()
try! audioEngine.start()
}
to stop audio
func stopAudio()
{
let inputNode = audioEngine.inputNode
let bus = 0
inputNode?.removeTap(onBus: bus)
self.audioEngine.stop()
}