How to play multiple Audio Files simultaneously using AVPlayer? - swift

I am trying to play multiple audio files using 2 AVPlayer instances, but one of the player stops for a fraction of a second rather than playing all audio files simultaneously.
The logic of the program is as follows:
var player: AVPlayer? will stream an audio file from my database. On its own is playing perfectly.
fileprivate var countPlayer: AVPlayer? plays the count number of the current item being played by var player. The count is a sequence of 1 to 8 and for each digit I am storing/sandobxing a .wav file locally such as 1.wav, 2.wav...8.wav.
When current time of var player is at a certain time, countPlayer is triggered and it plays one of the local file 1.wav, 2.wav..etc.
The problem is that when the var countPlayer starts playing, it causes the background AVPlayer, namely var player to stop for a fraction of a second, similar to what's described in this comment:
Play multiple Audio Files with AVPlayer
var player: AVPlayer? //plays the song
fileprivate var countPlayer: AVPlayer? // plays the count number of song
private func addBoundaryTimeObserver(tableIndexPath: IndexPath) {
let mediaItem = mediaArray[tableIndexPath.row]
guard let url = URL(string: mediaItem.mediaAudioUrlStringRepresentation ?? "") else {return}
let playerItem = AVPlayerItem(url: url)
player = AVPlayer(playerItem: playerItem)
var timesToTransverse = [NSValue]()
//convert string representation of times elements to array
let timesRecorded: [String] = mediaItem.timesRecorded.components(separatedBy: ",")
// Build boundary times from arrayOfBeats keys
let timeDoubles: [Double] = timesRecorded.compactMap {timeString in
if let second = Double("\(timeString)") {
return second
}
return nil
}
guard timeDoubles.count > 0 else {return} //unexpected
timesToTransverse = timeDoubles.map { second in
let cmtime = CMTime(seconds: second, preferredTimescale: CMTimeScale(NSEC_PER_SEC))
return NSValue(time: cmtime)
}
guard timesToTransverse.count != 0 else {return}
guard let playerCell = tableView.cellForRow(at: IndexPath(row: 0, section: 0)) as? PlayerCell else {return}
startTime = Date().timeIntervalSinceReferenceDate
timeIndex = 0
player?.play()
player?.rate = Float(initialPlaybackRate)
// find the index of time
//reset timeObserverToken
// call a function with the new times sorted
// Queue on which to invoke the callback
let mainQueue = DispatchQueue.main
// Add time observer
timeObserverToken =
player?.addBoundaryTimeObserver(forTimes: timesToTransverse, queue: mainQueue) {
[weak self] in
//because there are no time signature changes, we can simply increment timeIndex with + 1 every time `addBoundaryTimeObserver` completion handler is called and subscript timesToTransverse with timeIndex in order to get the subsequent timeInSeconds
guard let strongSelf = self, strongSelf.timeIndex < timesToTransverse.count else {return}
let timeElement = timesToTransverse[strongSelf.timeIndex]
strongSelf.timeInSeconds = CMTimeGetSeconds(timeElement.timeValue)
//show progress in progressView
let duration = CMTimeGetSeconds(playerItem.duration)
let cmtimeSeconds = CMTime(seconds: strongSelf.timeInSeconds, preferredTimescale: CMTimeScale(NSEC_PER_SEC))
//Total time since timer started, in seconds
strongSelf.timeInSeconds = Date().timeIntervalSinceReferenceDate - strongSelf.startTime
let timeString = String(format: "%.2f", strongSelf.timeInSeconds)
strongSelf.timeString = timeString
//use reminder operator to determine the beat count
let beat = (strongSelf.timeIndex + 1) % 8 == 0 ? 8 : ((strongSelf.timeIndex + 1) % 8)
//play the beat count : 1, 2, ...8
self.preapareToPlayAudio(beatCount: beat)
/*
0: (0 + 1) % 8 = 1
1: (1 + 1) % 8 = 2
6: (6 + 1) % 8 = 7
7: (7 + 1) % 8 = 0
*/
strongSelf.timeIndex += 1
}
}//end addBoundaryTimeObserver
//prepare determine what wav file to play
private func preapareToPlayAudio(beatCount: Int) {
switch beatCount {
case 1:
guard let url = Bundle.main.url(forResource: "1", withExtension: "wav") else {return}
playWith(beatCountURL: url)
//7 more cases go here .....
default: print("unexpected case here")
}
}//end play(beatCount: Int)
private func playWith(beatCountURL: URL) {
let playerItem = AVPlayerItem(url: beatCountURL)
countPlayer = AVPlayer(playerItem: playerItem)
countPlayer?.play()
}

You would be better off using AVAudioPlayerNode, AVAudioMixerNode, AVAudioEngine. Using these classes you won't have problems like you have right now. It's also not that difficult to set up.
You can check out my gist, in order to play the sounds in your Playgrounds you would need to put audio files into Resources folder in Project Navigator:
https://gist.github.com/standinga/24342d23acfe70dc08cbcc994895f32b
The code works without stopping background audio when top sounds are triggered.
Here's also the same code:
import AVFoundation
import PlaygroundSupport
PlaygroundPage.current.needsIndefiniteExecution = true
class AudioPlayer {
var backgroundAudioFile:AVAudioFile
var topAudioFiles: [AVAudioFile] = []
var engine:AVAudioEngine
var backgroundAudioNode: AVAudioPlayerNode
var topAudioAudioNodes = [AVAudioPlayerNode]()
var mixer: AVAudioMixerNode
var timer: Timer!
var urls: [URL] = []
init (_ url: URL, urls: [URL] = []) {
backgroundAudioFile = try! AVAudioFile(forReading: url)
topAudioFiles = urls.map { try! AVAudioFile(forReading: $0) }
engine = AVAudioEngine()
mixer = AVAudioMixerNode()
engine.attach(mixer)
engine.connect(mixer, to: engine.outputNode, format: nil)
self.urls = urls
backgroundAudioNode = AVAudioPlayerNode()
for _ in topAudioFiles {
topAudioAudioNodes += [AVAudioPlayerNode()]
}
}
func start() {
engine.attach(backgroundAudioNode)
engine.connect(backgroundAudioNode, to: mixer, format: nil)
backgroundAudioNode.scheduleFile(backgroundAudioFile, at: nil, completionHandler: nil)
try! engine.start()
backgroundAudioNode.play()
for node in topAudioAudioNodes {
engine.attach(node)
engine.connect(node, to: mixer, format: nil)
try! engine.start()
}
// simulate rescheduling files played on top of background audio
DispatchQueue.global().async { [unowned self] in
for i in 0..<1000 {
sleep(2)
let index = i % self.topAudioAudioNodes.count
let node = self.topAudioAudioNodes[index]
node.scheduleFile(self.topAudioFiles[index], at: nil, completionHandler: nil)
node.play()
}
}
}
}
let bundle = Bundle.main
let beepLow = bundle.url(forResource: "beeplow", withExtension: "wav")!
let beepMid = bundle.url(forResource: "beepmid", withExtension: "wav")!
let backgroundAudio = bundle.url(forResource: "backgroundAudio", withExtension: "wav")!
let audioPlayer = AudioPlayer(backgroundAudio, urls: [beepLow, beepMid])
audioPlayer.start()

Related

Unable to play downloaded HLS content while internet is not available

I am working on Downloading and playing HLS content, To download the HLS I am using following code
func downloadTask() {
let videoUrl = URL(string: "https://bitdash-a.akamaihd.net/content/MI201109210084_1/m3u8s/f08e80da-bf1d-4e3d-8899-f0f6155f6efa.m3u8")!
configuration = URLSessionConfiguration.background(withIdentifier: downloadIdentifier)
downloadSession = AVAssetDownloadURLSession(configuration: configuration!, assetDownloadDelegate: self, delegateQueue: OperationQueue.main)
let documentsDirectoryURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!
destinationUrl = documentsDirectoryURL.appendingPathComponent(videoUrl.lastPathComponent)
var urlComponents = URLComponents(
url: videoUrl,
resolvingAgainstBaseURL: false
)!
urlComponents.scheme = "https"
do {
let asset = try AVURLAsset(url: urlComponents.url!)
asset.resourceLoader.setDelegate(self, queue: DispatchQueue(label: "com.example.AssetResourceLoaderDelegateQueue"))
if #available(iOS 10.0, *) {
assetDownloadTask = downloadSession!
.makeAssetDownloadTask(
asset: asset,
assetTitle: "RG-TVVideo",
assetArtworkData: nil,
options: nil
)
APP_DELEGATE.isProgressRunning = true
assetDownloadTask?.resume()
} else {
// Fallback on earlier versions
}
} catch { print("Erorr while parsing the URL.") }
}
Download finished
func urlSession(_ session: URLSession, assetDownloadTask: AVAssetDownloadTask, didFinishDownloadingTo location: URL) {
if #available(iOS 11.0, *) {
let storageManager = AVAssetDownloadStorageManager.shared()
let newPolicy = AVMutableAssetDownloadStorageManagementPolicy()
newPolicy.expirationDate = Date()
newPolicy.priority = .important
let baseURL = URL(fileURLWithPath: NSHomeDirectory())
let assetURL = baseURL.appendingPathComponent(location.relativePath)
storageManager.setStorageManagementPolicy(newPolicy, for: assetURL)
UserDefaults.standard.set(location.relativePath, forKey: "videoPath")
strDownloadStatus = "5"
let dictVideoInfo = ["strDownloadStatus" : "5","VideoID":self.strID]
// Here I am Storing Downloaded location in to database
DBManager.shared.updateVideoStatus(strVideoID: APP_DELEGATE.arrTempVideoIds.object(at: 0) as! String, strStatus: "5", strSavePath: location.relativePath) { (status) in }
DispatchQueue.main.async {
NotificationCenter.default.post(name: NSNotification.Name.init("UpdateProgress"), object: self.percentageComplete, userInfo: dictVideoInfo)
}
}
}
Now I am trying to get Video path from the location which is stored in Database and trying to play it offline(Without Internet) using following code
func setLocalPlayer(strDownloadPath: String) {
let strDownloadPath = “”
//Getting path from database
DBManager.shared.getDownloadedPath(videoID: VideoID) { (strPath) in
strDownloadPath = strPath
}
activityIndicator.isHidden = false
let baseURL = URL(fileURLWithPath: NSHomeDirectory())
let assetURL = baseURL.appendingPathComponent(strDownloadPath)
let asset = AVURLAsset(url: assetURL)
// if let cache = asset.assetCache, cache.isPlayableOffline {
// let videoAsset = AVURLAsset(url: assetURL)
asset.resourceLoader.preloadsEligibleContentKeys = true
asset.resourceLoader.setDelegate(self, queue: DispatchQueue(label: "com.example.AssetResourceLoaderDelegateQueue"))
let playerItem = AVPlayerItem(asset: asset)
avPlayer = AVPlayer(playerItem: playerItem)
avPlayerLayer = AVPlayerLayer()
avPlayerLayer.frame = CGRect(x: 0, y: 0, width: playerContainer.frame.width, height: playerContainer.frame.height)
avPlayerLayer.videoGravity = .resize
avPlayerLayer.player = avPlayer
playerContainer.layer.addSublayer(avPlayerLayer)
let interval = CMTime(seconds: 0.01, preferredTimescale: CMTimeScale(NSEC_PER_SEC))
timeObserver = avPlayer?.addPeriodicTimeObserver(forInterval: interval, queue: DispatchQueue.main, using: { elapsedTime in
self.updateVideoPlayerState()
if self.avPlayer != nil {
self.bufferState()
}
})
self.slider.setThumbImage(UIImage(named: "slider_dot"), for: UIControl.State.normal)
resetTimer()
avPlayer.play()
isPlaying = true
// }
}
NOTE: This code is working fine when internet is on
I have referred following links
https://developer.apple.com/library/archive/documentation/AudioVideo/Conceptual/MediaPlaybackGuide/Contents/Resources/en.lproj/HTTPLiveStreaming/HTTPLiveStreaming.html
https://assist-software.net/snippets/how-play-encrypted-http-live-streams-offline-avfoundation-ios-using-swift-4
Downloading and playing offline HLS Content - iOS 10
Please guide what I am doing wrong.
Thanks
Well, I don't know if it's your error, but for further readings :
Don't do newPolicy.expirationDate = Date() it's a mistake. According to Advances in HTTP Live Streaming 2017 WWDC session, it will delete your file as soon as possible.
Before playing your offline playback, you can check if it's still on your device in Settings -> General -> Storage -> MyApp
The expiration date property is there in case your asset at some point
becomes no longer eligible to be played. For instance, you may find
that you may be in a situation where a particular show may be leaving
your catalog, you no longer have rights to stream it.
If that's the case you can set the expiration date and it will be sort of bumped up
in the deletion queue. So, using it is fairly straight forward.

URL is nil in AVAudioFile

I am trying to play multiple audio files on top of a background audio file using AVAudioEngine.
When I try to initialize backgroundAudioFile, the app crashes saying :Thread 1: Fatal error: 'try!' expression unexpectedly raised an error: Error Domain=com.apple.coreaudio.avfaudio Code=2003334207 "(null)" UserInfo={failed call=ExtAudioFileOpenURL((CFURLRef)fileURL, &_extAudioFile)}.
The url passed to the initializer is valid. It is printed out.
import Foundation
import AVFoundation
class AudioPlayer {
var timeIndex = 0 // keeps track of the index of the item transveersed within `timesToTransverse` by `player?.addBoundaryTimeObserver`
var topAudioFiles: [AVAudioFile] = []
var engine:AVAudioEngine
var backgroundAudioNode: AVAudioPlayerNode
var backgroundAudioFile: AVAudioFile
var topAudioAudioNodes = [AVAudioPlayerNode]()
var mixer: AVAudioMixerNode
var timer: Timer!
var urls: [URL] = []
var player: AVPlayer!
var timesToTransverse = [NSValue]() //contains timeValues in seconds such as ["1.54",2.64, 67.20]
var delays = [UInt64]()
fileprivate var timeObserverToken: Any?
init (url: URL, urls: [URL] = [], timesToTransverse: [NSValue]) {
self.urls = urls
self.timesToTransverse = timesToTransverse
topAudioFiles = urls.map { try! AVAudioFile(forReading: $0) }
print("the remote url is \(url)")
// it prints the url is https://firebasestorage.googleapis.com/v0/b/salsaworld-658f3.appspot.com/o/adminAudioFiles%2F-LWv5rnKiLawXvsSsQgG.m4a?alt=media&token=19e8eac0-2b47-49e2-acd3-9a459903f84b
//it crashes on this line
backgroundAudioFile = try! AVAudioFile(forReading: url)
player = AVPlayer(url: url)
engine = AVAudioEngine()
mixer = AVAudioMixerNode()
engine.attach(mixer)
engine.connect(mixer, to: engine.outputNode, format: nil)
backgroundAudioNode = AVAudioPlayerNode()
initTopAudioNodes()
try! engine.start()
}
func initTopAudioNodes() {
for _ in topAudioFiles {
topAudioAudioNodes += [AVAudioPlayerNode()]
}
for node in topAudioAudioNodes {
engine.attach(node)
engine.connect(node, to: mixer, format: nil)
}
}//end initTopAudioNodes
func playWithAudioPlayerAndNodes() {
player.play()
var i = 1
timeObserverToken = player.addBoundaryTimeObserver(forTimes: timesToTransverse, queue: nil) {
[weak self] in
guard let self = self else {return}
let index = i % self.topAudioAudioNodes.count
let node = self.topAudioAudioNodes[index]
node.scheduleFile(self.topAudioFiles[index], at: nil, completionHandler: nil)
node.play()
i += 1
/* Because there are no time signature changes,
we can simply increment timeIndex with + 1 every time
`addBoundaryTimeObserver`'s completion handler is called.
Then, we subscript timesToTransverse with timeIndex
in order to get the subsequent timeInSeconds
*/
guard self.timeIndex < self.timesToTransverse.count else {return}
print("timeIndex is now \(self.timeIndex)")
let timeElement = self.timesToTransverse[self.timeIndex]
let timeInSeconds = CMTimeGetSeconds(timeElement.timeValue)
//use reminder operator to determine the beat count
let beat = (self.timeIndex + 1) % 8 == 0 ? 8 : ((self.timeIndex + 1) % 8)
print("Beat would be: ", beat)
/*
0: (0 + 1) % 8 = 1
1: (1 + 1) % 8 = 2
6: (6 + 1) % 8 = 7
7: (7 + 1) % 8 = 0
*/
self.timeIndex += 1
}//end class AudioPlayer
// create instance of class AudioPlayer and call func playWithAudioPlayerAndNodes
class HomeViewController {
override func viewDidLoad() {
super.viewDidLoad()
let bundle = Bundle.main
let one = bundle.url(forResource: "1", withExtension: "wav")!
let two = bundle.url(forResource: "2", withExtension: "wav")!
let three = bundle.url(forResource: "3", withExtension: "wav")!
let five = bundle.url(forResource: "5", withExtension: "wav")!
let six = bundle.url(forResource: "6", withExtension: "wav")!
let seven = bundle.url(forResource: "7", withExtension: "wav")!
// mediaArray contains string URL's downloaded from FirebaseDatabse
let mediaItem = mediaArray[tableIndexPath.row]
guard let backgroundAudio = URL(string: mediaItem.mediaAudioUrlStringRepresentation ?? "") else {return}
let audioPlayer = AudioPlayer(url:
backgroundAudio,
urls: [one, two, three, five, six, seven],
timesToTransverse: timesToTransverse)
//start playing
audioPlayer.playWithAudioPlayerAndNodes()
}
}
You cannot call AVAudioFile(forReading:) on a remote file. You need to download the binary data and parse it into packets using audio file stream services. That way, you can supply the packets to a buffer and play from the buffer thru the audio engine.
you can try:
let one = try! AVAudioFile(forReading: URL(fileURLWithPath: (Bundle.main.path(forResource: "1", ofType: "wav", inDirectory: "YOUKNOW!")!)))
it works fine with me

Mac - Swift 3 - queuing audio files and playing

I would like to write an app in swift 3 in order to play queued audio files without any gap, crack or noise when passing from one to another.
My first try was using AvAudioPlayer and AvAudioDelegate (AVAudioPlayer using array to queue audio files - Swift), but I don't know how to preload the next song to avoid gap. Even if I know how to do it, I am not certain it is the best way to achieve my goal.
AVQueuePlayer seems to be a better candidate for the job, it is made for that purpose, but I don't find any example to help me out.
Maybe it is only a problem of preloading or buffering? I am a bit lost in this ocean of possibilities.
Any suggestion is welcomed.
It is far to be perfect, specially if you want to do it twice or more ("file exist" error), but it can serve as a base.
What it does is taking two files (mines are aif samples of ap. 4 sec.), encode them in one file and play the resulting files. If you have hundreds of them, assembled aleatory or not, it can make great fun.
All credits for the mergeAudioFiles function goes to #Peyman and #Pigeon_39. Concatenate two audio files in Swift and play them
Swift 3
import Cocoa
import AVFoundation
var action = AVAudioPlayer()
let path = Bundle.main.path(forResource: "audiofile1.aif", ofType:nil)!
let url = URL(fileURLWithPath: path)
let path2 = Bundle.main.path(forResource: "audiofile2.aif", ofType:nil)!
let url2 = URL(fileURLWithPath: path2)
let array1 = NSMutableArray(array: [url, url2])
class ViewController: NSViewController, AVAudioPlayerDelegate
{
#IBOutlet weak var LanceStop: NSButton!
override func viewDidLoad()
{
super.viewDidLoad()
}
override var representedObject: Any?
{
didSet
{
// Update the view, if already loaded.
}
}
#IBAction func Lancer(_ sender: NSButton)
{
mergeAudioFiles(audioFileUrls: array1)
let url3 = NSURL(string: "/Users/ADDUSERNAMEHERE/Documents/FinalAudio.m4a")
do
{
action = try AVAudioPlayer(contentsOf: url3 as! URL)
action.delegate = self
action.numberOfLoops = 0
action.prepareToPlay()
action.volume = 1
action.play()
}
catch{print("error")}
}
func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool)
{
if flag == true
{
}
}
var mergeAudioURL = NSURL()
func mergeAudioFiles(audioFileUrls: NSArray) {
//audioFileUrls.adding(url)
//audioFileUrls.adding(url2)
let composition = AVMutableComposition()
for i in 0 ..< audioFileUrls.count {
let compositionAudioTrack :AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
let asset = AVURLAsset(url: (audioFileUrls[i] as! NSURL) as URL)
let track = asset.tracks(withMediaType: AVMediaTypeAudio)[0]
let timeRange = CMTimeRange(start: CMTimeMake(0, 600), duration: track.timeRange.duration)
try! compositionAudioTrack.insertTimeRange(timeRange, of: track, at: composition.duration)
}
let documentDirectoryURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first! as NSURL
self.mergeAudioURL = documentDirectoryURL.appendingPathComponent("FinalAudio.m4a")! as URL as NSURL
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
assetExport?.outputFileType = AVFileTypeAppleM4A
assetExport?.outputURL = mergeAudioURL as URL
assetExport?.exportAsynchronously(completionHandler:
{
switch assetExport!.status
{
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport?.error)")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport?.error)")
case AVAssetExportSessionStatus.unknown:
print("unknown\(assetExport?.error)")
case AVAssetExportSessionStatus.waiting:
print("waiting\(assetExport?.error)")
case AVAssetExportSessionStatus.exporting:
print("exporting\(assetExport?.error)")
default:
print("Audio Concatenation Complete")
}
})
}
}

Generate AVAudioPCMBuffer with AVAudioRecorder

Along with iOS 10, apple released a new framework which recognizes speech. Data can be passed to this framework either by appending AVAudioPCMBuffers or giving a URL to a m4a. Currently, speech recognition works using the former but this is only possible after somebody has finished and is not in real time. Here is the code for that:
let audioSession = AVAudioSession.sharedInstance()
var audioRecorder:AVAudioRecorder!;
var soundURLGlobal:URL!;
function setUp(){
let recordSettings = [AVSampleRateKey : NSNumber(value: Float(44100.0)),
AVFormatIDKey : NSNumber(value: Int32(kAudioFormatMPEG4AAC)),
AVNumberOfChannelsKey : NSNumber(value: 1),
AVEncoderAudioQualityKey : NSNumber(value: Int32(AVAudioQuality.medium.rawValue))]
let fileManager = FileManager.default()
let urls = fileManager.urlsForDirectory(.documentDirectory, inDomains: .userDomainMask)
let documentDirectory = urls[0] as NSURL
let soundURL = documentDirectory.appendingPathComponent("sound.m4a")
soundURLGlobal=soundURL;
do {
try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord)
try audioRecorder = AVAudioRecorder(url: soundURL!, settings: recordSettings)
audioRecorder.prepareToRecord()
} catch {}
}
function start(){
do {
try audioSession.setActive(true)
audioRecorder.record()
} catch {}
}
function stop(){
audioRecorder.stop()
let request=SFSpeechURLRecognitionRequest(url: soundURLGlobal!)
let recognizer=SFSpeechRecognizer();
recognizer?.recognitionTask(with: request, resultHandler: { (result, error) in
if(result!.isFinal){
print(result?.bestTranscription.formattedString)
}
})
}
I am trying to convert this but I cannot find where to get a AVAudioPCMBuffer.
Thanks,
good topic.
Hi B Person
here is topic with solution
Tap Mic Input Using AVAudioEngine in Swift
see lecture Wwdc 2014
502 - AVAudioEngine in Practice
capture microphone => in 20 min
create buffer with tap code => in 21 .50
here is swift 3 code
#IBAction func button01Pressed(_ sender: Any) {
let inputNode = audioEngine.inputNode
let bus = 0
inputNode?.installTap(onBus: bus, bufferSize: 2048, format: inputNode?.inputFormat(forBus: bus)) {
(buffer: AVAudioPCMBuffer!, time: AVAudioTime!) -> Void in
var theLength = Int(buffer.frameLength)
print("theLength = \(theLength)")
var samplesAsDoubles:[Double] = []
for i in 0 ..< Int(buffer.frameLength)
{
var theSample = Double((buffer.floatChannelData?.pointee[i])!)
samplesAsDoubles.append( theSample )
}
print("samplesAsDoubles.count = \(samplesAsDoubles.count)")
}
audioEngine.prepare()
try! audioEngine.start()
}
to stop audio
func stopAudio()
{
let inputNode = audioEngine.inputNode
let bus = 0
inputNode?.removeTap(onBus: bus)
self.audioEngine.stop()
}

Play sound with a little delay

I have a sound in my app that starts automatically when appear the view; but, as the title says, I'd like that this sounds starts with a little delay, about an half second after the view appear. I tried to use PlayAtTime, but or it does not work or I have set somethings wrong...
This is my code:
var player = AVAudioPlayer?
override func viewDidLoad()
{
super.viewDidLoad()
playAudioWithDelay()
}
func playAudioWithDelay()
{
let file = NSBundle.mainBundle().URLForResource("PR1", withExtension: "wav")
player = AVAudioPlayer(contentsOfURL: file, error: nil)
player!.volume = 0.5
player!.numberOfLoops = -1
player!.playAtTime(//I tried with 0.5 but doesn't work)
player!.prepareToPlay()
player!.play()
}
You can try using this:
let seconds = 1.0//Time To Delay
let delay = seconds * Double(NSEC_PER_SEC) // nanoseconds per seconds
var dispatchTime = dispatch_time(DISPATCH_TIME_NOW, Int64(delay))
dispatch_after(dispatchTime, dispatch_get_main_queue(), {
//Play Sound here
})
Full code:
func playAudioWithDelay()
{
let file = NSBundle.mainBundle().URLForResource("PR1", withExtension: "wav")
player = AVAudioPlayer(contentsOfURL: file, error: nil)
player!.volume = 0.5
player!.numberOfLoops = -1
player!.playAtTime(//I tried with 0.5 but doesn't work)
player!.prepareToPlay()
let seconds = 1.0//Time To Delay
let delay = seconds * Double(NSEC_PER_SEC) // nanoseconds per seconds
var dispatchTime = dispatch_time(DISPATCH_TIME_NOW, Int64(delay))
dispatch_after(dispatchTime, dispatch_get_main_queue(), {
player!.play()
})
}
Try the following function implemented in Swift 3.0
var player: AVAudioPlayer?
func playAudioWithDelay()
{
let file = Bundle.main.url(forResource: "PR1", withExtension: "wav")
do {
player = try AVAudioPlayer(contentsOf: file!)
player?.volume = 0.5
player?.numberOfLoops = -1
player?.prepareToPlay()
} catch let error as NSError {
print("error: \(error.localizedDescription)")
}
let seconds = 1.0//Time To Delay
let when = DispatchTime.now() + seconds
DispatchQueue.main.asyncAfter(deadline: when) {
self.play()
}
}
func play() {
if player?.isPlaying == false {
player?.play()
}
}
Swift 5 Audio Delay Settings:
var player: AVAudioPlayer?
func playAudio(soundName: String, extension: String, delay: Double)
{
let file = Bundle.main.url(forResource: soundName, withExtension: extension)
do {
player = try AVAudioPlayer(contentsOf: file!)
player?.volume = 0.5
player?.numberOfLoops = -1
player?.prepareToPlay()
} catch let error as NSError {
print("error: \(error.localizedDescription)")
}
let seconds = delay//Time To Delay
let when = DispatchTime.now() + seconds
DispatchQueue.main.asyncAfter(deadline: when) {
player.play()
}
}