How to play sequence MP3 files? [duplicate] - swift

This question already has answers here:
Expected declaration error for making high score [duplicate]
(2 answers)
Closed 3 years ago.
I'm trying to play sequence mp3 files by using array, for and AVQueueplayer. I see this error Expected declaration.
for number in myIndex..<arr.count{
var queuePlayer: AVQueuePlayer = {
let url1 = Bundle.main.url(forResource: String(myIndex+number), withExtension: "mp3")!
let item1 = AVPlayerItem(url: url1)
let queue = AVQueuePlayer(items: [item1])
return queue
}()
}`
Expected declaration
#IBAction func autoplay(_ sender: Any) {
var items : [AVPlayerItem] = []
for number in myIndex..<arr.count {
let url = Bundle.main.url(forResource: String(number), withExtension: "mp3")!
items.append(AVPlayerItem(url: url))
}
let queue = AVQueuePlayer(items: items)
queue.play()
}

You need one AVQueuPlayer so try it like this:
lazy var queue : AVQueuePlayer = {
return AVQueuePlayer()
}()
var items : [AVPlayerItem] = []
for number in myIndex..<arr.count{
let url = Bundle.main.url(forResource: String(myIndex+number), withExtension: "mp3")!
items.append(AVPlayerItem(url: url))
}
queue = AVQueuePlayer(items: items)
Then you can simply use queue.play() or queue.advanceToNextItem() or so

Related

How to play two audio files at once with Swift using AudioKit

I have two short wav audio files that I'm trying to play at the same time. Using AudioKit, I have an AudioEngine(), and I'm assuming I should use a MultiSegmentAudioPlayer() as the output along with the scheduleSegments().
Here is what I have:
class AudioPlayClass : ObservableObject {
var player = MultiSegmentAudioPlayer()
let engine = AudioEngine()
init(){}
func playFiles(){
self.engine.output = player
do {
try AVAudioSession.sharedInstance().setCategory(.playAndRecord, mode: .default, options: .defaultToSpeaker)
try AVAudioSession.sharedInstance().setActive(true)
try engine.start()
guard let url = Bundle.main.url(forResource: note1, withExtension: "wav", subdirectory: instrumentDirectory) else {return}
guard let url2 = Bundle.main.url(forResource: note2, withExtension: "wav", subdirectory: instrumentDirectory) else {return}
let audioFile = try AVAudioFile(forReading: url)
let audioFile2 = try AVAudioFile(forReading: url2)
let fileSampleRate = audioFile.processingFormat.sampleRate
let file2SampleRate = audioFile2.processingFormat.sampleRate
let fileNumberOfSamples = audioFile.length
let file2NumberOfSamples = audioFile2.length
let audioFileEndTime = Double(fileNumberOfSamples)/fileSampleRate
let audioFile2EndTime = Double(file2NumberOfSamples)/file2SampleRate
let segment1 = segment(audioFile: audioFile,
playbackStartTime: 0.0, fileStartTime: 0, fileEndTime: audioFileEndTime)
let segment2 = segment(audioFile: audioFile2,
playbackStartTime: 0.0, fileStartTime: 0, fileEndTime: audioFile2EndTime)
player2.scheduleSegments(audioSegments: [segment1, segment2])
player2.play()
} catch {
print(error.localizedDescription.debugDescription)
}
}
}
public struct segment : StreamableAudioSegment {
public var audioFile: AVAudioFile
public var playbackStartTime: TimeInterval
public var fileStartTime: TimeInterval
public var fileEndTime: TimeInterval
public var completionHandler: AVAudioNodeCompletionHandler?
}
I just have a basic understanding of playing audio in Swift and using AudioKit so any feedback would be greatly appreciated. Thanks!
I think MultiSegmentAudioPlayer is mostly for playing sounds sequentially. You probably just want 2 AudioPlayer() and play them both at the same time.
Solved this by creating two separate instances of an AudioEngine(), each with their own respective AudioPlayer(), and loaded and played them immediately one after the other.

URL is nil in AVAudioFile

I am trying to play multiple audio files on top of a background audio file using AVAudioEngine.
When I try to initialize backgroundAudioFile, the app crashes saying :Thread 1: Fatal error: 'try!' expression unexpectedly raised an error: Error Domain=com.apple.coreaudio.avfaudio Code=2003334207 "(null)" UserInfo={failed call=ExtAudioFileOpenURL((CFURLRef)fileURL, &_extAudioFile)}.
The url passed to the initializer is valid. It is printed out.
import Foundation
import AVFoundation
class AudioPlayer {
var timeIndex = 0 // keeps track of the index of the item transveersed within `timesToTransverse` by `player?.addBoundaryTimeObserver`
var topAudioFiles: [AVAudioFile] = []
var engine:AVAudioEngine
var backgroundAudioNode: AVAudioPlayerNode
var backgroundAudioFile: AVAudioFile
var topAudioAudioNodes = [AVAudioPlayerNode]()
var mixer: AVAudioMixerNode
var timer: Timer!
var urls: [URL] = []
var player: AVPlayer!
var timesToTransverse = [NSValue]() //contains timeValues in seconds such as ["1.54",2.64, 67.20]
var delays = [UInt64]()
fileprivate var timeObserverToken: Any?
init (url: URL, urls: [URL] = [], timesToTransverse: [NSValue]) {
self.urls = urls
self.timesToTransverse = timesToTransverse
topAudioFiles = urls.map { try! AVAudioFile(forReading: $0) }
print("the remote url is \(url)")
// it prints the url is https://firebasestorage.googleapis.com/v0/b/salsaworld-658f3.appspot.com/o/adminAudioFiles%2F-LWv5rnKiLawXvsSsQgG.m4a?alt=media&token=19e8eac0-2b47-49e2-acd3-9a459903f84b
//it crashes on this line
backgroundAudioFile = try! AVAudioFile(forReading: url)
player = AVPlayer(url: url)
engine = AVAudioEngine()
mixer = AVAudioMixerNode()
engine.attach(mixer)
engine.connect(mixer, to: engine.outputNode, format: nil)
backgroundAudioNode = AVAudioPlayerNode()
initTopAudioNodes()
try! engine.start()
}
func initTopAudioNodes() {
for _ in topAudioFiles {
topAudioAudioNodes += [AVAudioPlayerNode()]
}
for node in topAudioAudioNodes {
engine.attach(node)
engine.connect(node, to: mixer, format: nil)
}
}//end initTopAudioNodes
func playWithAudioPlayerAndNodes() {
player.play()
var i = 1
timeObserverToken = player.addBoundaryTimeObserver(forTimes: timesToTransverse, queue: nil) {
[weak self] in
guard let self = self else {return}
let index = i % self.topAudioAudioNodes.count
let node = self.topAudioAudioNodes[index]
node.scheduleFile(self.topAudioFiles[index], at: nil, completionHandler: nil)
node.play()
i += 1
/* Because there are no time signature changes,
we can simply increment timeIndex with + 1 every time
`addBoundaryTimeObserver`'s completion handler is called.
Then, we subscript timesToTransverse with timeIndex
in order to get the subsequent timeInSeconds
*/
guard self.timeIndex < self.timesToTransverse.count else {return}
print("timeIndex is now \(self.timeIndex)")
let timeElement = self.timesToTransverse[self.timeIndex]
let timeInSeconds = CMTimeGetSeconds(timeElement.timeValue)
//use reminder operator to determine the beat count
let beat = (self.timeIndex + 1) % 8 == 0 ? 8 : ((self.timeIndex + 1) % 8)
print("Beat would be: ", beat)
/*
0: (0 + 1) % 8 = 1
1: (1 + 1) % 8 = 2
6: (6 + 1) % 8 = 7
7: (7 + 1) % 8 = 0
*/
self.timeIndex += 1
}//end class AudioPlayer
// create instance of class AudioPlayer and call func playWithAudioPlayerAndNodes
class HomeViewController {
override func viewDidLoad() {
super.viewDidLoad()
let bundle = Bundle.main
let one = bundle.url(forResource: "1", withExtension: "wav")!
let two = bundle.url(forResource: "2", withExtension: "wav")!
let three = bundle.url(forResource: "3", withExtension: "wav")!
let five = bundle.url(forResource: "5", withExtension: "wav")!
let six = bundle.url(forResource: "6", withExtension: "wav")!
let seven = bundle.url(forResource: "7", withExtension: "wav")!
// mediaArray contains string URL's downloaded from FirebaseDatabse
let mediaItem = mediaArray[tableIndexPath.row]
guard let backgroundAudio = URL(string: mediaItem.mediaAudioUrlStringRepresentation ?? "") else {return}
let audioPlayer = AudioPlayer(url:
backgroundAudio,
urls: [one, two, three, five, six, seven],
timesToTransverse: timesToTransverse)
//start playing
audioPlayer.playWithAudioPlayerAndNodes()
}
}
You cannot call AVAudioFile(forReading:) on a remote file. You need to download the binary data and parse it into packets using audio file stream services. That way, you can supply the packets to a buffer and play from the buffer thru the audio engine.
you can try:
let one = try! AVAudioFile(forReading: URL(fileURLWithPath: (Bundle.main.path(forResource: "1", ofType: "wav", inDirectory: "YOUKNOW!")!)))
it works fine with me

How to play multiple Audio Files simultaneously using AVPlayer?

I am trying to play multiple audio files using 2 AVPlayer instances, but one of the player stops for a fraction of a second rather than playing all audio files simultaneously.
The logic of the program is as follows:
var player: AVPlayer? will stream an audio file from my database. On its own is playing perfectly.
fileprivate var countPlayer: AVPlayer? plays the count number of the current item being played by var player. The count is a sequence of 1 to 8 and for each digit I am storing/sandobxing a .wav file locally such as 1.wav, 2.wav...8.wav.
When current time of var player is at a certain time, countPlayer is triggered and it plays one of the local file 1.wav, 2.wav..etc.
The problem is that when the var countPlayer starts playing, it causes the background AVPlayer, namely var player to stop for a fraction of a second, similar to what's described in this comment:
Play multiple Audio Files with AVPlayer
var player: AVPlayer? //plays the song
fileprivate var countPlayer: AVPlayer? // plays the count number of song
private func addBoundaryTimeObserver(tableIndexPath: IndexPath) {
let mediaItem = mediaArray[tableIndexPath.row]
guard let url = URL(string: mediaItem.mediaAudioUrlStringRepresentation ?? "") else {return}
let playerItem = AVPlayerItem(url: url)
player = AVPlayer(playerItem: playerItem)
var timesToTransverse = [NSValue]()
//convert string representation of times elements to array
let timesRecorded: [String] = mediaItem.timesRecorded.components(separatedBy: ",")
// Build boundary times from arrayOfBeats keys
let timeDoubles: [Double] = timesRecorded.compactMap {timeString in
if let second = Double("\(timeString)") {
return second
}
return nil
}
guard timeDoubles.count > 0 else {return} //unexpected
timesToTransverse = timeDoubles.map { second in
let cmtime = CMTime(seconds: second, preferredTimescale: CMTimeScale(NSEC_PER_SEC))
return NSValue(time: cmtime)
}
guard timesToTransverse.count != 0 else {return}
guard let playerCell = tableView.cellForRow(at: IndexPath(row: 0, section: 0)) as? PlayerCell else {return}
startTime = Date().timeIntervalSinceReferenceDate
timeIndex = 0
player?.play()
player?.rate = Float(initialPlaybackRate)
// find the index of time
//reset timeObserverToken
// call a function with the new times sorted
// Queue on which to invoke the callback
let mainQueue = DispatchQueue.main
// Add time observer
timeObserverToken =
player?.addBoundaryTimeObserver(forTimes: timesToTransverse, queue: mainQueue) {
[weak self] in
//because there are no time signature changes, we can simply increment timeIndex with + 1 every time `addBoundaryTimeObserver` completion handler is called and subscript timesToTransverse with timeIndex in order to get the subsequent timeInSeconds
guard let strongSelf = self, strongSelf.timeIndex < timesToTransverse.count else {return}
let timeElement = timesToTransverse[strongSelf.timeIndex]
strongSelf.timeInSeconds = CMTimeGetSeconds(timeElement.timeValue)
//show progress in progressView
let duration = CMTimeGetSeconds(playerItem.duration)
let cmtimeSeconds = CMTime(seconds: strongSelf.timeInSeconds, preferredTimescale: CMTimeScale(NSEC_PER_SEC))
//Total time since timer started, in seconds
strongSelf.timeInSeconds = Date().timeIntervalSinceReferenceDate - strongSelf.startTime
let timeString = String(format: "%.2f", strongSelf.timeInSeconds)
strongSelf.timeString = timeString
//use reminder operator to determine the beat count
let beat = (strongSelf.timeIndex + 1) % 8 == 0 ? 8 : ((strongSelf.timeIndex + 1) % 8)
//play the beat count : 1, 2, ...8
self.preapareToPlayAudio(beatCount: beat)
/*
0: (0 + 1) % 8 = 1
1: (1 + 1) % 8 = 2
6: (6 + 1) % 8 = 7
7: (7 + 1) % 8 = 0
*/
strongSelf.timeIndex += 1
}
}//end addBoundaryTimeObserver
//prepare determine what wav file to play
private func preapareToPlayAudio(beatCount: Int) {
switch beatCount {
case 1:
guard let url = Bundle.main.url(forResource: "1", withExtension: "wav") else {return}
playWith(beatCountURL: url)
//7 more cases go here .....
default: print("unexpected case here")
}
}//end play(beatCount: Int)
private func playWith(beatCountURL: URL) {
let playerItem = AVPlayerItem(url: beatCountURL)
countPlayer = AVPlayer(playerItem: playerItem)
countPlayer?.play()
}
You would be better off using AVAudioPlayerNode, AVAudioMixerNode, AVAudioEngine. Using these classes you won't have problems like you have right now. It's also not that difficult to set up.
You can check out my gist, in order to play the sounds in your Playgrounds you would need to put audio files into Resources folder in Project Navigator:
https://gist.github.com/standinga/24342d23acfe70dc08cbcc994895f32b
The code works without stopping background audio when top sounds are triggered.
Here's also the same code:
import AVFoundation
import PlaygroundSupport
PlaygroundPage.current.needsIndefiniteExecution = true
class AudioPlayer {
var backgroundAudioFile:AVAudioFile
var topAudioFiles: [AVAudioFile] = []
var engine:AVAudioEngine
var backgroundAudioNode: AVAudioPlayerNode
var topAudioAudioNodes = [AVAudioPlayerNode]()
var mixer: AVAudioMixerNode
var timer: Timer!
var urls: [URL] = []
init (_ url: URL, urls: [URL] = []) {
backgroundAudioFile = try! AVAudioFile(forReading: url)
topAudioFiles = urls.map { try! AVAudioFile(forReading: $0) }
engine = AVAudioEngine()
mixer = AVAudioMixerNode()
engine.attach(mixer)
engine.connect(mixer, to: engine.outputNode, format: nil)
self.urls = urls
backgroundAudioNode = AVAudioPlayerNode()
for _ in topAudioFiles {
topAudioAudioNodes += [AVAudioPlayerNode()]
}
}
func start() {
engine.attach(backgroundAudioNode)
engine.connect(backgroundAudioNode, to: mixer, format: nil)
backgroundAudioNode.scheduleFile(backgroundAudioFile, at: nil, completionHandler: nil)
try! engine.start()
backgroundAudioNode.play()
for node in topAudioAudioNodes {
engine.attach(node)
engine.connect(node, to: mixer, format: nil)
try! engine.start()
}
// simulate rescheduling files played on top of background audio
DispatchQueue.global().async { [unowned self] in
for i in 0..<1000 {
sleep(2)
let index = i % self.topAudioAudioNodes.count
let node = self.topAudioAudioNodes[index]
node.scheduleFile(self.topAudioFiles[index], at: nil, completionHandler: nil)
node.play()
}
}
}
}
let bundle = Bundle.main
let beepLow = bundle.url(forResource: "beeplow", withExtension: "wav")!
let beepMid = bundle.url(forResource: "beepmid", withExtension: "wav")!
let backgroundAudio = bundle.url(forResource: "backgroundAudio", withExtension: "wav")!
let audioPlayer = AudioPlayer(backgroundAudio, urls: [beepLow, beepMid])
audioPlayer.start()

"advanceToNextItem()" doesn't allow to loop through ALL videos

I've got auto-playing sequence of video clips with looping behaviour built on KVO. But, in addition to auto-playback, I'd like to advance all the clips using NSButton. So, if I press the Next Video button when my sequence is auto-playing, this action advances me to the next clip in a queue. But it doesn't work a proper way.
The problem is: when the queue comes to end, there's no loop for all the clips in the queue. There's a loop for the first clip only (if I pressed the Next Video button 10 times – only first clip loops 10 times), after that all clips are auto-playing normally.
Question: How to (when advancing) loop through all videos using NSButton?
Here's my code:
import Cocoa
import AVFoundation
import AVKit
class ViewController: NSViewController {
#IBOutlet weak var avPlayerView: AVPlayerView!
#IBOutlet weak var nextVideoButton: NSButton!
#objc private let player = AVQueuePlayer()
let clips = ["One", "Two", "Three", "Four"]
private var token: NSKeyValueObservation?
override func viewDidAppear() {
super.viewDidAppear()
addAllVideosToPlayer()
}
func addAllVideosToPlayer() {
avPlayerView.player = player
for clip in clips {
let urlPath = Bundle.main.path(forResource: clip, ofType: "mov")!
let url = URL(fileURLWithPath: urlPath)
let playerItem = AVPlayerItem(url: url)
player.insert(playerItem, after: player.items().last)
token = player.observe(\.currentItem) { [weak self] player, _ in
if self!.player.items().count == 1 { self?.addAllVideosToPlayer() }
}
player.play()
avPlayerView.controlsStyle = .none
}
}
#IBAction func nextVideo(_ sender: Any) {
var number: Int = 0
let clip = clips[number]
let urlPath = Bundle.main.path(forResource: clip, ofType: "mov")!
let url = URL(fileURLWithPath: urlPath)
let playerItem = AVPlayerItem(url: url)
player.insert(playerItem, after: player.items().last)
player.advanceToNextItem()
number += 1
if number >= clips.count {
number = 0
player.advanceToNextItem()
number += 1
}
}
}
Here's my ViewController:
You always access the first video here:
var number: Int = 0
let clip = clips[number]
number is always zero. Keep it as a member of the class and update it where needed.
Here's the solution of my problem (thanks to #giorashc) :
class ViewController: NSViewController {
var number: Int = 0 // class member
.........................................
.........................................
#IBAction func nextVideo(_ sender: Any) {
let clip = clips[number % 4] // modulus of 4
let urlPath = Bundle.main.path(forResource: clip, ofType: "mov")!
let url = URL(fileURLWithPath: urlPath)
let playerItem = AVPlayerItem(url: url)
player.insert(playerItem, after: player.items().last)
player.advanceToNextItem()
number += 1
}
}

Mac - Swift 3 - queuing audio files and playing

I would like to write an app in swift 3 in order to play queued audio files without any gap, crack or noise when passing from one to another.
My first try was using AvAudioPlayer and AvAudioDelegate (AVAudioPlayer using array to queue audio files - Swift), but I don't know how to preload the next song to avoid gap. Even if I know how to do it, I am not certain it is the best way to achieve my goal.
AVQueuePlayer seems to be a better candidate for the job, it is made for that purpose, but I don't find any example to help me out.
Maybe it is only a problem of preloading or buffering? I am a bit lost in this ocean of possibilities.
Any suggestion is welcomed.
It is far to be perfect, specially if you want to do it twice or more ("file exist" error), but it can serve as a base.
What it does is taking two files (mines are aif samples of ap. 4 sec.), encode them in one file and play the resulting files. If you have hundreds of them, assembled aleatory or not, it can make great fun.
All credits for the mergeAudioFiles function goes to #Peyman and #Pigeon_39. Concatenate two audio files in Swift and play them
Swift 3
import Cocoa
import AVFoundation
var action = AVAudioPlayer()
let path = Bundle.main.path(forResource: "audiofile1.aif", ofType:nil)!
let url = URL(fileURLWithPath: path)
let path2 = Bundle.main.path(forResource: "audiofile2.aif", ofType:nil)!
let url2 = URL(fileURLWithPath: path2)
let array1 = NSMutableArray(array: [url, url2])
class ViewController: NSViewController, AVAudioPlayerDelegate
{
#IBOutlet weak var LanceStop: NSButton!
override func viewDidLoad()
{
super.viewDidLoad()
}
override var representedObject: Any?
{
didSet
{
// Update the view, if already loaded.
}
}
#IBAction func Lancer(_ sender: NSButton)
{
mergeAudioFiles(audioFileUrls: array1)
let url3 = NSURL(string: "/Users/ADDUSERNAMEHERE/Documents/FinalAudio.m4a")
do
{
action = try AVAudioPlayer(contentsOf: url3 as! URL)
action.delegate = self
action.numberOfLoops = 0
action.prepareToPlay()
action.volume = 1
action.play()
}
catch{print("error")}
}
func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool)
{
if flag == true
{
}
}
var mergeAudioURL = NSURL()
func mergeAudioFiles(audioFileUrls: NSArray) {
//audioFileUrls.adding(url)
//audioFileUrls.adding(url2)
let composition = AVMutableComposition()
for i in 0 ..< audioFileUrls.count {
let compositionAudioTrack :AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
let asset = AVURLAsset(url: (audioFileUrls[i] as! NSURL) as URL)
let track = asset.tracks(withMediaType: AVMediaTypeAudio)[0]
let timeRange = CMTimeRange(start: CMTimeMake(0, 600), duration: track.timeRange.duration)
try! compositionAudioTrack.insertTimeRange(timeRange, of: track, at: composition.duration)
}
let documentDirectoryURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first! as NSURL
self.mergeAudioURL = documentDirectoryURL.appendingPathComponent("FinalAudio.m4a")! as URL as NSURL
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
assetExport?.outputFileType = AVFileTypeAppleM4A
assetExport?.outputURL = mergeAudioURL as URL
assetExport?.exportAsynchronously(completionHandler:
{
switch assetExport!.status
{
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport?.error)")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport?.error)")
case AVAssetExportSessionStatus.unknown:
print("unknown\(assetExport?.error)")
case AVAssetExportSessionStatus.waiting:
print("waiting\(assetExport?.error)")
case AVAssetExportSessionStatus.exporting:
print("exporting\(assetExport?.error)")
default:
print("Audio Concatenation Complete")
}
})
}
}