extension may not contain stored properties and no member problems in swift - swift

I am not able to understand why I am getting these errors. someone please help me. Here is the source code
import UIKit
import AVFoundation
// MARK: - PlaySoundsViewController: AVAudioPlayerDelegate
extension PlaySoundsViewController: AVAudioPlayerDelegate {
var audioEngine = AVAudioEngine()
// MARK: Alerts
struct Alerts {
static let DismissAlert = "Dismiss"
static let RecordingDisabledTitle = "Recording disabled"
static let RecordginDisabledMessage = "Youve disabled this app from recording your microphone. Check settings"
static let RecodingFailedTitle = "Recording failed"
static let RecordingFailedMessage = "Something went wrong with the recording"
static let AudioRecordedError = "Audio Recorder Error"
static let AudioSessionError = "Audio Session Error"
static let AudioRecordingError = "Audio Recording Error"
static let AudioFileError = "Audio File Error"
static let AudioEngineError = "Audio Engine Error"
}
// MARK: PlayingState (raw values correspond to sender tags)
enum PlayingState { case playing, notPlaying }
// MARK: Audio Functions
func setupAudio() {
// initialize (recording) audio file
do {
audioFile = try AVAudioFile(forReading: recordedAudioURL as URL)
} catch {
showAlert(Alerts.AudioFileError, message: String(describing: error))
}
}
func playSound(rate: Float? = nil, pitch: Float? = nil, echo: Bool = false, reverb: Bool = false) {
// initialize audio engine components
audioEngine = AVAudioEngine()
// node for playing audio
audioPlayerNode = AVAudioPlayerNode()
audioEngine.attach(audioPlayerNode)
// node for adjusting rate/pitch
let changeRatePitchNode = AVAudioUnitTimePitch()
if let pitch = pitch {
changeRatePitchNode.pitch = pitch
}
if let rate = rate {
changeRatePitchNode.rate = rate
}
audioEngine.attach(changeRatePitchNode)
// node for echo
let echoNode = AVAudioUnitDistortion()
echoNode.loadFactoryPreset(.multiEcho1)
audioEngine.attach(echoNode)
// node for reverb
let reverbNode = AVAudioUnitReverb()
reverbNode.loadFactoryPreset(.cathedral)
reverbNode.wetDryMix = 50
audioEngine.attach(reverbNode)
// connect nodes
if echo == true && reverb == true {
connectAudioNodes(audioPlayerNode, changeRatePitchNode, echoNode, reverbNode, audioEngine.outputNode)
} else if echo == true {
connectAudioNodes(audioPlayerNode, changeRatePitchNode, echoNode, audioEngine.outputNode)
} else if reverb == true {
connectAudioNodes(audioPlayerNode, changeRatePitchNode, reverbNode, audioEngine.outputNode)
} else {
connectAudioNodes(audioPlayerNode, changeRatePitchNode, audioEngine.outputNode)
}
// schedule to play and start the engine!
audioPlayerNode.stop()
audioPlayerNode.scheduleFile(audioFile, at: nil) {
var delayInSeconds: Double = 0
if let lastRenderTime = self.audioPlayerNode.lastRenderTime, let playerTime = self.audioPlayerNode.playerTime(forNodeTime: lastRenderTime) {
if let rate = rate {
delayInSeconds = Double(self.audioFile.length - playerTime.sampleTime) / Double(self.audioFile.processingFormat.sampleRate) / Double(rate)
} else {
delayInSeconds = Double(self.audioFile.length - playerTime.sampleTime) / Double(self.audioFile.processingFormat.sampleRate)
}
}
// schedule a stop timer for when audio finishes playing
self.stopTimer = Timer(timeInterval: delayInSeconds, target: self, selector: #selector(PlaySoundsViewController.stopAudio), userInfo: nil, repeats: false)
RunLoop.main.add(self.stopTimer!, forMode: RunLoopMode.defaultRunLoopMode)
}
do {
try audioEngine.start()
} catch {
showAlert(Alerts.AudioEngineError, message: String(describing: error))
return
}
// play the recording!
audioPlayerNode.play()
}
func stopAudio() {
if let audioPlayerNode = audioPlayerNode {
audioPlayerNode.stop()
}
if let stopTimer = stopTimer {
stopTimer.invalidate()
}
configureUI(.notPlaying)
if let audioEngine = audioEngine {
audioEngine.stop()
audioEngine.reset()
}
}
// MARK: Connect List of Audio Nodes
func connectAudioNodes(_ nodes: AVAudioNode...) {
for x in 0..<nodes.count-1 {
audioEngine.connect(nodes[x], to: nodes[x+1], format: audioFile.processingFormat)
}
}
// MARK: UI Functions
func configureUI(_ playState: PlayingState) {
switch(playState) {
case .playing:
setPlayButtonsEnabled(false)
stopButton.isEnabled = true
case .notPlaying:
setPlayButtonsEnabled(true)
stopButton.isEnabled = false
}
}
func setPlayButtonsEnabled(_ enabled: Bool) {
snailButton.isEnabled = enabled
chipmunkButton.isEnabled = enabled
rabbitButton.isEnabled = enabled
vaderButton.isEnabled = enabled
echoButton.isEnabled = enabled
reverbButton.isEnabled = enabled
}
func showAlert(_ title: String, message: String) {
let alert = UIAlertController(title: title, message: message, preferredStyle: .alert)
alert.addAction(UIAlertAction(title: Alerts.DismissAlert, style: .default, handler: nil))
self.present(alert, animated: true, completion: nil)
}
}
The screenshot of the error is in the link below.
Screenshot!

You can not declare var audioEngine = AVAudioEngine() inside an extension. Declare it inside PlaySoundsViewController class instead.
Extensions are meant to augment behaviours, not fundamentally change a class.
#source: Why can't you add stored properties to extensions?

Related

AudioKit Conflict between Midi Instrument and Mic behavior

I am trying to make my app produce midi notes at the same time listening to the input from the mic:
var engine = AudioEngine()
var initialDevice: Device!
var mic: AudioEngine.InputNode!
var tappableNodeA: Fader!
var tappableNodeB: Fader!
var tappableNodeC: Fader!
var silence: Fader!
var tracker: PitchTap!
private var instrument = MIDISampler(name: "Instrument 1")
func noteOn(note: MIDINoteNumber) {
instrument.play(noteNumber: note, velocity: 90, channel: 0)
}
func noteOff(note: MIDINoteNumber) {
instrument.stop(noteNumber: note, channel: 0)
}
override func viewDidLoad() {
super.viewDidLoad()
print("init started ")
guard let input = engine.input else { fatalError() }
guard let device = engine.inputDevice else { fatalError() }
print("input selected")
initialDevice = device
engine.output = instrument
mic = input
tappableNodeA = Fader(mic)
tappableNodeB = Fader(tappableNodeA)
tappableNodeC = Fader(tappableNodeB)
silence = Fader(tappableNodeC, gain: 0)
engine.output = silence
print("objects init")
tracker = PitchTap(mic) { pitch, amp in
DispatchQueue.main.async {
self.update(pitch[0], amp[0])
}
}
start()
// other init that are not related
}
The start function is written below:
func start() {
do {
if let fileURL = Bundle.main.url(forResource: "Sounds/Sampler Instruments/sawPiano1", withExtension: "exs") {
try instrument.loadInstrument(url: fileURL)
} else {
Log("Could not find file")
}
} catch {
Log("Could not load instrument")
}
do {
try engine.start()
tracker.start()
} catch let err {
print("caught error at start")
Log(err)
}
}
As long as I making the first try call to set up the instrument I get the following error:
*** Terminating app due to uncaught exception 'com.apple.coreaudio.avfaudio', reason: 'required condition is false: _engine != nil
Why the would the condition be false?
Ok, so the solution was to separate the calls into two functions, and position the first call before tapNode configuration:
var engine = AudioEngine()
var initialDevice: Device!
var mic: AudioEngine.InputNode!
var tappableNodeA: Fader!
var tappableNodeB: Fader!
var tappableNodeC: Fader!
var silence: Fader!
var tracker: PitchTap!
private var instrument = MIDISampler(name: "Instrument 1")
func noteOn(note: MIDINoteNumber) {
instrument.play(noteNumber: note, velocity: 90, channel: 0)
}
func noteOff(note: MIDINoteNumber) {
instrument.stop(noteNumber: note, channel: 0)
}
override func viewDidLoad() {
super.viewDidLoad()
print("init started ")
guard let input = engine.input else { fatalError() }
guard let device = engine.inputDevice else { fatalError() }
print("input selected")
initialDevice = device
engine.output = instrument
start1()
mic = input
tappableNodeA = Fader(mic)
tappableNodeB = Fader(tappableNodeA)
tappableNodeC = Fader(tappableNodeB)
silence = Fader(tappableNodeC, gain: 0)
engine.output = silence
print("objects init")
tracker = PitchTap(mic) { pitch, amp in
DispatchQueue.main.async {
self.update(pitch[0], amp[0])
}
}
start()
// other init that are not related
}
func start1(){
do {
if let fileURL = Bundle.main.url(forResource: "Sounds/Sampler Instruments/sawPiano1", withExtension: "exs") {
try instrument.loadInstrument(url: fileURL)
} else {
Log("Could not find file")
}
} catch let err {
Log("Could not load instrument")
Log(err)
}
}
func start() {
do {
try engine.start()
tracker.start()
} catch let err {
print("caught error at start")
Log(err)
}
}
Although the exception is now gone, there is still no sound being played for some reason.

List all available audio devices

I want to list all available audio devices in swift to provide a selection for input and output. My application should listen on a audio channel and "write" to another. I do not want the system default!
let devices = AVCaptureDevice.devices(for: .audio)
print(devices.count)
for device in devices {
print(device.localizedName)
}
The Code lists 0 devices. But I expect at least the internal output.
Some links to CoreAudio, AudioToolbox and AVFoundation that explain the audio source selection would be nice.
Here's some Swift 5 code that will enumerate all the audio devices.
You can use the uid with AVAudioPlayer's currentDevice property to output to a specific device.
import Cocoa
import AVFoundation
class AudioDevice {
var audioDeviceID:AudioDeviceID
init(deviceID:AudioDeviceID) {
self.audioDeviceID = deviceID
}
var hasOutput: Bool {
get {
var address:AudioObjectPropertyAddress = AudioObjectPropertyAddress(
mSelector:AudioObjectPropertySelector(kAudioDevicePropertyStreamConfiguration),
mScope:AudioObjectPropertyScope(kAudioDevicePropertyScopeOutput),
mElement:0)
var propsize:UInt32 = UInt32(MemoryLayout<CFString?>.size);
var result:OSStatus = AudioObjectGetPropertyDataSize(self.audioDeviceID, &address, 0, nil, &propsize);
if (result != 0) {
return false;
}
let bufferList = UnsafeMutablePointer<AudioBufferList>.allocate(capacity:Int(propsize))
result = AudioObjectGetPropertyData(self.audioDeviceID, &address, 0, nil, &propsize, bufferList);
if (result != 0) {
return false
}
let buffers = UnsafeMutableAudioBufferListPointer(bufferList)
for bufferNum in 0..<buffers.count {
if buffers[bufferNum].mNumberChannels > 0 {
return true
}
}
return false
}
}
var uid:String? {
get {
var address:AudioObjectPropertyAddress = AudioObjectPropertyAddress(
mSelector:AudioObjectPropertySelector(kAudioDevicePropertyDeviceUID),
mScope:AudioObjectPropertyScope(kAudioObjectPropertyScopeGlobal),
mElement:AudioObjectPropertyElement(kAudioObjectPropertyElementMaster))
var name:CFString? = nil
var propsize:UInt32 = UInt32(MemoryLayout<CFString?>.size)
let result:OSStatus = AudioObjectGetPropertyData(self.audioDeviceID, &address, 0, nil, &propsize, &name)
if (result != 0) {
return nil
}
return name as String?
}
}
var name:String? {
get {
var address:AudioObjectPropertyAddress = AudioObjectPropertyAddress(
mSelector:AudioObjectPropertySelector(kAudioDevicePropertyDeviceNameCFString),
mScope:AudioObjectPropertyScope(kAudioObjectPropertyScopeGlobal),
mElement:AudioObjectPropertyElement(kAudioObjectPropertyElementMaster))
var name:CFString? = nil
var propsize:UInt32 = UInt32(MemoryLayout<CFString?>.size)
let result:OSStatus = AudioObjectGetPropertyData(self.audioDeviceID, &address, 0, nil, &propsize, &name)
if (result != 0) {
return nil
}
return name as String?
}
}
}
class AudioDeviceFinder {
static func findDevices() {
var propsize:UInt32 = 0
var address:AudioObjectPropertyAddress = AudioObjectPropertyAddress(
mSelector:AudioObjectPropertySelector(kAudioHardwarePropertyDevices),
mScope:AudioObjectPropertyScope(kAudioObjectPropertyScopeGlobal),
mElement:AudioObjectPropertyElement(kAudioObjectPropertyElementMaster))
var result:OSStatus = AudioObjectGetPropertyDataSize(AudioObjectID(kAudioObjectSystemObject), &address, UInt32(MemoryLayout<AudioObjectPropertyAddress>.size), nil, &propsize)
if (result != 0) {
print("Error \(result) from AudioObjectGetPropertyDataSize")
return
}
let numDevices = Int(propsize / UInt32(MemoryLayout<AudioDeviceID>.size))
var devids = [AudioDeviceID]()
for _ in 0..<numDevices {
devids.append(AudioDeviceID())
}
result = AudioObjectGetPropertyData(AudioObjectID(kAudioObjectSystemObject), &address, 0, nil, &propsize, &devids);
if (result != 0) {
print("Error \(result) from AudioObjectGetPropertyData")
return
}
for i in 0..<numDevices {
let audioDevice = AudioDevice(deviceID:devids[i])
if (audioDevice.hasOutput) {
if let name = audioDevice.name,
let uid = audioDevice.uid {
print("Found device \"\(name)\", uid=\(uid)")
}
}
}
}
}
The code you posted works perfectly fine for audio input devices when I paste it into an Xcode Playground.
Note, however, that AVCaptureDevice API does not list audio output devices as they are no capture devices but playback devices. If a device supports both, input and output, you can still use the device's uniqueID in an output context, for example with AVPlayer's audioOutputDeviceUniqueID.
(Also note, that if you want your code to work on iOS as well, devices(for:) is marked as deprecated since iOS 11 and you should move to AVCaptureDevice.DiscoverySession instead.)
Regarding your request for additional info on Core Audio and AudioToolbox, this SO question has some pretty comprehensive answers on the matter. The question asks for input devices but the answers provide enough context to let you understand handling of the output side as well. There's even an answer with some (dated) Swift code. On a personal note I have to say calling Core Audio API from Swift is oftentimes more pain than gain. Because of that it might be faster, although a bit unsafer, wrapping those portions of code into Objective-C or plain C and exposing them via the Swift bridging header, if your project allows it.
If you want something like a actionSheet and need to switch between audio devices seamlessly. Use this code.
Code
import Foundation
import AVFoundation
import UIKit
#objc class AudioDeviceHandler: NSObject {
#objc static let shared = AudioDeviceHandler()
/// Present audio device selection alert
/// - Parameters:
/// - presenterViewController: viewController where the alert need to present
/// - sourceView: alertController source view in case of iPad
#objc func presentAudioOutput(_ presenterViewController : UIViewController, _ sourceView: UIView) {
let speakerTitle = "Speaker"
let headphoneTitle = "Headphones"
let deviceTitle = (UIDevice.current.userInterfaceIdiom == .pad) ? "iPad" : "iPhone"
let cancelTitle = "Cancel"
var deviceAction = UIAlertAction()
var headphonesExist = false
let optionMenu = UIAlertController(title: nil, message: nil, preferredStyle: .actionSheet)
guard let availableInputs = AVAudioSession.sharedInstance().availableInputs else {
print("No inputs available ")
return
}
for audioPort in availableInputs {
switch audioPort.portType {
case .bluetoothA2DP, .bluetoothHFP, .bluetoothLE :
let bluetoothAction = UIAlertAction(title: audioPort.portName, style: .default) { _ in
self.setPreferredInput(port: audioPort)
}
if isCurrentOutput(portType: audioPort.portType) {
bluetoothAction.setValue(true, forKey: "checked")
}
optionMenu.addAction(bluetoothAction)
case .builtInMic, .builtInReceiver:
deviceAction = UIAlertAction(title: deviceTitle, style: .default, handler: { _ in
self.setToDevice(port: audioPort)
})
case .headphones, .headsetMic:
headphonesExist = true
let headphoneAction = UIAlertAction(title: headphoneTitle, style: .default) { _ in
self.setPreferredInput(port: audioPort)
}
if isCurrentOutput(portType: .headphones) || isCurrentOutput(portType: .headsetMic) {
headphoneAction.setValue(true, forKey: "checked")
}
optionMenu.addAction(headphoneAction)
case .carAudio:
let carAction = UIAlertAction(title: audioPort.portName, style: .default) { _ in
self.setPreferredInput(port: audioPort)
}
if isCurrentOutput(portType: audioPort.portType) {
carAction.setValue(true, forKey: "checked")
}
optionMenu.addAction(carAction)
default:
break
}
}
// device actions only required if no headphone available
if !headphonesExist {
if (isCurrentOutput(portType: .builtInReceiver) ||
isCurrentOutput(portType: .builtInMic)) {
deviceAction.setValue(true, forKey: "checked")
}
optionMenu.addAction(deviceAction)
}
// configure speaker action
let speakerAction = UIAlertAction(title: speakerTitle, style: .default) { _ in
self.setOutputToSpeaker()
}
if isCurrentOutput(portType: .builtInSpeaker) {
speakerAction.setValue(true, forKey: "checked")
}
optionMenu.addAction(speakerAction)
// configure cancel action
let cancelAction = UIAlertAction(title: cancelTitle, style: .cancel)
optionMenu.addAction(cancelAction)
optionMenu.modalPresentationStyle = .popover
if let presenter = optionMenu.popoverPresentationController {
presenter.sourceView = sourceView
presenter.sourceRect = sourceView.bounds
}
presenterViewController.present(optionMenu, animated: true, completion: nil)
// auto dismiss after 5 seconds
DispatchQueue.main.asyncAfter(deadline: .now() + 5.0) {
optionMenu.dismiss(animated: true, completion: nil)
}
}
#objc func setOutputToSpeaker() {
do {
try AVAudioSession.sharedInstance().overrideOutputAudioPort(AVAudioSession.PortOverride.speaker)
} catch let error as NSError {
print("audioSession error turning on speaker: \(error.localizedDescription)")
}
}
fileprivate func setPreferredInput(port: AVAudioSessionPortDescription) {
do {
try AVAudioSession.sharedInstance().setPreferredInput(port)
} catch let error as NSError {
print("audioSession error change to input: \(port.portName) with error: \(error.localizedDescription)")
}
}
fileprivate func setToDevice(port: AVAudioSessionPortDescription) {
do {
// remove speaker if needed
try AVAudioSession.sharedInstance().overrideOutputAudioPort(AVAudioSession.PortOverride.none)
// set new input
try AVAudioSession.sharedInstance().setPreferredInput(port)
} catch let error as NSError {
print("audioSession error change to input: \(AVAudioSession.PortOverride.none.rawValue) with error: \(error.localizedDescription)")
}
}
#objc func isCurrentOutput(portType: AVAudioSession.Port) -> Bool {
AVAudioSession.sharedInstance().currentRoute.outputs.contains(where: { $0.portType == portType })
}
}
How to use
class ViewController: UIViewController {
#IBOutlet weak var audioButton: UIButton!
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
}
#IBAction func selectAudio(_ sender: Any) {
// present audio device selection action sheet
AudioDeviceHandler.shared.presentAudioOutput(self, audioButton)
}
}
Result
It is possible to list input and output devices. This is a simplification of stevex's answer.
For output devices:
if (audioDevice.hasOutput) {
if let name = audioDevice.name,
let uid = audioDevice.uid {
print("Found device \"\(name)\", uid=\(uid)")
}
}
For input devices:
if (!audioDevice.hasOutput) {
if let name = audioDevice.name,
let uid = audioDevice.uid {
print("Found device \"\(name)\", uid=\(uid)")
}
}
(Notice the ! before audioDevice.hasOutput.)

Error with Apple's SFSpeechRecognition when app in Background Mode (phone locked)

I am attempting to use Apple's SFSpeechRecognition (xcode 9, ios11, swift 4). The following code uses a timer to start a new request for speech recognition. I have background capability on, microphone usage granted and speech recognition granted. If the phone is unlocked, everything works as expected. But when the phone is locked, I receive the following error:
2018-02-20 22:24:47.522562-0500 Speech-Recognition-Demo[3505:1234188] [Utility] +[AFAggregator logDictationFailedWithError:] Error Domain=kAFAssistantErrorDomain Code=1700 "(null)"
According to this link, speech recognition doesn't seem to work when app is in background mode but this information is old. I'm hoping someone has solved this or found a workaround.
Has anyone solved this problem or can anyone suggest something to try? My alternative is to require the Apple Watch for my app and I'd REALLY like to avoid that...
import UIKit
import Speech
class SpeechDetectionViewController: UIViewController,
SFSpeechRecognizerDelegate {
#IBOutlet weak var detectedTextLabel: UILabel!
#IBOutlet weak var colorView: UIView!
#IBOutlet weak var startButton: UIButton!
let audioEngine = AVAudioEngine()
let speechRecognizer: SFSpeechRecognizer? = SFSpeechRecognizer()
var request: SFSpeechAudioBufferRecognitionRequest?
var recognitionTask: SFSpeechRecognitionTask?
var isRecording = false
// timers
var timer = Timer()
let timerInterval = 5.0
var secondsElapsed = 0
// toggle for taking commands
var takeCommands = true
override func viewDidLoad() {
super.viewDidLoad()
self.requestSpeechAuthorization()
timer = Timer.scheduledTimer(timeInterval: timerInterval,target: self,selector: #selector(timerAction(timer:)) ,userInfo: nil,repeats: true)
}
#objc func timerAction(timer:Timer){
/* if takeCommands {
startRecording()
} else {
stopRecording()
}
takeCommands = !takeCommands
print("takeCommands: \(takeCommands)")
*/
startRecording()
}
//MARK: IBActions and Cancel
#IBAction func startButtonTapped(_ sender: UIButton) {
startRecording()
}
func startRecording(){
if isRecording {
print("STOP talking.")
request?.endAudio() // Added line to mark end of recording
request = nil
//audioEngine.stop()
if let node = audioEngine.inputNode {
node.removeTap(onBus: 0)
}
recognitionTask?.cancel()
isRecording = false
startButton.backgroundColor = UIColor.gray
} else {
print("START talking.")
self.recordAndRecognizeSpeech()
isRecording = true
startButton.backgroundColor = UIColor.red
}
}
func stopRecording() {
//audioEngine.stop()
if let node = audioEngine.inputNode {
node.removeTap(onBus: 0)
}
recognitionTask?.cancel()
isRecording = false
startButton.backgroundColor = UIColor.gray
}
//MARK: - Recognize Speech
func recordAndRecognizeSpeech() {
request = SFSpeechAudioBufferRecognitionRequest()
guard let node = audioEngine.inputNode else { return }
let recordingFormat = node.outputFormat(forBus: 0)
node.installTap(onBus: 0, bufferSize: 1024, format: recordingFormat) { buffer, _ in
self.request?.append(buffer)
}
if !audioEngine.isRunning {
audioEngine.prepare()
do {
try audioEngine.start()
} catch {
self.sendAlert(message: "There has been an audio engine error.")
return print(error)
}
}
guard let myRecognizer = SFSpeechRecognizer() else {
self.sendAlert(message: "Speech recognition is not supported for your current locale.")
return
}
if !myRecognizer.isAvailable {
self.sendAlert(message: "Speech recognition is not currently available. Check back at a later time.")
// Recognizer is not available right now
return
}
recognitionTask = speechRecognizer?.recognitionTask(with: request!, resultHandler: { result, error in
if result != nil { // check to see if result is empty (i.e. no speech found)
if let result = result {
let bestString = result.bestTranscription.formattedString
self.detectedTextLabel.text = bestString
var lastString: String = ""
for segment in result.bestTranscription.segments {
let indexTo = bestString.index(bestString.startIndex, offsetBy: segment.substringRange.location)
lastString = bestString.substring(from: indexTo)
}
self.checkForColorsSaid(resultString: lastString)
} else if let error = error {
self.sendAlert(message: "There has been a speech recognition error")
print(error)
}
}
})
}
//MARK: - Check Authorization Status
func requestSpeechAuthorization() {
SFSpeechRecognizer.requestAuthorization { authStatus in
OperationQueue.main.addOperation {
switch authStatus {
case .authorized:
self.startButton.isEnabled = true
case .denied:
self.startButton.isEnabled = false
self.detectedTextLabel.text = "User denied access to speech recognition"
case .restricted:
self.startButton.isEnabled = false
self.detectedTextLabel.text = "Speech recognition restricted on this device"
case .notDetermined:
self.startButton.isEnabled = false
self.detectedTextLabel.text = "Speech recognition not yet authorized"
}
}
}
}
//MARK: - UI / Set view color.
func checkForColorsSaid(resultString: String) {
switch resultString {
case "red":
colorView.backgroundColor = UIColor.red
case "orange":
colorView.backgroundColor = UIColor.orange
case "yellow":
colorView.backgroundColor = UIColor.yellow
case "green":
colorView.backgroundColor = UIColor.green
case "blue":
colorView.backgroundColor = UIColor.blue
case "purple":
colorView.backgroundColor = UIColor.purple
case "black":
colorView.backgroundColor = UIColor.black
case "white":
colorView.backgroundColor = UIColor.white
case "gray":
colorView.backgroundColor = UIColor.gray
default: break
}
}
//MARK: - Alert
func sendAlert(message: String) {
let alert = UIAlertController(title: "Speech Recognizer Error", message: message, preferredStyle: UIAlertControllerStyle.alert)
alert.addAction(UIAlertAction(title: "OK", style: UIAlertActionStyle.default, handler: nil))
self.present(alert, animated: true, completion: nil)
}
}

How to assign a different audio URL for each reusable cell in Swift

I have 2 (or more) reusable collectionView cell and each one have to play a different audio. My problem is that when the audio1 finish, audio2 file start in the same cell of the audio2. If I manually play on each cell there's no problem, but if I want to play all audio automatically one after the other, all audio are played in the same cell. How I can start the next audio in the next cell if the cell has not yet been created?
Here how I append to array:
func appendToArray() {
for (_, page) in self.resources.enumerate() {
for (index,resource) in page.enumerate() {
print("Passa di qui") // Qui passa
if resource.fileType() == .Audio {
S3Client.sharedInstance.downloadResource(resourceKey: resource.value, completion: { (success, file) in
// let files = String(file)
self.audioURLs.append(file)
/**
if self.audioURLs.count == self.resources.count {
// print("audioURLs \(self.audioURLs[index])")
MediaAudioPlayer.sharedInstance.queueTrack(self.audioURLs)
}
*/
})
}
}
}
}
This is the cellForItemAtIndexPath:
func collectionView(collectionView: UICollectionView, cellForItemAtIndexPath indexPath: NSIndexPath) -> UICollectionViewCell {
case .Audio:
let cell = collectionView.dequeueReusableCellWithReuseIdentifier(MediaAudioCell.kCellIdentifier, forIndexPath: indexPath) as! MediaAudioCell
cell.activityIndicator.startAnimating()
cell.activityIndicator.hidden = false
S3Client.sharedInstance.downloadResource(resourceKey: resource.value, completion: { (success, file) in
if success == true && file != nil {
cell.activityIndicator.stopAnimating()
cell.activityIndicator.hidden = true
cell.audioURL = file!
// Make slider indipendent from cell to another
cell.sliderAudio.tag = indexPath.row
cell.sliderAudio.addTarget(self, action: "sliderChange:", forControlEvents: .ValueChanged)
// print("ArrayURL: \(file)")
// print("CiaoCell : \(self.audioURLs.count)")
// print("Ciaoself.resources.countCell : \(self.resources.count)")
/**
if self.audioURLs.count == self.resources.count {
// print("audioURLs \(self.audioURLs[index])")
let item = self.audioURLs[indexPath.row] print("item: \(item)")
}
if self.audioURLs.count == self.resources.count {
// print("audioURLs \(self.audioURLs[index])")
// MediaAudioPlayer.sharedInstance.queueTrack(self.audioURLs)
}
*/
// Display total audio leinght
let asset = AVURLAsset(URL: file!, options: nil)
let audios = asset.tracksWithMediaType(AVMediaTypeAudio)
if let audios: AVAssetTrack = audios[0] {
let audioDuration:CMTime = audios.timeRange.duration
let seconds:Float64 = CMTimeGetSeconds(audioDuration)
cell.labelAudio.text = cell.stringFromTimeInterval(NSTimeInterval(seconds)) as String
}
}
})
return cell
}
This is part of cell's Class:
override func awakeFromNib() {
super.awakeFromNib()
// Partenza automatica, dopo 2secondi, se Accessibilità su ON
let delayTime = dispatch_time(DISPATCH_TIME_NOW, Int64(2 * Double(NSEC_PER_SEC)))
dispatch_after(delayTime, dispatch_get_main_queue()) {
if self.defaults.boolForKey("AutomaticStart") == true && self.defaults.boolForKey("goBackPressed") == false {
if let audioURL = self.audioURL {
// Set AVAudioSession for recording and playing at the same time
let session = AVAudioSession.sharedInstance()
do {
try session.setCategory(AVAudioSessionCategoryPlayback)
try session.setActive(true)
} catch _ {}
// If audio is playing, do not pass to next if cell is created, but continue to playing.
if MediaAudioPlayer.sharedInstance.player?.playing == true { // Se metto a 'false', ed elimino 'else', non parte in automatico.
} else {
MediaVideoPlayer.sharedInstance.stop()
MediaAudioPlayer.sharedInstance.playPauseAudio(audioURL: audioURL, delegate: self)
}
}
}
}
}
And this is the player class:
class MediaAudioPlayer: NSObject, AVAudioPlayerDelegate {
static let sharedInstance = MediaAudioPlayer()
private var delegate: MediaAudioPlayerDelegate?
var player: AVAudioPlayer?
private var lastURL: NSURL?
private var timer: NSTimer?
internal var sliderTouched: Bool = false
var tracks = Array<NSURL?>()
var currentTrackIndex = 0
override init() {
super.init()
}
// MARK: Setup
func playPauseAudio(audioURL url: NSURL, delegate: MediaAudioPlayerDelegate) {
self.delegate?.playing = true // Set default play button on last delegate
self.delegate = delegate // Save delegate
self.sliderTouched = false
// Setup as new only when this audio has not been already set up
if (self.lastURL == nil) || (url != self.lastURL) {
self.lastURL = url
self.setupAudioSession(category: AVAudioSessionCategoryPlayback)
do { // Setup Player
self.player = try AVAudioPlayer(contentsOfURL: url)
} catch _ {}
self.player?.delegate = self
self.player?.prepareToPlay()
timer = NSTimer.scheduledTimerWithTimeInterval(0.4, target: self, selector: #selector(MediaAudioPlayer.update), userInfo: nil, repeats: true)
}
// Play - Pause
if self.player?.playing == true {
self.player?.pause()
self.delegate?.playing = true
} else {
self.player?.play()
self.delegate?.playing = false
}
}
// Transform second to minute
func stringFromTimeInterval(interval: NSTimeInterval) -> NSString {
let ti = NSInteger(interval)
let seconds = ti % 60
let minutes = (ti / 60) % 60
return NSString(format: "%0.2d:%0.2d", minutes, seconds)
}
// MARK: Audio Session
private func setupAudioSession(category category: String) {
let session = AVAudioSession.sharedInstance()
do {
try session.setCategory(category)
try session.setActive(true)
} catch _ {}
}
// MARK: Stop
func stop() {
self.player?.stop()
self.player = nil // Deinit player
self.delegate?.playing = true
self.delegate = nil // Deinit delegate
self.timer?.invalidate(); self.timer = nil
self.lastURL = nil
}
// MARK: Playing
internal func playing() -> Bool {
if player != nil {
return player?.rate == 1.0 ? true : false
}
return false
}
// MARK: Seek
func seekToPosition(position position: Float) {
if let duration = self.player?.duration {
player?.currentTime = Double(position) * duration
self.delegate?.currentTimeAudio = stringFromTimeInterval((player?.currentTime)!) as String
}
}
func update() {
if sliderTouched == false {
if let currentTime = self.player?.currentTime, duration = player?.duration {
let time = Float(currentTime) / Float(duration)
self.delegate?.sliderPosition = time
self.delegate?.currentTimeAudio = stringFromTimeInterval((player?.currentTime)!) as String
}
}
}
// MARK: Delegate
var counter = 0
func audioPlayerDidFinishPlaying(player: AVAudioPlayer, successfully flag: Bool) {
print("Called")
self.lastURL = nil
self.delegate?.playing = true
/**
if flag == true {
nextSong(true)
}*/
/**
if ((counter + 1) == tracks.count) {
counter = 0
self.delegate?.playing = false
nextSong(false)
} else {
self.delegate?.playing = true
nextSong(true)
}
*/
}
}
Thank you!!

UISlider to control AVAudioPlayer

I'm trying to implement a little function in my app. I am currently playing sounds as AVAudioPlayers and that works fine. What I would like to add is to control the sound's position (currentTime) with an UISlider: is there a simple way to do it ?
I looked at an Apple project but it was quite messy....have you got samples or suggestions ?
Thanks to everyone in advance
Shouldn't be a problem - just set the slider to continuous and set the max value to your player's duration after loading your sound file.
Edit
I just did this and it works for me...
- (IBAction)slide {
player.currentTime = slider.value;
}
- (void)updateTime:(NSTimer *)timer {
slider.value = player.currentTime;
}
- (IBAction)play:(id)sender {
NSURL *url = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:#"sound.caf" ofType:nil]];
NSError *error;
player = [[AVAudioPlayer alloc] initWithContentsOfURL:url error:&error];
if (!player) NSLog(#"Error: %#", error);
[player prepareToPlay];
slider.maximumValue = [player duration];
slider.value = 0.0;
[NSTimer scheduledTimerWithTimeInterval:1.0 target:self selector:#selector(updateTime:) userInfo:nil repeats:YES];
[player play];
}
The slider is configured in IB, as is a button to start playing.
Swift 3.0 Update:
var player: AVAudioPlayer!
var sliderr: UISlider!
#IBAction func play(_ sender: Any) {
var url = URL(fileURLWithPath: Bundle.main.path(forResource: "sound.caf", ofType: nil)!)
var error: Error?
do {
player = try AVAudioPlayer(contentsOf: url)
}
catch let error {
}
if player == nil {
print("Error: \(error)")
}
player.prepareToPlay()
sliderr.maximumValue = Float(player.duration)
sliderr.value = 0.0
Timer.scheduledTimer(timeInterval: 1.0, target: self, selector: #selector(self.updateTime), userInfo: nil, repeats: true)
player.play()
}
func updateTime(_ timer: Timer) {
sliderr.value = Float(player.currentTime)
}
#IBAction func slide(_ slider: UISlider) {
player.currentTime = TimeInterval(slider.value)
}
To extend on paull's answer, you'd set the slider to be continuous with a maximum value of your audio player's duration, then add some object of yours (probably the view controller) as a target for the slider's UIControlEventValueChanged event; when you receive the action message, you'd then set the AVAudioPlayer's currentTime property to the slider's value.
You might also want to use an NSTimer to update the slider's value as the audio player plays; +scheduledTimerWithTimeInterval:target:selector:userInfo:repeats: is the easiest way to do that.
I needed to adapt the above answer a bit to get it to work. The issue is that using
slider.maximumValue = [player duration];
slider.value = player.currentTime;
player.currentTime = slider.value;
Do not work because the slider expects a float and the player currentTime and dration return CMTime. To make these work, I adapted them to read:
slider.maximumValue = CMTimeGetSeconds([player duration]);
slider.value = CMTimeGetSeconds(player.currentTime);
player.currentTime = CMTimeMakeWithSeconds((int)slider.value,1);
If you don't need any data in between drag, then you should simply set:
mySlider.isContinuous = false
Otherwise, try below code to controller each phase of touch.
// audio slider bar
private lazy var slider: UISlider = {
let slider = UISlider()
slider.translatesAutoresizingMaskIntoConstraints = false
slider.minimumTrackTintColor = .red
slider.maximumTrackTintColor = .white
slider.setThumbImage(UIImage(named: "sliderThumb"), for: .normal)
slider.addTarget(self, action: #selector(onSliderValChanged(slider:event:)), for: .valueChanged)
// slider.isContinuous = false
return slider
}()
#objc func onSliderValChanged(slider: UISlider, event: UIEvent) {
guard let player = AudioPlayer.shared.player else { return }
if let touchEvent = event.allTouches?.first {
switch touchEvent.phase {
case .began:
// handle drag began
// I would stop the timer when drag begin
timer.invalidate()
case .moved:
// handle drag moved
// Update label's text for current playing time
case .ended:
// update the player's currTime and re-create the timer when drag is done.
player.currentTime = TimeInterval(slider.value)
timer = Timer.scheduledTimer(timeInterval: 1.0, target: self, selector: #selector(updateTime(_:)), userInfo: nil, repeats: true)
default:
break
}
}
}
Problems that I've faced during playing an audio file and show start/end time and controlling the song with the UISlider.
Not playing audio directly without downloading it in temp folder.
UISlider got crashed on main thread in lower iOS version i.e 12.4/13.1
Smooth Scrolling of UISlider.
Calculating and updating the start/end time of the song.
This answer needs some editing, but it will work without any doubt.
//UISlider init
lazy var slider: UISlider = {
let progress = UISlider()
progress.minimumValue = 0.0
progress.maximumValue = 100.0
progress.tintColor = UIColor.init(named: "ApplicationColor")
return progress
}()
var audioPlayer : AVAudioPlayer?
//First I've downloaded the audio and then playing it.
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
timer = Timer.scheduledTimer(timeInterval: 1, target: self, selector: #selector(trackAudio), userInfo: nil, repeats: true)
if let audioURLString = audioURL{
let urlstring = URL(string: audioURLString)!
downloadFromURL(url: urlstring) { (localURL, response, error) in
if let localURL = localURL{
self.playAudioFile(url: localURL)
}
}
}
}
override func viewDidDisappear(_ animated: Bool) {
super.viewDidDisappear(animated)
stopTimer()
}
// Stop TimeInterval After View disappear
func stopTimer() {
if timer != nil {
timer?.invalidate()
audioPlayer?.stop()
audioPlayer = nil
timer = nil
}
}
#objc func sliderSelected(_ sender : UISlider){
if audioPlayer != nil{
if !isPlaying{
self.audioPlayer?.play()
playButton.setImage(UIImage.init(named: "AudioPause"), for: .normal)
isPlaying = true
}else{
self.audioPlayer?.currentTime = TimeInterval(Float(sender.value) * Float(self.audioPlayer!.duration) / 100.0)
if (sender.value / 100.0 == 1.0){
//Do something if audio ends while dragging the UISlider.
}
}
}
}
func downloadFromURL(url:URL,completion: #escaping((_ downladedURL: URL?,_ response :URLResponse?,_ error: Error?) -> Void)){
var downloadTask:URLSessionDownloadTask
downloadTask = URLSession.shared.downloadTask(with: url) {(URL, response, error) in
if let url = URL{
completion(url,nil,nil)
}else if let response = response{
completion(nil,response,nil)
}
if let error = error{
completion(nil,nil,error)
}
}
downloadTask.resume()
}
func playAudioFile(url:URL){
do{
self.audioPlayer = try AVAudioPlayer(contentsOf: url)
self.audioPlayer?.prepareToPlay()
self.audioPlayer?.delegate = self
self.audioPlayer?.play()
let audioDuration = audioPlayer?.duration
let audioDurationSeconds = audioDuration
minutes = Int(audioDurationSeconds!/60);
seconds = Int(audioDurationSeconds!.truncatingRemainder(dividingBy: 60))
} catch{
print("AVAudioPlayer init failed")
}
}
#objc func trackAudio() {
if audioPlayer != nil{
DispatchQueue.main.async {
print("HI")
let normalizedTime = Float(self.audioPlayer!.currentTime * 100.0 / self.audioPlayer!.duration)
self.slider.setValue(normalizedTime, animated: true)
let currentTime = self.audioPlayer?.currentTime
self.currentMinutes = Int(currentTime!/60);
self.currentSeconds = Int(currentTime!.truncatingRemainder(dividingBy: 60))
self.startTimeLabel.text = String(format: "%02i:%02i", self.currentMinutes, self.currentSeconds)
self.endTimeLabel.text = String(format: "%02i:%02i", self.minutes, self.seconds)
}
}
}
If anyone was looking for a simple TouchDown and TouchUp on UI slider then this turns out to be as simple as :
slider.addTarget(self, action: #selector(changeVlaue(_:)), for: .valueChanged)
slider.addTarget(self, action: #selector(sliderTapped), for: .touchDown)
slider.addTarget(self, action: #selector(sliderUntouched), for: .touchUpInside)
Here's the entire setup for an AVAudioPlayer. Some of the code in handleScrubbing() and fingerLiftedFromSlider() is duplicated but whatever...
This will let you show what's on the currentTimeLabel (usually on the left) and the totalDurationLabel (usually on the right) with the scrubber/slider in the the middle of them. When you slide the slider the currentTime will update to show wherever the slider is.
There is something to be aware about. If the the player was playing before you touch the slider, while you slide the slider, the player is still playing. In .began you need to check if the player was playing and if so pause it and set a variable like wasAudioPlayerPlayingBeforeSliderWasTouched to true so that when your finger is lifted it will continue playing from wherever you lift your finger. If you don't pause the player then the slider isn't going to slide smoothly.
When you lift your finger there is a check in onFingerLiftedStartAudioPlayerIfItWasPlayingBeforeTouchesBegan() to see if the slider is at its endTime. If it is instead of playing it'll run the code in audioEndTimeStopEverything().
In the startAudioPlayer method, I used an AVURLAsset get a set the actual url's duration. I got it from this answer which has a great explanation.
I used this code with a local url, not sure how this will work with a remote url.
import UIKit
import AVFoundation
class MyAudioController: UIViewController {
lazy var currentTimeLabel ... { ... }()
lazy var totalDurationLabel ... { ... }()
lazy vay pausePlayButton ... { ... }()
lazy var fastForwardButton ... { ... }()
lazy var rewindButton ... { ... }()
lazy var slider: UISlider = {
let s = UISlider()
s.translatesAutoresizingMaskIntoConstraints = false
s.isContinuous = true
s.minimumTrackTintColor = UIColor.red
s.maximumTrackTintColor = UIColor.blue
s.setThumbImage(UIImage(named: "circleIcon"), for: .normal)
s.addTarget(self, action: #selector(sliderValChanged(slider:event:)), for: .valueChanged)
return s
}()
weak var timer: Timer? // *** MAKE SURE THIS IS WEAK ***
var audioPlayer: AVAudioPlayer?
var wasAudioPlayerPlayingBeforeSliderWasTouched = false
override func viewDidLoad() {
super.viewDidLoad()
guard let myAudioUrl = URL(string: "...") else { return }
setAudio(with: myAudioUrl)
do {
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default)
try AVAudioSession.sharedInstance().setActive(true)
} catch {
print(error)
}
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
stopAudioPlayer()
}
// 1. init your AVAudioPlayer here
func setAudioPlayer(with audioTrack: URL) {
do {
stopAudioPlayer() // if something was previously playing
audioPlayer = try AVAudioPlayer(contentsOf: audioTrack)
audioPlayer?.delegate = self
audioPlayer?.prepareToPlay()
audioPlayer?.volume = audioVolume
startAudioPlayer()
} catch let err as NSError {
print(err.localizedDescription)
}
}
// 2. Audio PLAYER - start / stop funcs
stopAudioPlayer() {
stopAudioTimer()
audioPlayer?.pause()
audioPlayer?.stop()
}
func startAudioPlayer() {
if let audioPlayer = audioPlayer, audioPlayer.isPlaying {
audioPlayer.pause()
}
audioPlayer?.currentTime = 0
audioPlayer?.play()
pausePlayButton.setImage(UIImage(named: "pauseIcon"), for: .normal)
startAudioTimer()
}
func startAudioTimer() {
stopAudioTimer()
slider.value = 0
currentTimeLabel.text = "00:00"
totalDurationLabel.text = "00:00"
guard let url = audioPlayer?.url else { return }
let assetOpts = [AVURLAssetPreferPreciseDurationAndTimingKey: true]
let asset = AVURLAsset(url: url, options: assetOpts)
let assetDuration: CMTime = asset.duration
let assetDurationInSecs: Float64 = CMTimeGetSeconds(assetDuration)
slider.maximumValue = Float(assetDurationInSecs)
totalDurationLabel.text = strFromTimeInterval(interval: TimeInterval(assetDurationInSecs))
runAudioTimer()
}
// 3. TIMER funcs
func runAudioTimer() {
if timer == nil {
timer = Timer.scheduledTimer(withTimeInterval: 0.01, repeats: true, block: { [weak self](_) in
self?.audioTimerIsRunning()
})
}
}
func audioTimerIsRunning() {
guard let audioPlayer = audioPlayer else { return }
let currentTime = audioPlayer.currentTime
if Float(currentTime) >= Float(slider.maximumValue) {
stopAudioTimer()
}
currentTimeLabel.text = strFromTimeInterval(interval: currentTime)
slider.value = Float(currentTime)
}
func stopAudioTimer() {
if timer != nil {
timer?.invalidate()
timer = nil
}
}
// slider funcs
#objc func sliderValChanged(slider: UISlider, event: UIEvent) {
if let touchEvent = event.allTouches?.first {
switch touchEvent.phase {
case .began:
checkIfAudioPlayerWasPlayingWhenSliderIsFirstTouched()
stopAudioTimer()
print("Finger Touched")
case .moved:
handleScrubbing()
print("Finger is Moving Scrubber")
case .ended:
print("Finger Lifted")
onFingerLiftedStartAudioPlayerIfItWasPlayingBeforeTouchesBegan()
fingerLiftedFromSlider()
default:
print("Something Else Happened In Slider")
}
}
}
func checkIfAudioPlayerWasPlayingWhenSliderIsFirstTouched() {
guard let audioPlayer = audioPlayer else { return }
if audioPlayer.isPlaying {
audioPlayer.pause()
wasAudioPlayerPlayingBeforeSliderWasTouched = true
}
}
func handleScrubbing() {
guard let audioPlayer = audioPlayer else { return }
let sliderValue = TimeInterval(slider.value)
currentTimeLabel.text = strFromTimeInterval(interval: sliderValue)
audioPlayer.currentTime = sliderValue
if audioPlayer.currentTime >= audioPlayer.duration {
audioEndTimeStopEverything()
}
}
func onFingerLiftedStartAudioPlayerIfItWasPlayingBeforeTouchesBegan() {
if wasAudioPlayerPlayingBeforeSliderWasTouched {
wasAudioPlayerPlayingBeforeSliderWasTouched = false
guard let audioPlayer = audioPlayer else { return }
if slider.value >= slider.maximumValue {
audioEndTimeStopEverything()
} else {
audioPlayer.play()
}
}
}
func fingerLiftedFromSlider() {
guard let audioPlayer = audioPlayer else { return }
if !audioPlayer.isPlaying { // this check is necessary because if you paused the audioPlayer, then started sliding, it should still be paused when you lift you finger up. It it's paused there is no need for the timer function to run.
let sliderValue = TimeInterval(slider.value)
currentTimeLabel.text = strFromTimeInterval(interval: sliderValue)
audioPlayer.currentTime = sliderValue
return
}
runAudioTimer()
}
func audioEndTimeStopEverything() {
stopAudioPlayer()
pausePlayButton.setImage(UIImage("named: playIcon"), for: .normal)
guard let audioPlayer = audioPlayer else { return }
// for some reason when the audioPlayer would reach its end time it kept resetting its currentTime property to zero. I don't know if that's meant to happen or a bug but the currentTime would be zero and the slider would be at the end. To rectify the issue I set them both to their end times
audioPlayer.currentTime = audioPlayer.duration
slider.value = slider.maximumValue
}
}
extension MyAudioController: AVAudioPlayerDelegate {
func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) {
audioEndTimeStopEverything()
}
func audioPlayerDecodeErrorDidOccur(_ player: AVAudioPlayer, error: Error?) {
if let error = error {
print(error.localizedDescription)
}
}
}
Here is the strFromTimeInterval(interval: ) function that I got from here. I only used it because I didn't want to bother with milliseconds. The code above was ran using audio files with minutes and seconds, not hours. If you have any problems with hours you can also swap this function out for this answer
extension MyAudioController {
func strFromTimeInterval(interval: TimeInterval) -> String {
let time = NSInteger(interval)
let seconds = time % 60
let minutes = (time / 60) % 60
let hours = (time / 3600)
var formatString = ""
if hours == 0 {
if (minutes < 10) {
formatString = "%2d:%0.2d"
} else {
formatString = "%0.2d:%0.2d"
}
return String(format: formatString,minutes,seconds)
} else {
formatString = "%2d:%0.2d:%0.2d"
return String(format: formatString,hours,minutes,seconds)
}
}
}