My Morse code translator will not output the sound as it should. I have tested the speakers and my methods without this function and it works flawlessly, but it is not in context with the rest of the program. The compiler gives me no errors and the playground does not crash, it just doesn't play sound. Volume and ringer is at full.
func speakTheCode(message: String) {
var speaker = AVAudioPlayer()
let longBeep = #fileLiteral(resourceName: "beep_long.mp3")
let shortBeep = #fileLiteral(resourceName: "beep_short.mp3")
let dash = "-"
let dot = "."
for character in message.characters {
if character == dash[dash.startIndex] {
speaker = try! AVAudioPlayer(contentsOf: longBeep)
speaker.prepareToPlay()
print("-")
}
else if character == dot[dot.startIndex] {
speaker = try! AVAudioPlayer(contentsOf: shortBeep)
speaker.prepareToPlay()
print(".")
}
speaker.play()
}
}
I've been messing around with the code for hours now and nothing is working. What (if anything) am I doing wrong?
There seems to some playgrounds issues with playing audio. See this thread:
Playing a sound in a Swift Playground
However, I was able to make some changes to your code and get it to work. Here's my code:
class Morse:NSObject, AVAudioPlayerDelegate {
private var message = ""
private var dotSound:AVAudioPlayer!
private var dashSound:AVAudioPlayer!
private let dash = Character("-")
private let dot = Character(".")
private var index:String.Index!
init(message:String) {
super.init()
do {
if let url = Bundle.main.url(forResource:"beep_short", withExtension:"mp3") {
self.dotSound = try AVAudioPlayer(contentsOf:url)
self.dotSound.delegate = self
self.dotSound.prepareToPlay()
}
} catch {
NSLog("Error loading dot audio!")
}
do {
if let url = Bundle.main.url(forResource:"beep_long", withExtension:"mp3") {
self.dashSound = try AVAudioPlayer(contentsOf:url)
self.dashSound.delegate = self
self.dashSound.prepareToPlay()
}
} catch {
NSLog("Error loading dash audio!")
}
self.message = message
self.index = message.startIndex
}
func playCharacter() {
let character = message.characters[index]
NSLog("Character: \(character)")
if character == dash {
dashSound.play()
} else if character == dot {
dotSound.play()
}
}
func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) {
NSLog("Finished playing")
if index != message.endIndex {
self.index = message.index(after:index)
playCharacter()
}
}
}
let m = Morse(message:"...---")
m.playCharacter()
PlaygroundPage.current.needsIndefiniteExecution = true
I had to enable indefinite execution to get the code to execute at all. Also, I had some issues with the second audio file loading but I didn't investigate further to see if it was an issue with my test file or something else since it mostly worked.
#Fahim still it is showing error
class Morse:NSObject, AVAudioPlayerDelegate {
private var message = ""
private var dotSound:AVAudioPlayer!
private var dashSound:AVAudioPlayer!
private let dash = Character("-")
private let dot = Character(".")
private var index:String.Index!
init(message:String) {
super.init()
do {
if let url = Bundle.main.url(forResource:"beep_short", withExtension:"mp3") {
self.dotSound = try AVAudioPlayer(contentsOf:url)
self.dotSound.delegate = self
self.dotSound.prepareToPlay()
}
} catch {
NSLog("Error loading dot audio!")
}
do {
if let url = Bundle.main.url(forResource:"beep_long", withExtension:"mp3") {
self.dashSound = try AVAudioPlayer(contentsOf:url)
self.dashSound.delegate = self
self.dashSound.prepareToPlay()
}
} catch {
NSLog("Error loading dash audio!")
}
self.message = message
self.index = message.startIndex
}
func playCharacter() {
let character = message.characters[index]
NSLog("Character: \(character)")
if character == dash {
dashSound.play()
} else if character == dot {
dotSound.play()
}
}
func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) {
NSLog("Finished playing")
if index != message.endIndex {
self.index = message.index(after:index)
playCharacter()
}
}
}
let m = Morse(message:"...---")
m.playCharacter()
PlaygroundPage.current.needsIndefiniteExecution = true
Related
I am trying to make my app produce midi notes at the same time listening to the input from the mic:
var engine = AudioEngine()
var initialDevice: Device!
var mic: AudioEngine.InputNode!
var tappableNodeA: Fader!
var tappableNodeB: Fader!
var tappableNodeC: Fader!
var silence: Fader!
var tracker: PitchTap!
private var instrument = MIDISampler(name: "Instrument 1")
func noteOn(note: MIDINoteNumber) {
instrument.play(noteNumber: note, velocity: 90, channel: 0)
}
func noteOff(note: MIDINoteNumber) {
instrument.stop(noteNumber: note, channel: 0)
}
override func viewDidLoad() {
super.viewDidLoad()
print("init started ")
guard let input = engine.input else { fatalError() }
guard let device = engine.inputDevice else { fatalError() }
print("input selected")
initialDevice = device
engine.output = instrument
mic = input
tappableNodeA = Fader(mic)
tappableNodeB = Fader(tappableNodeA)
tappableNodeC = Fader(tappableNodeB)
silence = Fader(tappableNodeC, gain: 0)
engine.output = silence
print("objects init")
tracker = PitchTap(mic) { pitch, amp in
DispatchQueue.main.async {
self.update(pitch[0], amp[0])
}
}
start()
// other init that are not related
}
The start function is written below:
func start() {
do {
if let fileURL = Bundle.main.url(forResource: "Sounds/Sampler Instruments/sawPiano1", withExtension: "exs") {
try instrument.loadInstrument(url: fileURL)
} else {
Log("Could not find file")
}
} catch {
Log("Could not load instrument")
}
do {
try engine.start()
tracker.start()
} catch let err {
print("caught error at start")
Log(err)
}
}
As long as I making the first try call to set up the instrument I get the following error:
*** Terminating app due to uncaught exception 'com.apple.coreaudio.avfaudio', reason: 'required condition is false: _engine != nil
Why the would the condition be false?
Ok, so the solution was to separate the calls into two functions, and position the first call before tapNode configuration:
var engine = AudioEngine()
var initialDevice: Device!
var mic: AudioEngine.InputNode!
var tappableNodeA: Fader!
var tappableNodeB: Fader!
var tappableNodeC: Fader!
var silence: Fader!
var tracker: PitchTap!
private var instrument = MIDISampler(name: "Instrument 1")
func noteOn(note: MIDINoteNumber) {
instrument.play(noteNumber: note, velocity: 90, channel: 0)
}
func noteOff(note: MIDINoteNumber) {
instrument.stop(noteNumber: note, channel: 0)
}
override func viewDidLoad() {
super.viewDidLoad()
print("init started ")
guard let input = engine.input else { fatalError() }
guard let device = engine.inputDevice else { fatalError() }
print("input selected")
initialDevice = device
engine.output = instrument
start1()
mic = input
tappableNodeA = Fader(mic)
tappableNodeB = Fader(tappableNodeA)
tappableNodeC = Fader(tappableNodeB)
silence = Fader(tappableNodeC, gain: 0)
engine.output = silence
print("objects init")
tracker = PitchTap(mic) { pitch, amp in
DispatchQueue.main.async {
self.update(pitch[0], amp[0])
}
}
start()
// other init that are not related
}
func start1(){
do {
if let fileURL = Bundle.main.url(forResource: "Sounds/Sampler Instruments/sawPiano1", withExtension: "exs") {
try instrument.loadInstrument(url: fileURL)
} else {
Log("Could not find file")
}
} catch let err {
Log("Could not load instrument")
Log(err)
}
}
func start() {
do {
try engine.start()
tracker.start()
} catch let err {
print("caught error at start")
Log(err)
}
}
Although the exception is now gone, there is still no sound being played for some reason.
I am building an app that works with janus gateway via websocket and webrtc. everything works fine, I can send and receive voice calls successfully but insertDtmf metod doesnt send my dtmf to other peer.
Same account and same codes in android works fine.
Here is where I prepare webrtc
private func prepareWebRtc( callbacks:PluginHandleWebRTCCallbacksDelegate) {
if (pc != nil) {
if (callbacks.getJsep() == nil) {
createSdpInternal(callbacks: callbacks, isOffer: isOffer)
} else {
let jsep = callbacks.getJsep()!
let sdpString:String = jsep["sdp"] as! String
let type:RTCSdpType = RTCSessionDescription.type(for: jsep["type"] as! String)
let sdp:RTCSessionDescription = RTCSessionDescription.init(type: type, sdp: sdpString)
pc.setRemoteDescription(sdp) { (err) in}
}
} else {
trickle = callbacks.getTrickle() != nil ? callbacks.getTrickle()! : false
streamsDone(webRTCCallbacks: callbacks)
}
}
private func streamsDone(webRTCCallbacks:PluginHandleWebRTCCallbacksDelegate) {
let rtcConfig = RTCConfiguration.init()
rtcConfig.iceServers = server.iceServers
rtcConfig.bundlePolicy = RTCBundlePolicy.maxBundle
rtcConfig.rtcpMuxPolicy = RTCRtcpMuxPolicy.require
rtcConfig.continualGatheringPolicy = RTCContinualGatheringPolicy.gatherContinually
rtcConfig.sdpSemantics = .planB
let source :RTCAudioSource = sessionFactory.audioSource(with: audioConstraints)
let audioTrack:RTCAudioTrack? = sessionFactory.audioTrack(with: source, trackId: AUDIO_TRACK_ID)
let stream:RTCMediaStream? = sessionFactory.mediaStream(withStreamId: LOCAL_MEDIA_ID)
if (audioTrack != nil){
stream!.addAudioTrack(audioTrack!)
myStream = stream
}
if (stream != nil){
onLocalStream(stream: stream!)
}
// pc.addTrack(audioTrack, mediaStreamLabels);
pc = sessionFactory.peerConnection(with: rtcConfig, constraints: audioConstraints, delegate: nil)
if (myStream != nil){
pc.add(myStream)
}
if let obj:[String:Any] = webRTCCallbacks.getJsep(){
let sdp:String = obj["sdp"] as! String
let type:RTCSdpType = RTCSessionDescription.type(for: obj["type"] as! String)
let sessionDescription:RTCSessionDescription = RTCSessionDescription(type: type, sdp: sdp)
print(" STREAMS DONE JSEP NULL DEĞİL")
// pc.setRemoteDescription(WebRtcObserver(webRTCCallbacks), sessionDescription);
pc.setRemoteDescription(sessionDescription) { (err) in
}
}else{
createSdpInternal(callbacks: webRTCCallbacks, isOffer: isOffer)
print(" STREAMS DONE JSEP NULL ");
}
/* } catch (Exception ex) {
webRTCCallbacks.onCallbackError(ex.getMessage());
}*/
}
and here where I try to send dtmf
public func insertDTMF(_ tone:String){
if(pc != nil){
if let dtmfSender = pc.senders.first?.dtmfSender{
dtmfSender.insertDtmf(tone, duration: 200, interToneGap: 70)
}
//Here the timers are in ms
}
}
In my case, this is how I have handled insert DTMF functionality.
a - First filter out audio RTCRtpSender track:
var audioSender: RTCRtpSender?
for rtpSender in pc.senders {
if rtpSender.track?.kind == "audio" {
audioSender = rtpSender
}
}
b - And then use the same filtered audioSender object to insert the tone using OperationQueue
if let audioSender = audioSender {
let queue = OperationQueue()
queue.addOperation({
audioSender.dtmfSender?.insertDtmf(dtmfTone, duration: TimeInterval(0.1),interToneGap: TimeInterval(0.5))
})
}
Note: you can modify duration and interToneGap as per your requirement.
Hope this solution works for you as well.
The original answer can be found here: https://stackoverflow.com/a/60148372/4515269
I want to list all available audio devices in swift to provide a selection for input and output. My application should listen on a audio channel and "write" to another. I do not want the system default!
let devices = AVCaptureDevice.devices(for: .audio)
print(devices.count)
for device in devices {
print(device.localizedName)
}
The Code lists 0 devices. But I expect at least the internal output.
Some links to CoreAudio, AudioToolbox and AVFoundation that explain the audio source selection would be nice.
Here's some Swift 5 code that will enumerate all the audio devices.
You can use the uid with AVAudioPlayer's currentDevice property to output to a specific device.
import Cocoa
import AVFoundation
class AudioDevice {
var audioDeviceID:AudioDeviceID
init(deviceID:AudioDeviceID) {
self.audioDeviceID = deviceID
}
var hasOutput: Bool {
get {
var address:AudioObjectPropertyAddress = AudioObjectPropertyAddress(
mSelector:AudioObjectPropertySelector(kAudioDevicePropertyStreamConfiguration),
mScope:AudioObjectPropertyScope(kAudioDevicePropertyScopeOutput),
mElement:0)
var propsize:UInt32 = UInt32(MemoryLayout<CFString?>.size);
var result:OSStatus = AudioObjectGetPropertyDataSize(self.audioDeviceID, &address, 0, nil, &propsize);
if (result != 0) {
return false;
}
let bufferList = UnsafeMutablePointer<AudioBufferList>.allocate(capacity:Int(propsize))
result = AudioObjectGetPropertyData(self.audioDeviceID, &address, 0, nil, &propsize, bufferList);
if (result != 0) {
return false
}
let buffers = UnsafeMutableAudioBufferListPointer(bufferList)
for bufferNum in 0..<buffers.count {
if buffers[bufferNum].mNumberChannels > 0 {
return true
}
}
return false
}
}
var uid:String? {
get {
var address:AudioObjectPropertyAddress = AudioObjectPropertyAddress(
mSelector:AudioObjectPropertySelector(kAudioDevicePropertyDeviceUID),
mScope:AudioObjectPropertyScope(kAudioObjectPropertyScopeGlobal),
mElement:AudioObjectPropertyElement(kAudioObjectPropertyElementMaster))
var name:CFString? = nil
var propsize:UInt32 = UInt32(MemoryLayout<CFString?>.size)
let result:OSStatus = AudioObjectGetPropertyData(self.audioDeviceID, &address, 0, nil, &propsize, &name)
if (result != 0) {
return nil
}
return name as String?
}
}
var name:String? {
get {
var address:AudioObjectPropertyAddress = AudioObjectPropertyAddress(
mSelector:AudioObjectPropertySelector(kAudioDevicePropertyDeviceNameCFString),
mScope:AudioObjectPropertyScope(kAudioObjectPropertyScopeGlobal),
mElement:AudioObjectPropertyElement(kAudioObjectPropertyElementMaster))
var name:CFString? = nil
var propsize:UInt32 = UInt32(MemoryLayout<CFString?>.size)
let result:OSStatus = AudioObjectGetPropertyData(self.audioDeviceID, &address, 0, nil, &propsize, &name)
if (result != 0) {
return nil
}
return name as String?
}
}
}
class AudioDeviceFinder {
static func findDevices() {
var propsize:UInt32 = 0
var address:AudioObjectPropertyAddress = AudioObjectPropertyAddress(
mSelector:AudioObjectPropertySelector(kAudioHardwarePropertyDevices),
mScope:AudioObjectPropertyScope(kAudioObjectPropertyScopeGlobal),
mElement:AudioObjectPropertyElement(kAudioObjectPropertyElementMaster))
var result:OSStatus = AudioObjectGetPropertyDataSize(AudioObjectID(kAudioObjectSystemObject), &address, UInt32(MemoryLayout<AudioObjectPropertyAddress>.size), nil, &propsize)
if (result != 0) {
print("Error \(result) from AudioObjectGetPropertyDataSize")
return
}
let numDevices = Int(propsize / UInt32(MemoryLayout<AudioDeviceID>.size))
var devids = [AudioDeviceID]()
for _ in 0..<numDevices {
devids.append(AudioDeviceID())
}
result = AudioObjectGetPropertyData(AudioObjectID(kAudioObjectSystemObject), &address, 0, nil, &propsize, &devids);
if (result != 0) {
print("Error \(result) from AudioObjectGetPropertyData")
return
}
for i in 0..<numDevices {
let audioDevice = AudioDevice(deviceID:devids[i])
if (audioDevice.hasOutput) {
if let name = audioDevice.name,
let uid = audioDevice.uid {
print("Found device \"\(name)\", uid=\(uid)")
}
}
}
}
}
The code you posted works perfectly fine for audio input devices when I paste it into an Xcode Playground.
Note, however, that AVCaptureDevice API does not list audio output devices as they are no capture devices but playback devices. If a device supports both, input and output, you can still use the device's uniqueID in an output context, for example with AVPlayer's audioOutputDeviceUniqueID.
(Also note, that if you want your code to work on iOS as well, devices(for:) is marked as deprecated since iOS 11 and you should move to AVCaptureDevice.DiscoverySession instead.)
Regarding your request for additional info on Core Audio and AudioToolbox, this SO question has some pretty comprehensive answers on the matter. The question asks for input devices but the answers provide enough context to let you understand handling of the output side as well. There's even an answer with some (dated) Swift code. On a personal note I have to say calling Core Audio API from Swift is oftentimes more pain than gain. Because of that it might be faster, although a bit unsafer, wrapping those portions of code into Objective-C or plain C and exposing them via the Swift bridging header, if your project allows it.
If you want something like a actionSheet and need to switch between audio devices seamlessly. Use this code.
Code
import Foundation
import AVFoundation
import UIKit
#objc class AudioDeviceHandler: NSObject {
#objc static let shared = AudioDeviceHandler()
/// Present audio device selection alert
/// - Parameters:
/// - presenterViewController: viewController where the alert need to present
/// - sourceView: alertController source view in case of iPad
#objc func presentAudioOutput(_ presenterViewController : UIViewController, _ sourceView: UIView) {
let speakerTitle = "Speaker"
let headphoneTitle = "Headphones"
let deviceTitle = (UIDevice.current.userInterfaceIdiom == .pad) ? "iPad" : "iPhone"
let cancelTitle = "Cancel"
var deviceAction = UIAlertAction()
var headphonesExist = false
let optionMenu = UIAlertController(title: nil, message: nil, preferredStyle: .actionSheet)
guard let availableInputs = AVAudioSession.sharedInstance().availableInputs else {
print("No inputs available ")
return
}
for audioPort in availableInputs {
switch audioPort.portType {
case .bluetoothA2DP, .bluetoothHFP, .bluetoothLE :
let bluetoothAction = UIAlertAction(title: audioPort.portName, style: .default) { _ in
self.setPreferredInput(port: audioPort)
}
if isCurrentOutput(portType: audioPort.portType) {
bluetoothAction.setValue(true, forKey: "checked")
}
optionMenu.addAction(bluetoothAction)
case .builtInMic, .builtInReceiver:
deviceAction = UIAlertAction(title: deviceTitle, style: .default, handler: { _ in
self.setToDevice(port: audioPort)
})
case .headphones, .headsetMic:
headphonesExist = true
let headphoneAction = UIAlertAction(title: headphoneTitle, style: .default) { _ in
self.setPreferredInput(port: audioPort)
}
if isCurrentOutput(portType: .headphones) || isCurrentOutput(portType: .headsetMic) {
headphoneAction.setValue(true, forKey: "checked")
}
optionMenu.addAction(headphoneAction)
case .carAudio:
let carAction = UIAlertAction(title: audioPort.portName, style: .default) { _ in
self.setPreferredInput(port: audioPort)
}
if isCurrentOutput(portType: audioPort.portType) {
carAction.setValue(true, forKey: "checked")
}
optionMenu.addAction(carAction)
default:
break
}
}
// device actions only required if no headphone available
if !headphonesExist {
if (isCurrentOutput(portType: .builtInReceiver) ||
isCurrentOutput(portType: .builtInMic)) {
deviceAction.setValue(true, forKey: "checked")
}
optionMenu.addAction(deviceAction)
}
// configure speaker action
let speakerAction = UIAlertAction(title: speakerTitle, style: .default) { _ in
self.setOutputToSpeaker()
}
if isCurrentOutput(portType: .builtInSpeaker) {
speakerAction.setValue(true, forKey: "checked")
}
optionMenu.addAction(speakerAction)
// configure cancel action
let cancelAction = UIAlertAction(title: cancelTitle, style: .cancel)
optionMenu.addAction(cancelAction)
optionMenu.modalPresentationStyle = .popover
if let presenter = optionMenu.popoverPresentationController {
presenter.sourceView = sourceView
presenter.sourceRect = sourceView.bounds
}
presenterViewController.present(optionMenu, animated: true, completion: nil)
// auto dismiss after 5 seconds
DispatchQueue.main.asyncAfter(deadline: .now() + 5.0) {
optionMenu.dismiss(animated: true, completion: nil)
}
}
#objc func setOutputToSpeaker() {
do {
try AVAudioSession.sharedInstance().overrideOutputAudioPort(AVAudioSession.PortOverride.speaker)
} catch let error as NSError {
print("audioSession error turning on speaker: \(error.localizedDescription)")
}
}
fileprivate func setPreferredInput(port: AVAudioSessionPortDescription) {
do {
try AVAudioSession.sharedInstance().setPreferredInput(port)
} catch let error as NSError {
print("audioSession error change to input: \(port.portName) with error: \(error.localizedDescription)")
}
}
fileprivate func setToDevice(port: AVAudioSessionPortDescription) {
do {
// remove speaker if needed
try AVAudioSession.sharedInstance().overrideOutputAudioPort(AVAudioSession.PortOverride.none)
// set new input
try AVAudioSession.sharedInstance().setPreferredInput(port)
} catch let error as NSError {
print("audioSession error change to input: \(AVAudioSession.PortOverride.none.rawValue) with error: \(error.localizedDescription)")
}
}
#objc func isCurrentOutput(portType: AVAudioSession.Port) -> Bool {
AVAudioSession.sharedInstance().currentRoute.outputs.contains(where: { $0.portType == portType })
}
}
How to use
class ViewController: UIViewController {
#IBOutlet weak var audioButton: UIButton!
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
}
#IBAction func selectAudio(_ sender: Any) {
// present audio device selection action sheet
AudioDeviceHandler.shared.presentAudioOutput(self, audioButton)
}
}
Result
It is possible to list input and output devices. This is a simplification of stevex's answer.
For output devices:
if (audioDevice.hasOutput) {
if let name = audioDevice.name,
let uid = audioDevice.uid {
print("Found device \"\(name)\", uid=\(uid)")
}
}
For input devices:
if (!audioDevice.hasOutput) {
if let name = audioDevice.name,
let uid = audioDevice.uid {
print("Found device \"\(name)\", uid=\(uid)")
}
}
(Notice the ! before audioDevice.hasOutput.)
NSKeyedArchiver.archiveRootObject(<#rootObject: AnyObject#>, toFile: <#String#>)
Only returns true the first time. Every next time I call it, the method returns false.
I read some SO, some posts said that I can't rewrite data this way. However, I tried :
NSFileManager.defaultManager().removeItemAtPath(path, error: nil)
and it still didn't help.
What I did:
Checked all my model files for the NSCoding protocol
Checked all my required init(coder aDecoder: NSCoder) and func encodeWithCoder(aCoder: NSCoder)
I am missing something, since I have done this in my last app and it worked fla`
import Foundation
private let ON_DISK_DATA_DICTIONARY = "savedDataPathsOnDisk"
private let _WBMAccessDataOnDiskMShared = WBMAccessDataOnDiskM()
private var dataDirectories:NSArray! = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)
private var dataDirectoryURL:NSURL! = NSURL(fileURLWithPath: dataDirectories.objectAtIndex(0) as! String, isDirectory: true)
private var dataDirectoryPath:String! = dataDirectoryURL.path!
let FILE_FORMAT = ".archive"
class WBMAccessDataOnDiskM: NSObject
{
class var sharedData: WBMAccessDataOnDiskM
{
return _WBMAccessDataOnDiskMShared
}
private var dataAndPathDictionary = [String:String]()
func getDataAndPathDictionary() -> [String:String]
{
return self.dataAndPathDictionary
}
func addDataAndPathToDictionary(data:String ,path:String)
{
if !checkIfDataAllreadyExists(data)
{
let fullPath = createFullDataPath(path)
dataAndPathDictionary[data] = fullPath
NSUserDefaults.standardUserDefaults().setObject(dataAndPathDictionary, forKey: ON_DISK_DATA_DICTIONARY)
}
}
func checkIfDataIsAvailable(dataPathComponent:String) -> (Bool,String)
{
var paths = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0] as! String
var dataPath = paths.stringByAppendingPathComponent(dataPathComponent)
var checkValidation = NSFileManager.defaultManager()
println(dataPathComponent)
if (checkValidation.fileExistsAtPath(dataPath))
{
return (true,dataPath)
}
else
{
return (false,"")
}
}
func checkForDataOnDisk() -> Bool
{
let dataDict = NSUserDefaults.standardUserDefaults().objectForKey(ON_DISK_DATA_DICTIONARY) as? [String:String]
if dataDict == nil
{
return false
}
else
{
dataAndPathDictionary = dataDict!
return true
}
}
private func checkIfDataAllreadyExists(data:String) -> Bool
{
let keys = self.dataAndPathDictionary.keys.array
if contains(keys, data)
{
return true
}
return false
}
private func createFullDataPath(path:String) -> String
{
var fullPathURL = dataDirectoryURL.URLByAppendingPathComponent(path + FILE_FORMAT)
return fullPathURL.path!
}
func saveDataArray(data:[AnyObject], path:String)
{
NSFileManager.defaultManager().removeItemAtPath(path, error: nil)
if NSKeyedArchiver.archiveRootObject(data, toFile: path)
{
// SAVING
println(" Saving data ARRAY ")
}
else
{
println(" NOT saving data ARRAY ")
}
}
func saveDataObject(dataObject:AnyObject, path:String)
{
if NSKeyedArchiver.archiveRootObject(dataObject, toFile: path)
{
println(" Saving data OBJECT ")
}
else
{
println(" NOT saving data OBJECT ")
}
}
// dataFromDisk = NSKeyedUnarchiver.unarchiveObjectWithFile(pathForNews) as? [AnyObject]
func loadDataArray(path:String) -> [AnyObject]?
{
var dataArrayFromDisk: [AnyObject]?
dataArrayFromDisk = NSKeyedUnarchiver.unarchiveObjectWithFile(path) as? [AnyObject]
return dataArrayFromDisk
}
func loadDataObject(path:String) -> AnyObject?
{
var dataObjectFromDisk: AnyObject?
dataObjectFromDisk = NSKeyedUnarchiver.unarchiveObjectWithFile(path)
return dataObjectFromDisk
}
func getNewsDataLanguagePath() -> String
{
var currentOSLanguage = LOCALIZATION.currentOsLanguage
currentOSLanguage = currentOSLanguage.substringToIndex(2)
if currentOSLanguage == "de"
{
return ON_DISK_CONTENT_DE
}
else if currentOSLanguage == "en"
{
return ON_DISK_CONTENT_ENG
}
return ON_DISK_CONTENT_ENG
}
`
I am using Xcode 6.4 and Swift 1.2.
Any help & code correction is welcome.
Because of the code you put here does't contain the call of saveDataArray or saveDataObject so I judge that you have maintain the path of a archived object manually.This is where thing went wrong. The method of NSKeyedArchiver named archiveRootObject can automatically maintain the archiver file path.
In the Apple's doucumet
Archives an object graph rooted at a given object by encoding it into a data object then atomically writes the resulting data object to a file at a given path, and returns a Boolean value that indicates whether the operation was successful.
And there is another question in SO may help you.
I followed apple instructions in this good example: Persist Data
But I had the same problem you describe with my app for AppleTV. At the end I change .Documents directory for CacheDirectory and it's working well.
static let DocumentsDirectorio = NSFileManager().URLsForDirectory(.CachesDirectory, inDomains: .UserDomainMask).first!
I am trying to currently display the album artwork for a locally stored .MP3 track in an ImageView. Does anyone know how to fetch this artwork in Swift in order to accomplish this?
I have found this solution (iOS AVFoundation: How do I fetch artwork from an mp3 file?) but the code is written in Objective C. I simply want to grab the image embedded in my MP3 and display it in my ImageView.
I've looked at the API documentation for the MPMediaItemArtwork and found an example that also accomplishes what I am trying to accomplish in Objective C as well here(http://www.codeitive.com/0zHjkUjUWX/not-able-to-get-the-uiimage-from-mpmediaitempropertyartwork.html) but cannot come up with a solution. My code is as follows:
import UIKit
import AVFoundation
import MediaPlayer
class ViewController: UIViewController {
let audioPath:NSURL! = NSBundle.mainBundle().URLForResource("SippinOnFire", withExtension: "mp3")
#IBOutlet var artistImage: UIImageView!
#IBOutlet var trackLabel: UILabel!
#IBOutlet var artistLabel: UILabel!
#IBOutlet var sliderValue: UISlider!
var player:AVAudioPlayer = AVAudioPlayer()
#IBAction func play(sender: AnyObject) {
let audioInfo = MPNowPlayingInfoCenter.defaultCenter()
println(audioInfo)
player.play()
//println("Playing \(audioPath)")
let playerItem = AVPlayerItem(URL: audioPath)
let metadataList = playerItem.asset.metadata as! [AVMetadataItem]
for item in metadataList {
if let stringValue = item.value {
println(item.commonKey)
if item.commonKey == "title" {
trackLabel.text = stringValue as? String
}
if item.commonKey == "artist" {
artistLabel.text = stringValue as? String
}
if item.commonKey == "artwork" {
if let audioImage = UIImage(data: item.value as! NSData) {
let audioArtwork = MPMediaItemArtwork(image: audioImage)
println(audioImage.description)
}
}
}
}
}
#IBAction func pause(sender: AnyObject) {
player.pause()
}
#IBAction func stop(sender: AnyObject) {
player.stop()
player.currentTime = 0;
}
#IBAction func sliderChanged(sender: AnyObject) {
player.volume = sliderValue.value
}
override func viewDidLoad() {
super.viewDidLoad()
var error:NSError? = nil
player = AVAudioPlayer(contentsOfURL: audioPath!, error: &error)
player.volume = 0.5;
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
Here is a screen shot of my sample .mp3 file. As you can see there is indeed album artwork that is both visible in the "get info" section of Finder. I've also opened the .mp3 in my iTunes to make sure and have confirmed there is artwork in the "get info" section of it there as well as under the "artwork" tab.
However, when trying to use the commonKey to assign the image to my imageView I find that there is no commonKey for "artwork".
Thanks
Change your snippet of code into this (I already tested it):
I added println lines commented in places of interest, Feel free to uncomment in order to see what is happening.
for item in metadataList {
if item.commonKey == nil{
continue
}
if let key = item.commonKey, let value = item.value {
//println(key)
//println(value)
if key == "title" {
trackLabel.text = value as? String
}
if key == "artist" {
artistLabel.text = value as? String
}
if key == "artwork" {
if let audioImage = UIImage(data: value as! NSData) {
//println(audioImage.description)
artistImage.image = audioImage
}
}
}
}
UPDATE: A bit of clean up of this code
for item in metadataList {
guard let key = item.commonKey, let value = item.value else{
continue
}
switch key {
case "title" : trackLabel.text = value as? String
case "artist": artistLabel.text = value as? String
case "artwork" where value is NSData : artistImage.image = UIImage(data: value as! NSData)
default:
continue
}
}
UPDATE: For Swift 4
for item in metadataList {
guard let key = item.commonKey?.rawValue, let value = item.value else{
continue
}
switch key {
case "title" : trackLabel.text = value as? String
case "artist": artistLabel.text = value as? String
case "artwork" where value is Data : artistImage.image = UIImage(data: value as! Data)
default:
continue
}
}
edit/update Swift 4 or later:
import MediaPlayer
var nowPlayingInfo: [String: Any] = [:]
let playerItem = AVPlayerItem(url: url)
let metadataList = playerItem.asset.metadata
for item in metadataList {
switch item.commonKey {
case .commonKeyTitle?:
nowPlayingInfo[MPMediaItemPropertyTitle] = item.stringValue ?? ""
case .commonKeyType?:
nowPlayingInfo[MPMediaItemPropertyGenre] = item.stringValue ?? ""
case .commonKeyAlbumName?:
nowPlayingInfo[MPMediaItemPropertyAlbumTitle] = item.stringValue ?? ""
case .commonKeyArtist?:
nowPlayingInfo[MPMediaItemPropertyArtist] = item.stringValue ?? ""
case .commonKeyArtwork?:
if let data = item.dataValue,
let image = UIImage(data: data) {
nowPlayingInfo[MPMediaItemPropertyArtwork] = MPMediaItemArtwork(boundsSize: image.size) { _ in image }
}
case .none: break
default: break
}
}
let audioInfo = MPNowPlayingInfoCenter.default()
audioInfo.nowPlayingInfo = nowPlayingInfo
Note: You will have to invoke beginReceivingRemoteControlEvents() otherwise it will not work on the actual device. You will also need to set your app Background Modes (Audio and AirPlay) and set your AVAudioSession category to AVAudioSessionCategoryPlayback and set it active:
do {
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default, options: [.mixWithOthers, .allowAirPlay])
print("Playback OK")
try AVAudioSession.sharedInstance().setActive(true)
print("Session is Active")
} catch {
print(error)
}
Try this:
It appears that sometimes iOS 8 returns nil at first attempt of obtaining this info:
if let audioCenter = MPNowPlayingInfoCenter.defaultCenter(){
if let audioInfo = audioCenter.nowPlayingInfo{
if let artwork = audioInfo[MPMediaItemPropertyArtwork] as? MPMediaItemArtwork
{
var image: UIImage? = artwork.imageWithSize(artwork.bounds.size)
if image == nil {
image = artwork.imageWithSize(artwork.bounds.size);
}
if image != nil{
println("image loaded")
}
}
}
}