List all available audio devices - swift

I want to list all available audio devices in swift to provide a selection for input and output. My application should listen on a audio channel and "write" to another. I do not want the system default!
let devices = AVCaptureDevice.devices(for: .audio)
print(devices.count)
for device in devices {
print(device.localizedName)
}
The Code lists 0 devices. But I expect at least the internal output.
Some links to CoreAudio, AudioToolbox and AVFoundation that explain the audio source selection would be nice.

Here's some Swift 5 code that will enumerate all the audio devices.
You can use the uid with AVAudioPlayer's currentDevice property to output to a specific device.
import Cocoa
import AVFoundation
class AudioDevice {
var audioDeviceID:AudioDeviceID
init(deviceID:AudioDeviceID) {
self.audioDeviceID = deviceID
}
var hasOutput: Bool {
get {
var address:AudioObjectPropertyAddress = AudioObjectPropertyAddress(
mSelector:AudioObjectPropertySelector(kAudioDevicePropertyStreamConfiguration),
mScope:AudioObjectPropertyScope(kAudioDevicePropertyScopeOutput),
mElement:0)
var propsize:UInt32 = UInt32(MemoryLayout<CFString?>.size);
var result:OSStatus = AudioObjectGetPropertyDataSize(self.audioDeviceID, &address, 0, nil, &propsize);
if (result != 0) {
return false;
}
let bufferList = UnsafeMutablePointer<AudioBufferList>.allocate(capacity:Int(propsize))
result = AudioObjectGetPropertyData(self.audioDeviceID, &address, 0, nil, &propsize, bufferList);
if (result != 0) {
return false
}
let buffers = UnsafeMutableAudioBufferListPointer(bufferList)
for bufferNum in 0..<buffers.count {
if buffers[bufferNum].mNumberChannels > 0 {
return true
}
}
return false
}
}
var uid:String? {
get {
var address:AudioObjectPropertyAddress = AudioObjectPropertyAddress(
mSelector:AudioObjectPropertySelector(kAudioDevicePropertyDeviceUID),
mScope:AudioObjectPropertyScope(kAudioObjectPropertyScopeGlobal),
mElement:AudioObjectPropertyElement(kAudioObjectPropertyElementMaster))
var name:CFString? = nil
var propsize:UInt32 = UInt32(MemoryLayout<CFString?>.size)
let result:OSStatus = AudioObjectGetPropertyData(self.audioDeviceID, &address, 0, nil, &propsize, &name)
if (result != 0) {
return nil
}
return name as String?
}
}
var name:String? {
get {
var address:AudioObjectPropertyAddress = AudioObjectPropertyAddress(
mSelector:AudioObjectPropertySelector(kAudioDevicePropertyDeviceNameCFString),
mScope:AudioObjectPropertyScope(kAudioObjectPropertyScopeGlobal),
mElement:AudioObjectPropertyElement(kAudioObjectPropertyElementMaster))
var name:CFString? = nil
var propsize:UInt32 = UInt32(MemoryLayout<CFString?>.size)
let result:OSStatus = AudioObjectGetPropertyData(self.audioDeviceID, &address, 0, nil, &propsize, &name)
if (result != 0) {
return nil
}
return name as String?
}
}
}
class AudioDeviceFinder {
static func findDevices() {
var propsize:UInt32 = 0
var address:AudioObjectPropertyAddress = AudioObjectPropertyAddress(
mSelector:AudioObjectPropertySelector(kAudioHardwarePropertyDevices),
mScope:AudioObjectPropertyScope(kAudioObjectPropertyScopeGlobal),
mElement:AudioObjectPropertyElement(kAudioObjectPropertyElementMaster))
var result:OSStatus = AudioObjectGetPropertyDataSize(AudioObjectID(kAudioObjectSystemObject), &address, UInt32(MemoryLayout<AudioObjectPropertyAddress>.size), nil, &propsize)
if (result != 0) {
print("Error \(result) from AudioObjectGetPropertyDataSize")
return
}
let numDevices = Int(propsize / UInt32(MemoryLayout<AudioDeviceID>.size))
var devids = [AudioDeviceID]()
for _ in 0..<numDevices {
devids.append(AudioDeviceID())
}
result = AudioObjectGetPropertyData(AudioObjectID(kAudioObjectSystemObject), &address, 0, nil, &propsize, &devids);
if (result != 0) {
print("Error \(result) from AudioObjectGetPropertyData")
return
}
for i in 0..<numDevices {
let audioDevice = AudioDevice(deviceID:devids[i])
if (audioDevice.hasOutput) {
if let name = audioDevice.name,
let uid = audioDevice.uid {
print("Found device \"\(name)\", uid=\(uid)")
}
}
}
}
}

The code you posted works perfectly fine for audio input devices when I paste it into an Xcode Playground.
Note, however, that AVCaptureDevice API does not list audio output devices as they are no capture devices but playback devices. If a device supports both, input and output, you can still use the device's uniqueID in an output context, for example with AVPlayer's audioOutputDeviceUniqueID.
(Also note, that if you want your code to work on iOS as well, devices(for:) is marked as deprecated since iOS 11 and you should move to AVCaptureDevice.DiscoverySession instead.)
Regarding your request for additional info on Core Audio and AudioToolbox, this SO question has some pretty comprehensive answers on the matter. The question asks for input devices but the answers provide enough context to let you understand handling of the output side as well. There's even an answer with some (dated) Swift code. On a personal note I have to say calling Core Audio API from Swift is oftentimes more pain than gain. Because of that it might be faster, although a bit unsafer, wrapping those portions of code into Objective-C or plain C and exposing them via the Swift bridging header, if your project allows it.

If you want something like a actionSheet and need to switch between audio devices seamlessly. Use this code.
Code
import Foundation
import AVFoundation
import UIKit
#objc class AudioDeviceHandler: NSObject {
#objc static let shared = AudioDeviceHandler()
/// Present audio device selection alert
/// - Parameters:
/// - presenterViewController: viewController where the alert need to present
/// - sourceView: alertController source view in case of iPad
#objc func presentAudioOutput(_ presenterViewController : UIViewController, _ sourceView: UIView) {
let speakerTitle = "Speaker"
let headphoneTitle = "Headphones"
let deviceTitle = (UIDevice.current.userInterfaceIdiom == .pad) ? "iPad" : "iPhone"
let cancelTitle = "Cancel"
var deviceAction = UIAlertAction()
var headphonesExist = false
let optionMenu = UIAlertController(title: nil, message: nil, preferredStyle: .actionSheet)
guard let availableInputs = AVAudioSession.sharedInstance().availableInputs else {
print("No inputs available ")
return
}
for audioPort in availableInputs {
switch audioPort.portType {
case .bluetoothA2DP, .bluetoothHFP, .bluetoothLE :
let bluetoothAction = UIAlertAction(title: audioPort.portName, style: .default) { _ in
self.setPreferredInput(port: audioPort)
}
if isCurrentOutput(portType: audioPort.portType) {
bluetoothAction.setValue(true, forKey: "checked")
}
optionMenu.addAction(bluetoothAction)
case .builtInMic, .builtInReceiver:
deviceAction = UIAlertAction(title: deviceTitle, style: .default, handler: { _ in
self.setToDevice(port: audioPort)
})
case .headphones, .headsetMic:
headphonesExist = true
let headphoneAction = UIAlertAction(title: headphoneTitle, style: .default) { _ in
self.setPreferredInput(port: audioPort)
}
if isCurrentOutput(portType: .headphones) || isCurrentOutput(portType: .headsetMic) {
headphoneAction.setValue(true, forKey: "checked")
}
optionMenu.addAction(headphoneAction)
case .carAudio:
let carAction = UIAlertAction(title: audioPort.portName, style: .default) { _ in
self.setPreferredInput(port: audioPort)
}
if isCurrentOutput(portType: audioPort.portType) {
carAction.setValue(true, forKey: "checked")
}
optionMenu.addAction(carAction)
default:
break
}
}
// device actions only required if no headphone available
if !headphonesExist {
if (isCurrentOutput(portType: .builtInReceiver) ||
isCurrentOutput(portType: .builtInMic)) {
deviceAction.setValue(true, forKey: "checked")
}
optionMenu.addAction(deviceAction)
}
// configure speaker action
let speakerAction = UIAlertAction(title: speakerTitle, style: .default) { _ in
self.setOutputToSpeaker()
}
if isCurrentOutput(portType: .builtInSpeaker) {
speakerAction.setValue(true, forKey: "checked")
}
optionMenu.addAction(speakerAction)
// configure cancel action
let cancelAction = UIAlertAction(title: cancelTitle, style: .cancel)
optionMenu.addAction(cancelAction)
optionMenu.modalPresentationStyle = .popover
if let presenter = optionMenu.popoverPresentationController {
presenter.sourceView = sourceView
presenter.sourceRect = sourceView.bounds
}
presenterViewController.present(optionMenu, animated: true, completion: nil)
// auto dismiss after 5 seconds
DispatchQueue.main.asyncAfter(deadline: .now() + 5.0) {
optionMenu.dismiss(animated: true, completion: nil)
}
}
#objc func setOutputToSpeaker() {
do {
try AVAudioSession.sharedInstance().overrideOutputAudioPort(AVAudioSession.PortOverride.speaker)
} catch let error as NSError {
print("audioSession error turning on speaker: \(error.localizedDescription)")
}
}
fileprivate func setPreferredInput(port: AVAudioSessionPortDescription) {
do {
try AVAudioSession.sharedInstance().setPreferredInput(port)
} catch let error as NSError {
print("audioSession error change to input: \(port.portName) with error: \(error.localizedDescription)")
}
}
fileprivate func setToDevice(port: AVAudioSessionPortDescription) {
do {
// remove speaker if needed
try AVAudioSession.sharedInstance().overrideOutputAudioPort(AVAudioSession.PortOverride.none)
// set new input
try AVAudioSession.sharedInstance().setPreferredInput(port)
} catch let error as NSError {
print("audioSession error change to input: \(AVAudioSession.PortOverride.none.rawValue) with error: \(error.localizedDescription)")
}
}
#objc func isCurrentOutput(portType: AVAudioSession.Port) -> Bool {
AVAudioSession.sharedInstance().currentRoute.outputs.contains(where: { $0.portType == portType })
}
}
How to use
class ViewController: UIViewController {
#IBOutlet weak var audioButton: UIButton!
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
}
#IBAction func selectAudio(_ sender: Any) {
// present audio device selection action sheet
AudioDeviceHandler.shared.presentAudioOutput(self, audioButton)
}
}
Result

It is possible to list input and output devices. This is a simplification of stevex's answer.
For output devices:
if (audioDevice.hasOutput) {
if let name = audioDevice.name,
let uid = audioDevice.uid {
print("Found device \"\(name)\", uid=\(uid)")
}
}
For input devices:
if (!audioDevice.hasOutput) {
if let name = audioDevice.name,
let uid = audioDevice.uid {
print("Found device \"\(name)\", uid=\(uid)")
}
}
(Notice the ! before audioDevice.hasOutput.)

Related

Get callback when ADMOB reward ad is closed without seeing whole ad in ios swift

I am using reward admob ad in my project with latest sdk. How can i get proper callback that the user has closed the ad in between. I know there is a delegate method of fullscreencontentdelegate which has a function adDidDismiss but in that function i am doing some code block which i perform when i complete watching the ad and it just works fine but what if i closed the ad in between, because what happens is that whether i see the whole ad or not this delegate function gets called and there is no way to differentiate how would i proceed with complete and incomplete ad. Please help me with this.
video link
as in the video first time i am not watching the whole ad and i just close it then also the scratch card popup comes because of the delegate method being called, which i dont want to open and then i just watch the whole ad and get my reward which is working fine
My code snippet:
enum RewardAdType {
case avatar, freeChips, scratchCard, chips250
}
typealias AD_COMPLETION_BLOCK = (_ success: Bool) -> ()
class RewardAdManager : NSObject
{
//MARK: - PROPERTIES
var rewardBasedVideoAd : GADRewardedAd? = nil
var rewardValue = ""
var type: RewardAdType? = nil
}
//MARK: - HELPERS
extension RewardAdManager
{
func loadRewardedAd(vc: UIViewController, userId: String, type: RewardAdType, imageName: String? = nil, chipsCoin: String? = nil, completion: #escaping AD_COMPLETION_BLOCK)
{
self.type = type
let adUnit = self.type == .avatar ? Constants.REWARD_AD_AVATAR_LIVE_ID : Constants.REWARD_AD_WINCHIPS_LIVE_ID
let request = GADRequest()
GADRewardedAd.load(withAdUnitID: adUnit, request: request) { [weak self] ad, error in
guard let self = self, error == nil else {
Helpers.hidehud()
self?.type = nil
self?.rewardBasedVideoAd = nil
return
}
let serverSideVerificationOptions = GADServerSideVerificationOptions()
serverSideVerificationOptions.userIdentifier = userId
if type == .scratchCard {
self.rewardValue = self.generateRandomRewardValue()
serverSideVerificationOptions.customRewardString = self.rewardValue
} else if type == .avatar {
serverSideVerificationOptions.customRewardString = imageName
} else if type == .freeChips {
serverSideVerificationOptions.customRewardString = chipsCoin
} else if type == .chips250 {
serverSideVerificationOptions.customRewardString = "250"
}
self.rewardBasedVideoAd = ad
self.rewardBasedVideoAd?.serverSideVerificationOptions = serverSideVerificationOptions
self.rewardBasedVideoAd?.fullScreenContentDelegate = self
self.showRewardedAd(viewController: vc, type: type, completion: completion)
}
}
func showRewardedAd(viewController: UIViewController, type: RewardAdType? = nil, completion: #escaping AD_COMPLETION_BLOCK)
{
Helpers.hidehud()
if let ad = self.rewardBasedVideoAd {
self.type = type
DispatchQueueHelper.delay {
ad.present(fromRootViewController: viewController) {}
}
completion(true)
} else {
self.type = nil
self.checkForSavedLanguage(viewController: viewController)
}
}
func checkForSavedLanguage(viewController: UIViewController)
{
let lang = LanguageCode(rawValue: Defaults[.LangCode]) ?? .english
viewController.showToast(msg: Constants.NO_ADS_MESSAGE.localizeString(string: lang))
}
func generateRandomRewardValue() -> String
{
var val = 0
let random = Double.random(in: 0.1...1.0)
if random < 0.20 {
val = 150
} else if random < 0.50 {
val = 200
} else if random < 0.70 {
val = 250
} else {
val = 350
}
return val.toString()
}
}
//MARK: - GADFullScreenContentDelegate
extension RewardAdManager : GADFullScreenContentDelegate {
func ad(_ ad: GADFullScreenPresentingAd, didFailToPresentFullScreenContentWithError error: Error)
{
self.type = nil
Helpers.hidehud()
let lang = LanguageCode(rawValue: Defaults[.LangCode]) ?? .english
let userInfo = ["msg":Constants.NO_ADS_MESSAGE.localizeString(string: lang)]
NotificationCaller.shared.showLeaveMsg(userInfo: userInfo)
}
func adDidDismissFullScreenContent(_ ad: GADFullScreenPresentingAd)
{
guard let type = self.type else { return }
self.rewardBasedVideoAd = nil
let userInfo: [String:RewardAdType] = ["type":type]
NotificationCaller.shared.showRewardTypePopup(userInfo: userInfo)
}
}

Firestore pagination using MVVM architecture swift

I don't quite understand what I am doing wrong since I am very new to MVVM. It worked in MVC architecture. I've setup my VM and am able to get the first set of results and even then that's not working properly. I get 4 results instead of 10 which is what LOADLIMIT is set as. I was able to get it to work in an MVC architecture without any issues. The VM function which triggers the query is called multiple (3) times instead of just once i.e. even prior to scrolling.
Here is my VM:
enum FetchRestaurant {
case success
case error
case location
case end
}
class ListViewModel {
let restaurant: [Restaurant]?
let db = Firestore.firestore()
var restaurantArray = [Restaurant]()
var lastDocument: DocumentSnapshot?
var currentLocation: CLLocation?
typealias fetchRestaurantCallback = (_ restaurants: [Restaurant]?, _ message: String?, _ status: FetchRestaurant) -> Void
var restaurantFetched: fetchRestaurantCallback?
var fetchRestaurant: FetchRestaurant?
init(restaurant: [Restaurant]) {
self.restaurant = restaurant
}
func fetchRestaurantCallback (callback: #escaping fetchRestaurantCallback) {
self.restaurantFetched = callback
}
func fetchRestaurants(address: String) {
print("address received: \(address)")
getLocation(from: address) { location in
if let location = location {
self.currentLocation = location
self.queryGenerator(at: location)
} else {
self.restaurantFetched?(nil, nil, .location)
}
}
}
func queryGenerator(at location: CLLocation) {
var query: Query!
if restaurantArray.isEmpty {
query = db.collection("Restaurant_Data").whereField("distributionType", isLessThanOrEqualTo: 2).limit(to: Constants.Mealplan.LOADLIMIT)
} else {
print("last document:\(String(describing: lastDocument?.documentID))")
query = db.collection("Restaurant_Data").whereField("distributionType", isLessThanOrEqualTo: 2).start(afterDocument: lastDocument!).limit(to: Constants.Mealplan.LOADLIMIT)
}
batchFetch(query: query)
}
func batchFetch(query: Query) {
query.getDocuments { (querySnapshot, error) in
if let error = error {
self.restaurantFetched?(nil, error.localizedDescription, .error)
} else if querySnapshot!.isEmpty {
self.restaurantFetched?(nil, nil, .end)
} else if !querySnapshot!.isEmpty {
let queriedRestaurants = querySnapshot?.documents.compactMap { querySnapshot -> Restaurant? in
return try? querySnapshot.data(as: Restaurant.self)
}
guard let restaurants = queriedRestaurants,
let currentLocation = self.currentLocation else {
self.restaurantFetched?(nil, nil, .end)
return }
self.restaurantArray.append(contentsOf: self.applicableRestaurants(allQueriedRestaurants: restaurants, location: currentLocation))
DispatchQueue.main.asyncAfter(deadline: .now(), execute: {
self.restaurantFetched?(self.restaurantArray, nil, .success)
})
self.lastDocument = querySnapshot!.documents.last
}
}
}
func getLocation(from address: String, completionHandler: #escaping (_ location: CLLocation?) -> Void) {
let geocoder = CLGeocoder()
geocoder.geocodeAddressString(address) { (placemarks, error) in
guard let placemarks = placemarks,
let location = placemarks.first?.location else {
completionHandler(nil)
return
}
completionHandler(location)
}
}
}
And in the VC viewDidLoad:
var fetchMore = false
var reachedEnd = false
let leadingScreensForBatching: CGFloat = 5.0
var searchController = UISearchController(searchResultsController: nil)
var currentAddress : String?
var listViewModel = ListViewModel(restaurant: [Restaurant]())
override func viewDidLoad() {
super.viewDidLoad()
listViewModel.fetchRestaurantCallback { (restaurants, error, result) in
switch result {
case .success :
self.loadingShimmer.stopShimmering()
self.loadingShimmer.removeFromSuperview()
guard let fetchedRestaurants = restaurants else { return }
self.restaurantArray.append(contentsOf: fetchedRestaurants)
self.tableView.reloadData()
self.fetchMore = false
case .location :
self.showAlert(alertTitle: "No businesses nearby", message: "Try going back and changing the address")
case .error :
guard let error = error else { return }
self.showAlert(alertTitle: "Error", message: error)
case .end :
self.fetchMore = false
self.reachedEnd = true
}
}
if let currentAddress = currentAddress {
listViewModel.fetchRestaurants(address: currentAddress)
}
}
I would really appreciate links or resources for implementing MVVM in Swift for a Firestore back-end. I'm coming up short on searches here and on Google. Even tried medium.
EDIT
class ListViewController: UITableViewController {
lazy var loadingShimmer: UIImageView = {
let image = UIImage(named: "shimmer_background")
let imageview = UIImageView(image: image)
imageview.contentMode = .top
imageview.translatesAutoresizingMaskIntoConstraints = false
return imageview
}()
var restaurantArray = [Restaurant]()
var planDictionary = [String: Any]()
var fetchMore = false
var reachedEnd = false
let leadingScreensForBatching: CGFloat = 5.0
var searchController = UISearchController(searchResultsController: nil)
var currentAddress : String?
var listViewModel = ListViewModel(restaurant: [Restaurant]())
override func viewDidLoad() {
super.viewDidLoad()
setupTable()
}
override func viewWillAppear(_ animated: Bool) {
clearsSelectionOnViewWillAppear = false
}
func setupTable() {
navigationItem.backBarButtonItem = UIBarButtonItem(title: "Restaurant", style: .plain, target: nil, action: nil)
tableView.register(RestaurantCell.self, forCellReuseIdentifier: "Cell")
tableView.delegate = self
tableView.dataSource = self
let navigationBarHeight: CGFloat = self.navigationController!.navigationBar.frame.height
tableView.contentInset = UIEdgeInsets(top: 0, left: 0, bottom: -navigationBarHeight, right: 0)
tableView.separatorStyle = .none
tableView.showsVerticalScrollIndicator = false
tableView.addSubview(loadingShimmer)
loadingShimmer.topAnchor.constraint(equalTo: tableView.safeAreaLayoutGuide.topAnchor).isActive = true
loadingShimmer.leadingAnchor.constraint(equalTo: tableView.leadingAnchor).isActive = true
loadingShimmer.trailingAnchor.constraint(equalTo: tableView.trailingAnchor).isActive = true
loadingShimmer.startShimmering()
initialSetup()
}
func initialSetup() {
let addressOne = planDictionary["addressOne"] as! String + ", "
let city = planDictionary["city"] as! String + ", "
let postalCode = planDictionary["postalCode"] as! String
currentAddress = addressOne + city + postalCode
setupSearch()
listViewModel.fetchRestaurantCallback { (restaurants, error, result) in
switch result {
case .success :
self.loadingShimmer.stopShimmering()
self.loadingShimmer.removeFromSuperview()
guard let fetchedRestaurants = restaurants else { return }
self.restaurantArray.append(contentsOf: fetchedRestaurants)
self.tableView.reloadData()
self.fetchMore = false
case .location :
self.showAlert(alertTitle: "No businesses nearby", message: "Try going back and changing the address")
case .error :
guard let error = error else { return }
self.showAlert(alertTitle: "Error", message: error)
case .end :
self.fetchMore = false
self.reachedEnd = true
}
}
if let currentAddress = currentAddress {
listViewModel.fetchRestaurants(address: currentAddress)
}
}
override func scrollViewDidEndDecelerating(_ scrollView: UIScrollView) {
let off = scrollView.contentOffset.y
let off1 = scrollView.contentSize.height
if off > off1 - scrollView.frame.height * leadingScreensForBatching {
print("\(fetchMore), \(reachedEnd)")
if !fetchMore && !reachedEnd {
if let address = self.currentAddress {
print("address sent: \(address)")
listViewModel.fetchRestaurants(address: address)
}
}
}
}
}
That you're only getting back 4 results instead of 10 is not due to a faulty query or get-document request—those are coded properly. You're either losing documents when you parse them (some are failing Restaurant initialization), Constants.Mealplan.LOADLIMIT is wrong, or there aren't more than 4 documents in the collection itself that satisfy the query.
That the query is executed 3 times instead of once is also not due to anything in this code—viewDidLoad is only called once and geocodeAddressString only returns once. You're making a fetch request elsewhere that we can't see.
In the batchFetch method, you have a guard that returns out of the function without ever calling its completion handler. This will leave the UI in a state of limbo. I'd recommend always calling the completion handler no matter why the function returns.
You never manage the document cursor. If the get-document return has less documents than the load limit, then nil the last-document cursor. This way, when you attempt to get the next page of documents, guard against a nil cursor and see if there is even more to fetch.
There's no need to pass in an empty array and have your function fill it; simply construct and return an array of results within ListViewModel itself.
We can't see how you trigger pagination. Is it through a scroll delegate when the user reaches the bottom or through a button tap, for example? If it's through a scroll delegate, then I'd disable that for now and see how many returns you get—I suspect one, instead of 3.
What is the particular reason you've ditched MVC for MVVM here? With MVC, you can get pagination up with just a few lines of code. I think MVVM is overkill for iOS applications and would advise against using it unless you have a compelling reason.

Why does my segue not wait until completion handler finished?

I have a page based app, using RootViewController, ModelViewController, DataViewController, and a SearchViewController.
In my searchViewController, I search for an item and then add or remove that Item to an array which is contained in a Manager class(and UserDefaults), which the modelViewController uses to instantiate an instance of DataViewController with the correct information loaded using the dataObject. Depending on whether an Item was added or removed, I use a Bool to determine which segue was used, addCoin or removeCoin, so that the RootViewController(PageView) will show either the last page in the array, (when a page is added) or the first (when removed).
Everything was working fine until I ran into an error which I can not diagnose, the problem is that when I add a page, the app crashes, giving me a "unexpectadely found nil when unwrapping an optional value"
This appears to be the problem function, in the searchViewController 'self.performSegue(withIdentifier: "addCoin"' seems to be called instantly, even without the dispatchque:
#objc func addButtonAction(sender: UIButton!) {
print("Button tapped")
if Manager.shared.coins.contains(dataObject) {
Duplicate()
} else if Manager.shared.coins.count == 5 {
max()
} else {
Manager.shared.addCoin(coin: dataObject)
CGPrices.shared.getData(arr: true, completion: { (success) in
print(Manager.shared.coins)
DispatchQueue.main.async {
self.performSegue(withIdentifier: "addCoin", sender: self)
}
})
}
searchBar.text = ""
}
Meaning that In my DataViewController, this function will find nil:
func getIndex() {
let index = CGPrices.shared.coinData.index(where: { $0.id == dataObject })!
dataIndex = index
}
I can't find out why it does not wait for completion.
I also get this error about threads:
[Assert] Cannot be called with asCopy = NO on non-main thread.
which is why I try to do the push segue using dispatch que
Here is my searchViewController full code:
import UIKit
class SearchViewController: UIViewController, UISearchBarDelegate {
let selectionLabel = UILabel()
let searchBar = UISearchBar()
let addButton = UIButton()
let removeButton = UIButton()
var filteredObject: [String] = []
var dataObject = ""
var isSearching = false
//Add Button Action.
#objc func addButtonAction(sender: UIButton!) {
print("Button tapped")
if Manager.shared.coins.contains(dataObject) {
Duplicate()
} else if Manager.shared.coins.count == 5 {
max()
} else {
Manager.shared.addCoin(coin: dataObject)
CGPrices.shared.getData(arr: true, completion: { (success) in
print(Manager.shared.coins)
DispatchQueue.main.async {
self.performSegue(withIdentifier: "addCoin", sender: self)
}
})
}
searchBar.text = ""
}
//Remove button action.
#objc func removeButtonActon(sender: UIButton!) {
print("Button tapped")
if Manager.shared.coins.contains(dataObject) {
Duplicate()
} else if Manager.shared.coins.count == 5 {
max()
} else {
Manager.shared.removeCoin(coin: dataObject)
self.performSegue(withIdentifier: "addCoin", sender: self)
}
searchBar.text = ""
}
//Prepare for segue, pass removeCoinSegue Bool depending on remove or addCoin.
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
if segue.identifier == "addCoin" {
if let destinationVC = segue.destination as? RootViewController {
destinationVC.addCoinSegue = true
}
} else if segue.identifier == "addCoin" {
if let destinationVC = segue.destination as? RootViewController {
destinationVC.addCoinSegue = false
}
}
}
//Remove button action.
#objc func removeButtonAction(sender: UIButton!) {
if Manager.shared.coins.count == 1 {
removeAlert()
} else {
Manager.shared.removeCoin(coin: dataObject)
print(Manager.shared.coins)
print(dataObject)
searchBar.text = ""
self.removeButton.isHidden = true
DispatchQueue.main.async {
self.performSegue(withIdentifier: "removeCoin", sender: self)
}
}
}
//Search/Filter the struct from CGNames, display both the Symbol and the Name but use the ID as dataObject.
func filterStructForSearchText(searchText: String, scope: String = "All") {
if !searchText.isEmpty {
isSearching = true
filteredObject = CGNames.shared.coinNameData.filter {
// if you need to search key and value and include partial matches
// $0.key.contains(searchText) || $0.value.contains(searchText)
// if you need to search caseInsensitively key and value and include partial matches
$0.name.range(of: searchText, options: .caseInsensitive) != nil || $0.symbol.range(of: searchText, options: .caseInsensitive) != nil
}
.map{ $0.id }
} else {
isSearching = false
print("NoText")
}
}
//Running filter function when text changes.
func searchBar(_ searchBar: UISearchBar, textDidChange searchText: String) {
filterStructForSearchText(searchText: searchText)
if isSearching == true && filteredObject.count > 0 {
addButton.isHidden = false
dataObject = filteredObject[0]
selectionLabel.text = dataObject
if Manager.shared.coins.contains(dataObject) {
removeButton.isHidden = false
addButton.isHidden = true
} else {
removeButton.isHidden = true
addButton.isHidden = false
}
} else {
addButton.isHidden = true
removeButton.isHidden = true
selectionLabel.text = "e.g. btc/bitcoin"
}
}
override func viewDidLoad() {
super.viewDidLoad()
//Setup the UI.
self.view.backgroundColor = .gray
setupView()
}
override func viewDidLayoutSubviews() {
}
//Hide keyboard
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
self.view.endEditing(true)
}
//Alerts
func removeAlert() {
let alertController = UIAlertController(title: "Can't Remove", message: "\(dataObject) can't be deleted, add another to delete \(dataObject)", preferredStyle: .alert)
alertController.addAction(UIAlertAction(title: "Okay", style: .default, handler: nil))
self.present(alertController, animated: true, completion: nil)
}
func Duplicate() {
let alertController = UIAlertController(title: "Duplicate", message: "\(dataObject) is already in your pages!", preferredStyle: .alert)
alertController.addAction(UIAlertAction(title: "Okay", style: .default, handler: nil))
self.present(alertController, animated: true, completion: nil)
}
func max() {
let alertController = UIAlertController(title: "Maximum Reached", message: "\(dataObject) can't be added, you have reached the maximum of 5 coins. Please delete a coin to add another.", preferredStyle: .alert)
alertController.addAction(UIAlertAction(title: "Okay", style: .default, handler: nil))
self.present(alertController, animated: true, completion: nil)
}
}
and here is the DataViewController
import UIKit
class DataViewController: UIViewController {
#IBOutlet weak var dataLabel: UILabel!
//Variables and Objects.
//The dataObject carries the chosen cryptocurrencies ID from the CoinGecko API to use to get the correct data to load on each object.
var dataObject = String()
//The DefaultCurrency (gbp, eur...) chosen by the user.
var defaultCurrency = ""
//The Currency Unit taken from the exchange section of the API.
var currencyUnit = CGExchange.shared.exchangeData[0].rates.gbp.unit
var secondaryUnit = CGExchange.shared.exchangeData[0].rates.eur.unit
var tertiaryUnit = CGExchange.shared.exchangeData[0].rates.usd.unit
//Index of the dataObject
var dataIndex = Int()
//Objects
let cryptoLabel = UILabel()
let cryptoIconImage = UIImageView()
let secondaryPriceLabel = UILabel()
let mainPriceLabel = UILabel()
let tertiaryPriceLabel = UILabel()
//Custom Fonts.
let customFont = UIFont(name: "AvenirNext-Heavy", size: UIFont.labelFontSize)
let secondFont = UIFont(name: "AvenirNext-BoldItalic" , size: UIFont.labelFontSize)
//Setup Functions
//Get the index of the dataObject
func getIndex() {
let index = CGPrices.shared.coinData.index(where: { $0.id == dataObject })!
dataIndex = index
}
//Label
func setupLabels() {
//cryptoLabel from dataObject as name.
cryptoLabel.text = CGPrices.shared.coinData[dataIndex].name
//Prices from btc Exchange rate.
let btcPrice = CGPrices.shared.coinData[dataIndex].current_price!
let dcExchangeRate = CGExchange.shared.exchangeData[0].rates.gbp.value
let secondaryExchangeRate = CGExchange.shared.exchangeData[0].rates.eur.value
let tertiaryExchangeRate = CGExchange.shared.exchangeData[0].rates.usd.value
let realPrice = (btcPrice * dcExchangeRate)
let secondaryPrice = (btcPrice * secondaryExchangeRate)
let tertiaryPrice = (btcPrice * tertiaryExchangeRate)
secondaryPriceLabel.text = "\(secondaryUnit)\(String((round(1000 * secondaryPrice) / 1000)))"
mainPriceLabel.text = "\(currencyUnit)\(String((round(1000 * realPrice) /1000)))"
tertiaryPriceLabel.text = "\(tertiaryUnit)\(String((round(1000 * tertiaryPrice) / 1000)))"
}
//Image
func getIcon() {
let chosenImage = CGPrices.shared.coinData[dataIndex].image
let remoteImageUrl = URL(string: chosenImage)
guard let url = remoteImageUrl else { return }
URLSession.shared.dataTask(with: url) { (data, response, err) in
guard let data = data else { return }
do {
DispatchQueue.main.async {
self.cryptoIconImage.image = UIImage(data: data)
}
}
}.resume()
}
override func viewDidLoad() {
super.viewDidLoad()
// for family in UIFont.familyNames.sorted() {
// let names = UIFont.fontNames(forFamilyName: family)
// print("Family: \(family) Font names: \(names)")
// }
// Do any additional setup after loading the view, typically from a nib.
self.setupLayout()
self.getIndex()
self.setupLabels()
self.getIcon()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
self.dataLabel!.text = dataObject
view.backgroundColor = .lightGray
}
}
Edit: CGPrices Class with getData method:
import Foundation
class CGPrices {
struct Coins: Decodable {
let id: String
let name: String
let symbol: String
let image: String
let current_price: Double?
let low_24h: Double?
//let price_change_24h: Double?
}
var coinData = [Coins]()
var defaultCurrency = ""
var coins = Manager.shared.coins
var coinsEncoded = ""
static let shared = CGPrices()
func encode() {
for i in 0..<coins.count {
coinsEncoded += coins[i]
if (i + 1) < coins.count { coinsEncoded += "%2C" }
}
print("encoded")
}
func getData(arr: Bool, completion: #escaping (Bool) -> ()) {
encode()
let urlJSON = "https://api.coingecko.com/api/v3/coins/markets?vs_currency=btc&ids=\(coinsEncoded)"
guard let url = URL(string: urlJSON) else { return }
URLSession.shared.dataTask(with: url) { (data, response, err) in
guard let data = data else { return }
do {
let coinsData = try JSONDecoder().decode([Coins].self, from: data)
self.coinData = coinsData
completion(arr)
} catch let jsonErr {
print("error serializing json: \(jsonErr)")
print(data)
}
}.resume()
}
func refresh(completion: () -> ()) {
defaultCurrency = UserDefaults.standard.string(forKey: "DefaultCurrency")!
completion()
}
}
I figured it out.
The problem was inside my getData method I was not updated the coins array:
var coinData = [Coins]()
var defaultCurrency = ""
var coins = Manager.shared.coins
var coinsEncoded = ""
static let shared = CGPrices()
func encode() {
for i in 0..<coins.count {
coinsEncoded += coins[i]
if (i+1)<coins.count { coinsEncoded+="%2C" }
}
print("encoded")
}
I needed to add this line in getData:
func getData(arr: Bool, completion: #escaping (Bool) -> ()) {
//Adding this line to update the array so that the URL is appended correctly.
coins = Manager.shared.coins
encode()
let urlJSON = "https://api.coingecko.com/api/v3/coins/markets?vs_currency=btc&ids=\(coinsEncoded)"
This would fix the finding nil in the DataViewController, but the app would still crash do to updating UI Elements on a background thread, as the segue was called inside the completion handler of the getData method. to fix this, I used DispatchQue.Main.Async on the segue inside the getData method in the addButton function, to ensure that everything is updated on the main thread, like so:
#objc func addButtonAction(sender: UIButton!) {
print("Button tapped")
if Manager.shared.coins.contains(dataObject) {
Duplicate()
} else if Manager.shared.coins.count == 5 {
max()
} else {
Manager.shared.addCoin(coin: dataObject)
print("starting")
CGPrices.shared.getData(arr: true) { (arr) in
print("complete")
print(CGPrices.shared.coinData)
//Here making sure it is updated on main thread.
DispatchQueue.main.async {
self.performSegue(withIdentifier: "addCoin", sender: self)
}
}
}
searchBar.text = ""
}
Thanks for all the comments as they helped me to figure this out, and I learned a lot in doing so. Hopefully this can help someone else in their thought process when debugging, as one can get so caught up in one area of a problem, and forget to take a step back and look to other areas.

extension may not contain stored properties and no member problems in swift

I am not able to understand why I am getting these errors. someone please help me. Here is the source code
import UIKit
import AVFoundation
// MARK: - PlaySoundsViewController: AVAudioPlayerDelegate
extension PlaySoundsViewController: AVAudioPlayerDelegate {
var audioEngine = AVAudioEngine()
// MARK: Alerts
struct Alerts {
static let DismissAlert = "Dismiss"
static let RecordingDisabledTitle = "Recording disabled"
static let RecordginDisabledMessage = "Youve disabled this app from recording your microphone. Check settings"
static let RecodingFailedTitle = "Recording failed"
static let RecordingFailedMessage = "Something went wrong with the recording"
static let AudioRecordedError = "Audio Recorder Error"
static let AudioSessionError = "Audio Session Error"
static let AudioRecordingError = "Audio Recording Error"
static let AudioFileError = "Audio File Error"
static let AudioEngineError = "Audio Engine Error"
}
// MARK: PlayingState (raw values correspond to sender tags)
enum PlayingState { case playing, notPlaying }
// MARK: Audio Functions
func setupAudio() {
// initialize (recording) audio file
do {
audioFile = try AVAudioFile(forReading: recordedAudioURL as URL)
} catch {
showAlert(Alerts.AudioFileError, message: String(describing: error))
}
}
func playSound(rate: Float? = nil, pitch: Float? = nil, echo: Bool = false, reverb: Bool = false) {
// initialize audio engine components
audioEngine = AVAudioEngine()
// node for playing audio
audioPlayerNode = AVAudioPlayerNode()
audioEngine.attach(audioPlayerNode)
// node for adjusting rate/pitch
let changeRatePitchNode = AVAudioUnitTimePitch()
if let pitch = pitch {
changeRatePitchNode.pitch = pitch
}
if let rate = rate {
changeRatePitchNode.rate = rate
}
audioEngine.attach(changeRatePitchNode)
// node for echo
let echoNode = AVAudioUnitDistortion()
echoNode.loadFactoryPreset(.multiEcho1)
audioEngine.attach(echoNode)
// node for reverb
let reverbNode = AVAudioUnitReverb()
reverbNode.loadFactoryPreset(.cathedral)
reverbNode.wetDryMix = 50
audioEngine.attach(reverbNode)
// connect nodes
if echo == true && reverb == true {
connectAudioNodes(audioPlayerNode, changeRatePitchNode, echoNode, reverbNode, audioEngine.outputNode)
} else if echo == true {
connectAudioNodes(audioPlayerNode, changeRatePitchNode, echoNode, audioEngine.outputNode)
} else if reverb == true {
connectAudioNodes(audioPlayerNode, changeRatePitchNode, reverbNode, audioEngine.outputNode)
} else {
connectAudioNodes(audioPlayerNode, changeRatePitchNode, audioEngine.outputNode)
}
// schedule to play and start the engine!
audioPlayerNode.stop()
audioPlayerNode.scheduleFile(audioFile, at: nil) {
var delayInSeconds: Double = 0
if let lastRenderTime = self.audioPlayerNode.lastRenderTime, let playerTime = self.audioPlayerNode.playerTime(forNodeTime: lastRenderTime) {
if let rate = rate {
delayInSeconds = Double(self.audioFile.length - playerTime.sampleTime) / Double(self.audioFile.processingFormat.sampleRate) / Double(rate)
} else {
delayInSeconds = Double(self.audioFile.length - playerTime.sampleTime) / Double(self.audioFile.processingFormat.sampleRate)
}
}
// schedule a stop timer for when audio finishes playing
self.stopTimer = Timer(timeInterval: delayInSeconds, target: self, selector: #selector(PlaySoundsViewController.stopAudio), userInfo: nil, repeats: false)
RunLoop.main.add(self.stopTimer!, forMode: RunLoopMode.defaultRunLoopMode)
}
do {
try audioEngine.start()
} catch {
showAlert(Alerts.AudioEngineError, message: String(describing: error))
return
}
// play the recording!
audioPlayerNode.play()
}
func stopAudio() {
if let audioPlayerNode = audioPlayerNode {
audioPlayerNode.stop()
}
if let stopTimer = stopTimer {
stopTimer.invalidate()
}
configureUI(.notPlaying)
if let audioEngine = audioEngine {
audioEngine.stop()
audioEngine.reset()
}
}
// MARK: Connect List of Audio Nodes
func connectAudioNodes(_ nodes: AVAudioNode...) {
for x in 0..<nodes.count-1 {
audioEngine.connect(nodes[x], to: nodes[x+1], format: audioFile.processingFormat)
}
}
// MARK: UI Functions
func configureUI(_ playState: PlayingState) {
switch(playState) {
case .playing:
setPlayButtonsEnabled(false)
stopButton.isEnabled = true
case .notPlaying:
setPlayButtonsEnabled(true)
stopButton.isEnabled = false
}
}
func setPlayButtonsEnabled(_ enabled: Bool) {
snailButton.isEnabled = enabled
chipmunkButton.isEnabled = enabled
rabbitButton.isEnabled = enabled
vaderButton.isEnabled = enabled
echoButton.isEnabled = enabled
reverbButton.isEnabled = enabled
}
func showAlert(_ title: String, message: String) {
let alert = UIAlertController(title: title, message: message, preferredStyle: .alert)
alert.addAction(UIAlertAction(title: Alerts.DismissAlert, style: .default, handler: nil))
self.present(alert, animated: true, completion: nil)
}
}
The screenshot of the error is in the link below.
Screenshot!
You can not declare var audioEngine = AVAudioEngine() inside an extension. Declare it inside PlaySoundsViewController class instead.
Extensions are meant to augment behaviours, not fundamentally change a class.
#source: Why can't you add stored properties to extensions?

Displaying Artwork for .MP3 file

I am trying to currently display the album artwork for a locally stored .MP3 track in an ImageView. Does anyone know how to fetch this artwork in Swift in order to accomplish this?
I have found this solution (iOS AVFoundation: How do I fetch artwork from an mp3 file?) but the code is written in Objective C. I simply want to grab the image embedded in my MP3 and display it in my ImageView.
I've looked at the API documentation for the MPMediaItemArtwork and found an example that also accomplishes what I am trying to accomplish in Objective C as well here(http://www.codeitive.com/0zHjkUjUWX/not-able-to-get-the-uiimage-from-mpmediaitempropertyartwork.html) but cannot come up with a solution. My code is as follows:
import UIKit
import AVFoundation
import MediaPlayer
class ViewController: UIViewController {
let audioPath:NSURL! = NSBundle.mainBundle().URLForResource("SippinOnFire", withExtension: "mp3")
#IBOutlet var artistImage: UIImageView!
#IBOutlet var trackLabel: UILabel!
#IBOutlet var artistLabel: UILabel!
#IBOutlet var sliderValue: UISlider!
var player:AVAudioPlayer = AVAudioPlayer()
#IBAction func play(sender: AnyObject) {
let audioInfo = MPNowPlayingInfoCenter.defaultCenter()
println(audioInfo)
player.play()
//println("Playing \(audioPath)")
let playerItem = AVPlayerItem(URL: audioPath)
let metadataList = playerItem.asset.metadata as! [AVMetadataItem]
for item in metadataList {
if let stringValue = item.value {
println(item.commonKey)
if item.commonKey == "title" {
trackLabel.text = stringValue as? String
}
if item.commonKey == "artist" {
artistLabel.text = stringValue as? String
}
if item.commonKey == "artwork" {
if let audioImage = UIImage(data: item.value as! NSData) {
let audioArtwork = MPMediaItemArtwork(image: audioImage)
println(audioImage.description)
}
}
}
}
}
#IBAction func pause(sender: AnyObject) {
player.pause()
}
#IBAction func stop(sender: AnyObject) {
player.stop()
player.currentTime = 0;
}
#IBAction func sliderChanged(sender: AnyObject) {
player.volume = sliderValue.value
}
override func viewDidLoad() {
super.viewDidLoad()
var error:NSError? = nil
player = AVAudioPlayer(contentsOfURL: audioPath!, error: &error)
player.volume = 0.5;
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
Here is a screen shot of my sample .mp3 file. As you can see there is indeed album artwork that is both visible in the "get info" section of Finder. I've also opened the .mp3 in my iTunes to make sure and have confirmed there is artwork in the "get info" section of it there as well as under the "artwork" tab.
However, when trying to use the commonKey to assign the image to my imageView I find that there is no commonKey for "artwork".
Thanks
Change your snippet of code into this (I already tested it):
I added println lines commented in places of interest, Feel free to uncomment in order to see what is happening.
for item in metadataList {
if item.commonKey == nil{
continue
}
if let key = item.commonKey, let value = item.value {
//println(key)
//println(value)
if key == "title" {
trackLabel.text = value as? String
}
if key == "artist" {
artistLabel.text = value as? String
}
if key == "artwork" {
if let audioImage = UIImage(data: value as! NSData) {
//println(audioImage.description)
artistImage.image = audioImage
}
}
}
}
UPDATE: A bit of clean up of this code
for item in metadataList {
guard let key = item.commonKey, let value = item.value else{
continue
}
switch key {
case "title" : trackLabel.text = value as? String
case "artist": artistLabel.text = value as? String
case "artwork" where value is NSData : artistImage.image = UIImage(data: value as! NSData)
default:
continue
}
}
UPDATE: For Swift 4
for item in metadataList {
guard let key = item.commonKey?.rawValue, let value = item.value else{
continue
}
switch key {
case "title" : trackLabel.text = value as? String
case "artist": artistLabel.text = value as? String
case "artwork" where value is Data : artistImage.image = UIImage(data: value as! Data)
default:
continue
}
}
edit/update Swift 4 or later:
import MediaPlayer
var nowPlayingInfo: [String: Any] = [:]
let playerItem = AVPlayerItem(url: url)
let metadataList = playerItem.asset.metadata
for item in metadataList {
switch item.commonKey {
case .commonKeyTitle?:
nowPlayingInfo[MPMediaItemPropertyTitle] = item.stringValue ?? ""
case .commonKeyType?:
nowPlayingInfo[MPMediaItemPropertyGenre] = item.stringValue ?? ""
case .commonKeyAlbumName?:
nowPlayingInfo[MPMediaItemPropertyAlbumTitle] = item.stringValue ?? ""
case .commonKeyArtist?:
nowPlayingInfo[MPMediaItemPropertyArtist] = item.stringValue ?? ""
case .commonKeyArtwork?:
if let data = item.dataValue,
let image = UIImage(data: data) {
nowPlayingInfo[MPMediaItemPropertyArtwork] = MPMediaItemArtwork(boundsSize: image.size) { _ in image }
}
case .none: break
default: break
}
}
let audioInfo = MPNowPlayingInfoCenter.default()
audioInfo.nowPlayingInfo = nowPlayingInfo
Note: You will have to invoke beginReceivingRemoteControlEvents() otherwise it will not work on the actual device. You will also need to set your app Background Modes (Audio and AirPlay) and set your AVAudioSession category to AVAudioSessionCategoryPlayback and set it active:
do {
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .default, options: [.mixWithOthers, .allowAirPlay])
print("Playback OK")
try AVAudioSession.sharedInstance().setActive(true)
print("Session is Active")
} catch {
print(error)
}
Try this:
It appears that sometimes iOS 8 returns nil at first attempt of obtaining this info:
if let audioCenter = MPNowPlayingInfoCenter.defaultCenter(){
if let audioInfo = audioCenter.nowPlayingInfo{
if let artwork = audioInfo[MPMediaItemPropertyArtwork] as? MPMediaItemArtwork
{
var image: UIImage? = artwork.imageWithSize(artwork.bounds.size)
if image == nil {
image = artwork.imageWithSize(artwork.bounds.size);
}
if image != nil{
println("image loaded")
}
}
}
}