I've been trying to use the microhpone with AudioKit.
The code compiles and runs it also request permission to access to microphone but all the frequency and amplitude readings are all 0.
Here is the class I wrote to dispatch Microphone readings as events.
I've seen other questions which could have been relavant and tried them all like I've tried to change input but no luck.
My guess is either i'm not understanding the AudioKit lifecycle correctly or #objc tags changes the behaviour.
This code used to work on older ios versions but on the 13.3 seems like there are changes. (reason for #objc is that I need to use this with react native bridge)
I thought this could be related to info.plist but I have configured the info.plist for microphone privacy with a string but still no luck.
Am I missing something?
Thanks
please check the code here:
import Foundation
import AudioKit
#objc(AudioKitWrapper)
class AudioKitWrapper: NSObject {
#objc var mic: AKMicrophone!
#objc var timer: Timer!
#objc var tracker: AKFrequencyTracker!
override init(){
super.init()
do {
try AKSettings.setSession(category: .playAndRecord)
} catch {
AKLog("Could not set session category.")
}
AKSettings.defaultToSpeaker = true
if let inputs = AudioKit.inputDevices {
do {
print(inputs)
try AudioKit.setInputDevice(inputs[0])
AKSettings.audioInputEnabled = true
mic = AKMicrophone()
try mic?.setDevice(inputs[0])
}
catch {
print("microphone not supported")
}
}
try? AudioKit.start()
mic?.stop()
self.tracker = AKFrequencyTracker.init(mic, hopSize: 4_096, peakCount: 20)
timer = Timer.scheduledTimer(timeInterval: 0.1, target: self, selector: #selector(self.loop), userInfo: nil, repeats: true)
}
#objc func start() {
if (mic?.isStarted == true) {
print("stop")
mic?.stop()
try! AudioKit.stop()
}
else {
print("start")
mic?.start()
// try! AudioKit.start()
var silence = AKBooster(tracker, gain: 0)
AudioKit.output = silence
}
}
#objc static func requiresMainQueueSetup() -> Bool {
return false
}
#objc func loop() {
if (mic?.isStarted == true){
print(self.tracker.amplitude, self.tracker.frequency)
EventEmitter.sharedInstance.dispatch(name: "onChange",
body: ["amplitude": tracker.amplitude, "frequency":tracker.frequency])
}
}
}
I watched the videos to go from sandbox to production and changed few things.
I realised the main issue with my code was not using AKBooster. change the variable declarations to what the videos mentioned and it started to work.
Here is swift code that works:
import Foundation
import AudioKit
#objc(AudioKitWrapper)
class AudioKitWrapper: NSObject {
#objc var mic: AKMicrophone!
#objc var timer: Timer!
#objc var tracker: AKFrequencyTracker!
#objc var silence: AKBooster!
#objc var micCopy1: AKBooster!
#objc override init(){
super.init()
mic = AKMicrophone()
micCopy1 = AKBooster(mic)
do {
try AKSettings.setSession(category: .playAndRecord)
AKSettings.defaultToSpeaker = true
} catch {
print("Could not set session category.")
return
}
if let inputs = AudioKit.inputDevices {
do {
try AudioKit.setInputDevice(inputs[0])
try mic.setDevice(inputs[0])
}
catch {
print("microphone not supported")
return
}
}
do {
tracker = AKFrequencyTracker.init(micCopy1, hopSize: 4_096, peakCount: 20)
silence = AKBooster(tracker, gain: 0)
AudioKit.output = silence
try AudioKit.start()
mic.stop()
}
catch {
print("AudioKit did not start")
}
timer = Timer.scheduledTimer(timeInterval: 0.1, target: self, selector: #selector(loop), userInfo: nil, repeats: true)
}
#objc func start() {
print("started?", mic?.isStarted == true)
if (mic?.isStarted == true) {
print("stop")
mic?.stop()
}
else {
print("start")
mic.start()
}
}
#objc static func requiresMainQueueSetup() -> Bool {
return true
}
#objc func loop() {
if (mic.isStarted == true){
print("dispatch", tracker.frequency, tracker.amplitude)
EventEmitter.sharedInstance.dispatch(name: "onChange",
body: ["amplitude": self.tracker.amplitude, "frequency": self.tracker.frequency])
}
}
}
Also for those interested to make AudioKit work on React Native, here are the objectiveC code:
AudioKitBridge.m
#import <Foundation/Foundation.h>
#import "React/RCTBridgeModule.h"
#interface RCT_EXTERN_MODULE(AudioKitWrapper, NSObject)
RCT_EXTERN_METHOD(start)
#end
AudioKitBridge.h
#ifndef AudioKitBridge_h
#define AudioKitBridge_h
#import <React/RCTBridgeModule.h>
#interface AudioKitBridge: NSObject
#end
Related
I am trying to calculate the download and upload speed for my network adapter using Swift.
The code seems to work, however, the values are off compared to speed tests.
Here is my code:
#main
class AppDelegate: NSObject, NSApplicationDelegate
{
var timer: Timer?
var old: Int64 = 0
func applicationDidFinishLaunching(_ aNotification: Notification)
{
self.timer = Timer.scheduledTimer(timeInterval: 1.0, target: self, selector: #selector(self.refresh), userInfo: nil, repeats: true)
}
func applicationWillTerminate(_ aNotification: Notification)
{
self.timer?.invalidate()
self.timer = nil
}
func applicationSupportsSecureRestorableState(_ app: NSApplication) -> Bool {
return true
}
private func getBytesInfo(_ pointer: UnsafeMutablePointer<ifaddrs>) -> (upload: Int64, download: Int64)?
{
guard pointer.pointee.ifa_addr.pointee.sa_family == UInt8(AF_LINK) else { return nil }
let data: UnsafeMutablePointer<if_data>? = unsafeBitCast(pointer.pointee.ifa_data, to: UnsafeMutablePointer<if_data>.self)
return (upload: Int64(data?.pointee.ifi_obytes ?? 0), download: Int64(data?.pointee.ifi_ibytes ?? 0))
}
#objc fileprivate func refresh()
{
var interfaceAddresses: UnsafeMutablePointer<ifaddrs>? = nil
guard getifaddrs(&interfaceAddresses) == 0 else { return }
var pointer = interfaceAddresses
while pointer != nil
{
defer { pointer = pointer?.pointee.ifa_next }
if String(cString: pointer!.pointee.ifa_name) != "en1" // Please adjust and use your primary network adapter.
{
continue
}
if let info = getBytesInfo(pointer!)
{
if Double(info.download) - Double(self.old) > 0.0
{
let value = UInt64(info.download - self.old)
Swift.print(Double(value) / 1024.0 / 1024.0) // MB/s
Swift.print(Double(value * 8) / 1000.0 / 1000.0) // Mb/s
Swift.print()
}
self.old = Int64(info.download)
}
}
freeifaddrs(interfaceAddresses)
}
}
Here is the output (I have tested this multiple times, there are no other processes running that would explain such a big difference):
I am testing on macOS Monterey. I have tested two machines, both show the same behaviour.
Is my code wrong?
I am not using any VPN. I have checked both Wi-Fi and Ethernet.
I'm using AudioKit 4.10.
I use this simple code to sample frequency and amplitude from a microphone:
import AudioKit
protocol MicAnalyzerDelegate {
/**
* The MicAnalyzer calls this delegate function.
*/
func micAnalyzerInfos(infos: (frequency : Double, amplitude : Double)?)
}
class MicAnalyzer: NSObject {
// MARK: - Properties
private static var micAnalyzer: MicAnalyzer = MicAnalyzer()
var delegate: MicAnalyzerDelegate?
fileprivate var mic: AKMicrophone!
fileprivate var tracker: AKFrequencyTracker!
fileprivate var silence: AKBooster!
fileprivate var timer: Timer?
// MARK: - Initializations
private override init() {
super.init()
AKSettings.audioInputEnabled = true
self.initMicTracker()
}
private func initMicTracker(){
/* Add the built-in microphone. */
mic = AKMicrophone()
/* Add a traker */
tracker = AKFrequencyTracker(mic)
silence = AKBooster(tracker, gain: 0)
}
// MARK: - Accessors
class func shared() -> MicAnalyzer {
return micAnalyzer
}
func startMonitoring() {
/* Start the microphone and analyzer. */
AudioKit.output = silence
do {
try AudioKit.start()
} catch {
AKLog("AudioKit did not start!")
}
/* Initialize and schedule a new run loop timer. */
timer = Timer.scheduledTimer(timeInterval: 0.1,
target: self,
selector: #selector(MicAnalyzer.tick),
userInfo: nil,
repeats: true)
}
// Stopped as described here: https://github.com/AudioKit/AudioKit/issues/1716
func stopMonitoring() {
do {
AudioKit.disconnectAllInputs()
try AudioKit.stop()
try AudioKit.shutdown()
} catch {
print("AudioKit did not stop")
}
AudioKit.output = nil
// Interrupts polling on tick infos
timer?.invalidate()
timer = nil
}
var infos: (frequency : Double, amplitude : Double)? {
if(!tracker.isStopped){
let frequency = tracker.frequency
let amplitude = tracker.amplitude
return (frequency: frequency, amplitude: amplitude)
} else {
return nil
}
}
/* Call the delegate. */
#objc func tick() {
self.delegate?.micAnalyzerInfos(infos: infos)
}
}
Calling in order the following code I have that situation:
MicAnalyzer.shared().startMonitoring() // --> Everything works good
MicAnalyzer.shared().stopMonitoring() // --> I think it is stopped correctly
MicAnalyzer.shared().startMonitoring() // --> From now on the delegate is called but I get always 0 as frequency and amplitude
Why after recalling the AudioKit.start() (inside the MicAnalyzer.shared().startMonitoring()) I can't sample frequency and amplitude anymore? How should I restart the whole thing?
If I reinitiate each time the variables mic, tracker and silence the memory grows as they are not really deallocated.
This is a second version of the same code idea:
import AudioKit
protocol MicAnalyzerDelegate : class {
/**
* The tuner calls this delegate function
*/
func micAnalyzerInfos(infos: (frequency : Double, amplitude : Double)?)
}
// https://audiokit.io/examples/MicrophoneAnalysis/
class MicAnalyzer: NSObject {
// MARK: - Properties
private weak var delegate: MicAnalyzerDelegate?
fileprivate var mic: AKMicrophone!
fileprivate var tracker: AKFrequencyTracker!
fileprivate var silence: AKBooster!
fileprivate var timer: Timer?
// MARK: - Initializations
init(delegate: MicAnalyzerDelegate) {
print("Init called!!!")
self.delegate = delegate
super.init()
}
deinit {
print("Deinit called!!!")
}
// MARK: - Accessors
func startMonitoring() {
AKSettings.audioInputEnabled = true
/* Add the built-in microphone. */
mic = AKMicrophone()
/* Add a traker */
tracker = AKFrequencyTracker(mic)
silence = AKBooster(tracker, gain: 0)
/* Start the microphone and analyzer. */
AudioKit.output = silence
do {
try AudioKit.start()
} catch {
AKLog("AudioKit did not start!")
}
/* Initialize and schedule a new run loop timer. */
timer = Timer.scheduledTimer(timeInterval: 1,
target: self,
selector: #selector(MicAnalyzer.tick),
userInfo: nil,
repeats: true)
}
func stopMonitoring() {
do {
AudioKit.disconnectAllInputs()
try AudioKit.stop()
try AudioKit.shutdown()
} catch {
print("AudioKit did not stop")
}
AudioKit.output = nil
// Interrupts polling on tick infos
timer?.invalidate()
timer = nil
silence.stop()
silence = nil
tracker.stop()
tracker = nil
mic.stop()
mic = nil
}
var infos: (frequency : Double, amplitude : Double)? {
if(!tracker.isStopped){
let frequency = tracker.frequency
let amplitude = tracker.amplitude
return (frequency: frequency, amplitude: amplitude)
} else {
return nil
}
}
/* Call the delegate. */
#objc func tick() {
print(infos?.frequency ?? "0.0")
//self.delegate.micAnalyzerInfos(infos: infos)
}
}
And then I could call that 'second code test' like this in order:
override func viewDidAppear() {
super.viewDidAppear()
print("viewDidAppear!!!")
tuner = Tuner(delegate: self)
tuner?.startMonitoring()
}
override func viewDidDisappear() {
super.viewDidDisappear()
print("viewDidDisappear!!!")
tuner?.stopMonitoring()
tuner = nil
}
You need to make sure the AKFrequencyTracker is part of the signal chain, as that's an AVAudioEngine engine requirement.
Which means that if you do:
do {
AudioKit.disconnectAllInputs()
try AudioKit.stop()
try AudioKit.shutdown()
} catch {
// error handler
}
AudioKit.output = nil
You have to call initMicTracker to have the AKFrequencyTracker back in the signal chain. So, to restart you'd do:
initMicTracker
startMonitoring
Because your stopMonitoring did:
AudioKit.disconnectAllInputs(), which means (stop)MicTracker
You know that you already created instances for AKMicrophone, AKFrequencyTracker and AKBooster free it from memory
func stopMonitoring() {
...
self.mic = nil
self.tracker = nil
self.silence = nil
}
After calling AudioKit.stop(), and then subsequently calling AudioKit.start(), I'm unable to reconnect my AKMixer/AKPlayer to AudioKit. It appears that the engine starts successfully but no sound is produced when I call player.play(). I'm not quite sure what I'm missing. I've boiled it all down to a simple demo project and I've included the most important sections of code below.
View controller
class ViewController: UIViewController {
public let tickSound = MySound(url: "tick.wav")
#IBAction func stopAudioKit(_ sender: Any) {
try! AudioKit.stop()
print("AudioKit stopped")
}
#IBAction func playSound(_ sender: Any) {
tickSound.play()
}
}
Sound Object
class MySound: NSObject {
private static var mixer: AKMixer = AKMixer()
private var player: AKPlayer?
init(url: String) {
super.init()
if let file = try? AKAudioFile(readFileName: url) {
self.player = AKPlayer(audioFile: file)
self.player?.buffering = .always
MySound.mixer.connect(input: self.player)
}
}
func play() {
if AudioKit.output !== MySound.mixer {
AudioKit.output = MySound.mixer
}
if !AudioKit.engine.isRunning {
do {
try AudioKit.start()
} catch {
assert(false)
}
}
if AudioKit.engine.isRunning {
player?.play()
} else {
assert(false)
}
}
}
Github demo project: https://github.com/rednebmas/AudioKitDemo
Thank you!
I have created the following coin toss program which also triggers an audio file with each coin toss every second. I'd like to know how to change this so that the audio only triggers when the result changes from a heads to a tails or vice versa. Not every second as it is now. Any help is greatly appreciated.
import UIKit
import AVFoundation
class ViewController: UIViewController {
var times: Timer!
var timer: Timer!
var coinFlip : Int = 0
var coinResult : String = ""
var audioPlayer : AVAudioPlayer!
let soundArray = ["note1", "note7"]
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
times = Timer.scheduledTimer(timeInterval: 1, target: self, selector: #selector(fiftyFifty), userInfo: nil, repeats: true)
timer = Timer.scheduledTimer(timeInterval: 1, target: self, selector: #selector(result), userInfo: nil, repeats: true)
result()
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
func fiftyFifty() -> Int {
coinFlip = Int(arc4random_uniform(2))
return coinFlip
}
func result() -> String {
if coinFlip == 1 {
coinResult = "Heads"
print("Heads")
playSound(soundFileName: soundArray[0])
}
else {
coinResult = "Tails"
print("Tails")
playSound(soundFileName: soundArray[1])
}
}
func playSound(soundFileName : String) {
let soundURL = Bundle.main.url(forResource: soundFileName, withExtension: "wav")
do {
audioPlayer = try AVAudioPlayer(contentsOf: soundURL!)
}
catch {
print(error)
}
audioPlayer.play()
}
}
I've now changed it to add a new variable output from result(), coinResult. Not sure if this helps but hopefully might.
If I understood well, when the coin flips, you want to play a song, in this case you can simply change your coinFlip declaration to that:
var coinFlip : Int = 0 {
didSet {
result()
}
}
didSet is a property observer that observes and respond to changes in property's value.
didSet is called immediately after the new value is stored.
You can read more about property observers in Apple's documentation.
EDIT: Your code will be like this:
import UIKit
import AVFoundation
class ViewController: UIViewController {
var times: Timer!
var timer: Timer!
// Change this
var coinFlip : Int = 0 {
didSet {
result()
}
}
var audioPlayer : AVAudioPlayer!
let soundArray = ["note1", "note7"]
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
times = Timer.scheduledTimer(timeInterval: 1, target: self, selector: #selector(fiftyFifty), userInfo: nil, repeats: true)
timer = Timer.scheduledTimer(timeInterval: 1, target: self, selector: #selector(result), userInfo: nil, repeats: true)
result()
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
func fiftyFifty() -> Int {
coinFlip = Int(arc4random_uniform(2))
return coinFlip
}
func result() {
if coinFlip == 1 {
print("Heads")
playSound(soundFileName: soundArray[0])
}
else {
print("Tails")
playSound(soundFileName: soundArray[1])
}
}
func playSound(soundFileName : String) {
let soundURL = Bundle.main.url(forResource: soundFileName, withExtension: "wav")
do {
audioPlayer = try AVAudioPlayer(contentsOf: soundURL!)
}
catch {
print(error)
}
audioPlayer.play()
}
}
I want to create a DirectoryWatcher class that detects whether a directory has changed its first level content and then calls a delegate. This will also run in another thread.
Here is my code:
protocol DirectoryWatcherDelegate {
func directoryDidChange(path: String)
}
class DirectoryWatcher {
private(set) var path = ""
private let timer: NSTimer
private let fm = NSFileManager.defaultManager()
private var previousContent: [String]
let delegate: DirectoryWatcherDelegate?
init(initPath: String) {
path = initPath
timer = NSTimer.scheduledTimerWithTimeInterval(10, target: self, selector: Selector("checkDir"), userInfo: nil, repeats: true)
}
func start() {
previousContent = getContentAtPath()
timer.fire()
}
func stop() {
timer.invalidate()
}
private func checkDir() {
if previousContent != getContentAtPath() {
dispatch_async(dispatch_get_main_queue(), { () -> Void in
if let delegate = self.delegate {
delegate.directoryDidChange(self.path)
} else {
print("No delegate has been assigned")
}
})
}
}
private func getContentAtPath() -> [String] {
var content: [String]
do {
content = try fm.contentsOfDirectoryAtPath(path)
} catch {
content = []
}
return content
}
}
My Problem ist in the init methode, where I initialize the NSTimer. Because this is the init methode I am not able to use self, bacause it is uninitialized.
How to fix this?
BTW: The error messages I am getting are:
The variable self.timer used before initialized
and
Return from initializer without initializing all stored properties