How Do I Replay Oscillator With Envelope in AudioKit? - swift

I have the player below that makes a short beep. However, "Player.shared.play()" only plays once for the first time.
I can't trigger it again on demand.
Any help would be appreciated.
import Foundation
import AudioKit
class Player {
static let shared = Player()
let osc = AKOscillator()
let env:AKAmplitudeEnvelope
var panner = AKPanner()
init() {
osc.amplitude = 0.3
env = AKAmplitudeEnvelope(osc)
env.attackDuration = 0.01
env.decayDuration = 0.01
env.sustainLevel = 0.0
env.releaseDuration = 0.01
panner = AKPanner(env)
AudioKit.output = panner
try! AudioKit.start()
osc.start()
env.start()
panner.start()
}
func play() {
osc.stop()
osc.start()
env.stop()
env.start()
}
}

#jl303,
For some reason, you have to add a delay between the starting and stopping of the oscillator and the envelope. I added it with a DispatchQueue delay like this:
func playOsc() {
osc.start()
env.start()
DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) { [unowned self] in
self.osc.stop()
self.env.stop()
}
}
Here's the revised Player class in context:
import Foundation
import AudioKit
class Player {
static let sharedInstance = Player()
let osc = AKOscillator()
let env: AKAmplitudeEnvelope
var panner = AKPanner()
init() {
AKSettings.bufferLength = .medium
AKSettings.playbackWhileMuted = true
do {
try AKSettings.setSession(category: .playAndRecord, with: [.defaultToSpeaker, .allowBluetooth, .mixWithOthers])
} catch {
AKLog("Could not set session category.")
}
osc.amplitude = 0.3
env = AKAmplitudeEnvelope(osc)
env.attackDuration = 0.01
env.decayDuration = 0.01
env.sustainLevel = 0.0
env.releaseDuration = 0.01
panner = AKPanner(env)
AudioKit.output = panner
try! AudioKit.start()
}
func playOsc() {
osc.start()
env.start()
DispatchQueue.main.asyncAfter(deadline: .now() + 0.05) { [unowned self] in
self.osc.stop()
self.env.stop()
}
}
}
Here's the ViewController to trigger the Oscillator sound on command:
import UIKit
class ViewController: UIViewController {
var player = Player.sharedInstance
#IBOutlet weak var oscPlayStopButton: UIButton!
override func viewDidLoad() {
super.viewDidLoad()
setUpButtonStyle()
}
fileprivate func setUpButtonStyle() {
oscPlayStopButton.layer.cornerRadius = 12.0
oscPlayStopButton.layer.borderColor = UIColor.systemBlue.cgColor
oscPlayStopButton.layer.borderWidth = 1.0
}
#IBAction func playOscAction(_ sender: UIButton) {
player.playOsc()
}
}
You can download the Xcode project here:
https://github.com/markjeschke/osc-player

Related

How can I monitor currentPosition of AVMIDIPlayer, please?

I'm trying to make a simple music player. I want the label to display the current position of the AVMIDIPlayer. With my code the label is only updated once at the very beginning:
import UIKit
import AVFoundation
class ViewController: UIViewController {
var player:AVMIDIPlayer = AVMIDIPlayer()
var playerTime:Double = 999 {
didSet {
label.text = String(playerTime)
}
}
#IBOutlet var label: UILabel!
#IBAction func Play(_ sender: Any) {
player.play()
}
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
do {
let audioPath = Bundle.main.path(forResource: “song”, ofType: "mid")
try player = AVMIDIPlayer(contentsOf: NSURL(fileURLWithPath: audioPath!) as URL, soundBankURL: nil)
playerTime = player.currentPosition
}
catch {
}
}
}
What have I missed, please? Thank you. Scarlett
The reason the label isn’t updating is you’re setting the text in viewDidLoad which is called only once. Use a Timer to update the label.
import UIKit
import AVFoundation
class ViewController: UIViewController {
var player:AVMIDIPlayer = AVMIDIPlayer()
var playerTime:Double = 999 {
didSet {
label.text = String(playerTime)
}
}
var timer = Timer()
#IBOutlet var label: UILabel!
#IBAction func Play(_ sender: Any) {
player.play()
// this will execute every 0.1 seconds, allowing you to update the label.
timer = Timer.scheduledTimer(withTimeInterval: 0.1, repeats: true) { _ in
self.playerTime += 0.1
let min = self.playerTime.truncatingRemainder(dividingBy: 3600)) / 60
let sec = self.playerTime.truncatingRemainder(dividingBy: 60)
self.label.text = String(format: "%02d:%02d", min, sec)
}
}
func stop() {
// when you stop playback
timer.invalidate()
}
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
do {
let audioPath = Bundle.main.path(forResource: “song”, ofType: "mid")
try player = AVMIDIPlayer(contentsOf: NSURL(fileURLWithPath: audioPath!) as URL, soundBankURL: nil)
playerTime = player.currentPosition
}
catch {
}
}
}

IBAction stops working when I connect IBOutlet

I have an IBAction wired up to a button in my storyboard that plays a sound. This works just fine. However, when I wire up an IBOutlet to the same button, the code for the outlet takes over and the IBAction stops working. In this case, the IBOutlet serves the purpose of making the button pulse with animation. Here is my code:
import UIKit
import Firebase
import AVFoundation
class Page1: UIViewController, AVAudioPlayerDelegate, UIAlertViewDelegate {
#IBOutlet weak var scrollView: UIScrollView!
#IBOutlet weak var pageControl: UIPageControl!
#IBOutlet weak var testpulse: UIButton!
override func viewDidLoad() {
super.viewDidLoad()
let url = Bundle.main.url(forResource: "ahh", withExtension: "mp3")
do {
ahh = try AVAudioPlayer(contentsOf: url!)
ahh.prepareToPlay()
}
catch let error as NSError {
print(error.debugDescription)
}
testpulse.isUserInteractionEnabled = true
let tapGestureRecognizer = UITapGestureRecognizer(target: self, action: #selector(Page1.addPulse))
tapGestureRecognizer.numberOfTapsRequired = 1
testpulse.addGestureRecognizer(tapGestureRecognizer)
}
#objc func addPulse(){
let pulse = Pulsing(numberOfPulses: 1, radius: 110, position: testpulse.center)
pulse.animationDuration = 0.8
pulse.backgroundColor = UIColor.purple.cgColor
self.view.layer.insertSublayer(pulse, below: testpulse.layer)
}
#IBAction func ahh(_ sender: Any) {
ahh.play()
}
}
And for my Swift animation file I have this:
import UIKit
class Pulsing: CALayer {
var animationGroup = CAAnimationGroup()
var initialPulseScale:Float = 0
var nextPulseAfter:TimeInterval = 0
var animationDuration:TimeInterval = 1.5
var radius:CGFloat = 200
var numberOfPulses:Float = Float.infinity
override init(layer: Any) {
super.init(layer: layer)
}
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
}
init (numberOfPulses:Float = Float.infinity, radius:CGFloat, position:CGPoint) {
super.init()
self.backgroundColor = UIColor.black.cgColor
self.contentsScale = UIScreen.main.scale
self.opacity = 0
self.radius = radius
self.numberOfPulses = numberOfPulses
self.position = position
self.bounds = CGRect(x: 0, y: 0, width: radius * 2, height: radius * 2)
//self.cornerRadius = radius
DispatchQueue.global(qos: DispatchQoS.QoSClass.default).async {
self.setupAnimationGroup()
DispatchQueue.main.async {
self.add(self.animationGroup, forKey: "pulse")
}
}
setupAnimationGroup()
self.add(animationGroup, forKey: "pulse")
}
func createScaleAnimation () -> CABasicAnimation {
let scaleAnimation = CABasicAnimation(keyPath: "transform.scale.xy")
scaleAnimation.fromValue = NSNumber(value: initialPulseScale)
scaleAnimation.toValue = NSNumber(value: 1)
scaleAnimation.duration = animationDuration
return scaleAnimation
}
func createOpacityAnimation() -> CAKeyframeAnimation? {
let opacityAnimation = CAKeyframeAnimation(keyPath: "opacity")
opacityAnimation.duration = animationDuration
opacityAnimation.values = [0.4, 0.8, 0]
opacityAnimation.keyTimes = [0, 0.2, 1]
return opacityAnimation
}
func setupAnimationGroup() {
self.animationGroup = CAAnimationGroup()
self.animationGroup.duration = animationDuration + nextPulseAfter
self.animationGroup.repeatCount = numberOfPulses
let defaultCurve = CAMediaTimingFunction(name: kCAMediaTimingFunctionDefault)
self.animationGroup.timingFunction = defaultCurve
self.animationGroup.animations = [createScaleAnimation(), createOpacityAnimation()!]
}
}
When I connect the IBAction to my button, the sound works. However when I connect the IBOutlet, the animation works but the IBAction doesn't. What could be causing this? It's still connected in the storyboard.
import UIKit
import Firebase
import AVFoundation
class Page1: UIViewController, AVAudioPlayerDelegate, UIAlertViewDelegate {
#IBOutlet weak var scrollView: UIScrollView!
#IBOutlet weak var pageControl: UIPageControl!
#IBOutlet weak var testpulse: UIButton!
override func viewDidLoad() {
super.viewDidLoad()
let url = Bundle.main.url(forResource: "ahh", withExtension: "mp3")
do {
ahh = try AVAudioPlayer(contentsOf: url!)
ahh.prepareToPlay()
}
catch let error as NSError {
print(error.debugDescription)
}
testpulse.isUserInteractionEnabled = true
}
#objc func addPulse(){
let pulse = Pulsing(numberOfPulses: 1, radius: 110, position: testpulse.center)
pulse.animationDuration = 0.8
pulse.backgroundColor = UIColor.purple.cgColor
self.view.layer.insertSublayer(pulse, below: testpulse.layer)
}
#IBAction func ahh(_ sender: Any) {
ahh.play()
addPulse()
}
}

AudioKit did not start

I'm a Chinese developer,so my English is not good,I have met some trouble with AudioKit when the setting of play background in Appdelegate,
I write some code to play audio background :
import UIKit
import MediaPlayer
#UIApplicationMain
class AppDelegate: UIResponder, UIApplicationDelegate {
var window: UIWindow?
func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplicationLaunchOptionsKey: Any]?) -> Bool {
//设置音频后台播放
do {
try AVAudioSession.sharedInstance().setActive(true)
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback)
}catch{
}
UIApplication.shared.beginReceivingRemoteControlEvents()//设置后台波播放
return true
}
}
then a homeViewController is the rootViewController,it can push recordViewController:
import UIKit
class ViewController: UIViewController {
#IBOutlet weak var toRecordButton: UIButton!
override func viewDidLoad() {
super.viewDidLoad()
self.title = "首页"
}
#IBAction func toRecordAction(_ sender: Any) {
let vc = RecorderViewController()
self.navigationController?.pushViewController(vc, animated: true)
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
next is the code of recordViewController:
import UIKit
import AudioKitUI
import SnapKit
import MediaPlayer
class RecorderViewController: UIViewController {
private var fileForm = ".m4a"
private lazy var inputPlot:AKNodeOutputPlot = {
let plot = AKNodeOutputPlot.init(frame: CGRect.init(x: 20, y: 20, width: UIScreen.main.bounds.width - 40, height: 150))
return plot
}()
var micMixer: AKMixer!
var recorder: AKNodeRecorder!
var player: AKAudioPlayer!//播放背景音乐的播放器
var testPlayer:AKPlayer!//试听的播放器
var tape: AKAudioFile!
var micBooster: AKBooster!
var moogLadder: AKMoogLadder!
var delay: AKDelay!
var mainMixer: AKMixer!
let mic = AKMicrophone()//麦克风对象
fileprivate var micTracker:AKMicrophoneTracker?//话筒输入追踪器
fileprivate var playerTracker:AKAmplitudeTracker?//其他的追踪器
override func viewDidLoad() {
super.viewDidLoad()
self.view.backgroundColor = UIColor.white
self.title = "录音"
self.setupViews()
self.stetupConfigs()
}
private func setupViews(){
self.view.addSubview(inputPlot)
let button = UIButton.init()
self.view.addSubview(button)
button.snp.makeConstraints { (make) in
make.center.equalTo(self.view.snp.center)
make.size.equalTo(CGSize.init(width: 80, height: 50))
}
button.setTitle("开始录音", for: .normal)
button.setTitle("停止录音", for: .selected)
button.setTitleColor(UIColor.black, for: .normal)
button.setTitleColor(UIColor.black, for: .selected)
button.addTarget(self, action: #selector(buttonClicked(sender:)), for: .touchUpInside)
}
private func stetupConfigs(){
// Clean tempFiles !
AKAudioFile.cleanTempDirectory()
// Session settings
AKSettings.bufferLength = .medium
do {
try AKSettings.setSession(category: .playAndRecord, with: .allowBluetoothA2DP)
} catch {
AKLog("Could not set session category.")
}
AKSettings.defaultToSpeaker = true
// Patching
inputPlot.node = mic
micMixer = AKMixer(mic)
micBooster = AKBooster(micMixer)
// Will set the level of microphone monitoring
micBooster.gain = 0
recorder = try? AKNodeRecorder(node: micMixer)
if let file = recorder.audioFile {
testPlayer = AKPlayer(audioFile: file)
}
testPlayer.isLooping = true
moogLadder = AKMoogLadder(testPlayer)
mainMixer = AKMixer(moogLadder, micBooster)
AudioKit.output = mainMixer
do {
try AudioKit.start()
} catch {
AKLog("AudioKit did not start!")
}
micBooster.gain = 0
}
#objc private func buttonClicked(sender:UIButton){
sender.isSelected = !sender.isSelected
if sender.isSelected == true {
do {
try recorder.record()
} catch { print("Errored recording.") }
}else{
recorder.stop()
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
when these code run,it print this:
2018-03-20 14:37:34.101644+0800 RecorderTest[18970:5568244]
[DYMTLInitPlatform] platform initialization successful
AKMicrophone.swift:init():45:Mixer inputs 8
2018-03-20 14:37:50.045692+0800 RecorderTest[18970:5568082] [aurioc]
918: failed: -10851 (enable 1, outf< 2 ch, 0 Hz, Float32, non-
inter> inf< 2 ch, 0 Hz, Float32, non-inter>)
AKAudioFile.swift:deleteFileWithFileName:139:"334A33E7-1237-447A-8A6A-
AA5F57CA5871.caf" deleted.
AKAudioFile.swift:cleanTempDirectory():157:1 files deleted
2018-03-20 14:37:50.482190+0800 RecorderTest[18970:5568082] [mcmx] 338:
input bus 0 sample rate is 0
2018-03-20 14:37:50.482772+0800 RecorderTest[18970:5568082] [avae]
AVAEInternal.h:103:_AVAE_CheckNoErr:
[AVAudioEngineGraph.mm:1266:Initialize: (err =
AUGraphParser::InitializeActiveNodesInOutputChain(ThisGraph,
kOutputChainOptimizedTraversal, *GetOutputNode(),
isOutputChainActive)): error -10875
2018-03-20 14:37:50.482822+0800 RecorderTest[18970:5568082] [avae]
AVAudioEngine.mm:149:-[AVAudioEngine prepare]: Engine#0x1038b0460:
could not initialize, error = -10875
2018-03-20 14:37:50.572746+0800 RecorderTest[18970:5568082] [mcmx] 338:
input bus 0 sample rate is 0
2018-03-20 14:37:50.572815+0800 RecorderTest[18970:5568082] [avae]
AVAEInternal.h:103:_AVAE_CheckNoErr:
[AVAudioEngineGraph.mm:1266:Initialize: (err =
AUGraphParser::InitializeActiveNodesInOutputChain(ThisGraph,
kOutputChainOptimizedTraversal, *GetOutputNode(),
isOutputChainActive)): error -10875
RecorderViewController.swift:stetupConfigs():90:AudioKit did not start!

Retain cycle suspected in closure

I suspect that the following function, which I use in my GameScene class in order to manage the accelerometer's input, is keeping my scene from deinitializing when I transition to another scene:
class GameScene: SKScene {
let motionManager = CMMotionManager()
var xAcceleration = CGFloat(0)
// Some stuff
// override func didMove(to: ....
func setupCoreMotion() {
motionManager.accelerometerUpdateInterval = 0.2
let queue = OperationQueue()
motionManager.startAccelerometerUpdates(to: queue,
withHandler:
{
accelerometerData, error in
guard let accelerometerData = accelerometerData else {
return
}
let acceleration = accelerometerData.acceleration
self.xAcceleration = (CGFloat(acceleration.x) * 0.75) +
(self.xAcceleration * 0.25)
})
}
}
It may be because of the self capture, but if that's the case, I have no clue where to put the "[unowned self] in" capture list.
You should put it before accelerometerData, error in like this [unowned self] accelerometerData, error in

Swift SpriteKit: Thread 1: EXC_BAD_ACCESS on addChild? What am I doing wrong?

I have a game that when you die, you click and the game should restart itself, but whenever I click to restart it gives me this(see above) error. Thanks in advance(:
Here is my code:
class MCTFruitGen: SKSpriteNode {
var generationTimer: NSTimer!
var fruits = [MCTFruit]()
var fruitTracker = [MCTFruit]()
func startGeneratingFruitEvery(seconds: NSTimeInterval) {
generationTimer = NSTimer.scheduledTimerWithTimeInterval(seconds, target: self, selector: "generateFruit", userInfo: nil, repeats: true)
}
func stopGenerating() {
generationTimer?.invalidate()
}
func generateFruit() {
var scale: CGFloat
let rand = arc4random_uniform(2)
if rand == 0 {
scale = -1.0
} else {
scale = 1.0
}
let strawberry = MCTFruit()
strawberry.position.x = size.width/2 + strawberry.size.width/2
strawberry.position.y = scale * (NMCGroundHeight/160 + strawberry.size.height)
self.fruits.append(strawberry)
fruitTracker.append(fruits)
addChild(strawberry) // line that gives me the error
}
func stopFruit() {
stopGenerating()
for fruit in fruits {
fruit.stopFruitMoving()
}
}
}
Try using SKAction instead of NSTimer for generating the fruit or any timing events in SpriteKit. For example
class MCTFruitGen: SKSpriteNode {
var fruits = [MCTFruit]()
var fruitTracker = [MCTFruit]()
func startGeneratingFruitEvery(seconds: NSTimeInterval) {
let callGenerateAction = SKAction.runBlock { () -> Void in
self.generateFruit()
}
let waitOneSecond = SKAction.waitForDuration(seconds)
let sequenceAction = SKAction.sequence([callGenerateAction, waitOneSecond])
let fruitGeneratorAction = SKAction.repeatActionForever(sequenceAction)
self.runAction(fruitGeneratorAction, withKey: "fruitGenerator")
}
func stopGenerating() {
self.removeActionForKey("fruitGenerator");
}
func generateFruit() {
// Your Code
}
func stopFruit() {
stopGenerating()
// Your Code
}
}