Thread 1:BAD_EXC_INSTRUCTION (code=EXC_1386_INVOP, subcode=0x0) - swift

My code is posted below, not sure what the issue is but the return errors are listed as above along with fatal error:
unexpectedly found nil while unwrapping an Optional value
I have seen other answers to similar errors, but all of those involve things such as if let, which is not where my error seems to be occurring, the error message is connected to a line near the top that says "audioFile = try AVAudioFile(forReading: recordedAudioURL as URl)"
//
// PlaySoundsViewController+Audio.swift
// PitchPerfect
//
// Copyright © 2016 Udacity. All rights reserved.
//
import UIKit
import AVFoundation
extension PlaySoundsViewController: AVAudioPlayerDelegate {
struct Alerts {
static let DismissAlert = "Dismiss"
static let RecordingDisabledTitle = "Recording Disabled"
static let RecordingDisabledMessage = "You've disabled this app from recording your microphone. Check Settings."
static let RecordingFailedTitle = "Recording Failed"
static let RecordingFailedMessage = "Something went wrong with your recording."
static let AudioRecorderError = "Audio Recorder Error"
static let AudioSessionError = "Audio Session Error"
static let AudioRecordingError = "Audio Recording Error"
static let AudioFileError = "Audio File Error"
static let AudioEngineError = "Audio Engine Error"
}
// raw values correspond to sender tags
enum PlayingState { case Playing, NotPlaying }
// MARK: Audio Functions
func setupAudio() {
// initialize (recording) audio file
do {
audioFile = try AVAudioFile(forReading: recordedAudioURL as URL)
} catch {
showAlert(title: Alerts.AudioFileError, message: String(describing: error))
}
print("Audio has been setup")
}
func playSound(rate: Float? = nil, pitch: Float? = nil, echo: Bool = false, reverb: Bool = false) {
// initialize audio engine components
audioEngine = AVAudioEngine()
// node for playing audio
audioPlayerNode = AVAudioPlayerNode()
audioEngine.attach(audioPlayerNode)
// node for adjusting rate/pitch
let changeRatePitchNode = AVAudioUnitTimePitch()
if let pitch = pitch {
changeRatePitchNode.pitch = pitch
}
if let rate = rate {
changeRatePitchNode.rate = rate
}
audioEngine.attach(changeRatePitchNode)
// node for echo
let echoNode = AVAudioUnitDistortion()
echoNode.loadFactoryPreset(.multiEcho1)
audioEngine.attach(echoNode)
// node for reverb
let reverbNode = AVAudioUnitReverb()
reverbNode.loadFactoryPreset(.cathedral)
reverbNode.wetDryMix = 50
audioEngine.attach(reverbNode)
// connect nodes
if echo == true && reverb == true {
connectAudioNodes(nodes: audioPlayerNode, changeRatePitchNode, echoNode, reverbNode, audioEngine.outputNode)
} else if echo == true {
connectAudioNodes(nodes: audioPlayerNode, changeRatePitchNode, echoNode, audioEngine.outputNode)
} else if reverb == true {
connectAudioNodes(nodes: audioPlayerNode, changeRatePitchNode, reverbNode, audioEngine.outputNode)
} else {
connectAudioNodes(nodes: audioPlayerNode, changeRatePitchNode, audioEngine.outputNode)
}
// schedule to play and start the engine!
audioPlayerNode.stop()
audioPlayerNode.scheduleFile(audioFile, at: nil) {
var delayInSeconds: Double = 0
if let lastRenderTime = self.audioPlayerNode.lastRenderTime, let playerTime = self.audioPlayerNode.playerTime(forNodeTime: lastRenderTime) {
if let rate = rate {
delayInSeconds = Double(self.audioFile.length - playerTime.sampleTime) / Double(self.audioFile.processingFormat.sampleRate) / Double(rate)
} else {
delayInSeconds = Double(self.audioFile.length - playerTime.sampleTime) / Double(self.audioFile.processingFormat.sampleRate)
}
}
// schedule a stop timer for when audio finishes playing
self.stopTimer = Timer(timeInterval: delayInSeconds, target: self, selector: #selector(PlaySoundsViewController.stopAudio), userInfo: nil, repeats: false)
RunLoop.main.add(self.stopTimer!, forMode: RunLoopMode.defaultRunLoopMode)
}
do {
try audioEngine.start()
} catch {
showAlert(title: Alerts.AudioEngineError, message: String(describing: error))
return
}
// play the recording!
audioPlayerNode.play()
}
// MARK: Connect List of Audio Nodes
func connectAudioNodes(nodes: AVAudioNode...) {
for x in 0..<nodes.count-1 {
audioEngine.connect(nodes[x], to: nodes[x+1], format: audioFile.processingFormat)
}
}
func stopAudio() {
if let stopTimer = stopTimer {
stopTimer.invalidate()
}
configureUI(playState: .NotPlaying)
if let audioPlayerNode = audioPlayerNode {
audioPlayerNode.stop()
}
if let audioEngine = audioEngine {
audioEngine.stop()
audioEngine.reset()
}
}
// MARK: UI Functions
func configureUI(playState: PlayingState) {
switch(playState) {
case .Playing:
setPlayButtonsEnabled(enabled: false)
stopplaybackButton.isEnabled = true
case .NotPlaying:
setPlayButtonsEnabled(enabled: true)
stopplaybackButton.isEnabled = false
}
}
func setPlayButtonsEnabled(enabled: Bool) {
snailButton.isEnabled = enabled
chipmunkButton.isEnabled = enabled
rabbitButton.isEnabled = enabled
vaderButton.isEnabled = enabled
echoButton.isEnabled = enabled
reverbButton.isEnabled = enabled
}
func showAlert(title: String, message: String) {
let alert = UIAlertController(title: title, message: message, preferredStyle: .alert)
alert.addAction(UIAlertAction(title: Alerts.DismissAlert, style: .default, handler: nil))
self.present(alert, animated: true, completion: nil)
}
}

I was working on the same Udacity course, and I believe this is because the mic is not supported in the iOS 8.0+ simulator. One potential work-around is to drop this line into the viewDidLoad() function in the PlaySoundsViewController:
recordedAudioURL = Bundle.main.url(forResource: "yourSound", withExtension: "mp3")
...and then drag an mp3 named "yourSound.mp3" into your PitchPerfect project. It's a hack but it will stop the crash and allow you to test the audio modification buttons on that mp3, though you need to use an actual iOS device if you want to record your own samples in the app.
EDIT: I went through my entire codebase again and checked my audio settings. On the codebase, I had a method incorrectly placed (prepare for segue) in the PlaySoundsViewController, and for the audio settings, once I set the mic to my cinema display audio, the recording worked in the simulator. Good luck.

I had this same problem and it was because prepare didn't have correct syntax and the audioURL was not being propagated to the PlaySoundViewController.
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
if segue.identifier == "stopRecording" {
let playSoundsVC = segue.destination as! PlaySoundsViewController
let recordedAudioURL = sender as! URL
playSoundsVC.recordedAudioURL = recordedAudioURL
print("setting URL in PlaySoundsViewController")
}
}
fixed it for me.

Related

AvAudioEngine playing and recording possible?

I am trying to use the AVAudioEngine to play a button sound. But unfortunately the sound file is played only once.
The Idea is, that the user taps on the button, a sound plays and the recording starts. After the user taps on the button again, a second sound should be playing indicating, that the recording session has been ended.
So far the first sound appears, and the recording starts.
Unfortunately the second sound (the ending sound) wont be played.
And I have found out, that when I am using the same AudioEngine as the recording function, the sound wont be played at all.
As I am completely new to the AVFoundation Framework, I am not sure what the issue here is.
Thank in advance.
var StartSoundEngineScene1 = AVAudioEngine()
var StartSoundNodeScene1 = AVAudioPlayerNode()
func SetupAudio(AudioEngine: AVAudioEngine, SoundNode: AVAudioPlayerNode, FileURL: URL) {
guard let AudioFile = try? AVAudioFile(forReading: FileURL) else{ return }
let AudioSession = AVAudioSession.sharedInstance()
AudioEngine.attach(SoundNode)
AudioEngine.connect(SoundNode, to: AudioEngine.mainMixerNode, format: AudioFile.processingFormat)
AudioEngine.prepare()
}
override func viewDidLoad() {
super.viewDidLoad()
SetupAudio(AudioEngine: StartSoundEngineScene1, SoundNode: StartSoundNodeScene1, FileURL: StartRecSound)
}
func ButtonSound (AudioEngine: AVAudioEngine, SoundNode: AVAudioPlayerNode, FileURL: URL){
try? AudioEngine.start()
guard let audioFile = try? AVAudioFile(forReading: FileURL) else{ return }
SoundNode.scheduleFile(audioFile, at: nil, completionHandler: nil)
SoundNode.volume = 0.16
SoundNode.play()
}
func StartRecording(){
ButtonSound(AudioEngine: StartSoundEngineScene1, SoundNode: StartSoundNodeScene1, FileURL: StartRecSound)
Timer.scheduledTimer(withTimeInterval: 0.7, repeats: false) { timer in
if audioEngine.isRunning {
audioEngine.stop()
recognitionRequest?.endAudio()
} else {
print("Rercording Started")
if let recognitionTask = self.recognitionTask {
recognitionTask.cancel()
self.recognitionTask = nil
}
self.recordedMessage = ""
let audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setCategory(AVAudioSession.Category.record)
try audioSession.setMode(AVAudioSession.Mode.measurement)
}catch {
print(error)
}
recognitionRequest = SFSpeechAudioBufferRecognitionRequest()
guard let recognitionRequest = self.recognitionRequest else {
fatalError("Unable to create a speech audio buffer")
}
recognitionRequest.shouldReportPartialResults = true
recognitionRequest.requiresOnDeviceRecognition = true
recognitionTask = speechRecognizer?.recognitionTask(with: recognitionRequest, resultHandler: { (result, error) in
var isFinal = false
if let result = result {
let sentence = result.bestTranscription.formattedString
self.recordedMessage = sentence
print (self.recordedMessage)
isFinal = result.isFinal
}
if error != nil || isFinal {
self.audioEngine.stop()
self.audioEngine.inputNode.removeTap(onBus: 0)
self.recognitionRequest = nil
self.recognitionTask = nil
self.RecordBtn.isEnabled = true
}
})
let recordingFormat = audioEngine.inputNode.outputFormat(forBus: 0)
audioEngine.inputNode.installTap(onBus: 0, bufferSize: 1024, format: recordingFormat) { (buffer, when) in
self.recognitionRequest?.append(buffer)
}
audioEngine.prepare()
do{
try audioEngine.start()
}catch {
print(error)
}
}
}
}
func StopRecording(){
if audioEngine.isRunning{
audioEngine.stop()
ButtonSound(AudioEngine: StartSoundEngineScene1, SoundNode: StartSoundNodeScene1, FileURL: StopRecSound)
recognitionRequest?.endAudio()
audioEngine.inputNode.removeTap(onBus: 0)
}
}
You set the AVAudioSessionCategory as record.
try audioSession.setCategory(AVAudioSession.Category.record)
If you want to play and Record concurrently, You should set this category playAndRecord
And... If you change the AVAudioSession during playing or recording, AVAudioEngine's configuration will be changed then It fires the AVAudioEngineConfigurationChange notification.

App crashes when trying to access API with alamofire

I have this code that I use to read barcode from a record vinyl and after, I try to get information based on the result of the barcode on a website called Discogs. I've created an app at this web site which is required but my app crashes every time when it finishes reading the barcode.
I can read the barcode(get the number out of it) and it dismisses my viewcontroller back to mainVC where should show the result but my app crashes before...
What should I do ? I am using alamofire
func setupCamera(){
session = AVCaptureSession()
let videoCaptureDevice = AVCaptureDevice.default(for: AVMediaType.video)
let videoInput : AVCaptureDeviceInput!
do {
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice!)
} catch {
return
}
if (session.canAddInput(videoInput)){
session.addInput(videoInput)
} else {
scanningNotPossible()
}
// Create output object.
let metadataOutput = AVCaptureMetadataOutput()
// Add output to the session.
if (session.canAddOutput(metadataOutput)) {
session.addOutput(metadataOutput)
// Send captured data to the delegate object via a serial queue.
metadataOutput.setMetadataObjectsDelegate(self, queue: .main)
// Set barcode type for which to scan: EAN-13.
metadataOutput.metadataObjectTypes = [AVMetadataObject.ObjectType.ean13]
} else {
scanningNotPossible()
}
previewLayer = AVCaptureVideoPreviewLayer(session: session);
previewLayer.frame = view.layer.bounds;
previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill;
view.layer.addSublayer(previewLayer);
// Begin the capture session.
session.startRunning()
}
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
session.stopRunning()
if let metadataObject = metadataObjects.first {
guard let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject else { return }
guard let stringValue = readableObject.stringValue else { return }
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
found(code: stringValue)
}
// dismiss(animated: true)
}
func barcodeDetected(code: String) {
// Let the user know we've found something.
let alert = UIAlertController(title: "Found a Barcode!", message: code, preferredStyle: UIAlertController.Style.alert)
let theAction = UIAlertAction(title: "Search", style: .default){ (action: UIAlertAction!) in
// Remove the spaces.
let trimmedCode = code.trimmingCharacters(in: .whitespaces)
// EAN or UPC?
// Check for added "0" at beginning of code.
let trimmedCodeString = "\(trimmedCode)"
var trimmedCodeNoZero: String
if trimmedCodeString.hasPrefix("0") && trimmedCodeString.count > 1 {
trimmedCodeNoZero = String(trimmedCodeString.dropFirst())
// Send the doctored UPC to DataService.searchAPI()
DataService.searchAPI(codeNumber: trimmedCodeNoZero)
} else {
// Send the doctored EAN to DataService.searchAPI()
DataService.searchAPI(codeNumber: trimmedCodeString)
}
print("popopop")
self.navigationController?.popViewController(animated: true)
}
alert.addAction(theAction)
self.present(alert, animated: true, completion: nil)
}
import Foundation
import Alamofire
import SwiftyJSON
class DataService {
static let dataService = DataService()
private(set) var ALBUM_FROM_DISCOGS = ""
private(set) var YEAR_FROM_DISCOGS = ""
static func searchAPI(codeNumber: String) {
// The URL we will use to get out album data from Discogs
let discogsURL = "\(DISCOGS_AUTH_URL)\(codeNumber)&?barcode&key=\(DISCOGS_KEY)&secret=\(DISCOGS_SECRET)"
Alamofire.request(discogsURL)
.responseJSON { response in
var json = JSON(response.result.value!)
let albumArtistTitle = "\(json["results"][0]["title"])"
let albumYear = "\(json["results"][0]["year"])"
self.dataService.ALBUM_FROM_DISCOGS = albumArtistTitle
self.dataService.YEAR_FROM_DISCOGS = albumYear
// Post a notification to let AlbumDetailsViewController know we have some data.
NotificationCenter.default.post(name: NSNotification.Name(rawValue: "AlbumNotification"), object: nil)
}
}
}
log error:
> 2019-04-26 17:35:21.791189-0300 Discogs Barcode Example[1852:598955] -[Discogs_Barcode_Example.AlbumDetaisViewController setLabels]: unrecognized selector sent to instance 0x10230a460
2019-04-26 17:35:21.793519-0300 Discogs Barcode Example[1852:598955] *** Terminating app due to uncaught exception 'NSInvalidArgumentException', reason: '-[Discogs_Barcode_Example.AlbumDetaisViewController setLabels]: unrecognized selector sent to instance 0x10230a460'
*** First throw call stack:
(0x2016c3518 0x20089e9f8 0x2015e0278 0x22d83bef8 0x2016c8d60 0x2016ca9fc 0x2016345bc 0x201634588 0x201633a7c 0x201633728 0x2015ad524 0x2016331d8 0x20201b814 0x100f1cb38 0x101358d78 0x101325708 0x10209b6f0 0x10209cc74 0x1020aa6fc 0x201654ec0 0x20164fdf8 0x20164f354 0x20384f79c 0x22d810b68 0x100f1b03c 0x2011158e0)
libc++abi.dylib: terminating with uncaught exception of type NSException
[Discogs_Barcode_Example.AlbumDetaisViewController setLabels]:
it seems that AlbumDetaisViewController was hooked to an action method named setLabels and you changed it to other name , so either return back to original name or clear the connection from IB and connect with the new name

Getting image from AVCaptureMovieFileOutput without switching

I would like to prevent lags when the app switches between video-recording and photo-taking: by using only AVCaptureMovieFileOutput and getting a snapshot from it when captured an image.
Just like how SnapChat does.
Is it possible somehow? I haven't found any releated articles about this.
I don't want to switch between outputs, because it lags
The code:
#IBOutlet var cameraView: UIView!
#IBOutlet var cameraSwitchButton: UIButton!
#IBOutlet var captureButtonView: CaptureButton!
#IBOutlet var cameraFlashButton: UIButton!
var captureSession = AVCaptureSession()
let movieOutput = AVCaptureMovieFileOutput()
var activeInput: AVCaptureDeviceInput!
var previewLayer = AVCaptureVideoPreviewLayer()
var outputURL: URL!
var connection : AVCaptureConnection!
override func viewDidLoad() {
if setupSession() {
setupPreview()
startSession()
connection = movieOutput.connection(with: AVMediaType.video)
if (connection?.isVideoStabilizationSupported)! {
connection?.preferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.off
}
}
let tapGesture = UITapGestureRecognizer(target: self, action: #selector(captureButtonTapped))
let longGesture = UILongPressGestureRecognizer(target: self, action: #selector(captureButtonLongPressed))
tapGesture.numberOfTapsRequired = 1
captureButtonView.addGestureRecognizer(tapGesture)
captureButtonView.addGestureRecognizer(longGesture)
}
#objc func captureButtonTapped(){
?? TAKE PHOTO HERE ??
}
var isRecordingVideo : Bool = false
#objc func captureButtonLongPressed(sender : UILongPressGestureRecognizer){
if sender.state == .began {
isRecordingVideo = true
startRecording()
captureButtonView.startTimer(duration: 10.0)
}
if sender.state == .ended || sender.state == .failed || sender.state == .cancelled {
captureButtonView.clear()
isRecordingVideo = false
stopRecording()
}
}
func setupPreview() {
// Configure previewLayer
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = cameraView.bounds
previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
cameraView.layer.addSublayer(previewLayer)
}
//MARK:- Setup Camera
func setupSession() -> Bool {
captureSession.sessionPreset = AVCaptureSession.Preset.high
// Setup Camera
let camera = AVCaptureDevice.default(for: AVMediaType.video)
do {
let input = try AVCaptureDeviceInput(device: camera!)
if captureSession.canAddInput(input) {
captureSession.addInput(input)
activeInput = input
}
} catch {
print("Error setting device video input: \(error)")
return false
}
// Setup Microphone
let microphone = AVCaptureDevice.default(for: AVMediaType.audio)
do {
let micInput = try AVCaptureDeviceInput(device: microphone!)
if captureSession.canAddInput(micInput) {
captureSession.addInput(micInput)
}
} catch {
print("Error setting device audio input: \(error)")
return false
}
// Movie output
if captureSession.canAddOutput(movieOutput) {
captureSession.addOutput(movieOutput)
}
return true
}
func setupCaptureMode(_ mode: Int) {
}
//MARK:- Camera Session
func startSession() {
if !captureSession.isRunning {
videoQueue().async {
self.captureSession.startRunning()
}
}
}
func stopSession() {
if captureSession.isRunning {
videoQueue().async {
self.captureSession.stopRunning()
}
}
}
func videoQueue() -> DispatchQueue {
return DispatchQueue.main
}
func currentVideoOrientation() -> AVCaptureVideoOrientation {
var orientation: AVCaptureVideoOrientation
switch UIDevice.current.orientation {
case .portrait:
orientation = AVCaptureVideoOrientation.portrait
case .landscapeRight:
orientation = AVCaptureVideoOrientation.landscapeLeft
case .portraitUpsideDown:
orientation = AVCaptureVideoOrientation.portraitUpsideDown
default:
orientation = AVCaptureVideoOrientation.landscapeRight
}
return orientation
}
func startCapture() {
startRecording()
}
func tempURL() -> URL? {
let directory = NSTemporaryDirectory() as NSString
if directory != "" {
let path = directory.appendingPathComponent(NSUUID().uuidString + ".mp4")
return URL(fileURLWithPath: path)
}
return nil
}
func startRecording() {
if movieOutput.isRecording == false {
if (connection?.isVideoOrientationSupported)! {
connection?.videoOrientation = currentVideoOrientation()
}
let device = activeInput.device
if (device.isSmoothAutoFocusSupported) {
do {
try device.lockForConfiguration()
device.isSmoothAutoFocusEnabled = false
device.unlockForConfiguration()
} catch {
print("Error setting configuration: \(error)")
}
}
outputURL = tempURL()
movieOutput.startRecording(to: outputURL, recordingDelegate: self)
}
else {
stopRecording()
}
}
func stopRecording() {
if movieOutput.isRecording == true {
movieOutput.stopRecording()
}
}
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
if (error != nil) {
print("Error recording movie: \(error!.localizedDescription)")
} else {
UISaveVideoAtPathToSavedPhotosAlbum(outputURL.path, nil, nil, nil)
_ = outputURL as URL
}
outputURL = nil
}
I wasn't able to find a way using only AVCaptureMovieFileOutput, however you can add an additional photo output and trigger photos without having to switch between the outputs.
I'm short on time at the moment but this should get you going till I can edit with more info.
(See EDIT with full implementation below, and limited force unwrapping)
First off setup an additional var for a photo output in your view controller
// declare an additional camera output var
var cameraOutput = AVCapturePhotoOutput()
// do this in your 'setupSession' func where you setup your movie output
cameraOutput.isHighResolutionCaptureEnabled = true
captureSession.addOutput(cameraOutput)
Declare a function to capture your photo using the cameraOutput:
func capturePhoto() {
// create settings for your photo capture
let settings = AVCapturePhotoSettings()
let previewPixelType = settings.availablePreviewPhotoPixelFormatTypes.first!
let previewFormat = [
kCVPixelBufferPixelFormatTypeKey as String: previewPixelType,
kCVPixelBufferWidthKey as String: UIScreen.main.bounds.size.width,
kCVPixelBufferHeightKey as String: UIScreen.main.bounds.size.height
] as [String : Any]
settings.previewPhotoFormat = previewFormat
cameraOutput.capturePhoto(with: settings, delegate: self)
}
and conform to the AVCapturePhotoCaptureDelegate.
I created a separate class called VideoFeed to manage the video capture session, so this sample is an extension of that class. I'll update with more info on this later.
The loadImage(data: Data) function calls a delegate with the image. You can ignore that call if you put this directly in your view controller, and save or do whatever you like with the generated photo:
extension VideoFeed: AVCapturePhotoCaptureDelegate {
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photoSampleBuffer: CMSampleBuffer?, previewPhoto previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {
guard error == nil else {
print("Photo Error: \(String(describing: error))")
return
}
guard let sampleBuffer = photoSampleBuffer,
let previewBuffer = previewPhotoSampleBuffer,
let outputData = AVCapturePhotoOutput
.jpegPhotoDataRepresentation(forJPEGSampleBuffer: sampleBuffer, previewPhotoSampleBuffer: previewBuffer) else {
print("Oops, unable to create jpeg image")
return
}
print("captured photo...")
loadImage(data: outputData)
}
func loadImage(data: Data) {
let dataProvider = CGDataProvider(data: data as CFData)
let cgImageRef: CGImage! = CGImage(jpegDataProviderSource: dataProvider!, decode: nil, shouldInterpolate: true, intent: .defaultIntent)
let image = UIImage(cgImage: cgImageRef, scale: 1.0, orientation: UIImageOrientation.right)
// do whatever you like with the generated image here...
delegate?.processVideoSnapshot(image)
}
}
EDIT:
Here's the complete implementation I used in my test project.
First I moved all the AVFoundation specific code into it's own VideoFeed class and created some callbacks to the view controller.
This separates concerns and limits the view controllers responsibilities to:
Adding the preview layer to the view
Triggering and handling the captured image/screenshot
Starting/stopping video file recording.
Here's the ViewController implementation:
ViewController.swift
import UIKit
import AVFoundation
class ViewController: UIViewController, VideoFeedDelegate {
#IBOutlet var cameraView: UIView!
var videoFeed: VideoFeed?
override func viewDidLoad() {
super.viewDidLoad()
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
// end session
videoFeed?.stopSession()
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
// request camera access
AVCaptureDevice.requestAccess(for: AVMediaType.video) { [weak self] granted in
guard granted != false else {
// TODO: show UI stating camera cannot be used, update in settings app...
print("Camera access denied")
return
}
DispatchQueue.main.async {
if self?.videoFeed == nil {
// video access was enabled so setup video feed
self?.videoFeed = VideoFeed(delegate: self)
} else {
// video feed already available, restart session...
self?.videoFeed?.startSession()
}
}
}
}
// MARK: VideoFeedDelegate
func videoFeedSetup(with layer: AVCaptureVideoPreviewLayer) {
// set the layer size
layer.frame = cameraView.layer.bounds
// add to view
cameraView.layer.addSublayer(layer)
}
func processVideoSnapshot(_ image: UIImage?) {
// validate
guard let image = image else {
return
}
// SAVE IMAGE HERE IF DESIRED
// for now just showing in a lightbox/detail view controller
let storyboard = UIStoryboard(name: "Main", bundle: Bundle(for: AppDelegate.self))
let vc = storyboard.instantiateViewController(withIdentifier: "LightboxViewController") as! LightboxViewController
vc.previewImage = image
navigationController?.pushViewController(vc, animated: true)
}
#IBAction func captureButtonTapped(_ sender: Any){
// trigger photo capture from video feed...
// this will trigger a callback to the function above with the captured image
videoFeed?.capturePhoto()
}
}
Here's the full implementation of the VideoFeed class.
Using this approach allows you to reuse the video functionality in other projects more easily without having it tightly coupled to the view controller.
VideoFeed.swift
import UIKit
import AVFoundation
/// Defines callbacks associated with the VideoFeed class. Notifies delegate of significant events.
protocol VideoFeedDelegate: class {
/// Callback triggered when the preview layer for this class has been created and configured. Conforming objects should set and maintain a strong reference to this layer otherwise it will be set to nil when the calling function finishes execution.
///
/// - Parameter layer: The video preview layer associated with the active captureSession in the VideoFeed class.
func videoFeedSetup(with layer: AVCaptureVideoPreviewLayer)
/// Callback triggered when a snapshot of the video feed has been generated.
///
/// - Parameter image: <#image description#>
func processVideoSnapshot(_ image: UIImage?)
}
class VideoFeed: NSObject {
// MARK: Variables
/// The capture session to be used in this class.
var captureSession = AVCaptureSession()
/// The preview layer associated with this session. This class has a
/// weak reference to this layer, the delegate (usually a ViewController
/// instance) should add this layer as a sublayer to its preview UIView.
/// The delegate will have the strong reference to this preview layer.
weak var previewLayer: AVCaptureVideoPreviewLayer?
/// The output that handles saving the video stream to a file.
var fileOutput: AVCaptureMovieFileOutput?
/// A reference to the active video input
var activeInput: AVCaptureDeviceInput?
/// Output for capturing frame grabs of video feed
var cameraOutput = AVCapturePhotoOutput()
/// Delegate to receive callbacks about significant events triggered by this class.
weak var delegate: VideoFeedDelegate?
/// The capture connection associated with the fileOutput.
/// Set when fileOutput is created.
var connection : AVCaptureConnection?
// MARK: Public accessors
/// Public initializer. Accepts a delegate to receive callbacks with the preview layer and any snapshot images.
///
/// - Parameter delegate: A reference to an object conforming to VideoFeedDelegate
/// to receive callbacks for significant events in this class.
init(delegate: VideoFeedDelegate?) {
self.delegate = delegate
super.init()
setupSession()
}
/// Public accessor to begin a capture session.
public func startSession() {
guard captureSession.isRunning == false else {
return
}
captureSession.startRunning()
}
/// Public accessor to end the current capture session.
public func stopSession() {
// validate
guard captureSession.isRunning else {
return
}
// end file recording if the session ends and we're currently recording a video to file
if let isRecording = fileOutput?.isRecording, isRecording {
stopRecording()
}
captureSession.stopRunning()
}
/// Public accessor to begin file recording.
public func startRecording() {
guard fileOutput?.isRecording == false else {
stopRecording()
return
}
configureVideoOrientation()
disableSmoothAutoFocus()
guard let url = tempURL() else {
print("Unable to start file recording, temp url generation failed.")
return
}
fileOutput?.startRecording(to: url, recordingDelegate: self)
}
/// Public accessor to end file recording.
public func stopRecording() {
guard fileOutput?.isRecording == true else {
return
}
fileOutput?.stopRecording()
}
/// Public accessor to trigger snapshot capture of video stream.
public func capturePhoto() {
// create settings object
let settings = AVCapturePhotoSettings()
// verify that we have a pixel format type available
guard let previewPixelType = settings.availablePreviewPhotoPixelFormatTypes.first else {
print("Unable to configure photo capture settings, 'availablePreviewPhotoPixelFormatTypes' has no available options.")
return
}
let screensize = UIScreen.main.bounds.size
// setup format configuration dictionary
let previewFormat: [String : Any] = [
kCVPixelBufferPixelFormatTypeKey as String: previewPixelType,
kCVPixelBufferWidthKey as String: screensize.width,
kCVPixelBufferHeightKey as String: screensize.height
]
settings.previewPhotoFormat = previewFormat
// trigger photo capture
cameraOutput.capturePhoto(with: settings, delegate: self)
}
// MARK: Setup functions
/// Handles configuration and setup of the session, inputs, video preview layer and outputs.
/// If all are setup and configured it starts the session.
internal func setupSession() {
captureSession.sessionPreset = AVCaptureSession.Preset.high
guard setupInputs() else {
return
}
setupOutputs()
setupVideoLayer()
startSession()
}
/// Sets up capture inputs for this session.
///
/// - Returns: Returns true if inputs are successfully setup, else false.
internal func setupInputs() -> Bool {
// only need access to this functionality within this function, so declare as sub-function
func addInput(input: AVCaptureInput) {
guard captureSession.canAddInput(input) else {
return
}
captureSession.addInput(input)
}
do {
if let camera = AVCaptureDevice.default(for: AVMediaType.video) {
let input = try AVCaptureDeviceInput(device: camera)
addInput(input: input)
activeInput = input
}
// Setup Microphone
if let microphone = AVCaptureDevice.default(for: AVMediaType.audio) {
let micInput = try AVCaptureDeviceInput(device: microphone)
addInput(input: micInput)
}
return true
} catch {
print("Error setting device video input: \(error)")
return false
}
}
internal func setupOutputs() {
// only need access to this functionality within this function, so declare as sub-function
func addOutput(output: AVCaptureOutput) {
if captureSession.canAddOutput(output) {
captureSession.addOutput(output)
}
}
// file output
let fileOutput = AVCaptureMovieFileOutput()
captureSession.addOutput(fileOutput)
if let connection = fileOutput.connection(with: .video), connection.isVideoStabilizationSupported {
connection.preferredVideoStabilizationMode = .off
self.connection = connection
}
cameraOutput.isHighResolutionCaptureEnabled = true
captureSession.addOutput(cameraOutput)
}
internal func setupVideoLayer() {
let layer = AVCaptureVideoPreviewLayer(session: captureSession)
layer.videoGravity = AVLayerVideoGravity.resizeAspectFill
delegate?.videoFeedSetup(with: layer)
previewLayer = layer
}
// MARK: Helper functions
/// Creates a url in the temporary directory for file recording.
///
/// - Returns: A file url if successful, else nil.
internal func tempURL() -> URL? {
let directory = NSTemporaryDirectory() as NSString
if directory != "" {
let path = directory.appendingPathComponent(NSUUID().uuidString + ".mp4")
return URL(fileURLWithPath: path)
}
return nil
}
/// Disables smooth autofocus functionality on the active device,
/// if the active device is set and 'isSmoothAutoFocusSupported'
/// is supported for the currently set active device.
internal func disableSmoothAutoFocus() {
guard let device = activeInput?.device, device.isSmoothAutoFocusSupported else {
return
}
do {
try device.lockForConfiguration()
device.isSmoothAutoFocusEnabled = false
device.unlockForConfiguration()
} catch {
print("Error disabling smooth autofocus: \(error)")
}
}
/// Sets the current AVCaptureVideoOrientation on the currently active connection if it's supported.
internal func configureVideoOrientation() {
guard let connection = connection, connection.isVideoOrientationSupported,
let currentOrientation = AVCaptureVideoOrientation(rawValue: UIApplication.shared.statusBarOrientation.rawValue) else {
return
}
connection.videoOrientation = currentOrientation
}
}
// MARK: AVCapturePhotoCaptureDelegate
extension VideoFeed: AVCapturePhotoCaptureDelegate {
// iOS 11+ processing
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
guard error == nil, let outputData = photo.fileDataRepresentation() else {
print("Photo Error: \(String(describing: error))")
return
}
print("captured photo...")
loadImage(data: outputData)
}
// iOS < 11 processing
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photoSampleBuffer: CMSampleBuffer?, previewPhoto previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {
if #available(iOS 11.0, *) {
// use iOS 11-only feature
// nothing to do here as iOS 11 uses the callback above
} else {
guard error == nil else {
print("Photo Error: \(String(describing: error))")
return
}
guard let sampleBuffer = photoSampleBuffer,
let previewBuffer = previewPhotoSampleBuffer,
let outputData = AVCapturePhotoOutput
.jpegPhotoDataRepresentation(forJPEGSampleBuffer: sampleBuffer, previewPhotoSampleBuffer: previewBuffer) else {
print("Image creation from sample buffer/preview buffer failed.")
return
}
print("captured photo...")
loadImage(data: outputData)
}
}
/// Creates a UIImage from Data object received from AVCapturePhotoOutput
/// delegate callback and sends to the VideoFeedDelegate for handling.
///
/// - Parameter data: Image data.
internal func loadImage(data: Data) {
guard let dataProvider = CGDataProvider(data: data as CFData), let cgImageRef: CGImage = CGImage(jpegDataProviderSource: dataProvider, decode: nil, shouldInterpolate: true, intent: .defaultIntent) else {
return
}
let image = UIImage(cgImage: cgImageRef, scale: 1.0, orientation: UIImageOrientation.right)
delegate?.processVideoSnapshot(image)
}
}
extension VideoFeed: AVCaptureFileOutputRecordingDelegate {
func fileOutput(_ output: AVCaptureFileOutput, didStartRecordingTo fileURL: URL, from connections: [AVCaptureConnection]) {
print("Video recording started: \(fileURL.absoluteString)")
}
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
guard error == nil else {
print("Error recording movie: \(String(describing: error))")
return
}
UISaveVideoAtPathToSavedPhotosAlbum(outputFileURL.path, nil, nil, nil)
}
}
For anyone else making use of this, don't forget to add permissions to your info.plist for access to the camera, photo library and microphone.
<key>NSCameraUsageDescription</key>
<string>Let us use your camera</string>
<key>NSPhotoLibraryAddUsageDescription</key>
<string>save to images</string>
<key>NSMicrophoneUsageDescription</key>
<string>for sound in video</string>

Reset AVAudioEngine

I am creating a basic soundboard app. I have two switches. One that when activated will make the audio slower and lower pitch and one that makes it faster and higher. I have an if else if if statement that looks at those switches and then plays the audio accordingly, however when I try to push it a second time, either for the same sound or a different sound, it crashes.
I'm about 99% sure that this is due to the AVAudioEngine needing to be reset or having the nodes themselves reset, but I am way past being out of my league here. I've searched high and low, but the answer I seem to be finding relate to resetting the player when using different buttons to make the high or low sounds. Any thoughts?
class ViewController: UIViewController {
#IBOutlet weak var sassSwitch: UISwitch!
#IBOutlet weak var chipSwitch: UISwitch!
#IBAction func sassAction(_ sender: UISwitch) {
chipSwitch.setOn(false, animated: true)
}
#IBAction func chipSwitch(_ sender: UISwitch) {
sassSwitch.setOn(false, animated: true)
}
///Playback Engine
private let audioEngine = AVAudioEngine()
///Player's Nodes
private let pitchPlayer = AVAudioPlayerNode()
private let timePitch = AVAudioUnitTimePitch()
///Audio Files to be played
private var audioFile1 = AVAudioFile()
private var audioFile2 = AVAudioFile()
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
if let filePath = Bundle.main.path(forResource: "PeteNope", ofType:
"mp3") {
let filePathURL = URL(fileURLWithPath: filePath)
setPlayerFile(filePathURL)
}
if let filePath2 = Bundle.main.path(forResource: "Law_WOW", ofType:
"mp3") {
let filePath2URL = URL(fileURLWithPath: filePath2)
setPlayerFile2(filePath2URL)
}
}
private func setPlayerFile(_ fileURL: URL) {
do {
let file = try AVAudioFile(forReading: fileURL)
self.audioFile1 = file
} catch {
fatalError("Could not create AVAudioFile instance. error: \(error).")
}
}
private func setPlayerFile2(_ fileURL: URL) {
do {
let file = try AVAudioFile(forReading: fileURL)
self.audioFile2 = file
} catch {
fatalError("Could not create AVAudioFile instance. error: \(error).")
}
}
#IBAction func sound1Play(_ sender: UIButton) {
if sassSwitch.isOn {
timePitch.pitch = -300
timePitch.rate = 0.5
audioEngine.attach(pitchPlayer)
audioEngine.attach(timePitch)
audioEngine.connect(pitchPlayer, to: timePitch, format: audioFile1.processingFormat)
audioEngine.connect(timePitch, to: audioEngine.outputNode, format: audioFile1.processingFormat)
pitchPlayer.scheduleFile(audioFile1, at: nil, completionHandler: nil)
// Start the engine.
do {
try audioEngine.start()
} catch {
fatalError("Could not start engine. error: \(error).")
}
pitchPlayer.play()
} else if chipSwitch.isOn {
timePitch.pitch = +500
timePitch.rate = 2.0
audioEngine.attach(pitchPlayer)
audioEngine.attach(timePitch)
audioEngine.connect(pitchPlayer, to: timePitch, format: audioFile1.processingFormat)
audioEngine.connect(timePitch, to: audioEngine.outputNode, format: audioFile1.processingFormat)
pitchPlayer.scheduleFile(audioFile1, at: nil, completionHandler: nil)
// Start the engine.
do {
try audioEngine.start()
} catch {
fatalError("Could not start engine. error: \(error).")
}
pitchPlayer.play()
} else {
timePitch.pitch = +0
timePitch.rate = 1.0
audioEngine.attach(pitchPlayer)
audioEngine.attach(timePitch)
audioEngine.connect(pitchPlayer, to: timePitch, format: audioFile1.processingFormat)
audioEngine.connect(timePitch, to: audioEngine.outputNode, format: audioFile1.processingFormat)
pitchPlayer.scheduleFile(audioFile1, at: nil, completionHandler: nil)
// Start the engine.
do {
try audioEngine.start()
} catch {
fatalError("Could not start engine. error: \(error).")
}
pitchPlayer.play()
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
It looks like you might just need to move the attach, connect, and engine.start functions out of the sound1Play function, and put them into viewDidLoad. Currently you're connecting the nodes every action, and restarting the engine multiple times as well.

Record And Play Voice in Separate Class (Swift3)

I used many codes that was for record an play the voice, but most of them are not in swift3 and they don't work in my app.
This code works, but I want to create a separate class from the viewcontroller that do recording an playing voices. Also the mentioned github code is complex an I'm searching for a simplified code.
Update:
After recording, when I check existence of the recorded file, the file doesn't exist, and it raises EXC_BAD_ACCESS error on appDelegate.
What's wrong?
Any suggestions would be appreciated.
Try to record audio by wirting line
let isRec = AudioManager.shared.record(fileName: "rec")
if isRec returned true then recording is happening else not.
To finish recording use : let recordedURL = AudioManager.shared.finishRecording()
To play recorded file send above url to setupUpPlayer() function in manager class
Not to forget to use extension code snippets give below the code snippet which are delegate functions of AVAudioRecorder and AVAudioPlayer
import Foundation
import AVFoundation
class AudioManager: NSObject {
static let shared = AudioManager()
var recordingSession: AVAudioSession?
var recorder: AVAudioRecorder?
var meterTimer: Timer?
var recorderApc0: Float = 0
var recorderPeak0: Float = 0
//PLayer
var player: AVAudioPlayer?
var savedFileURL: URL?
func setup() {
recordingSession = AVAudioSession.sharedInstance()
do {
try recordingSession?.setCategory(AVAudioSessionCategoryPlayAndRecord, with: .defaultToSpeaker)
try recordingSession?.setActive(true)
recordingSession?.requestRecordPermission({ (allowed) in
if allowed {
print("Mic Authorised")
} else {
print("Mic not Authorised")
}
})
} catch {
print("Failed to set Category", error.localizedDescription)
}
}
func record(fileName: String) -> Bool {
setup()
let url = getUserPath().appendingPathComponent(fileName + ".m4a")
let audioURL = URL.init(fileURLWithPath: url.path)
let recordSettings: [String: Any] = [AVFormatIDKey: kAudioFormatMPEG4AAC,
AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue,
AVNumberOfChannelsKey: 2,
AVSampleRateKey: 44100.0]
do {
recorder = try AVAudioRecorder.init(url: audioURL, settings: recordSettings)
recorder?.delegate = self
recorder?.isMeteringEnabled = true
recorder?.prepareToRecord()
recorder?.record()
self.meterTimer = Timer.scheduledTimer(withTimeInterval: 0.1, repeats: true, block: { (timer: Timer) in
//Update Recording Meter Values so we can track voice loudness
if let recorder = self.recorder {
recorder.updateMeters()
self.recorderApc0 = recorder.averagePower(forChannel: 0)
self.recorderPeak0 = recorder.peakPower(forChannel: 0)
}
})
savedFileURL = url
print("Recording")
return true
} catch {
print("Error Handling", error.localizedDescription)
return false
}
}
func getUserPath() -> URL {
return FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
}
func finishRecording() -> String {
recorder?.stop()
self.meterTimer?.invalidate()
var fileURL: String?
if let url: URL = recorder?.url {
fileURL = String(describing: url)
}
return /fileURL
}
//Player
func setupPlayer(_ url: URL) {
do {
try player = AVAudioPlayer.init(contentsOf: url)
} catch {
print("Error1", error.localizedDescription)
}
player?.prepareToPlay()
player?.play()
player?.volume = 1.0
player?.delegate = self
}
}
//MARK:- Audio Recorder Delegate
extension AudioManager: AVAudioRecorderDelegate {
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) {
print("AudioManager Finish Recording")
}
func audioRecorderEncodeErrorDidOccur(_ recorder: AVAudioRecorder, error: Error?) {
print("Encoding Error", /error?.localizedDescription)
}
}
//MARK:- Audio Player Delegates
extension AudioManager: AVAudioPlayerDelegate {
func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer,
successfully flag: Bool) {
player.stop()
print("Finish Playing")
}
func audioPlayerDecodeErrorDidOccur(_ player: AVAudioPlayer,
error: Error?) {
print(/error?.localizedDescription)
}
}