Record And Play Voice in Separate Class (Swift3) - swift

I used many codes that was for record an play the voice, but most of them are not in swift3 and they don't work in my app.
This code works, but I want to create a separate class from the viewcontroller that do recording an playing voices. Also the mentioned github code is complex an I'm searching for a simplified code.
Update:
After recording, when I check existence of the recorded file, the file doesn't exist, and it raises EXC_BAD_ACCESS error on appDelegate.
What's wrong?
Any suggestions would be appreciated.

Try to record audio by wirting line
let isRec = AudioManager.shared.record(fileName: "rec")
if isRec returned true then recording is happening else not.
To finish recording use : let recordedURL = AudioManager.shared.finishRecording()
To play recorded file send above url to setupUpPlayer() function in manager class
Not to forget to use extension code snippets give below the code snippet which are delegate functions of AVAudioRecorder and AVAudioPlayer
import Foundation
import AVFoundation
class AudioManager: NSObject {
static let shared = AudioManager()
var recordingSession: AVAudioSession?
var recorder: AVAudioRecorder?
var meterTimer: Timer?
var recorderApc0: Float = 0
var recorderPeak0: Float = 0
//PLayer
var player: AVAudioPlayer?
var savedFileURL: URL?
func setup() {
recordingSession = AVAudioSession.sharedInstance()
do {
try recordingSession?.setCategory(AVAudioSessionCategoryPlayAndRecord, with: .defaultToSpeaker)
try recordingSession?.setActive(true)
recordingSession?.requestRecordPermission({ (allowed) in
if allowed {
print("Mic Authorised")
} else {
print("Mic not Authorised")
}
})
} catch {
print("Failed to set Category", error.localizedDescription)
}
}
func record(fileName: String) -> Bool {
setup()
let url = getUserPath().appendingPathComponent(fileName + ".m4a")
let audioURL = URL.init(fileURLWithPath: url.path)
let recordSettings: [String: Any] = [AVFormatIDKey: kAudioFormatMPEG4AAC,
AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue,
AVNumberOfChannelsKey: 2,
AVSampleRateKey: 44100.0]
do {
recorder = try AVAudioRecorder.init(url: audioURL, settings: recordSettings)
recorder?.delegate = self
recorder?.isMeteringEnabled = true
recorder?.prepareToRecord()
recorder?.record()
self.meterTimer = Timer.scheduledTimer(withTimeInterval: 0.1, repeats: true, block: { (timer: Timer) in
//Update Recording Meter Values so we can track voice loudness
if let recorder = self.recorder {
recorder.updateMeters()
self.recorderApc0 = recorder.averagePower(forChannel: 0)
self.recorderPeak0 = recorder.peakPower(forChannel: 0)
}
})
savedFileURL = url
print("Recording")
return true
} catch {
print("Error Handling", error.localizedDescription)
return false
}
}
func getUserPath() -> URL {
return FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
}
func finishRecording() -> String {
recorder?.stop()
self.meterTimer?.invalidate()
var fileURL: String?
if let url: URL = recorder?.url {
fileURL = String(describing: url)
}
return /fileURL
}
//Player
func setupPlayer(_ url: URL) {
do {
try player = AVAudioPlayer.init(contentsOf: url)
} catch {
print("Error1", error.localizedDescription)
}
player?.prepareToPlay()
player?.play()
player?.volume = 1.0
player?.delegate = self
}
}
//MARK:- Audio Recorder Delegate
extension AudioManager: AVAudioRecorderDelegate {
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) {
print("AudioManager Finish Recording")
}
func audioRecorderEncodeErrorDidOccur(_ recorder: AVAudioRecorder, error: Error?) {
print("Encoding Error", /error?.localizedDescription)
}
}
//MARK:- Audio Player Delegates
extension AudioManager: AVAudioPlayerDelegate {
func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer,
successfully flag: Bool) {
player.stop()
print("Finish Playing")
}
func audioPlayerDecodeErrorDidOccur(_ player: AVAudioPlayer,
error: Error?) {
print(/error?.localizedDescription)
}
}

Related

AudioKit crashes when try record after playing in AKPlayer

Here’s the problem I have. I have an app that loads sounds from the web and plays it afterward. At the same time, the app has the feature to record voice and play it. I use two different View Controllers for these two features. For these tasks, I’ve created a singleton.
When I launch the app and go directly to the Record View Controller first - everything works fine (I can record the sound, play it, then I can go to my second View Controller with my sounds and can play it with no problems too).
However, if I launch the app and play the sounds FIRST (use my second view controller) and after this go to Record View Controller and try to record something I get this crash with the message : Terminating app due to uncaught exception 'com.apple.coreaudio.avfaudio', reason: '[[busArray objectAtIndexedSubscript:(NSUInteger)element] setFormat:format error:&nsErr]: returned false, error Error Domain=NSOSStatusErrorDomain Code=-10865 "(null)"'
This crash happens when I taped recordButton. It happens on line - try recorder.record():
func startRecord() {
if AKSettings.headPhonesPlugged {
micBooster.gain = 1
}
micBooster.gain = 0
do {
try recorder.record()
} catch {
print("Can't record because: \(error)")
}
}
This crash happens if I set category .playAndRecord
try audioSession.setCategory(.playAndRecord, mode: .default, options: .mixWithOthers)
My singleton class:
class AudioKitSingleton {
var mic: AKMicrophone!
var micMixer: AKMixer!
var recorder: AKNodeRecorder!
var tape: AKAudioFile!
var player: AKPlayer!
var micBooster: AKBooster!
var mainMixer: AKMixer!
var url: URL?
var name: String?
var artist: String?
var uploader: String?
var performer: String?
var load: Bool = false
var isPlay: Bool = false
var categoryID: Int? = nil
static let shared = AudioKitSingleton()
func initMic() {
stopAudioKitEngine()
AKAudioFile.cleanTempDirectory()
AKSettings.bufferLength = .medium
AKSettings.defaultToSpeaker = true
setupRecordSession()
mic = AKMicrophone()
micMixer = AKMixer(mic)
micBooster = AKBooster(micMixer)
micBooster.gain = 0
recorder = try? AKNodeRecorder(node: micMixer)
if let file = recorder.audioFile {
player = AKPlayer(audioFile: file)
}
player.isLooping = false
mainMixer = AKMixer(player, micBooster)
AudioKit.output = mainMixer
startAudioKitEngine()
}
func deinitMic() {
stopAudioKitEngine()
mic = nil
micMixer = nil
recorder = nil
tape = nil
player = nil
micBooster = nil
mainMixer = nil
}
func startRecord() {
if AKSettings.headPhonesPlugged {
micBooster.gain = 1
}
micBooster.gain = 0
do {
try recorder.record()
} catch {
print("Can't record because: \(error)")
}
}
func stopRecord() {
micBooster.gain = 0
tape = recorder.audioFile!
player.load(audioFile: tape)
player.prepare()
if let _ = player.audioFile?.duration {
recorder.stop()
}
}
func resetRecorder() {
do {
try recorder.reset()
} catch {
print("Can't reset recorder because: \(error)")
}
}
func playerPlayRecord() {
player.play()
}
func playerStopRecord() {
player.stop()
}
func setupRecordSession() {
do {
try audioSession.setCategory(.record, mode: .default, options: .mixWithOthers)
} catch {
print(error)
}
}
func startAudioKitEngine() {
do {
try AudioKit.start()
} catch {
AKLog("AudioKit did not start because: \(error)")
}
}
func stopAudioKitEngine() {
if AudioKit.engine.isRunning {
do {
try AudioKit.stop()
} catch {
AKLog("AudioKit did not start because: \(error)")
}
}
}
func setupPlayer(url: URL) {
if AudioKit.engine.isRunning {
stopAudioKitEngine()
}
player = try AKPlayer(url: url)
AudioKit.output = player
startAudioKitEngine()
}
func setupPlayer(mixloop: AVAudioFile) {
if AudioKit.engine.isRunning {
stopAudioKitEngine()
}
player = try AKPlayer(audioFile: mixloop)
AudioKit.output = player
startAudioKitEngine()
}
func play() {
try player?.play()
}
func resume() {
try player?.resume()
}
func stop() {
player?.stop()
}
func pause() {
player?.pause()
}
func remove() {
if AudioKit.engine.isRunning {
try? AudioKit.stop()
player?.stop()
player = nil
recorder = nil
AudioKit.output = nil
url = nil
name = nil
artist = nil
uploader = nil
load = false
isPlay = false
}
}
}
This crash happens on all iPhones except iPhone 5s, iOS12. Need help.
I'm pretty sure this will be solved in today's AudioKit 4.5.2 release, uploading now.

Getting image from AVCaptureMovieFileOutput without switching

I would like to prevent lags when the app switches between video-recording and photo-taking: by using only AVCaptureMovieFileOutput and getting a snapshot from it when captured an image.
Just like how SnapChat does.
Is it possible somehow? I haven't found any releated articles about this.
I don't want to switch between outputs, because it lags
The code:
#IBOutlet var cameraView: UIView!
#IBOutlet var cameraSwitchButton: UIButton!
#IBOutlet var captureButtonView: CaptureButton!
#IBOutlet var cameraFlashButton: UIButton!
var captureSession = AVCaptureSession()
let movieOutput = AVCaptureMovieFileOutput()
var activeInput: AVCaptureDeviceInput!
var previewLayer = AVCaptureVideoPreviewLayer()
var outputURL: URL!
var connection : AVCaptureConnection!
override func viewDidLoad() {
if setupSession() {
setupPreview()
startSession()
connection = movieOutput.connection(with: AVMediaType.video)
if (connection?.isVideoStabilizationSupported)! {
connection?.preferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.off
}
}
let tapGesture = UITapGestureRecognizer(target: self, action: #selector(captureButtonTapped))
let longGesture = UILongPressGestureRecognizer(target: self, action: #selector(captureButtonLongPressed))
tapGesture.numberOfTapsRequired = 1
captureButtonView.addGestureRecognizer(tapGesture)
captureButtonView.addGestureRecognizer(longGesture)
}
#objc func captureButtonTapped(){
?? TAKE PHOTO HERE ??
}
var isRecordingVideo : Bool = false
#objc func captureButtonLongPressed(sender : UILongPressGestureRecognizer){
if sender.state == .began {
isRecordingVideo = true
startRecording()
captureButtonView.startTimer(duration: 10.0)
}
if sender.state == .ended || sender.state == .failed || sender.state == .cancelled {
captureButtonView.clear()
isRecordingVideo = false
stopRecording()
}
}
func setupPreview() {
// Configure previewLayer
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = cameraView.bounds
previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
cameraView.layer.addSublayer(previewLayer)
}
//MARK:- Setup Camera
func setupSession() -> Bool {
captureSession.sessionPreset = AVCaptureSession.Preset.high
// Setup Camera
let camera = AVCaptureDevice.default(for: AVMediaType.video)
do {
let input = try AVCaptureDeviceInput(device: camera!)
if captureSession.canAddInput(input) {
captureSession.addInput(input)
activeInput = input
}
} catch {
print("Error setting device video input: \(error)")
return false
}
// Setup Microphone
let microphone = AVCaptureDevice.default(for: AVMediaType.audio)
do {
let micInput = try AVCaptureDeviceInput(device: microphone!)
if captureSession.canAddInput(micInput) {
captureSession.addInput(micInput)
}
} catch {
print("Error setting device audio input: \(error)")
return false
}
// Movie output
if captureSession.canAddOutput(movieOutput) {
captureSession.addOutput(movieOutput)
}
return true
}
func setupCaptureMode(_ mode: Int) {
}
//MARK:- Camera Session
func startSession() {
if !captureSession.isRunning {
videoQueue().async {
self.captureSession.startRunning()
}
}
}
func stopSession() {
if captureSession.isRunning {
videoQueue().async {
self.captureSession.stopRunning()
}
}
}
func videoQueue() -> DispatchQueue {
return DispatchQueue.main
}
func currentVideoOrientation() -> AVCaptureVideoOrientation {
var orientation: AVCaptureVideoOrientation
switch UIDevice.current.orientation {
case .portrait:
orientation = AVCaptureVideoOrientation.portrait
case .landscapeRight:
orientation = AVCaptureVideoOrientation.landscapeLeft
case .portraitUpsideDown:
orientation = AVCaptureVideoOrientation.portraitUpsideDown
default:
orientation = AVCaptureVideoOrientation.landscapeRight
}
return orientation
}
func startCapture() {
startRecording()
}
func tempURL() -> URL? {
let directory = NSTemporaryDirectory() as NSString
if directory != "" {
let path = directory.appendingPathComponent(NSUUID().uuidString + ".mp4")
return URL(fileURLWithPath: path)
}
return nil
}
func startRecording() {
if movieOutput.isRecording == false {
if (connection?.isVideoOrientationSupported)! {
connection?.videoOrientation = currentVideoOrientation()
}
let device = activeInput.device
if (device.isSmoothAutoFocusSupported) {
do {
try device.lockForConfiguration()
device.isSmoothAutoFocusEnabled = false
device.unlockForConfiguration()
} catch {
print("Error setting configuration: \(error)")
}
}
outputURL = tempURL()
movieOutput.startRecording(to: outputURL, recordingDelegate: self)
}
else {
stopRecording()
}
}
func stopRecording() {
if movieOutput.isRecording == true {
movieOutput.stopRecording()
}
}
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
if (error != nil) {
print("Error recording movie: \(error!.localizedDescription)")
} else {
UISaveVideoAtPathToSavedPhotosAlbum(outputURL.path, nil, nil, nil)
_ = outputURL as URL
}
outputURL = nil
}
I wasn't able to find a way using only AVCaptureMovieFileOutput, however you can add an additional photo output and trigger photos without having to switch between the outputs.
I'm short on time at the moment but this should get you going till I can edit with more info.
(See EDIT with full implementation below, and limited force unwrapping)
First off setup an additional var for a photo output in your view controller
// declare an additional camera output var
var cameraOutput = AVCapturePhotoOutput()
// do this in your 'setupSession' func where you setup your movie output
cameraOutput.isHighResolutionCaptureEnabled = true
captureSession.addOutput(cameraOutput)
Declare a function to capture your photo using the cameraOutput:
func capturePhoto() {
// create settings for your photo capture
let settings = AVCapturePhotoSettings()
let previewPixelType = settings.availablePreviewPhotoPixelFormatTypes.first!
let previewFormat = [
kCVPixelBufferPixelFormatTypeKey as String: previewPixelType,
kCVPixelBufferWidthKey as String: UIScreen.main.bounds.size.width,
kCVPixelBufferHeightKey as String: UIScreen.main.bounds.size.height
] as [String : Any]
settings.previewPhotoFormat = previewFormat
cameraOutput.capturePhoto(with: settings, delegate: self)
}
and conform to the AVCapturePhotoCaptureDelegate.
I created a separate class called VideoFeed to manage the video capture session, so this sample is an extension of that class. I'll update with more info on this later.
The loadImage(data: Data) function calls a delegate with the image. You can ignore that call if you put this directly in your view controller, and save or do whatever you like with the generated photo:
extension VideoFeed: AVCapturePhotoCaptureDelegate {
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photoSampleBuffer: CMSampleBuffer?, previewPhoto previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {
guard error == nil else {
print("Photo Error: \(String(describing: error))")
return
}
guard let sampleBuffer = photoSampleBuffer,
let previewBuffer = previewPhotoSampleBuffer,
let outputData = AVCapturePhotoOutput
.jpegPhotoDataRepresentation(forJPEGSampleBuffer: sampleBuffer, previewPhotoSampleBuffer: previewBuffer) else {
print("Oops, unable to create jpeg image")
return
}
print("captured photo...")
loadImage(data: outputData)
}
func loadImage(data: Data) {
let dataProvider = CGDataProvider(data: data as CFData)
let cgImageRef: CGImage! = CGImage(jpegDataProviderSource: dataProvider!, decode: nil, shouldInterpolate: true, intent: .defaultIntent)
let image = UIImage(cgImage: cgImageRef, scale: 1.0, orientation: UIImageOrientation.right)
// do whatever you like with the generated image here...
delegate?.processVideoSnapshot(image)
}
}
EDIT:
Here's the complete implementation I used in my test project.
First I moved all the AVFoundation specific code into it's own VideoFeed class and created some callbacks to the view controller.
This separates concerns and limits the view controllers responsibilities to:
Adding the preview layer to the view
Triggering and handling the captured image/screenshot
Starting/stopping video file recording.
Here's the ViewController implementation:
ViewController.swift
import UIKit
import AVFoundation
class ViewController: UIViewController, VideoFeedDelegate {
#IBOutlet var cameraView: UIView!
var videoFeed: VideoFeed?
override func viewDidLoad() {
super.viewDidLoad()
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
// end session
videoFeed?.stopSession()
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
// request camera access
AVCaptureDevice.requestAccess(for: AVMediaType.video) { [weak self] granted in
guard granted != false else {
// TODO: show UI stating camera cannot be used, update in settings app...
print("Camera access denied")
return
}
DispatchQueue.main.async {
if self?.videoFeed == nil {
// video access was enabled so setup video feed
self?.videoFeed = VideoFeed(delegate: self)
} else {
// video feed already available, restart session...
self?.videoFeed?.startSession()
}
}
}
}
// MARK: VideoFeedDelegate
func videoFeedSetup(with layer: AVCaptureVideoPreviewLayer) {
// set the layer size
layer.frame = cameraView.layer.bounds
// add to view
cameraView.layer.addSublayer(layer)
}
func processVideoSnapshot(_ image: UIImage?) {
// validate
guard let image = image else {
return
}
// SAVE IMAGE HERE IF DESIRED
// for now just showing in a lightbox/detail view controller
let storyboard = UIStoryboard(name: "Main", bundle: Bundle(for: AppDelegate.self))
let vc = storyboard.instantiateViewController(withIdentifier: "LightboxViewController") as! LightboxViewController
vc.previewImage = image
navigationController?.pushViewController(vc, animated: true)
}
#IBAction func captureButtonTapped(_ sender: Any){
// trigger photo capture from video feed...
// this will trigger a callback to the function above with the captured image
videoFeed?.capturePhoto()
}
}
Here's the full implementation of the VideoFeed class.
Using this approach allows you to reuse the video functionality in other projects more easily without having it tightly coupled to the view controller.
VideoFeed.swift
import UIKit
import AVFoundation
/// Defines callbacks associated with the VideoFeed class. Notifies delegate of significant events.
protocol VideoFeedDelegate: class {
/// Callback triggered when the preview layer for this class has been created and configured. Conforming objects should set and maintain a strong reference to this layer otherwise it will be set to nil when the calling function finishes execution.
///
/// - Parameter layer: The video preview layer associated with the active captureSession in the VideoFeed class.
func videoFeedSetup(with layer: AVCaptureVideoPreviewLayer)
/// Callback triggered when a snapshot of the video feed has been generated.
///
/// - Parameter image: <#image description#>
func processVideoSnapshot(_ image: UIImage?)
}
class VideoFeed: NSObject {
// MARK: Variables
/// The capture session to be used in this class.
var captureSession = AVCaptureSession()
/// The preview layer associated with this session. This class has a
/// weak reference to this layer, the delegate (usually a ViewController
/// instance) should add this layer as a sublayer to its preview UIView.
/// The delegate will have the strong reference to this preview layer.
weak var previewLayer: AVCaptureVideoPreviewLayer?
/// The output that handles saving the video stream to a file.
var fileOutput: AVCaptureMovieFileOutput?
/// A reference to the active video input
var activeInput: AVCaptureDeviceInput?
/// Output for capturing frame grabs of video feed
var cameraOutput = AVCapturePhotoOutput()
/// Delegate to receive callbacks about significant events triggered by this class.
weak var delegate: VideoFeedDelegate?
/// The capture connection associated with the fileOutput.
/// Set when fileOutput is created.
var connection : AVCaptureConnection?
// MARK: Public accessors
/// Public initializer. Accepts a delegate to receive callbacks with the preview layer and any snapshot images.
///
/// - Parameter delegate: A reference to an object conforming to VideoFeedDelegate
/// to receive callbacks for significant events in this class.
init(delegate: VideoFeedDelegate?) {
self.delegate = delegate
super.init()
setupSession()
}
/// Public accessor to begin a capture session.
public func startSession() {
guard captureSession.isRunning == false else {
return
}
captureSession.startRunning()
}
/// Public accessor to end the current capture session.
public func stopSession() {
// validate
guard captureSession.isRunning else {
return
}
// end file recording if the session ends and we're currently recording a video to file
if let isRecording = fileOutput?.isRecording, isRecording {
stopRecording()
}
captureSession.stopRunning()
}
/// Public accessor to begin file recording.
public func startRecording() {
guard fileOutput?.isRecording == false else {
stopRecording()
return
}
configureVideoOrientation()
disableSmoothAutoFocus()
guard let url = tempURL() else {
print("Unable to start file recording, temp url generation failed.")
return
}
fileOutput?.startRecording(to: url, recordingDelegate: self)
}
/// Public accessor to end file recording.
public func stopRecording() {
guard fileOutput?.isRecording == true else {
return
}
fileOutput?.stopRecording()
}
/// Public accessor to trigger snapshot capture of video stream.
public func capturePhoto() {
// create settings object
let settings = AVCapturePhotoSettings()
// verify that we have a pixel format type available
guard let previewPixelType = settings.availablePreviewPhotoPixelFormatTypes.first else {
print("Unable to configure photo capture settings, 'availablePreviewPhotoPixelFormatTypes' has no available options.")
return
}
let screensize = UIScreen.main.bounds.size
// setup format configuration dictionary
let previewFormat: [String : Any] = [
kCVPixelBufferPixelFormatTypeKey as String: previewPixelType,
kCVPixelBufferWidthKey as String: screensize.width,
kCVPixelBufferHeightKey as String: screensize.height
]
settings.previewPhotoFormat = previewFormat
// trigger photo capture
cameraOutput.capturePhoto(with: settings, delegate: self)
}
// MARK: Setup functions
/// Handles configuration and setup of the session, inputs, video preview layer and outputs.
/// If all are setup and configured it starts the session.
internal func setupSession() {
captureSession.sessionPreset = AVCaptureSession.Preset.high
guard setupInputs() else {
return
}
setupOutputs()
setupVideoLayer()
startSession()
}
/// Sets up capture inputs for this session.
///
/// - Returns: Returns true if inputs are successfully setup, else false.
internal func setupInputs() -> Bool {
// only need access to this functionality within this function, so declare as sub-function
func addInput(input: AVCaptureInput) {
guard captureSession.canAddInput(input) else {
return
}
captureSession.addInput(input)
}
do {
if let camera = AVCaptureDevice.default(for: AVMediaType.video) {
let input = try AVCaptureDeviceInput(device: camera)
addInput(input: input)
activeInput = input
}
// Setup Microphone
if let microphone = AVCaptureDevice.default(for: AVMediaType.audio) {
let micInput = try AVCaptureDeviceInput(device: microphone)
addInput(input: micInput)
}
return true
} catch {
print("Error setting device video input: \(error)")
return false
}
}
internal func setupOutputs() {
// only need access to this functionality within this function, so declare as sub-function
func addOutput(output: AVCaptureOutput) {
if captureSession.canAddOutput(output) {
captureSession.addOutput(output)
}
}
// file output
let fileOutput = AVCaptureMovieFileOutput()
captureSession.addOutput(fileOutput)
if let connection = fileOutput.connection(with: .video), connection.isVideoStabilizationSupported {
connection.preferredVideoStabilizationMode = .off
self.connection = connection
}
cameraOutput.isHighResolutionCaptureEnabled = true
captureSession.addOutput(cameraOutput)
}
internal func setupVideoLayer() {
let layer = AVCaptureVideoPreviewLayer(session: captureSession)
layer.videoGravity = AVLayerVideoGravity.resizeAspectFill
delegate?.videoFeedSetup(with: layer)
previewLayer = layer
}
// MARK: Helper functions
/// Creates a url in the temporary directory for file recording.
///
/// - Returns: A file url if successful, else nil.
internal func tempURL() -> URL? {
let directory = NSTemporaryDirectory() as NSString
if directory != "" {
let path = directory.appendingPathComponent(NSUUID().uuidString + ".mp4")
return URL(fileURLWithPath: path)
}
return nil
}
/// Disables smooth autofocus functionality on the active device,
/// if the active device is set and 'isSmoothAutoFocusSupported'
/// is supported for the currently set active device.
internal func disableSmoothAutoFocus() {
guard let device = activeInput?.device, device.isSmoothAutoFocusSupported else {
return
}
do {
try device.lockForConfiguration()
device.isSmoothAutoFocusEnabled = false
device.unlockForConfiguration()
} catch {
print("Error disabling smooth autofocus: \(error)")
}
}
/// Sets the current AVCaptureVideoOrientation on the currently active connection if it's supported.
internal func configureVideoOrientation() {
guard let connection = connection, connection.isVideoOrientationSupported,
let currentOrientation = AVCaptureVideoOrientation(rawValue: UIApplication.shared.statusBarOrientation.rawValue) else {
return
}
connection.videoOrientation = currentOrientation
}
}
// MARK: AVCapturePhotoCaptureDelegate
extension VideoFeed: AVCapturePhotoCaptureDelegate {
// iOS 11+ processing
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
guard error == nil, let outputData = photo.fileDataRepresentation() else {
print("Photo Error: \(String(describing: error))")
return
}
print("captured photo...")
loadImage(data: outputData)
}
// iOS < 11 processing
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photoSampleBuffer: CMSampleBuffer?, previewPhoto previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {
if #available(iOS 11.0, *) {
// use iOS 11-only feature
// nothing to do here as iOS 11 uses the callback above
} else {
guard error == nil else {
print("Photo Error: \(String(describing: error))")
return
}
guard let sampleBuffer = photoSampleBuffer,
let previewBuffer = previewPhotoSampleBuffer,
let outputData = AVCapturePhotoOutput
.jpegPhotoDataRepresentation(forJPEGSampleBuffer: sampleBuffer, previewPhotoSampleBuffer: previewBuffer) else {
print("Image creation from sample buffer/preview buffer failed.")
return
}
print("captured photo...")
loadImage(data: outputData)
}
}
/// Creates a UIImage from Data object received from AVCapturePhotoOutput
/// delegate callback and sends to the VideoFeedDelegate for handling.
///
/// - Parameter data: Image data.
internal func loadImage(data: Data) {
guard let dataProvider = CGDataProvider(data: data as CFData), let cgImageRef: CGImage = CGImage(jpegDataProviderSource: dataProvider, decode: nil, shouldInterpolate: true, intent: .defaultIntent) else {
return
}
let image = UIImage(cgImage: cgImageRef, scale: 1.0, orientation: UIImageOrientation.right)
delegate?.processVideoSnapshot(image)
}
}
extension VideoFeed: AVCaptureFileOutputRecordingDelegate {
func fileOutput(_ output: AVCaptureFileOutput, didStartRecordingTo fileURL: URL, from connections: [AVCaptureConnection]) {
print("Video recording started: \(fileURL.absoluteString)")
}
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
guard error == nil else {
print("Error recording movie: \(String(describing: error))")
return
}
UISaveVideoAtPathToSavedPhotosAlbum(outputFileURL.path, nil, nil, nil)
}
}
For anyone else making use of this, don't forget to add permissions to your info.plist for access to the camera, photo library and microphone.
<key>NSCameraUsageDescription</key>
<string>Let us use your camera</string>
<key>NSPhotoLibraryAddUsageDescription</key>
<string>save to images</string>
<key>NSMicrophoneUsageDescription</key>
<string>for sound in video</string>

AVAudioRecorder never records

I am using an AVAudioRecorder to record human voice. My code is as follows:
// Property of Class
var recorder:AVAudioRecorder?
func recordButtonTapped() {
let audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setCategory(AVAudioSessionCategoryRecord)
try audioSession.setActive(true)
if audioSession.recordPermission() != .granted {
audioSession.requestRecordPermission({ (success) in
self.startRecording()
})
}
else {
self.startRecording()
}
} catch {
print("Unable To Set Category")
}
}
func startRecording() {
// libraryPathWith(media) just gets the path to the documents directory
// Like so: Documents/MediaLibrary/Audio/<mediaID>.<mediaExtension>
if let path = MMFileManager.libraryPathWith(media: self.media) {
isRecording = true
do {
let settings = [
AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 44100,
AVNumberOfChannelsKey: 1,
AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue
]
recorder = try AVAudioRecorder(url: path, settings: settings)
recorder?.delegate = self
if recorder!.prepareToRecord() {
recorder?.record()
}
}
catch {
isRecording = false
}
}
}
func stopRecording() {
self.recordingLabel.text = "Recording Complete"
self.recordingLabel.textColor = UIColor.white
if let rec = recorder {
rec.stop()
recorder = nil
}
isRecording = false
}
AVAudioRecorderDelegate
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) {
print("RECORDED AUDIO SUCCESSFULLY \(flag)")
}
func audioRecorderEncodeErrorDidOccur(_ recorder: AVAudioRecorder, error: Error?) {
print("AUDIO RECORDER ERROR \(error?.localizedDescription)")
}
After I call stop on the AVAudioRecorder the audioRecorderEncodeErrorDidOccur function never gets called, but the audioRecorderDidFinishRecording function does but the flag is always false. It prints out "RECORDED AUDIO SUCCESSFULLY false"
QUESTION
When I record using the code above I does save a file to my documents directory at the location specified. But this file is not something that I can play. It writes a text file, not an audio file as I specify the extension to be .aac.
Why does the AVAudioRecorder not record audio? And how do I get it to do so?
This is how I did it, first import
AVFoundation
and add the AVAudioRecorderDelegate to your ViewController:
class RecordViewController: UIViewController, AVAudioRecorderDelegate
then create a global instance of the AVAudioRecorder:
var audioRecorder : AVAudioRecorder!
Then I created a record button that starts the recording:
#IBAction func playButton(_ sender: Any) {
let dirPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] as String
let recordingName = "voiceRecording.wav"
let pathArray = [dirPath, recordingName]
let filePath = URL(string: pathArray.joined(separator: "/"))
let session = AVAudioSession.sharedInstance()
try! session.setCategory(AVAudioSessionCategoryPlayAndRecord, with: .defaultToSpeaker)
try! audioRecorder = AVAudioRecorder(url: filePath!, settings: [:])
audioRecorder.delegate = self
audioRecorder.isMeteringEnabled = true
audioRecorder.prepareToRecord()
audioRecorder.record()
}
dirPath finds the directory where the image will be stored.
recordingName will set the name of the actual recorded file
filepath combines the directory and recordingName for the final location
The rest are pretty much self explanatory.
Then create a pause button which is simpler:
#IBAction func pauseButton(_ sender: Any) {
audioRecorder.stop()
let audioSession = AVAudioSession.sharedInstance()
try! audioSession.setActive(false)
}
This should solve how you record the audio.
If you've verified your url has the "aac" extension, then I suspect the you simply forgot to call stop() on your recorder. This results in an un-finalized file.
Also print your errors in the catch block.
do{
try throwingFunc()
} catch {
print(error)
}
The issue was with the line in the stopRecording() function. Below the call to stop() the recorder I am immediately assigning the AVAudioRecorder instance to nil. This deallocates and ends the AVAudioRecorder before the post processing to create the authentic .aac file can be completed.

Swift capture video to different paths

I'm trying to capture a video using swift. I want to capture the video in a new file every x seconds.
Here is my current code:
var i = 0
override func viewDidLoad() {
super.viewDidLoad()
if setupSession() {
setupPreview()
startSession()
}
}
func setupSession() -> Bool {
...
return true
}
func startSession() {
...
}
func tempURL() -> URL? {
let directory = NSTemporaryDirectory() as NSString
if directory != "" {
i = i+1
old_cur_videoID = cur_videoID_system
cur_videoID_system = "\(cur_videoID)_\(i).mp4"
let path = directory.appendingPathComponent(cur_videoID_system)
return URL(fileURLWithPath: path)
}
return nil
}
func startRecording() {
let delayTime = DispatchTime.now() + 2
DispatchQueue.main.asyncAfter(deadline: delayTime) {
self.startRecording()
}
outputURL = tempURL()
movieOutput.startRecording(toOutputFileURL: outputURL, recordingDelegate: self)
}
And the two delegate functions:
func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) {
}
func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
if (error != nil) {
print("Error recording movie: \(error!.localizedDescription)")
} else {
if(i > 1){
saveOnServer()
}
}
outputURL = nil
}
If I try to run the Code, I get the following Error-Message:
Error recording movie: Movie recording cannot be started
Can somebody help me with my problem? If more code is necessary or some parts are unclear, please feel free to ask.
Thanks a lot!

AVPlayer audio buffering in swift 3 source disconnected observer

I have app that plays AAC audio stream. Everything works fine, but when I disconnect stream and connect again after one second audio stop playing after half minute. When i don't reconnect i have error after one- two minutes. To reconnect i must stop AVPlayer and start again. I want to reconnect stream or show message immediately after player stops play music. How can I do that? Moreover i have another question: I convert my code to swift 3 and I have problem with one line:
fileprivate var playerItem = AVPlayerItem?()
error: cannot invoke initializer without argument
How i can fix that? Maybe this is the problem?
My Radio Player class:
import Foundation
import AVFoundation
import UIKit
protocol errorMessageDelegate {
func errorMessageChanged(_ newVal: String)
}
protocol sharedInstanceDelegate {
func sharedInstanceChanged(_ newVal: Bool)
}
class RadioPlayer : NSObject {
static let sharedInstance = RadioPlayer()
var instanceDelegate:sharedInstanceDelegate? = nil
var sharedInstanceBool = false {
didSet {
if let delegate = self.instanceDelegate {
delegate.sharedInstanceChanged(self.sharedInstanceBool)
}
}
}
fileprivate var player = AVPlayer(url: URL(string: Globals.radioURL)!)
// fileprivate var playerItem = AVPlayerItem?()
fileprivate var isPlaying = false
var errorDelegate:errorMessageDelegate? = nil
var errorMessage = "" {
didSet {
if let delegate = self.errorDelegate {
delegate.errorMessageChanged(self.errorMessage)
}
}
}
override init() {
super.init()
errorMessage = ""
let asset: AVURLAsset = AVURLAsset(url: URL(string: Globals.radioURL)!, options: nil)
let statusKey = "tracks"
asset.loadValuesAsynchronously(forKeys: [statusKey], completionHandler: {
var error: NSError? = nil
DispatchQueue.main.async(execute: {
let status: AVKeyValueStatus = asset.statusOfValue(forKey: statusKey, error: &error)
if status == AVKeyValueStatus.loaded{
let playerItem = AVPlayerItem(asset: asset)
self.player = AVPlayer(playerItem: playerItem)
self.sharedInstanceBool = true
} else {
self.errorMessage = error!.localizedDescription
print(error!)
}
})
})
NotificationCenter.default.addObserver(
forName: NSNotification.Name.AVPlayerItemFailedToPlayToEndTime,
object: nil,
queue: nil,
using: { notification in
print("Status: Failed to continue")
self.errorMessage = NSLocalizedString("STREAM_INTERUPT", comment:"Stream was interrupted")
})
print("Initializing new player")
}
func resetPlayer() {
errorMessage = ""
let asset: AVURLAsset = AVURLAsset(url: URL(string: Globals.radioURL)!, options: nil)
let statusKey = "tracks"
asset.loadValuesAsynchronously(forKeys: [statusKey], completionHandler: {
var error: NSError? = nil
DispatchQueue.main.async(execute: {
let status: AVKeyValueStatus = asset.statusOfValue(forKey: statusKey, error: &error)
if status == AVKeyValueStatus.loaded{
let playerItem = AVPlayerItem(asset: asset)
// playerItem.addObserver(self, forKeyPath: "status", options: NSKeyValueObservingOptions.New, context: &ItemStatusContext)
self.player = AVPlayer(playerItem: playerItem)
self.sharedInstanceBool = true
} else {
self.errorMessage = error!.localizedDescription
print(error!)
}
})
})
}
func bufferFull() -> Bool {
return bufferAvailableSeconds() > 45.0
}
func bufferAvailableSeconds() -> TimeInterval {
// Check if there is a player instance
if ((player.currentItem) != nil) {
// Get current AVPlayerItem
let item: AVPlayerItem = player.currentItem!
if (item.status == AVPlayerItemStatus.readyToPlay) {
let timeRangeArray: NSArray = item.loadedTimeRanges as NSArray
if timeRangeArray.count < 1 { return(CMTimeGetSeconds(kCMTimeInvalid)) }
let aTimeRange: CMTimeRange = (timeRangeArray.object(at: 0) as AnyObject).timeRangeValue
// let startTime = CMTimeGetSeconds(aTimeRange.end)
let loadedDuration = CMTimeGetSeconds(aTimeRange.duration)
return (TimeInterval)(loadedDuration);
}
else {
return(CMTimeGetSeconds(kCMTimeInvalid))
}
}
else {
return(CMTimeGetSeconds(kCMTimeInvalid))
}
}
func play() {
player.play()
isPlaying = true
print("Radio is \(isPlaying ? "" : "not ")playing")
}
func pause() {
player.pause()
isPlaying = false
print("Radio is \(isPlaying ? "" : "not ")playing")
}
func currentlyPlaying() -> Bool {
return isPlaying
}
}
I will be grateful for help ;)
For the second issue fileprivate var playerItem = AVPlayerItem?()
write this and it should work fileprivate var playerItem: AVPlayerItem?.
For the first issue
when I disconnect stream and connect again after one second audio stop
playing after half minute. When i don't reconnect i have error after
one- two minutes. To reconnect i must stop AVPlayer and start again. I
want to reconnect stream or show message immediately after player
stops play music. How can I do that?
I don't get what's wrong ? You pause the player by pressing the button then you press the button again and after one - two minutes it stops by itself ?
I have tested the same class today and it works just fine, even after the connection is lost to the server (when the connection resumes you can click the play button and it will play)
I'll leave you my code here, give it a try
import Foundation
import AVFoundation
import UIKit
protocol errorMessageDelegate {
func errorMessageChanged(newVal: String)
}
protocol sharedInstanceDelegate {
func sharedInstanceChanged(newVal: Bool)
}
class RadioPlayer : NSObject {
static let sharedInstance = RadioPlayer()
var instanceDelegate:sharedInstanceDelegate? = nil
var sharedInstanceBool = false {
didSet {
if let delegate = self.instanceDelegate {
delegate.sharedInstanceChanged(newVal: self.sharedInstanceBool)
}
}
}
private var player = AVPlayer(url: NSURL(string: "<# YOUR STREAM HERE #>")! as URL)
private var playerItem: AVPlayerItem?
private var isPlaying = false
var errorDelegate:errorMessageDelegate? = nil
var errorMessage = "" {
didSet {
if let delegate = self.errorDelegate {
delegate.errorMessageChanged(newVal: self.errorMessage)
}
}
}
override init() {
super.init()
errorMessage = ""
let asset: AVURLAsset = AVURLAsset(url: NSURL(string: "<# YOUR STREAM HERE #>")! as URL, options: nil)
let statusKey = "tracks"
asset.loadValuesAsynchronously(forKeys: [statusKey], completionHandler: {
var error: NSError? = nil
DispatchQueue.main.async(execute: {
let status: AVKeyValueStatus = asset.statusOfValue(forKey: statusKey, error: &error)
if status == AVKeyValueStatus.loaded{
let playerItem = AVPlayerItem(asset: asset)
self.player = AVPlayer(playerItem: playerItem)
self.sharedInstanceBool = true
} else {
self.errorMessage = error!.localizedDescription
print(error!)
}
})
})
NotificationCenter.default.addObserver(
forName: NSNotification.Name.AVPlayerItemFailedToPlayToEndTime,
object: nil,
queue: nil,
using: { notification in
print("Status: Failed to continue")
self.errorMessage = "Stream was interrupted"
})
print("Initializing new player")
}
func resetPlayer() {
errorMessage = ""
let asset: AVURLAsset = AVURLAsset(url: NSURL(string: "<# YOUR STREAM HERE #>")! as URL, options: nil)
let statusKey = "tracks"
asset.loadValuesAsynchronously(forKeys: [statusKey], completionHandler: {
var error: NSError? = nil
DispatchQueue.main.async(execute: {
let status: AVKeyValueStatus = asset.statusOfValue(forKey: statusKey, error: &error)
if status == AVKeyValueStatus.loaded{
let playerItem = AVPlayerItem(asset: asset)
//playerItem.addObserver(self, forKeyPath: "status", options: NSKeyValueObservingOptions.New, context: &ItemStatusContext)
self.player = AVPlayer(playerItem: playerItem)
self.sharedInstanceBool = true
} else {
self.errorMessage = error!.localizedDescription
print(error!)
}
})
})
}
func bufferFull() -> Bool {
return bufferAvailableSeconds() > 45.0
}
func bufferAvailableSeconds() -> TimeInterval {
// Check if there is a player instance
if ((player.currentItem) != nil) {
// Get current AVPlayerItem
let item: AVPlayerItem = player.currentItem!
if (item.status == AVPlayerItemStatus.readyToPlay) {
let timeRangeArray: NSArray = item.loadedTimeRanges as NSArray
if timeRangeArray.count < 1 { return(CMTimeGetSeconds(kCMTimeInvalid)) }
let aTimeRange: CMTimeRange = (timeRangeArray.object(at: 0) as AnyObject).timeRangeValue
//let startTime = CMTimeGetSeconds(aTimeRange.end)
let loadedDuration = CMTimeGetSeconds(aTimeRange.duration)
return (TimeInterval)(loadedDuration)
}
else {
return(CMTimeGetSeconds(kCMTimeInvalid))
}
}
else {
return(CMTimeGetSeconds(kCMTimeInvalid))
}
}
func play() {
player.play()
isPlaying = true
print("Radio is \(isPlaying ? "" : "not ")playing")
}
func pause() {
player.pause()
isPlaying = false
print("Radio is \(isPlaying ? "" : "not ")playing")
}
func currentlyPlaying() -> Bool {
return isPlaying
}
}