Audio from MPMoviePlayerController continues playing after segue to next view controller - swift

After adding a video file, the audio from the video preview continues even after a segue to the next VC is pressed.
Is there any way to stop the audio from playing after the segue is pressed?
var objMoviePlayerController: MPMoviePlayerController = MPMoviePlayerController()
var urlVideo: NSURL = NSURL()
#IBOutlet weak var videoprofileView: UIImageView!
#IBOutlet weak var addvideoBtn: UIButton!
override func viewDidLoad() {
super.viewDidLoad()
var theWidth = view.frame.size.width
var theHeight = view.frame.size.height
videoprofileView.frame = CGRectMake(0, 50, theWidth, theWidth)
addvideoBtn.center = CGPointMake(theWidth/2, self.videoprofileView.frame.maxY+50)
}
#IBAction func addvideoBtn_click(sender: AnyObject) {
var ipcVideo = UIImagePickerController()
ipcVideo.delegate = self
ipcVideo.sourceType = UIImagePickerControllerSourceType.PhotoLibrary
ipcVideo.allowsEditing = true
ipcVideo.videoMaximumDuration = 15
var kUTTypeMovieAnyObject : AnyObject = kUTTypeMovie as AnyObject
ipcVideo.mediaTypes = [kUTTypeMovieAnyObject]
self.presentViewController(ipcVideo, animated: true, completion: nil)
}
func imagePickerController(picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [NSObject : AnyObject]) {
urlVideo = (info as NSDictionary).objectForKey(UIImagePickerControllerMediaURL) as! NSURL
self.dismissViewControllerAnimated(true, completion: nil)
objMoviePlayerController = MPMoviePlayerController(contentURL: urlVideo)
objMoviePlayerController.movieSourceType = MPMovieSourceType.Unknown
objMoviePlayerController.view.frame = self.videoprofileView.bounds
objMoviePlayerController.scalingMode = MPMovieScalingMode.AspectFill
objMoviePlayerController.controlStyle = MPMovieControlStyle.None
objMoviePlayerController.shouldAutoplay = true
videoprofileView.addSubview(objMoviePlayerController.view)
objMoviePlayerController.prepareToPlay()
objMoviePlayerController.play()
}
#IBAction func next_click(sender: AnyObject) {
let data:NSData = NSData(contentsOfURL: urlVideo)!
let file = PFFile(name:"video.mp4", data:data)
var currentUser = PFUser.currentUser()!
currentUser["video"] = file
currentUser.saveInBackgroundWithBlock( {
(succeeded: Bool, error: NSError?) -> Void in
if error == nil {
println("video saved")
} else {
println("couldn't save video")
}
})
}

Tell your video to stop before segueing to the next view controller.
override func prepareForSegue(segue: UIStoryboardSegue?, sender: AnyObject?) {
objMoviePlayerController.stop()
}
MPMediaPlayback Protocol Reference

Related

Recording and playing back audio working on simulator but not on real iPhone device [duplicate]

This question already has answers here:
What does "Fatal error: Unexpectedly found nil while unwrapping an Optional value" mean?
(16 answers)
Closed 3 years ago.
On the "soundPlayer.play()" line below I get an error:
Fatal error: Unexpectedly found nil while implicitly unwrapping an Optional value.
However, this only happens when pressing the play button on a real device.
Also please ignore that I named my button "plat" button instead of play button, hah.
class ViewController: UIViewController, UIImagePickerControllerDelegate, UINavigationControllerDelegate, AVAudioRecorderDelegate, AVAudioPlayerDelegate {
#IBOutlet weak var recordBTN: UIButton!
#IBOutlet weak var platBTN: UIButton!
var soundRecorder : AVAudioRecorder!
var soundPlayer : AVAudioPlayer!
var fileName: String = "audioFile.m4a"
var player: AVAudioPlayer!
var bannerView: GADBannerView!
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
bannerView = GADBannerView(adSize: kGADAdSizeBanner)
addBannerViewToView(bannerView)
bannerView.adUnitID = "ca-app-pub-3940256099942544/2934735716"
//ca-app-pub-9351248624194777/2551009478 real
bannerView.rootViewController = self
bannerView.load(GADRequest())
setupRecorder()
platBTN.isEnabled = false
}
func getDocumentsDirectory() -> URL {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
return paths[0]
}
func setupRecorder() {
let audioFilename = getDocumentsDirectory().appendingPathComponent(fileName)
let recordSetting = [ AVFormatIDKey : kAudioFormatAppleLossless,
AVEncoderAudioQualityKey : AVAudioQuality.max.rawValue,
AVEncoderBitRateKey : 320000,
AVNumberOfChannelsKey : 2,
AVSampleRateKey : 44100.2] as [String : Any]
do {
soundRecorder = try AVAudioRecorder(url: audioFilename, settings: recordSetting )
soundRecorder.delegate = self
soundRecorder.prepareToRecord()
} catch {
print(error)
}
}
func setupPlayer() {
let audioFilename = getDocumentsDirectory().appendingPathComponent(fileName)
do {
soundPlayer = try AVAudioPlayer(contentsOf: audioFilename)
soundPlayer.delegate = self
soundPlayer.prepareToPlay()
soundPlayer.volume = 1.0
} catch {
print(error)
}
}
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) {
platBTN.isEnabled = true
}
func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) {
recordBTN.isEnabled = true
platBTN.setTitle("Play", for: .normal)
}
#IBAction func recordAct(_ sender: UIButton) {
if recordBTN.titleLabel?.text == "Record" {
soundRecorder.record()
recordBTN.setTitle("Stop", for: .normal)
platBTN.isEnabled = false
} else {
soundRecorder.stop()
recordBTN.setTitle("Record", for: .normal)
platBTN.isEnabled = false
}
}
#IBAction func playAct(_ sender: UIButton) {
if platBTN.titleLabel?.text == "Play" {
platBTN.setTitle("Stop", for: .normal)
recordBTN.isEnabled = false
setupPlayer()
soundPlayer.play() //Thread 1: Fatal error: Unexpectedly found nil while implicitly unwrapping an Optional value
} else {
soundPlayer.stop()
platBTN.setTitle("Play", for: .normal)
recordBTN.isEnabled = false
}
}
#IBAction func playBtnPressed(_ sender: UIButton) {
playSound(soundName: "4chordsong3") //
}
#IBAction func pausePressed(_ sender: UIButton) {
if player?.play() != nil{
player.pause()
}
}
When I press the "play" button, the program immediately crashes and I get the error shown above. Is there any way I can fix this?
I found the solution. I simply added this to the setupRecorder method.:
let session = AVAudioSession.sharedInstance()
try! session.setCategory(AVAudioSession.Category.playAndRecord)//

How to implement custom camera preview in iOS?

I am trying to implement Custom camera effect like:- Image
I Thought that this is achieve like this way
This type of functionality already implemented in one app which is available in App Store. here is the link enter link description here
I want to copy this app's camera functionality.
I have already implemented something like this.
I am using below code for achieved above functionality.
Into ViewController.swift class.
import UIKit
import AVFoundation
#available(iOS 10.0, *)
class ViewController: UIViewController
{
#IBOutlet weak var vc: UIView!
#IBOutlet weak var img: UIImageView!
override func viewDidLoad() {
super.viewDidLoad()
setupCamera()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
session.startRunning()
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
session.stopRunning()
}
#IBOutlet fileprivate var previewView: PreviewView! {
didSet {
previewView.videoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
previewView.layer.cornerRadius = previewView.layer.frame.size.width/2
previewView.clipsToBounds = true
}
}
#IBOutlet fileprivate var imageView: UIImageView! {
didSet {
imageView.layer.cornerRadius = imageView.layer.frame.size.width/2
imageView.clipsToBounds = true
}
}
fileprivate let session: AVCaptureSession = {
let session = AVCaptureSession()
session.sessionPreset = AVCaptureSessionPresetPhoto
return session
}()
fileprivate let output = AVCaptureStillImageOutput()
}
#available(iOS 10.0, *)
extension ViewController {
func setupCamera() {
let backCamera = AVCaptureDevice.defaultDevice(withMediaType:
AVMediaTypeVideo)
guard let input = try? AVCaptureDeviceInput(device: backCamera) else {
fatalError("back camera not functional.") }
session.addInput(input)
session.addOutput(output)
previewView.session = session
}
}
// MARK: - #IBActions
#available(iOS 10.0, *)
private extension ViewController {
#IBAction func capturePhoto() {
if let videoConnection = output.connection(withMediaType: AVMediaTypeVideo) {
output.captureStillImageAsynchronously(from: videoConnection, completionHandler: { (CMSampleBuffer, Error) in
if let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(CMSampleBuffer) {
if let cameraImage = UIImage(data: imageData) {
self.imageView.image = cameraImage
UIImageWriteToSavedPhotosAlbum(cameraImage, nil, nil, nil)
}
}
})
}
}
}
Also Create Preview Class and this class into UIView from storyboard file.
From above code I have achived this image.
I need to add any shape of image layer as a frame into UIView. ButI have no idea how to achieved this type of functionality.
So, Basically my task is, how to add any shape of image layer into UIView and after capture image how to save image with image layer, like Final Image clue image

AVAudioplayer no resetting on viewDidAppear

The idea is simple and I do not think that the question has been asked in the past.
I want to build a simple mp3 player.
some songs displayed in a collection view the user selects a song
segue to another view with options to play, pause or stop only issue
is when you go back to the home screen to select a new song with the
current still playing. It is impossible to deactivate the current
player. When you need to play the 2 songs, the 2 are playing together
I have tried a lot of things
- create a new instance of player (player = AVAudioPlayer())
- player.pause() and player.play()
I do not see what I am doing wrong really.
this is my code :
import UIKit
import AVFoundation
class LecteurViewController: UIViewController {
var chansonSelected: Chanson? = nil
var lecteur:AVAudioPlayer = AVAudioPlayer()
var timer1 = Timer()
var timer2 = Timer()
#IBOutlet weak var dureeChansonSlider: UISlider!
#IBOutlet weak var chansonImageView: UIImageView!
#IBOutlet weak var chansonVolumeSlider: UISlider!
#IBOutlet weak var debutLabel: UILabel!
#IBOutlet weak var finLabel: UILabel!
#IBAction func stopMusicAction(_ sender: UIBarButtonItem) {
var player = AVAudioPlayer()
lecteur.stop()
LecteurManager.isActive = false
}
#IBAction func pauseMusicAction(_ sender: UIBarButtonItem) {
var player = AVAudioPlayer()
lecteur.pause()
LecteurManager.isActive = false
}
#IBAction func jouerMusicAction(_ sender: UIButton) {
if LecteurManager.isActive {
changeSong()
print("lecteur déjà en cours")
} else {
var player = AVAudioPlayer()
lecteur.play()
}
print(LecteurManager.isActive )
LecteurManager.isActive = true
}
func changeSong() {
lecteur.stop()
//lecteur = AVAudioPlayer()
jouerLecteurMp3()
print(chansonSelected!)
lecteur.play()
}
override func viewDidLoad() {
super.viewDidLoad()
configureView()
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
jouerLecteurMp3()
}
func configureView() {
self.title = (chansonSelected!.titre!).capitalized
chansonImageView.image = UIImage(named: "\(chansonSelected!.image).jpgs")
//formatter 'back' button
let backBtn = UIBarButtonItem(title: "< Playlist", style: .plain, target: self, action: #selector(LecteurViewController.reset(_sender:)))
self.navigationItem.leftBarButtonItem = backBtn
self.navigationController?.navigationBar.tintColor = UIColor.white
//contrôler volume chanson
chansonVolumeSlider.addTarget(self, action: #selector(LecteurViewController.ajusterVolume(_ :)), for: UIControlEvents.valueChanged)
//contrôler durée chanson
dureeChansonSlider.addTarget(self, action: #selector(LecteurViewController.ajusterDurée(_ :)), for: UIControlEvents.valueChanged)
updateUI()
}
func updateUI() {
//indiquer position chanson
timer1 = Timer.scheduledTimer(timeInterval: 1, target: self, selector: #selector(LecteurViewController.mettreAJourDurée), userInfo: nil, repeats: true)
//afficher durée chanson
timer2 = Timer.scheduledTimer(timeInterval: 1, target: self, selector: #selector(LecteurViewController.afficherDurée), userInfo: nil, repeats: true)
}
func reset(_sender:UIBarButtonItem) {
self.navigationController?.popViewController(animated: true)
}
func ajusterVolume(_ sender:UISlider) {
//print("volume ajusté \(chansonVolumeSlider.value)")
lecteur.volume = chansonVolumeSlider.value
}
func ajusterDurée(_ sender:UISlider) {
lecteur.currentTime = TimeInterval(dureeChansonSlider.value)
}
func mettreAJourDurée() {
dureeChansonSlider.value = Float(lecteur.currentTime)
}
func afficherDurée() {
print("durée actuelle: \(lecteur.duration - lecteur.currentTime)")
debutLabel.text = retournerPositionActuelle()
finLabel.text = retournerDureeTotal()
}
func retournerPositionActuelle() -> String {
let seconds = Int(lecteur.currentTime) % 60
let minutes = (Int(lecteur.currentTime) / 60) % 60
return String(format: "%0.2i:%0.2i", minutes, seconds)
}
func retournerDureeTotal() -> String {
let seconds = Int(lecteur.currentTime) % 60
let minutes = (Int(lecteur.currentTime) / 60) % 60
return String(format: "%0.2i:%0.2i", minutes, seconds)
}
func jouerLecteurMp3() {
let chanson = "bensound-\(chansonSelected!.titre!)"
let fichierMp3 = Bundle.main.path(forResource: chanson, ofType: "mp3")
do {
try lecteur = AVAudioPlayer(contentsOf: URL(string: fichierMp3!)!)
dureeChansonSlider.maximumValue = Float(lecteur.duration)
} catch {
print("erreur lecture mp3")
}
}
}
Try this:
func reset(_sender:UIBarButtonItem)
{
self.navigationController?.popViewController(animated: true)
lecteur.stop()
}

Write data to Firebase not working - Cannot convert value of type 'user' to expected argument type 'AnyObject?'

I am trying to save my registered data to Firebase but get a "Cannot convert value of type 'user' to expected argument type 'AnyObject?'" error. I tried many things but I can't seem to figure it out...can someone help me please? how do it save it?
import UIKit
import Firebase
import FirebaseDatabaseUI
class EventViewController: UIViewController, UIImagePickerControllerDelegate, UINavigationControllerDelegate {
//outlets for text & image
#IBOutlet weak var photoImageView: UIImageView!
#IBOutlet weak var eventName: UITextField!
#IBOutlet weak var eventDate: UITextField!
#IBOutlet weak var eventDes: UITextView!
//Database connection
let rootref = FIRDatabase().reference()
var imagePicker: UIImagePickerController = UIImagePickerController()
override func viewDidLoad() {
super.viewDidLoad()
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
#IBAction func submitEvent(sender: AnyObject) {
let name = eventName.text
let date = eventDate.text
let text = eventDes.text
var data: NSData = NSData()
if let image = photoImageView.image {
data = UIImageJPEGRepresentation(image,0.1)!
}
let base64String = data.base64EncodedStringWithOptions(NSDataBase64EncodingOptions.Encoding64CharacterLineLength)
if let unwrappedName = name , unwrappedDate = date, unwrappedText = text{
let user: NSDictionary = ["name":unwrappedName, "date":unwrappedDate, "text":unwrappedText, "photoBase64":base64String]
}
//Add firebase child node
let event = FIRDatabase().reference().child(name!)
// Write data to Firebase
event.setValue(user.init())
navigationController?.popViewControllerAnimated(true)
}
override func touchesBegan(touches: Set<UITouch>, withEvent event: UIEvent?) {
view.endEditing(true)
super.touchesBegan(touches, withEvent: event)
}
//UIImagePickerControllerDelegate methods
func imagePickerController(picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [String : AnyObject]) {
imagePicker.dismissViewControllerAnimated(true, completion: nil)
photoImageView.image = info[UIImagePickerControllerOriginalImage] as? UIImage
}
func imagePickerControllerDidCancel(picker: UIImagePickerController) {
dismissViewControllerAnimated(true, completion: nil)
}
#IBAction func addPicture(sender: AnyObject) {
if(UIImagePickerController.isSourceTypeAvailable(UIImagePickerControllerSourceType.Camera)) {
imagePicker = UIImagePickerController()
imagePicker.delegate = self
imagePicker.sourceType = .Camera
presentViewController(imagePicker, animated: true, completion: nil)
} else {
imagePicker.allowsEditing = false
imagePicker.sourceType = .PhotoLibrary
imagePicker.delegate = self
presentViewController(imagePicker, animated: true, completion:nil)
}
}
}
You have created an instance for your base database rootRef and again you are creating one more reference which is not required. And also you have created your user: NSDictionary within if let so you cannot access that variable out of scope. Declare your user above and then use it. Try this code:
let name = eventName.text
let date = eventDate.text
let text = eventDes.text
var data: NSData = NSData()
var user = NSDictionary()//declare here
if let image = photoImageView.image {
data = UIImageJPEGRepresentation(image,0.1)!
}
let base64String = data.base64EncodedStringWithOptions(NSDataBase64EncodingOptions.Encoding64CharacterLineLength)
if let unwrappedName = name , unwrappedDate = date, unwrappedText = text{
//use your declared dictionary
user = ["name":unwrappedName, "date":unwrappedDate, "text":unwrappedText, "photoBase64":base64String]
}
//Add firebase child node
//let event = FIRDatabase().reference().child(name!)
//Do not create one more reference to database
rootref.child(name!).setValue(user)
//rootref.setValue(user)
// Write data to Firebase
//event.setValue(user.init())
navigationController?.popViewControllerAnimated(true)

Can not play sound Swift

I'm having some trouble playing a sound which is attached to a button/IBAction.
When I do the exact same thing for iOS in Xcode, it works perfectly. However, when I do this for OS X, it doesn't work. Any ideas?
import Cocoa
import AVFoundation
class ViewController: NSViewController, NSSpeechRecognizerDelegate {
var pingAudioPlayer : AVAudioPlayer?
var sr = NSSpeechRecognizer()
#IBOutlet var output: NSTextView?
func playPing(){
let pingSound = NSURL(fileURLWithPath: NSBundle.mainBundle().pathForResource("ping", ofType: "mp3")!)
pingAudioPlayer = AVAudioPlayer(contentsOfURL: pingSound, error: nil)
pingAudioPlayer!.prepareToPlay()
pingAudioPlayer!.currentTime = 0
pingAudioPlayer!.play()
}
#IBAction func soundTest(sender: AnyObject) {
playPing()
}
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
sr.delegate = self
sr.commands = ["Ping", "Ping Mac"]
sr.startListening()
}
func speechRecognizer(sender: NSSpeechRecognizer, didRecognizeCommand command: AnyObject?) {
output!.string! += "\(command)\n"
playPing()
}
override var representedObject: AnyObject? {
didSet {
// Update the view, if already loaded.
}
}
}
The main problem was the speechRecognizer method, it wasn't the right signature.
import AVFoundation
class ViewController: NSViewController, NSSpeechRecognizerDelegate {
var pingAudioPlayer : AVAudioPlayer?
var sr = NSSpeechRecognizer()
#IBOutlet var output: NSTextView?
func playPing(){
let pingSound = NSURL(fileURLWithPath: NSBundle.mainBundle().pathForResource("ping", ofType: "mp3")!)
pingAudioPlayer = try? AVAudioPlayer(contentsOfURL: pingSound)
pingAudioPlayer?.prepareToPlay()
pingAudioPlayer?.currentTime = 0
pingAudioPlayer?.play()
}
#IBAction func soundTest(sender: AnyObject) {
playPing()
}
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
sr?.delegate = self
sr?.commands = ["Ping", "Ping Mac"]
sr?.startListening()
}
func speechRecognizer(sender: NSSpeechRecognizer, didRecognizeCommand command: String) {
output?.string! += "\(command)\n"
playPing()
}
}