How to execute delegate method once calling capturePhoto(with: , delegate: ) method - swift

This is my code.
When I click the captureButton it executes without error.
The purpose is to save a CIImage in captureProcessor.image
import UIKit
import AVFoundation
class ViewController: UIViewController {
let captureProcessor = captureProcess()
let session = AVCaptureSession()
let capturedPhotoOutPut = AVCapturePhotoOutput()
#IBOutlet weak var previewOfCamera: UIView!
#IBOutlet weak var imageShow: UIImageView!
#IBAction func captureButton(_ sender: Any) {
captureProcessor.capturePhoto(capturedPhotoOutPut)
// getCGRectInfo()
// code above is to get CGRect info from captureProcessor.image
}
override func viewDidLoad() {
super.viewDidLoad()
configurePreview()
}
func configureTheSession () -> Void {
var videoInput:AVCaptureDeviceInput!
let videoDevice = AVCaptureDevice.DiscoverySession(deviceType:[.builtInWideAngleCamera], mediaType: .video, position: .back).devices.first
videoInput = try! AVCaptureDeviceInput(device: videoDevice!)
session.beginConfiguration()
session.sessionPreset = .hd1280x720
session.addInput(videoInput)
session.addOutput(capturedPhotoOutPut)
session.commitConfiguration()
DispatchQueue.global(qos: .userInitiated).async {
self.session.startRunning()
}
}
And here is another class mentioned above
import Foundation
import AVFoundation
class captureProcess: NSObject, AVCapturePhotoCaptureDelegate{
var image:CIImage!
func capturePhoto (_ photoOutput: AVCapturePhotoOutput) {
let settings = AVCapturePhotoSettings()
settings.flashMode = .off
photoOutput.capturePhoto(with: settings, delegate: self)
}
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
let imageData = photo.fileDataRepresentation()
image = CIImage(data: imageData!)
}
But if I add an action in #IBAction, e.g. getInfo (comment in first chunk of code). Program will omit capturePhoto (it runs this line but doesn't execute the delegate) but executes getCGRectInfo first. The image variable is still empty. So the program will crash.
Is there any way to execute delegate method once the program run to capturePhoto line so that I can get the CIImage info properly

Related

How to implement custom camera preview in iOS?

I am trying to implement Custom camera effect like:- Image
I Thought that this is achieve like this way
This type of functionality already implemented in one app which is available in App Store. here is the link enter link description here
I want to copy this app's camera functionality.
I have already implemented something like this.
I am using below code for achieved above functionality.
Into ViewController.swift class.
import UIKit
import AVFoundation
#available(iOS 10.0, *)
class ViewController: UIViewController
{
#IBOutlet weak var vc: UIView!
#IBOutlet weak var img: UIImageView!
override func viewDidLoad() {
super.viewDidLoad()
setupCamera()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
session.startRunning()
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
session.stopRunning()
}
#IBOutlet fileprivate var previewView: PreviewView! {
didSet {
previewView.videoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
previewView.layer.cornerRadius = previewView.layer.frame.size.width/2
previewView.clipsToBounds = true
}
}
#IBOutlet fileprivate var imageView: UIImageView! {
didSet {
imageView.layer.cornerRadius = imageView.layer.frame.size.width/2
imageView.clipsToBounds = true
}
}
fileprivate let session: AVCaptureSession = {
let session = AVCaptureSession()
session.sessionPreset = AVCaptureSessionPresetPhoto
return session
}()
fileprivate let output = AVCaptureStillImageOutput()
}
#available(iOS 10.0, *)
extension ViewController {
func setupCamera() {
let backCamera = AVCaptureDevice.defaultDevice(withMediaType:
AVMediaTypeVideo)
guard let input = try? AVCaptureDeviceInput(device: backCamera) else {
fatalError("back camera not functional.") }
session.addInput(input)
session.addOutput(output)
previewView.session = session
}
}
// MARK: - #IBActions
#available(iOS 10.0, *)
private extension ViewController {
#IBAction func capturePhoto() {
if let videoConnection = output.connection(withMediaType: AVMediaTypeVideo) {
output.captureStillImageAsynchronously(from: videoConnection, completionHandler: { (CMSampleBuffer, Error) in
if let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(CMSampleBuffer) {
if let cameraImage = UIImage(data: imageData) {
self.imageView.image = cameraImage
UIImageWriteToSavedPhotosAlbum(cameraImage, nil, nil, nil)
}
}
})
}
}
}
Also Create Preview Class and this class into UIView from storyboard file.
From above code I have achived this image.
I need to add any shape of image layer as a frame into UIView. ButI have no idea how to achieved this type of functionality.
So, Basically my task is, how to add any shape of image layer into UIView and after capture image how to save image with image layer, like Final Image clue image

URL validation not working in swift

started learning swift two weeks ago, with no previous programming experience, and I can't for the life of me figure out why this wouldn't work to check for nil. it just crashes when trying to load a web page if the user enters an invalid URL. This is the ENTIRETY of the code.
import UIKit; import WebKit
class ViewController: UIViewController {
#IBOutlet weak var adressBar: UITextField!
#IBOutlet weak var webView: WKWebView!
override func viewDidLoad() {
super.viewDidLoad()
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
#IBAction func returnPressed(_ sender: Any) {
if let adressBarText = adressBar.text {
if let myURL = URL(string: adressBarText) {
let myRequest = URLRequest(url: myURL)
webView.load(myRequest)
adressBar.resignFirstResponder()
print("EYYYYY")
} else {
print("BOOOO")
}
}
}
}
Try this method
func verifyUrl (urlString: String?) -> Bool {
//Check for nil
if let urlString = urlString {
// create NSURL instance
if let url = NSURL(string: urlString) {
// check if your application can open the NSURL instance
return UIApplication.sharedApplication().canOpenURL(url)
}
}
return false
}
https://stackoverflow.com/a/30130535/8069241

Can not play sound Swift

I'm having some trouble playing a sound which is attached to a button/IBAction.
When I do the exact same thing for iOS in Xcode, it works perfectly. However, when I do this for OS X, it doesn't work. Any ideas?
import Cocoa
import AVFoundation
class ViewController: NSViewController, NSSpeechRecognizerDelegate {
var pingAudioPlayer : AVAudioPlayer?
var sr = NSSpeechRecognizer()
#IBOutlet var output: NSTextView?
func playPing(){
let pingSound = NSURL(fileURLWithPath: NSBundle.mainBundle().pathForResource("ping", ofType: "mp3")!)
pingAudioPlayer = AVAudioPlayer(contentsOfURL: pingSound, error: nil)
pingAudioPlayer!.prepareToPlay()
pingAudioPlayer!.currentTime = 0
pingAudioPlayer!.play()
}
#IBAction func soundTest(sender: AnyObject) {
playPing()
}
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
sr.delegate = self
sr.commands = ["Ping", "Ping Mac"]
sr.startListening()
}
func speechRecognizer(sender: NSSpeechRecognizer, didRecognizeCommand command: AnyObject?) {
output!.string! += "\(command)\n"
playPing()
}
override var representedObject: AnyObject? {
didSet {
// Update the view, if already loaded.
}
}
}
The main problem was the speechRecognizer method, it wasn't the right signature.
import AVFoundation
class ViewController: NSViewController, NSSpeechRecognizerDelegate {
var pingAudioPlayer : AVAudioPlayer?
var sr = NSSpeechRecognizer()
#IBOutlet var output: NSTextView?
func playPing(){
let pingSound = NSURL(fileURLWithPath: NSBundle.mainBundle().pathForResource("ping", ofType: "mp3")!)
pingAudioPlayer = try? AVAudioPlayer(contentsOfURL: pingSound)
pingAudioPlayer?.prepareToPlay()
pingAudioPlayer?.currentTime = 0
pingAudioPlayer?.play()
}
#IBAction func soundTest(sender: AnyObject) {
playPing()
}
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
sr?.delegate = self
sr?.commands = ["Ping", "Ping Mac"]
sr?.startListening()
}
func speechRecognizer(sender: NSSpeechRecognizer, didRecognizeCommand command: String) {
output?.string! += "\(command)\n"
playPing()
}
}

Trigger sound play() with NSSpeechRecognizer Swift

I'm trying to trigger a sound after I say a word. The speech recognizer recognizes the word when I say it and I've set it up so it puts out a string each time I say the command. What I'd like to do is trigger a sound after I say that specific word. This is what I have so far.
import Cocoa
import AVFoundation
class ViewController: NSViewController, NSSpeechRecognizerDelegate {
var ping = NSURL(fileURLWithPath: NSBundle.mainBundle().pathForResource("ping", ofType: "mp3")!)
var pingAudioPlayer = AVAudioPlayer()
var sr = NSSpeechRecognizer()
#IBOutlet var output: NSTextView?
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
pingAudioPlayer = AVAudioPlayer(contentsOfURL: ping, error: nil)
sr.delegate = self
sr.commands = ["Ping", "Ping Mac"]
sr.startListening()
}
func speechRecognizer(sender: NSSpeechRecognizer, didRecognizeCommand command: AnyObject?) {
output!.string! += "\(command)\n"
pingAudioPlayer.play()
}
override var representedObject: AnyObject? {
didSet {
// Update the view, if already loaded.
}
}
UPDATE:
import Cocoa
import AVFoundation
class ViewController: NSViewController, NSSpeechRecognizerDelegate {
var ping = NSURL(fileURLWithPath: NSBundle.mainBundle().pathForResource("ping", ofType: "mp3")!)
let pingAudioPlayer = AVAudioPlayer()
var sr = NSSpeechRecognizer()
#IBOutlet var output: NSTextView?
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
sr.delegate = self
sr.commands = ["Ping", "Ping Mac"]
sr.startListening()
}
func speechRecognizer(sender: NSSpeechRecognizer, didRecognizeCommand command: AnyObject?) {
output!.string! += "\(command)\n"
var pingAudioPlayer = AVAudioPlayer(contentsOfURL: ping, error: nil)
pingAudioPlayer.prepareToPlay()
pingAudioPlayer.play()
}
override var representedObject: AnyObject? {
didSet {
// Update the view, if already loaded.
}
}
}
Not sure why the audio player is not playing the sound once the word is recognized. Any ideas?
You aren't telling the audio player which sound to play. Try this:
func speechRecognizer(sender: NSSpeechRecognizer, didRecognizeCommand command: AnyObject?) {
do{
let pingAudioPlayer = try AVAudioPlayer(contentsOfURL:ping)
pingAudioPlayer.prepareToPlay()
pingAudioPlayer.play()
}catch {
print("Error getting the audio file")
}
}

FBLoginManager undeclared type

I installed FacebookSDK using Cocoapods, according to Terminal, I have installed FacebookSDK 4.8.0 (CoreKit, ShareKit and LoginKit), I imported the .h files in my BH-File.h, and already initialized everything in my AppDelegate.
For some reason, when trying to log in using a custom button, when I initialize FBLoginManager, I get an error Use of undeclared type "FBLoginManager".
this is my code
if (FBSDKAccessToken.currentAccessToken() == nil)
{
let fbLoginManager : FBSDKLoginManager =
fbLoginManager.logInWithReadPermissions(["public_profile", "email"], fromViewController: self, handler: { (loginResult, error) -> Void in
if error == nil {
print (FBSDKAccessToken.currentAccessToken().tokenString)
}
else {
print ("ERROR*****: \(error)")
}
})
}
What fixed to me was adding import FBSDKCoreKit and FBSDKLoginKit to my class, for some reason is not enough adding it in the BH-file.h
Try something like this, I just checked the code and it works (it's not exactly what you're looking for but I'm sure you can modify it as needed)
import UIKit
import FBSDKCoreKit
import FBSDKLoginKit
class ProfileViewController: UIViewController,FBSDKLoginButtonDelegate {
// #IBOutlet weak var nameLabel: UILabel!
#IBOutlet weak var nameLabel: UILabel!
#IBOutlet weak var imageView: UIImageView!
#IBOutlet weak var nextButton: UIButton!
#IBOutlet weak var fbLoginButton: FBSDKLoginButton!
override func viewDidLoad() {
super.viewDidLoad()
self.fbLoginButton.delegate = self
self.fbLoginButton.readPermissions = ["public_profile"]
self.fbLoginButton.publishPermissions = ["publish_actions"]
NSNotificationCenter.defaultCenter().addObserver(
self,
selector: "fbProfileChanged:",
name: FBSDKProfileDidChangeNotification,
object: nil)
FBSDKProfile.enableUpdatesOnAccessTokenChange(true)
// If we have a current Facebook access token, force the profile change handler
if ((FBSDKAccessToken.currentAccessToken()) != nil)
{
self.fbProfileChanged(self)
} }
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
override func prefersStatusBarHidden() -> Bool {
return true
}
//facebooks functions
func loginButton(loginButton: FBSDKLoginButton!, didCompleteWithResult result: FBSDKLoginManagerLoginResult!, error: NSError!) {
if (error != nil)
{
print( "\(error.localizedDescription)" )
}
else if (result.isCancelled)
{
// Logged out?
print( "Login Cancelled")
}
else
{
// Logged in?
print( "Logged in, segue now")
self.performSegueWithIdentifier("showHome", sender: self)
}
}
func loginButtonDidLogOut(loginButton: FBSDKLoginButton!) {
}
//see bitfountain
func fbProfileChanged(sender: AnyObject!) {
let fbProfile = FBSDKProfile.currentProfile()
if (fbProfile != nil)
{
// Fetch & format the profile picture
let strProfilePicURL = fbProfile.imagePathForPictureMode(FBSDKProfilePictureMode.Square, size: imageView.frame.size)
let url = NSURL(string: strProfilePicURL, relativeToURL: NSURL(string: "http://graph.facebook.com/"))
let imageData = NSData(contentsOfURL: url!)
let image = UIImage(data: imageData!)
self.nameLabel.text = fbProfile.name
self.imageView.image = image
self.nameLabel.hidden = false
self.imageView.hidden = false
self.nextButton.hidden = false
}
else
{
self.nameLabel.text = ""
self.imageView.image = UIImage(named: "")
self.nameLabel.hidden = true
self.imageView.hidden = true
}
}
#IBAction func nextButtonPressed(sender: UIButton) {
self.performSegueWithIdentifier("showHome", sender: self)
}
}