Access to value of the variable in if statement - swift

I need to access the value of a variable in a if statement, this is my code:
var codeError : Int?
#IBAction func mySwitch(_ sender: UISwitch) {
if sender.isOn {
codeError = 1
} else {
codeError = 2
}
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
if codeError == 1 {
front()
} else if codeError == 2 {
back()
}
}
My two functions, this is my front() function:
func front() {
let deviceDiscoverySession = AVCaptureDeviceDiscoverySession(deviceTypes: [AVCaptureDeviceType.builtInDualCamera, AVCaptureDeviceType.builtInTelephotoCamera,AVCaptureDeviceType.builtInWideAngleCamera], mediaType: AVMediaTypeVideo, position: AVCaptureDevicePosition.unspecified)
for device in (deviceDiscoverySession?.devices)! {
if device.position == AVCaptureDevicePosition.front {
do {
let input = try AVCaptureDeviceInput(device: device)
if captureSession.canAddInput(input) {
captureSession.addInput(input)
if captureSession.canAddOutput(photoOutput) {
captureSession.addOutput(photoOutput)
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
previewLayer?.connection.videoOrientation = AVCaptureVideoOrientation.portrait
cameraView.layer.addSublayer(previewLayer!)
}
}
} catch {
print("Error")
}
}
}
captureSession.startRunning()
print("😇")
}
And back() function:
func back() {
let deviceDiscoverySession = AVCaptureDeviceDiscoverySession(deviceTypes: [AVCaptureDeviceType.builtInDualCamera, AVCaptureDeviceType.builtInTelephotoCamera,AVCaptureDeviceType.builtInWideAngleCamera], mediaType: AVMediaTypeVideo, position: AVCaptureDevicePosition.unspecified)
for device in (deviceDiscoverySession?.devices)! {
if device.position == AVCaptureDevicePosition.back {
do {
let input = try AVCaptureDeviceInput(device: device)
if cS.canAddInput(input) {
cS.addInput(input)
if cS.canAddOutput(photoOutput) {
cS.addOutput(photoOutput)
previewLayer = AVCaptureVideoPreviewLayer(session: cS)
previewLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
previewLayer?.connection.videoOrientation = AVCaptureVideoOrientation.portrait
cameraView.layer.addSublayer(previewLayer!)
}
}
} catch {
print("Error")
}
}
}
cS.startRunning()
print("🙃")
}
The problem is when I run the app. I get a nil value of the codeError when I should get 1 o 2.
How do I solve this?

I'm not sure why you're accessing a variable set by user when interacting with the view on viewWillAppear. I suggest spending some time and studying UIViewController life cycles.
But if you just want to print error codes on switch changing values, this should do it.
var codeError : Int?
#IBAction func mySwitch(_ sender: UISwitch) {
if sender.isOn {
codeError = 1
} else {
codeError = 2
}
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
// will print nothing since errorCode is not set
printCodeError()
}
func printCodeError() {
if let codeError = self.codeError {
print("\(codeError)")
}
}

it is working like you original code. but you print it inside viewWillAppear. but at this time it is not initialized from you uiswitch.
one solution: give it default value:
var codeError: Int = 1
or shorter
var codeError = 1
Note:
viewWillAppear(_:)
Notifies the view controller that its view is about to be added to a view hierarchy.
see https://developer.apple.com/reference/uikit/uiviewcontroller/1621510-viewwillappear
but it is called on the view where your UISwitch is on. for your main goal (hidden in comments on the other answer) it is the wrong place to print out the variable

Related

How to made a UIViewController class reusable to pass data back to a viewController that calls it

I was using the code from the following site
https://www.hackingwithswift.com/example-code/media/how-to-scan-a-qr-code
The code works perfectly, the code can be viewed by accessing the link above.
It was a code that capture a QRCode/BarCode from camera and convert it to string.
The part of the the code that shows the string is:
func found(code: String) {
print(code)
}
After that the code string is "printed", the code calls "dismiss" and return to the previous UIViewController.
I want to get the "code" string and get the data to the previous UIViewController.
The only way that I am able to do that now is using the following code:
func found(code: String) {
print("code: \(code)")
ResenhaEquideoIdentificaAnimal1Controller.shared.microchipAnimalTextField.text = code
}
But this code only works if the code is called by the "ResenhaEquideoIdentificaAnimal1Controller" class.
I use the following code to call the new UIViewController inside the "ResenhaEquideoIdentificaAnimal1Controller" class using a UIButton.
let myScannerViewController = MyScannerViewController()
present(myScannerViewController, animated: true, completion: nil)
How can I made this class reusable to be able to call the "MyScannerViewController" class
and send data back to the view that calls it?
You want to use a "delegate patten", that is, when the code is found or something went wrong, you delegate the functionality to some other party to deal with it.
For example, you could modify the existing example to add support for a simple delegate...
import AVFoundation
import UIKit
protocol ScannerDelegate: AnyObject {
func scanner(_ controller: ScannerViewController, didDiscoverCode code: String)
func failedToScanner(_ controller: ScannerViewController)
}
class ScannerViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
var captureSession: AVCaptureSession!
var previewLayer: AVCaptureVideoPreviewLayer!
weak var scannerDelegate: ScannerDelegate?
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = UIColor.black
captureSession = AVCaptureSession()
guard let videoCaptureDevice = AVCaptureDevice.default(for: .video) else { return }
let videoInput: AVCaptureDeviceInput
do {
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
} catch {
return
}
if (captureSession.canAddInput(videoInput)) {
captureSession.addInput(videoInput)
} else {
failed()
return
}
let metadataOutput = AVCaptureMetadataOutput()
if (captureSession.canAddOutput(metadataOutput)) {
captureSession.addOutput(metadataOutput)
metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
metadataOutput.metadataObjectTypes = [.qr]
} else {
failed()
return
}
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = view.layer.bounds
previewLayer.videoGravity = .resizeAspectFill
view.layer.addSublayer(previewLayer)
captureSession.startRunning()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
if (captureSession?.isRunning == false) {
captureSession.startRunning()
}
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
if (captureSession?.isRunning == true) {
captureSession.stopRunning()
}
}
private func failed() {
captureSession = nil
scannerDelegate?.failedToScanner(self)
}
private func didFind(code: String) {
scannerDelegate?.scanner(self, didDiscoverCode: code)
}
override var prefersStatusBarHidden: Bool {
return true
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .portrait
}
// MARK: AVCaptureMetadataOutputObjectsDelegate
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
captureSession.stopRunning()
if let metadataObject = metadataObjects.first {
guard let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject else { return }
guard let stringValue = readableObject.stringValue else { return }
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
didFind(code: stringValue)
}
}
}
When you want to scan something, your calling view controller could adopt the protocol...
extension ViewController: ScannerDelegate {
func failedToScanner(_ controller: ScannerViewController) {
controller.dismiss(animated: true) {
let ac = UIAlertController(title: "Scanning not supported", message: "Your device does not support scanning a code from an item. Please use a device with a camera.", preferredStyle: .alert)
ac.addAction(UIAlertAction(title: "OK", style: .default))
self.present(ac, animated: true)
}
}
func scanner(_ controller: ScannerViewController, didDiscoverCode code: String) {
codeLabel.text = code
controller.dismiss(animated: true)
}
}
and when you wanted to present the scanner view controller, you would simply set the view controller as the delegate...
let controller = ScannerViewController()
controller.scannerDelegate = self
present(controller, animated: true)
The great thing about this is, you could easily reject the code if you weren't interested in simply by modifying the delegate workflow

How do I pass a scanned barcode ID from first view controller to second View Controller's UILabel?

This is the barcode scanning tutorial I used in my program, so that you have a lot more context when you read my code: Link
Here is what my program does so far: Essentially, when I scan an item's barcode with my phone, the UIAlert pops up with the barcode ID displayed and a button prompting the user to open the "Results" page. This is all fine and good, but how do I pass that same scanned barcode ID into a label on the Result's page? I have been stuck on this for 2 days now, even though it seems like such an easy task.
Any help is much appreciated <3
Here is my relevant code:
ProductCatalog.plist ->
Link to Image
Scanner_ViewController.swift (first View Controller) ->
import UIKit
import AVFoundation
class Scanner_ViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate, ScannerDelegate
{
private var scanner: Scanner?
override func viewDidLoad()
{
super.viewDidLoad()
self.scanner = Scanner(withDelegate: self)
guard let scanner = self.scanner else
{
return
}
scanner.requestCaptureSessionStartRunning()
}
override func didReceiveMemoryWarning()
{
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
// Mark - AVFoundation delegate methods
public func metadataOutput(_ output: AVCaptureMetadataOutput,
didOutput metadataObjects: [AVMetadataObject],
from connection: AVCaptureConnection)
{
guard let scanner = self.scanner else
{
return
}
scanner.metadataOutput(output,
didOutput: metadataObjects,
from: connection)
}
// Mark - Scanner delegate methods
func cameraView() -> UIView
{
return self.view
}
func delegateViewController() -> UIViewController
{
return self
}
func scanCompleted(withCode code: String)
{
print(code)
showAlert_Success(withTitle: (code))
}
private func showAlert_Success(withTitle title: String)
{
let alertController = UIAlertController(title: title, message: "Product has been successfully scanned", preferredStyle: .alert)
// programatically segue to the next view controller when the UIAlert pops up
alertController.addAction(UIAlertAction(title:"Get Results", style: .default, handler:{ action in self.performSegue(withIdentifier: "toAnalysisPage", sender: self) }))
present(alertController, animated: true)
}
}
Scanner.Swift (accompanies Scanner_ViewController.swift)->
import Foundation
import UIKit
import AVFoundation
protocol ScannerDelegate: class
{
func cameraView() -> UIView
func delegateViewController() -> UIViewController
func scanCompleted(withCode code: String)
}
class Scanner: NSObject
{
public weak var delegate: ScannerDelegate?
private var captureSession : AVCaptureSession?
init(withDelegate delegate: ScannerDelegate)
{
self.delegate = delegate
super.init()
self.scannerSetup()
}
private func scannerSetup()
{
guard let captureSession = self.createCaptureSession()
else
{
return
}
self.captureSession = captureSession
guard let delegate = self.delegate
else
{
return
}
let cameraView = delegate.cameraView()
let previewLayer = self.createPreviewLayer(withCaptureSession: captureSession,
view: cameraView)
cameraView.layer.addSublayer(previewLayer)
}
private func createCaptureSession() -> AVCaptureSession?
{
do
{
let captureSession = AVCaptureSession()
guard let captureDevice = AVCaptureDevice.default(for: .video) else
{
return nil
}
let deviceInput = try AVCaptureDeviceInput(device: captureDevice)
let metaDataOutput = AVCaptureMetadataOutput()
// add device input
if captureSession.canAddInput(deviceInput) && captureSession.canAddOutput(metaDataOutput)
{
captureSession.addInput(deviceInput)
captureSession.addOutput(metaDataOutput)
guard let delegate = self.delegate,
let viewController = delegate.delegateViewController() as? AVCaptureMetadataOutputObjectsDelegate else
{
return nil
}
metaDataOutput.setMetadataObjectsDelegate(viewController,
queue: DispatchQueue.main)
metaDataOutput.metadataObjectTypes = self.metaObjectTypes()
return captureSession
}
}
catch
{
// handle error
}
return nil
}
private func createPreviewLayer(withCaptureSession captureSession: AVCaptureSession,
view: UIView) -> AVCaptureVideoPreviewLayer
{
let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = view.layer.bounds
previewLayer.videoGravity = .resizeAspectFill
return previewLayer
}
private func metaObjectTypes() -> [AVMetadataObject.ObjectType]
{
return [.qr,
.code128,
.code39,
.code39Mod43,
.code93,
.ean13,
.ean8,
.interleaved2of5,
.itf14,
.pdf417,
.upce
]
}
public func metadataOutput(_ output: AVCaptureMetadataOutput,
didOutput metadataObjects: [AVMetadataObject],
from connection: AVCaptureConnection)
{
self.requestCaptureSessionStopRunning()
guard let metadataObject = metadataObjects.first,
let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject,
let scannedValue = readableObject.stringValue,
let delegate = self.delegate
else
{
return
}
delegate.scanCompleted(withCode: scannedValue)
}
public func requestCaptureSessionStartRunning()
{
self.toggleCaptureSessionRunningState()
}
public func requestCaptureSessionStopRunning()
{
self.toggleCaptureSessionRunningState()
}
private func toggleCaptureSessionRunningState()
{
guard let captureSession = self.captureSession
else
{
return
}
if !captureSession.isRunning
{
captureSession.startRunning()
}
else
{
captureSession.stopRunning()
}
}
}
Analysis_ViewController.swift (second view controller) ->
Right now, the forKey: has been hard-coded to item ID 8710908501708 because I have no idea how to actually pass camera-scanned ID's into the second View Controller :/
import UIKit
class Analysis_ViewController: UIViewController
{
#IBOutlet weak var productTitle: UILabel!
func getData()
{
let path = Bundle.main.path(forResource:"ProductCatalog", ofType: "plist")
let dict:NSDictionary = NSDictionary(contentsOfFile: path!)!
if (dict.object(forKey: "8710908501708" as Any) != nil)
{
if let levelDict:[String : Any] = dict.object(forKey: "8710908501708" as Any) as? [String : Any]
{
// use a for loop to iterate through all the keys and values in side the "Levels" dictionary
for (key, value) in levelDict
{
// if we find a key named whatever we care about, we can print out the value
if (key == "name")
{
productTitle.text = (value as! String)
}
}
}
}
}
// listing the better options that are safer in comparison to the scanned product image
override func viewDidLoad()
{
super.viewDidLoad()
getData()
}
}
Do you have a variable to hold the scanned ID in your view controllers? If not, you can add var itemID: String? to both Scanner_ViewController and Analysis_ViewController.
Then in your func where you get the scanned code, you can set it to the variable.
func scanCompleted(withCode code: String) {
print(code)
itemID = code // Saves the scanned code to your var
showAlert_Success(withTitle: (code))
}
For passing data to another view controller via segue, you might want to look into this UIViewController method for segues: documentation here. This answer also might help.
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
if segue.identifier == "toAnalysisPage" {
if let viewController = segue.destination as? Analysis_ViewController {
viewController.itemID = itemID
}
}
}

AVCaptureMetadataObjectDelegate not receiving callback

I am making QR scanner. My code is working when all of it written in one place inside ViewController but when I modularised it then I am not getting callback inside AVCaptureMetadataOutputObjectsDelegate.
import Foundation
import UIKit
import AVFoundation
class CameraSource : NSObject {
private var session : AVCaptureSession?
private var inputDevice : AVCaptureDeviceInput?
private var videoPreviewLayer : AVCaptureVideoPreviewLayer?
private var captureMetadataOutput : AVCaptureMetadataOutput?
func setCaptureMetadataOutput() {
self.captureMetadataOutput = nil
self.captureMetadataOutput = AVCaptureMetadataOutput()
}
func getCaptureMetadataOutput() -> AVCaptureMetadataOutput? {
return self.captureMetadataOutput
}
func setInputDevice(inputDevice : AVCaptureDeviceInput?) {
self.inputDevice = inputDevice
}
func getInputDevice() -> AVCaptureDeviceInput? {
return self.inputDevice
}
func setSession(session : AVCaptureSession?) {
self.session = session
}
func getSession() -> AVCaptureSession? {
return self.session
}
func setMetadataObjects(metaObjects : [AVMetadataObject.ObjectType], delegate : AVCaptureMetadataOutputObjectsDelegate) {
assert(self.captureMetadataOutput != nil)
self.captureMetadataOutput!.setMetadataObjectsDelegate(delegate, queue: DispatchQueue.main)
self.captureMetadataOutput!.metadataObjectTypes = metaObjects
}
func initViewoPreviewLayer(videoGravity : AVLayerVideoGravity, orientation : AVCaptureVideoOrientation) {
assert(session != nil)
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: session!)
videoPreviewLayer!.videoGravity = videoGravity
videoPreviewLayer!.connection!.videoOrientation = orientation
}
func addVideoLayerToImageView(imageView : UIImageView) {
assert(self.videoPreviewLayer != nil)
imageView.layer.addSublayer(self.videoPreviewLayer!)
self.videoPreviewLayer!.frame = imageView.bounds
}
func startSession() {
assert(session != nil)
self.session!.startRunning()
}
/*==========================================================================
STATIC FUNCTIONS
==========================================================================*/
static func getBackCamera() -> AVCaptureDevice {
return AVCaptureDevice.default(.builtInWideAngleCamera, for: AVMediaType.video, position: .back)!
}
static func getFrontCamera() -> AVCaptureDevice {
return AVCaptureDevice.default(.builtInWideAngleCamera, for: AVMediaType.video, position: .front)!
}
static func isCameraAvailable() -> Bool {
if #available(iOS 10.0, *) {
let count : Int = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera],
mediaType: AVMediaType.video,
position: .unspecified).devices.count
if count > 0 { return true }
}
else {
let count = AVCaptureDevice.devices(for: AVMediaType.video).count
if count > 0 { return true }
}
return false
}
/*==========================================================================
CAMERA BUILDER CLASS
==========================================================================*/
class Builder {
var cameraSource : CameraSource
init() {
cameraSource = CameraSource()
}
func createSession() -> Builder {
if (cameraSource.getSession() != nil) {
cameraSource.setSession(session: nil)
}
cameraSource.setSession(session: AVCaptureSession())
return self
}
func setSessionPreset(preset : AVCaptureSession.Preset) -> Builder {
assert(cameraSource.getSession() != nil)
cameraSource.getSession()!.sessionPreset = preset
return self
}
func attachInputDevice(camera : AVCaptureDevice) throws -> Builder {
try self.prepareInputDevice(camera: camera)
try self.addInputToSession()
assert(cameraSource.inputDevice != nil)
return self
}
func addOutputToSessionForMetaData() throws -> CameraSource {
cameraSource.setCaptureMetadataOutput()
assert(cameraSource.getSession() != nil && cameraSource.getCaptureMetadataOutput() != nil)
if !cameraSource.getSession()!.canAddOutput(cameraSource.getCaptureMetadataOutput()!) {
throw AppErrorCode.cameraError("Unable to attach output to camera session")
}
cameraSource.getSession()!.addOutput(cameraSource.getCaptureMetadataOutput()!)
return self.cameraSource
}
/*==========================================================================
BUILDER PRIVATE FUNCTIONS
==========================================================================*/
private func prepareInputDevice(camera : AVCaptureDevice) throws {
do {
let inputDevice = try AVCaptureDeviceInput(device: camera)
cameraSource.setInputDevice(inputDevice: inputDevice)
} catch let error as NSError {
print(error.localizedDescription)
throw AppErrorCode.cameraError("Unable to attach input to camera session")
}
}
private func addInputToSession() throws {
if(cameraSource.getSession() == nil) {
throw AppErrorCode.cameraError("Unable to create camera session")
}
assert(cameraSource.getInputDevice() != nil && cameraSource.getSession()!.canAddInput(cameraSource.getInputDevice()!))
cameraSource.getSession()!.addInput(cameraSource.getInputDevice()!)
}
}
}
My QR scanner Code looks like
import UIKit
import Foundation
import AVFoundation
protocol QRScannerDelegate {
func scannedData(_ scannedString : String)
}
class QRScanner : NSObject {
private var cameraSource : CameraSource?
var delegate : QRScannerDelegate?
func prepareCamera (delegate : QRScannerDelegate) throws -> QRScanner {
do {
self.delegate = delegate
self.cameraSource = try CameraSource
.Builder()
.createSession()
.setSessionPreset(preset: .photo)
.attachInputDevice(camera: CameraSource.getBackCamera())
.addOutputToSessionForMetaData()
self.cameraSource!.setMetadataObjects(metaObjects: [.qr], delegate: self as AVCaptureMetadataOutputObjectsDelegate)
} catch let err as NSError {
print(err.localizedDescription)
self.cameraSource = nil
throw AppErrorCode.cameraError("Unable to process camera with one or more issue")
}
return self
}
func initViewoPreviewLayer(videoGravity : AVLayerVideoGravity, orientation : AVCaptureVideoOrientation) -> QRScanner{
assert(cameraSource != nil)
self.cameraSource!.initViewoPreviewLayer(videoGravity: videoGravity, orientation: orientation)
return self
}
func addVideoLayerToImageView(imageView : UIImageView) -> QRScanner{
assert(cameraSource != nil)
self.cameraSource!.addVideoLayerToImageView(imageView: imageView)
return self
}
func startSession() {
assert(cameraSource != nil)
self.cameraSource!.startSession()
}
}
extension QRScanner : AVCaptureMetadataOutputObjectsDelegate {
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
print("Delegate called")
if metadataObjects.count == 0 {
self.delegate?.scannedData("No Data")
} else {
let metadataObj = metadataObjects[0] as! AVMetadataMachineReadableCodeObject
if metadataObj.type == AVMetadataObject.ObjectType.qr {
if metadataObj.stringValue != nil {
print("Scanner Getting data: \(metadataObj.stringValue!)")
self.delegate?.scannedData(metadataObj.stringValue!)
}
}
}
}
}
I have implemented the QRScannerDelegate in my ViewController but I am not getting anything in there. Moreover I am not getting callback inside AVCaptureMetadataOutputObjectsDelegate even.
I tried passing the ViewController instance as AVCaptureMetadataOutputObjectsDelegate then I was getting callback with the scanned info.
So My question is why is this happening?
1) When I am passing normal class as AVCaptureMetadataOutputObjectsDelegate I am not getting callback. But.
2) Whe I am passing UIViewController instance as AVCaptureMetadataOutputObjectsDelegate then I am able to get callback.
UPDATE
This is how I am calling prepareCamera from my View Controller
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
do {
try QRScanner().prepareCamera(delegate: self)
.initViewoPreviewLayer(videoGravity: .resizeAspectFill, orientation: .portrait)
.addVideoLayerToImageView(imageView: self.qrScannerImageView)
.startSession()
} catch {
print("Some Camera Error")
}
self.createOverlay()
}
Its hard to say for sure without knowing how you called prepareCamera as this is what triggers setMetadataObjectsDelegate but to me it looks like you may not be keeping a strong reference to QRScanner in your ViewController (instantiating it as in instance variable) Which could explain why the callback is getting hit when your ViewController is your AVCaptureMetadataOutputObjectsDelegate as the ViewController is still in memory.
It's also worth noting that if the ViewController is your QRScannerDelegate you will want to define delegate as weak var delegate : QRScannerDelegate? to prevent a memory leak.
EDIT:
Change
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
do {
try QRScanner().prepareCamera(delegate: self)
.initViewoPreviewLayer(videoGravity: .resizeAspectFill, orientation: .portrait)
.addVideoLayerToImageView(imageView: self.qrScannerImageView)
.startSession()
} catch {
print("Some Camera Error")
}
self.createOverlay()
}
to
var qrScanner = QRScanner()
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
do {
try self.qrScanner.prepareCamera(delegate: self)
.initViewoPreviewLayer(videoGravity: .resizeAspectFill, orientation: .portrait)
.addVideoLayerToImageView(imageView: self.qrScannerImageView)
.startSession()
} catch {
print("Some Camera Error")
}
self.createOverlay()
}
and change
protocol QRScannerDelegate {
func scannedData(_ scannedString : String)
}
to
protocol QRScannerDelegate: class {
func scannedData(_ scannedString : String)
}
To Allow a weak delegate
AVCaptureMetadataOutputObjectsDelegate is tough, but you can do some really cool stuff with it! So keep at it.
I pulled some QRScanner code I wrote a while ago and put it into a gist for you if you want to check it out. Its a bit more stripped down than what you have, but you may find it helpful.
https://gist.github.com/aChase55/733ea89af1bfa80c65971d3bc691f0b2

Button Click Twice

As we all know, to avoid clicking twice, we can set the code bellow on the tap method and add a HUD such as SVProgress.show().
isUserInteractionEnabled = false
After the network request, set it to true and SVProgress.dismiss().
I wonder if there is a method to extract the function for those button which needs to send a request. I have thought to use method swizzling. Add this to the button extension, the codes is bellow. It seems not good. Do you guys have some good ways to extract the function? Using inheritance, protocol or something else?
extension UIButton {
private struct AssociatedKeys {
static var cp_submitComplete = "cp_submitComplete"
static var cp_defaultMessage:String = NSLocalizedString("Loading", comment: "prompt")
static var cp_customMessage = "cp_customMessage"
}
var submitNotComplete: Bool {
get {
let objc_Get = objc_getAssociatedObject(self, &AssociatedKeys.cp_submitComplete)
if objc_Get != nil {
if let objc_Get = objc_Get as? Bool, objc_Get == true {
return true
}
return false
} else {
return false
}
}
set {
objc_setAssociatedObject(self, &AssociatedKeys.cp_submitComplete, newValue as Bool, .OBJC_ASSOCIATION_RETAIN_NONATOMIC)
if !newValue {
isUserInteractionEnabled = true
SVProgressHUD.dismiss()
}
}
}
var customMessage: String {
get {
let cp_customMessage = objc_getAssociatedObject(self, &AssociatedKeys.cp_customMessage)
if let message = cp_customMessage {
return message as! String
} else {
return AssociatedKeys.cp_defaultMessage
}
}
set {
objc_setAssociatedObject(self, &AssociatedKeys.cp_customMessage, newValue as String, .OBJC_ASSOCIATION_RETAIN_NONATOMIC)
}
}
override open class func initialize() {
if self == UIButton.self {
DispatchQueue.once(NSUUID().uuidString, block: {
let systemSel = #selector(UIButton.sendAction(_:to:for:))
let swizzSel = #selector(UIButton.cpSendAction(_:to:for:))
let systemMethod = class_getInstanceMethod(self, systemSel)
let swizzMethod = class_getInstanceMethod(self, swizzSel)
let isAdd = class_addMethod(self, systemSel, method_getImplementation(swizzMethod), method_getTypeEncoding(swizzMethod))
if isAdd {
class_replaceMethod(self, swizzSel, method_getImplementation(systemMethod), method_getTypeEncoding(systemMethod));
} else {
method_exchangeImplementations(systemMethod, swizzMethod);
}
})
}
}
private dynamic func cpSendAction(_ action: Selector, to target: Any?, for event: UIEvent?) {
cpSendAction(action, to: target, for: event)
if submitNotComplete {
//begin submit
isUserInteractionEnabled = false
SVProgressHUD.show(withStatus: customMessage)
}
}
}
I think it's a bad idea to handle this kind of logic in UIButton. I would rather make the view controller responsible for enabling/disabling the button.
func handleTap(_ sender: UIButton) {
sender.isEnabled = false
SVProgressHUD.show(withStatus: customMessage)
doSomeTaskAsync(withCompletion: {
sender.isEnabled = true
SVProgressHUD.dismiss()
})
}

Swift2: TKSubmitTransitionButton. How do i stop a button from transition/ animating when user login/ signup are incorrect

I have tried a number of times and the best i get is there would be an animation which never cancels or stop regardless of the command i use.
After following #Mattias example, i updated my code and looks something like this:
// DESIGN ANIMATION... TKTRANSITIONSUBMITBUTTON
#IBOutlet weak var btnFromNib: TKTransitionSubmitButton!
#IBAction func onTapButton(sender: AnyObject) {
btnFromNib.startLoadingAnimation()
if let email = self.emailField.text where email != "", let password = self.passwordField.text where password != "" {
DataService.ds.REF_BASE.authUser(email, password: password, withCompletionBlock: { error, authData in
if error != nil {
self.btnFromNib.returnToOriginalState()
if error.code == STATUS_ACCOUNT_NONEXIST {
self.showErrorAlert("This User does not exist", msg: "Please Sign Up")
} else {
}
} else {
self.btnFromNib.startFinishAnimation(1, completion: {
let myTabbarController = self.storyboard?.instantiateViewControllerWithIdentifier("myTabbarController") as! UITabBarController
let appDelegate = UIApplication.sharedApplication().delegate as! AppDelegate
appDelegate.window?.rootViewController = myTabbarController
myTabbarController.transitioningDelegate = self
})
}
})
}
}
The button yet keeps spinning / animating without stopping. After checking the custom animation class the function inherits from :
public func startLoadingAnimation() {
self.cachedTitle = titleForState(.Normal)
self.setTitle("", forState: .Normal)
self.shrink()
NSTimer.schedule(delay: shrinkDuration - 0.25) { timer in
self.spiner.animation()
}
}
public func startFinishAnimation(delay: NSTimeInterval, completion:(()->())?) {
NSTimer.schedule(delay: delay) { timer in
self.didEndFinishAnimation = completion
self.expand()
self.spiner.stopAnimation()
}
}
public func animate(duration: NSTimeInterval, completion:(()->())?) {
startLoadingAnimation()
startFinishAnimation(duration, completion: completion)
}
public override func animationDidStop(anim: CAAnimation, finished flag: Bool) {
let a = anim as! CABasicAnimation
if a.keyPath == "transform.scale" {
didEndFinishAnimation?()
NSTimer.schedule(delay: 1) { timer in
self.returnToOriginalState()
}
}
}
func returnToOriginalState() {
self.layer.removeAllAnimations()
self.setTitle(self.cachedTitle, forState: .Normal)
}
I noticed it had a public overide func animationDidStop(anim: CAAnimation, finished: Bool) to be the function to stop the animation. But when i use it, i get this error!
How do i rightfully get this to work? ...
Thanks in Advance
** UPDATED QUESTION **
I checked the code of TKTransitionSubmitButton and there are public methods called startLoadingAnimation(), returnToOriginalState() and startFinishAnimation().
I suggest:
Button tapped
startLoadingAnimation()
Check credentials
If wrong/error: returnToOriginalState()
If correct: startFinishAnimation()
Transition, from TKTransitionSubmitButton documentation:
btn.startFinishAnimation {
//Your Transition
let secondVC = SecondViewController()
secondVC.transitioningDelegate = self
self.presentViewController(secondVC, animated: true, completion: nil)
}
Edit: As far as I can see .animate() of the class calls both the start and finish animation, and that's why you can't cancel it.
EDIT2 This one works as intended for me (however I'm not sure about the static cornerRadius)
import UIKit
class ViewController: UIViewController {
#IBOutlet weak var submitButton: TKTransitionSubmitButton!
override func viewDidLoad() {
super.viewDidLoad()
submitButton.layer.cornerRadius = 15
// Do any additional setup after loading the view, typically from a nib.
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
#IBAction func buttonPressed(sender: AnyObject) {
submitButton.startLoadingAnimation()
delay(2, closure: {
self.checkCredentials()
})
}
func checkCredentials()
{
//FAKING WRONG CREDENTIALS
let userAndPasswordCorrect = false
if !userAndPasswordCorrect
{
submitButton.returnToOriginalState()
//Alert or whatever
}
}
//Faking network delay
func delay(delay:Double, closure:()->()) {
dispatch_after(
dispatch_time(
DISPATCH_TIME_NOW,
Int64(delay * Double(NSEC_PER_SEC))
),
dispatch_get_main_queue(), closure)
}
}
I checked the code of TKTransitionSubmitButton.
I resolve this issue. please try to stop animation under DispatchQueue.main.async and working well.
'func apicallforLogin() {
let urlString = "http://"
guard let requestUrl = URL(string:urlString) else { return }
let request = URLRequest(url:requestUrl)
let task = URLSession.shared.dataTask(with: request) {
(data, response, error) in
if error == nil,let usableData = data {
DispatchQueue.main.async {
self.btnLogin.startFinishAnimation(0.1, completion: {
let HomeVC = self.storyboard?.instantiateViewController(withIdentifier: "HomeViewController") as! HomeViewController
HomeVC.transitioningDelegate = self
self.navigationController?.pushViewController(HomeVC, animated: false)
})
}
print(usableData)
}else{
DispatchQueue.main.async {
self.btnLogin.returnToOriginalState()
}
}
}
task.resume()'
Well you might have got the answer by now but just I would like to give my answer. You can just copy below code and everything will work fine. I have made the change and had created the pull request for this issue.
import Foundation
import UIKit
#IBDesignable
open class TKTransitionSubmitButton : UIButton, UIViewControllerTransitioningDelegate, CAAnimationDelegate {
lazy var spiner: SpinerLayer! = {
let s = SpinerLayer(frame: self.frame)
return s
}()
#IBInspectable open var spinnerColor: UIColor = UIColor.white {
didSet {
spiner.spinnerColor = spinnerColor
}
}
open var didEndFinishAnimation : (()->())? = nil
let springGoEase = CAMediaTimingFunction(controlPoints: 0.45, -0.36, 0.44, 0.92)
let shrinkCurve = CAMediaTimingFunction(name: kCAMediaTimingFunctionLinear)
let expandCurve = CAMediaTimingFunction(controlPoints: 0.95, 0.02, 1, 0.05)
let shrinkDuration: CFTimeInterval = 0.1
#IBInspectable open var normalCornerRadius:CGFloat = 0.0 {
didSet {
self.layer.cornerRadius = normalCornerRadius
}
}
var cachedTitle: String?
var isAnimating = false
public override init(frame: CGRect) {
super.init(frame: frame)
self.setup()
}
public required init!(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)!
self.setup()
}
func setup() {
self.clipsToBounds = true
spiner.spinnerColor = spinnerColor
}
open func startLoadingAnimation() {
self.isAnimating = true
self.cachedTitle = title(for: UIControlState())
self.setTitle("", for: UIControlState())
self.layer.addSublayer(spiner)
// Animate
self.cornerRadius()
self.shrink()
_ = Timer.schedule(delay: self.shrinkDuration - 0.25) { timer in
self.spiner.animation()
}
}
open func startFinishAnimation(_ delay: TimeInterval,_ animation: CAMediaTimingFunction, completion:(()->())?) {
self.isAnimating = true
_ = Timer.schedule(delay: delay) { timer in
self.didEndFinishAnimation = completion
self.expand(animation)
self.spiner.stopAnimation()
}
}
open func animate(_ duration: TimeInterval,_ animation: CAMediaTimingFunction, completion:(()->())?) {
startLoadingAnimation()
startFinishAnimation(duration, animation, completion: completion)
}
open func setOriginalState() {
self.returnToOriginalState()
self.spiner.stopAnimation()
}
public func animationDidStop(_ anim: CAAnimation, finished flag: Bool) {
let a = anim as! CABasicAnimation
if a.keyPath == "transform.scale" {
didEndFinishAnimation?()
_ = Timer.schedule(delay: 1) { timer in
self.returnToOriginalState()
}
}
}
open func returnToOriginalState() {
self.spiner.removeFromSuperlayer()
self.layer.removeAllAnimations()
self.setTitle(self.cachedTitle, for: UIControlState())
self.spiner.stopAnimation()
self.isAnimating = false
}
func cornerRadius() {
let cornerRadiusAnim = CABasicAnimation(keyPath: "cornerRadius")
// cornerRadiusAnim.fromValue = frame.width
cornerRadiusAnim.toValue = frame.height/2
cornerRadiusAnim.duration = shrinkDuration
cornerRadiusAnim.timingFunction = shrinkCurve
cornerRadiusAnim.fillMode = kCAFillModeForwards
cornerRadiusAnim.isRemovedOnCompletion = false
layer.add(cornerRadiusAnim, forKey: cornerRadiusAnim.keyPath)
}
func shrink() {
let shrinkAnim = CABasicAnimation(keyPath: "bounds.size.width")
shrinkAnim.beginTime = CACurrentMediaTime() + 0.1
shrinkAnim.fromValue = frame.width
shrinkAnim.toValue = frame.height
shrinkAnim.duration = shrinkDuration
shrinkAnim.timingFunction = shrinkCurve
shrinkAnim.fillMode = kCAFillModeForwards
shrinkAnim.isRemovedOnCompletion = false
layer.add(shrinkAnim, forKey: shrinkAnim.keyPath)
}
func expand(_ animation: CAMediaTimingFunction) {
let expandAnim = CABasicAnimation(keyPath: "transform.scale")
expandAnim.fromValue = 1.0
expandAnim.toValue = 26.0
expandAnim.timingFunction = animation
expandAnim.duration = 0.3
expandAnim.delegate = self
expandAnim.fillMode = kCAFillModeForwards
expandAnim.isRemovedOnCompletion = false
layer.add(expandAnim, forKey: expandAnim.keyPath)
}
}