How to show scanned QRCode user profile in my app in swift - swift

First time i am working with QRCode
in our app i need to show the scanned user profile in my app when i scan that user QRCode in my app.. how to show scanned user profile in my app
i have written code for scan the user QRCode.. if i scan correct QRCode then here how to show that person profile in my app
code for scan the qrcode scanner:
import UIKit
import AVFoundation
class QRCodeViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
var video = AVCaptureVideoPreviewLayer()
let session = AVCaptureSession()
#IBOutlet weak var qrCode: UIImageView!
override func viewDidLoad() {
super.viewDidLoad()
guard let captureDevice = AVCaptureDevice.default(for: AVMediaType.video) else { return }
do{
let input = try AVCaptureDeviceInput(device: captureDevice)
session.addInput(input)
}catch{print("error")}
let output = AVCaptureMetadataOutput()
session.addOutput(output)
output.setMetadataObjectsDelegate(self, queue:DispatchQueue.main)
output.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]
video = AVCaptureVideoPreviewLayer(session: session)
video.frame = view.layer.bounds
view.layer.addSublayer(video)
self.view.bringSubviewToFront(qrCode)
session.startRunning()
}
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
if metadataObjects != nil && metadataObjects.count != 0{
if let object = metadataObjects[0] as? AVMetadataMachineReadableCodeObject{
if object.type == AVMetadataObject.ObjectType.qr{
let alert = UIAlertController(title: "QR Code", message: object.stringValue, preferredStyle: .alert)
alert.addAction(UIAlertAction(title:"Retake", style: .default, handler: nil))
alert.addAction(UIAlertAction(title:"Confirm", style: .default, handler: { (nil) in
// self.scannedCode = object.stringValue!
self.session.stopRunning() }))
present(alert, animated: true, completion: nil)
}
}
}
}
}
by using above code if i scan correct QRCode of user, then how to show that person profile in my screen.. please suggest me..

Related

Unsupported type found - use -availableMetadataObjectTypes' issue

I am trying to create QR Reader. However, when I open the window with scanner, it crashes with error "Terminating app due to uncaught exception 'NSInvalidArgumentException', reason: '*** -[AVCaptureMetadataOutput setMetadataObjectTypes:] Unsupported type found - use -availableMetadataObjectTypes'"
This is my code:
import UIKit
import AVFoundation
import Alamofire
import SwiftyJSON
class CameraTwoViewController: UIViewController,
AVCaptureMetadataOutputObjectsDelegate {
#IBOutlet weak var square: UIImageView!
var video = AVCaptureVideoPreviewLayer()
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
//Creating session
let session = AVCaptureSession()
//Define capture devcie
let captureDevice = AVCaptureDevice.default(for: .video)
do
{
let input = try AVCaptureDeviceInput(device: captureDevice!)
}
catch
{
print ("ERROR")
}
let output = AVCaptureMetadataOutput()
session.addOutput(output)
output.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
output.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]
//output.metadataObjectTypes = [AVMetadataObject.availableMetadataObjectTypes.qr]
video = AVCaptureVideoPreviewLayer(session: session)
video.frame = view.layer.bounds
view.layer.addSublayer(video)
self.view.bringSubview(toFront: square)
session.startRunning()
}
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
if metadataObjects != nil && metadataObjects.count != 0 {
if let object = metadataObjects[0] as? AVMetadataMachineReadableCodeObject {
if object.type == AVMetadataObject.ObjectType.qr {
let alert = UIAlertController(title: "Your code is:", message: object.stringValue, preferredStyle: .alert)
alert.addAction(UIAlertAction(title: "Retake", style: .default, handler: nil))
alert.addAction(UIAlertAction(title: "Copy", style: .default, handler: { (nil) in
UIPasteboard.general.string = object.stringValue
}))
present(alert, animated: true, completion: nil)
}
}
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
Thank you in advance!
Try to add input to the session before you add output. Something like that:
func configureScanner() {
guard let captureDevice = AVCaptureDevice.default(for: .video) else {
return
}
var input: AVCaptureDeviceInput?
do {
input = try AVCaptureDeviceInput(device: captureDevice)
} catch let error {
print(error.localizedDescription)
}
guard let indeedInput = input else {
return
}
captureSession = AVCaptureSession()
captureSession!.addInput(indeedInput)
let captureMetadataOutput = AVCaptureMetadataOutput()
captureSession!.addOutput(captureMetadataOutput)
captureMetadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
captureMetadataOutput.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]
...
}

swift4 qr reader working but no result

swift4 qr-code reader and suppose to popup an alert, but it didn't show the popup alert.
The session start running properly, but the func captureOutput() seems not trigger when qr-code scanned.
my code:
import UIKit
import AVFoundation
class ScannerViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
#IBOutlet weak var square: UIImageView!
//Creating session
var video = AVCaptureVideoPreviewLayer()
override func viewDidLoad() {
super.viewDidLoad()
//Creating session
let session = AVCaptureSession()
//Define capture devcie
let captureDevice = AVCaptureDevice.default(for: AVMediaType.video)
do
{let input = try AVCaptureDeviceInput(device: captureDevice!)
session.addInput(input)}
catch
{ print ("ERROR")}
let output = AVCaptureMetadataOutput()
session.addOutput(output)
output.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
output.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]
video = AVCaptureVideoPreviewLayer(session: session)
video.frame = view.layer.bounds
view.layer.addSublayer(video)
self.view.bringSubview(toFront: square)
session.startRunning()
}
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [Any]!, from connection: AVCaptureConnection!) {
if metadataObjects != nil && metadataObjects.count != 0
{
if let object = metadataObjects[0] as? AVMetadataMachineReadableCodeObject
{
if object.type == AVMetadataObject.ObjectType.qr
{
let alert = UIAlertController(title: "QR Code", message: object.stringValue, preferredStyle: .alert)
alert.addAction(UIAlertAction(title: "Retake", style: .default, handler: nil))
alert.addAction(UIAlertAction(title: "Copy", style: .default, handler: { (nil) in
UIPasteboard.general.string = object.stringValue
}))
present(alert, animated: true, completion: nil)
}
}
}
}
}
Your scanning code check will be in the following delegate:
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection)
Your code will be like this:
import UIKit
import AVFoundation
class ScannerViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
#IBOutlet weak var square: UIImageView!
//Creating session
var video = AVCaptureVideoPreviewLayer()
override func viewDidLoad() {
super.viewDidLoad()
//Creating session
let session = AVCaptureSession()
//Define capture devcie
let captureDevice = AVCaptureDevice.default(for: AVMediaType.video)
do
{let input = try AVCaptureDeviceInput(device: captureDevice!)
session.addInput(input)}
catch
{ print ("ERROR")}
let output = AVCaptureMetadataOutput()
session.addOutput(output)
output.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
output.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]
video = AVCaptureVideoPreviewLayer(session: session)
video.frame = view.layer.bounds
view.layer.addSublayer(video)
self.view.bringSubview(toFront: square)
session.startRunning()
}
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
if metadataObjects != nil && metadataObjects.count != 0
{
if let object = metadataObjects[0] as? AVMetadataMachineReadableCodeObject
{
if object.type == AVMetadataObject.ObjectType.qr
{
let alert = UIAlertController(title: "QR Code", message: object.stringValue, preferredStyle: .alert)
alert.addAction(UIAlertAction(title: "Retake", style: .default, handler: nil))
alert.addAction(UIAlertAction(title: "Copy", style: .default, handler: { (nil) in
UIPasteboard.general.string = object.stringValue
}))
present(alert, animated: true, completion: nil)
}
}
}
}
}

Opening an URL scanned from a QR code in SFSafariViewController

I'm trying to make my app open the scanned url from qr codes. I made a qr code scanner but it only copies the value of the scanned qr code to the pasteboard. I'm trying to make it open it in a SFSafariViewController.
instead of "Copy" option, I want to make it "Open" and actually open the scanned url.
Here's my code
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [Any]!, from connection: AVCaptureConnection!) {
if metadataObjects != nil && metadataObjects.count != 0
{
if let object = metadataObjects[0] as? AVMetadataMachineReadableCodeObject
{
if object.type == AVMetadataObjectTypeQRCode
{
let alert = UIAlertController(title: "QR Code", message: object.stringValue, preferredStyle: .alert)
alert.addAction(UIAlertAction(title: "Retake", style: .default, handler: nil))
alert.addAction(UIAlertAction(title: "Copy", style: .default, handler: { (nil) in
UIPasteboard.general.string = object.stringValue
}))
present(alert, animated: true, completion: nil)
}
}
}
}
Using safariviewController is pretty straightforward
First you have to import safariservices and then present the safaricontroller with a url
The basic codes to this are
import SafariServices
func loadSafari(url : String){
guard let url = URL(string: url) else { return }
let safariController = SFSafariViewController(url: url)
present(safariController, animated: true, completion: nil)
}
place this code in your class and call the function inside your capture output
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [Any]!, from connection: AVCaptureConnection!) {
if metadataObjects != nil && metadataObjects.count != 0
{
if let object = metadataObjects[0] as? AVMetadataMachineReadableCodeObject
{
if object.type == AVMetadataObjectTypeQRCode
{
UIPasteboard.general.string = object.stringValue
loadSafari(url: object.stringValue)
}))
present(alert, animated: true, completion: nil)
}
}
}
}
clicking on the done will dismiss the safariController and have the user navigate back to the previous viewcontroller.
I hope this helps. Let me know how it goes.
Your code shows no sign of attempting to use the SFSafariViewController. Have you tried something like the below?
import SafariServices
if let url = URL(string: object.stringValue) {
let browser = SFSafariViewController(url: url)
}
This won't make it magically appear, there will still be some work you need to do yourself to present it to the user such as:
presentViewController(browser, animated: true, completion: nil)

Share extension causes safari to hang in iphone

I am developing an iOS app that allows user to save urls, similar to the Pocket app. In the app I have a share extension that basically just save the url into a NSUserDefaults based on the app group. For some reason the share extension causes the mobile safari to hang (being non responsive) after selecting the share extension. The code for the share extension is so simple, I am wondering what may have caused it. On debugging in Xcode, the function in the share extension is not being called at all too it seems. Any clues? This is running on iOS 9.3.
Here is the code:
//
// ShareViewController.swift
// intrafeedappShare
//
// Created by Dicky Johan on 5/21/16.
// Copyright © 2016 Dicky Johan. All rights reserved.
//
import UIKit
import Social
import MobileCoreServices
class ShareViewController: UIViewController {
var selectedURL: String?
override func viewDidLoad() {
super.viewDidLoad()
let contentType = kUTTypeURL as String
guard let item = self.extensionContext?.inputItems.first as? NSExtensionItem else {
fatalError()
}
for attachment in item.attachments as! [NSItemProvider] {
if attachment.hasItemConformingToTypeIdentifier(contentType) {
attachment.loadItemForTypeIdentifier(kUTTypeURL as String, options: nil) { url, error in
if error == nil {
guard let url = url as? NSURL else {
self.extensionContext?.cancelRequestWithError(NSError(domain:"Url is empty",code:-1,userInfo: nil))
return
}
self.selectedURL = url.absoluteString
let defaults = NSUserDefaults(suiteName: Constants.Settings.sharedAppGroup)
if let arrUrls = defaults!.objectForKey(Constants.Settings.sharedURLS) {
// append to the existing list
arrUrls.appendString(url.absoluteString)
} else {
let newArrUrl = [url.absoluteString]
defaults!.setObject(newArrUrl, forKey: Constants.Settings.sharedURLS)
}
defaults!.synchronize()
self.extensionContext?.completeRequestReturningItems(nil, completionHandler: nil)
let alert = UIAlertController(title: "Success", message: "Added url to intrafeed", preferredStyle: .Alert)
let action = UIAlertAction(title: "Done", style: .Default) { _ in
self.dismissViewControllerAnimated(true, completion: nil)
}
alert.addAction(action)
self.presentViewController(alert, animated: true, completion: nil)
} else {
self.extensionContext?.cancelRequestWithError(error)
let alert = UIAlertController(title: "Error", message: "Error loading url", preferredStyle: .Alert)
let action = UIAlertAction(title: "Error", style: .Cancel) { _ in
self.dismissViewControllerAnimated(true, completion: nil)
}
alert.addAction(action)
self.presentViewController(alert, animated: true, completion: nil)
}
}
}
}
}
}
Ok, apparently there was a crash in the code, thus causing the Safari to freeze. On debugging the extension in Xcode, I found the issue.

Improve reading QR and PDF417 code with AVFoundation swift

I'm trying to read PDF-417 and QR code and my code works but when there is less light or blurred image does not work. So I am looking for information about how may improve reading code.
I tried changing the settings on the camera: AVCaptureSession Increased the resolution settings and the AVCaptureDevice Increased videoZoomFactor but apparently I'm not doing it correctly.
I hope you can help me.
import UIKit
import AVFoundation
class ViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
let session : AVCaptureSession = AVCaptureSession()
var previewLayer : AVCaptureVideoPreviewLayer!
var highlightView : UIView = UIView()
override func viewDidLoad() {
super.viewDidLoad()
/
self.highlightView.autoresizingMask = UIViewAutoresizing.FlexibleTopMargin |
UIViewAutoresizing.FlexibleBottomMargin |
UIViewAutoresizing.FlexibleLeftMargin |
UIViewAutoresizing.FlexibleRightMargin
/
self.highlightView.layer.borderColor = UIColor.greenColor().CGColor
self.highlightView.layer.borderWidth = 3
/
self.view.addSubview(self.highlightView)
/
let device = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
/
/
var error : NSError? = nil
let input : AVCaptureDeviceInput? = AVCaptureDeviceInput.deviceInputWithDevice(device, error: &error) as? AVCaptureDeviceInput
/
if input != nil {
session.addInput(input)
}
else {
/
println(error)
}
let output = AVCaptureMetadataOutput()
output.setMetadataObjectsDelegate(self, queue: dispatch_get_main_queue())
session.addOutput(output)
output.metadataObjectTypes = output.availableMetadataObjectTypes
previewLayer = AVCaptureVideoPreviewLayer.layerWithSession(session) as! AVCaptureVideoPreviewLayer
previewLayer.frame = self.view.bounds
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
self.view.layer.addSublayer(previewLayer)
/
session.startRunning()
}
/
func captureOutput(captureOutput: AVCaptureOutput!, didOutputMetadataObjects metadataObjects: [AnyObject]!, fromConnection connection: AVCaptureConnection!) {
var highlightViewRect = CGRectZero
var barCodeObject : AVMetadataMachineReadableCodeObject!
var detectionString : String!
let barCodeTypes = [
AVMetadataObjectTypePDF417Code,
AVMetadataObjectTypeQRCode
]
/
for metadata in metadataObjects {
for barcodeType in barCodeTypes {
if metadata.type == barcodeType
{
barCodeObject = self.previewLayer.transformedMetadataObjectForMetadataObject(metadata as! AVMetadataMachineReadableCodeObject) as! AVMetadataMachineReadableCodeObject
highlightViewRect = barCodeObject.bounds
detectionString = (metadata as! AVMetadataMachineReadableCodeObject).stringValue
self.session.stopRunning()
self.alert(detectionString)
break
}
}
}
println(detectionString)
self.highlightView.frame = highlightViewRect
self.view.bringSubviewToFront(self.highlightView)
}
func alert(Code: String)
{
let actionSheet:UIAlertController = UIAlertController(title: "Barcode", message: "\(Code)", preferredStyle: UIAlertControllerStyle.Alert)
let firstAlertAction:UIAlertAction = UIAlertAction(title: "OK", style: UIAlertActionStyle.Default, handler:
{
(alertAction:UIAlertAction!) in
self.session.startRunning()
})
actionSheet.addAction(firstAlertAction)
self.presentViewController(actionSheet, animated: true, completion: nil)
}
}
I struggled with unconsistent reading of qr-codes, what worked for me was to get the qr-code as proportional as possible. (If reading from another iphone/ipad screen) If that doesnt help you, the http://www.appcoda.com/qr-code-reader-swift/ <-- helped me out
cheers